1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{mpsc, oneshot},
21 future::{self, Shared},
22 stream::FuturesOrdered,
23};
24use git::{
25 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
26 blame::Blame,
27 parse_git_remote_url,
28 repository::{
29 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
30 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
31 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
32 },
33 stash::{GitStash, StashEntry},
34 status::{
35 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
36 UnmergedStatus, UnmergedStatusCode,
37 },
38};
39use gpui::{
40 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
41 WeakEntity,
42};
43use language::{
44 Buffer, BufferEvent, Language, LanguageRegistry,
45 proto::{deserialize_version, serialize_version},
46};
47use parking_lot::Mutex;
48use pending_op::{PendingOp, PendingOps};
49use postage::stream::Stream as _;
50use rpc::{
51 AnyProtoClient, TypedEnvelope,
52 proto::{self, git_reset, split_repository_update},
53};
54use serde::Deserialize;
55use std::{
56 cmp::Ordering,
57 collections::{BTreeSet, VecDeque},
58 future::Future,
59 mem,
60 ops::Range,
61 path::{Path, PathBuf},
62 str::FromStr,
63 sync::{
64 Arc,
65 atomic::{self, AtomicU64},
66 },
67 time::Instant,
68};
69use sum_tree::{Edit, SumTree, TreeSet};
70use task::Shell;
71use text::{Bias, BufferId};
72use util::{
73 ResultExt, debug_panic,
74 paths::{PathStyle, SanitizedPath},
75 post_inc,
76 rel_path::RelPath,
77};
78use worktree::{
79 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
80 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
81};
82use zeroize::Zeroize;
83
84pub struct GitStore {
85 state: GitStoreState,
86 buffer_store: Entity<BufferStore>,
87 worktree_store: Entity<WorktreeStore>,
88 repositories: HashMap<RepositoryId, Entity<Repository>>,
89 active_repo_id: Option<RepositoryId>,
90 #[allow(clippy::type_complexity)]
91 loading_diffs:
92 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
93 diffs: HashMap<BufferId, Entity<BufferGitState>>,
94 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
95 _subscriptions: Vec<Subscription>,
96}
97
98#[derive(Default)]
99struct SharedDiffs {
100 unstaged: Option<Entity<BufferDiff>>,
101 uncommitted: Option<Entity<BufferDiff>>,
102}
103
104struct BufferGitState {
105 unstaged_diff: Option<WeakEntity<BufferDiff>>,
106 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
107 conflict_set: Option<WeakEntity<ConflictSet>>,
108 recalculate_diff_task: Option<Task<Result<()>>>,
109 reparse_conflict_markers_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 language_registry: Option<Arc<LanguageRegistry>>,
112 conflict_updated_futures: Vec<oneshot::Sender<()>>,
113 recalculating_tx: postage::watch::Sender<bool>,
114
115 /// These operation counts are used to ensure that head and index text
116 /// values read from the git repository are up-to-date with any hunk staging
117 /// operations that have been performed on the BufferDiff.
118 ///
119 /// The operation count is incremented immediately when the user initiates a
120 /// hunk stage/unstage operation. Then, upon finishing writing the new index
121 /// text do disk, the `operation count as of write` is updated to reflect
122 /// the operation count that prompted the write.
123 hunk_staging_operation_count: usize,
124 hunk_staging_operation_count_as_of_write: usize,
125
126 head_text: Option<Arc<String>>,
127 index_text: Option<Arc<String>>,
128 head_changed: bool,
129 index_changed: bool,
130 language_changed: bool,
131}
132
133#[derive(Clone, Debug)]
134enum DiffBasesChange {
135 SetIndex(Option<String>),
136 SetHead(Option<String>),
137 SetEach {
138 index: Option<String>,
139 head: Option<String>,
140 },
141 SetBoth(Option<String>),
142}
143
144#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
145enum DiffKind {
146 Unstaged,
147 Uncommitted,
148}
149
150enum GitStoreState {
151 Local {
152 next_repository_id: Arc<AtomicU64>,
153 downstream: Option<LocalDownstreamState>,
154 project_environment: Entity<ProjectEnvironment>,
155 fs: Arc<dyn Fs>,
156 },
157 Remote {
158 upstream_client: AnyProtoClient,
159 upstream_project_id: u64,
160 downstream: Option<(AnyProtoClient, ProjectId)>,
161 },
162}
163
164enum DownstreamUpdate {
165 UpdateRepository(RepositorySnapshot),
166 RemoveRepository(RepositoryId),
167}
168
169struct LocalDownstreamState {
170 client: AnyProtoClient,
171 project_id: ProjectId,
172 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
173 _task: Task<Result<()>>,
174}
175
176#[derive(Clone, Debug)]
177pub struct GitStoreCheckpoint {
178 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
179}
180
181#[derive(Clone, Debug, PartialEq, Eq)]
182pub struct StatusEntry {
183 pub repo_path: RepoPath,
184 pub status: FileStatus,
185}
186
187impl StatusEntry {
188 fn to_proto(&self) -> proto::StatusEntry {
189 let simple_status = match self.status {
190 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
191 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
192 FileStatus::Tracked(TrackedStatus {
193 index_status,
194 worktree_status,
195 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
196 worktree_status
197 } else {
198 index_status
199 }),
200 };
201
202 proto::StatusEntry {
203 repo_path: self.repo_path.to_proto(),
204 simple_status,
205 status: Some(status_to_proto(self.status)),
206 }
207 }
208}
209
210impl TryFrom<proto::StatusEntry> for StatusEntry {
211 type Error = anyhow::Error;
212
213 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
214 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
215 let status = status_from_proto(value.simple_status, value.status)?;
216 Ok(Self { repo_path, status })
217 }
218}
219
220impl sum_tree::Item for StatusEntry {
221 type Summary = PathSummary<GitSummary>;
222
223 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
224 PathSummary {
225 max_path: self.repo_path.0.clone(),
226 item_summary: self.status.summary(),
227 }
228 }
229}
230
231impl sum_tree::KeyedItem for StatusEntry {
232 type Key = PathKey;
233
234 fn key(&self) -> Self::Key {
235 PathKey(self.repo_path.0.clone())
236 }
237}
238
239#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
240pub struct RepositoryId(pub u64);
241
242#[derive(Clone, Debug, Default, PartialEq, Eq)]
243pub struct MergeDetails {
244 pub conflicted_paths: TreeSet<RepoPath>,
245 pub message: Option<SharedString>,
246 pub heads: Vec<Option<SharedString>>,
247}
248
249#[derive(Clone, Debug, PartialEq, Eq)]
250pub struct RepositorySnapshot {
251 pub id: RepositoryId,
252 pub statuses_by_path: SumTree<StatusEntry>,
253 pub pending_ops_by_path: SumTree<PendingOps>,
254 pub work_directory_abs_path: Arc<Path>,
255 pub path_style: PathStyle,
256 pub branch: Option<Branch>,
257 pub head_commit: Option<CommitDetails>,
258 pub scan_id: u64,
259 pub merge: MergeDetails,
260 pub remote_origin_url: Option<String>,
261 pub remote_upstream_url: Option<String>,
262 pub stash_entries: GitStash,
263}
264
265type JobId = u64;
266
267#[derive(Clone, Debug, PartialEq, Eq)]
268pub struct JobInfo {
269 pub start: Instant,
270 pub message: SharedString,
271}
272
273pub struct Repository {
274 this: WeakEntity<Self>,
275 snapshot: RepositorySnapshot,
276 commit_message_buffer: Option<Entity<Buffer>>,
277 git_store: WeakEntity<GitStore>,
278 // For a local repository, holds paths that have had worktree events since the last status scan completed,
279 // and that should be examined during the next status scan.
280 paths_needing_status_update: BTreeSet<RepoPath>,
281 job_sender: mpsc::UnboundedSender<GitJob>,
282 active_jobs: HashMap<JobId, JobInfo>,
283 job_id: JobId,
284 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
285 latest_askpass_id: u64,
286}
287
288impl std::ops::Deref for Repository {
289 type Target = RepositorySnapshot;
290
291 fn deref(&self) -> &Self::Target {
292 &self.snapshot
293 }
294}
295
296#[derive(Clone)]
297pub enum RepositoryState {
298 Local {
299 backend: Arc<dyn GitRepository>,
300 environment: Arc<HashMap<String, String>>,
301 },
302 Remote {
303 project_id: ProjectId,
304 client: AnyProtoClient,
305 },
306}
307
308#[derive(Clone, Debug, PartialEq, Eq)]
309pub enum RepositoryEvent {
310 StatusesChanged {
311 // TODO could report which statuses changed here
312 full_scan: bool,
313 },
314 MergeHeadsChanged,
315 BranchChanged,
316 StashEntriesChanged,
317}
318
319#[derive(Clone, Debug)]
320pub struct JobsUpdated;
321
322#[derive(Debug)]
323pub enum GitStoreEvent {
324 ActiveRepositoryChanged(Option<RepositoryId>),
325 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
326 RepositoryAdded,
327 RepositoryRemoved(RepositoryId),
328 IndexWriteError(anyhow::Error),
329 JobsUpdated,
330 ConflictsUpdated,
331}
332
333impl EventEmitter<RepositoryEvent> for Repository {}
334impl EventEmitter<JobsUpdated> for Repository {}
335impl EventEmitter<GitStoreEvent> for GitStore {}
336
337pub struct GitJob {
338 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
339 key: Option<GitJobKey>,
340}
341
342#[derive(PartialEq, Eq)]
343enum GitJobKey {
344 WriteIndex(RepoPath),
345 ReloadBufferDiffBases,
346 RefreshStatuses,
347 ReloadGitState,
348}
349
350impl GitStore {
351 pub fn local(
352 worktree_store: &Entity<WorktreeStore>,
353 buffer_store: Entity<BufferStore>,
354 environment: Entity<ProjectEnvironment>,
355 fs: Arc<dyn Fs>,
356 cx: &mut Context<Self>,
357 ) -> Self {
358 Self::new(
359 worktree_store.clone(),
360 buffer_store,
361 GitStoreState::Local {
362 next_repository_id: Arc::new(AtomicU64::new(1)),
363 downstream: None,
364 project_environment: environment,
365 fs,
366 },
367 cx,
368 )
369 }
370
371 pub fn remote(
372 worktree_store: &Entity<WorktreeStore>,
373 buffer_store: Entity<BufferStore>,
374 upstream_client: AnyProtoClient,
375 project_id: u64,
376 cx: &mut Context<Self>,
377 ) -> Self {
378 Self::new(
379 worktree_store.clone(),
380 buffer_store,
381 GitStoreState::Remote {
382 upstream_client,
383 upstream_project_id: project_id,
384 downstream: None,
385 },
386 cx,
387 )
388 }
389
390 fn new(
391 worktree_store: Entity<WorktreeStore>,
392 buffer_store: Entity<BufferStore>,
393 state: GitStoreState,
394 cx: &mut Context<Self>,
395 ) -> Self {
396 let _subscriptions = vec![
397 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
398 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
399 ];
400
401 GitStore {
402 state,
403 buffer_store,
404 worktree_store,
405 repositories: HashMap::default(),
406 active_repo_id: None,
407 _subscriptions,
408 loading_diffs: HashMap::default(),
409 shared_diffs: HashMap::default(),
410 diffs: HashMap::default(),
411 }
412 }
413
414 pub fn init(client: &AnyProtoClient) {
415 client.add_entity_request_handler(Self::handle_get_remotes);
416 client.add_entity_request_handler(Self::handle_get_branches);
417 client.add_entity_request_handler(Self::handle_get_default_branch);
418 client.add_entity_request_handler(Self::handle_change_branch);
419 client.add_entity_request_handler(Self::handle_create_branch);
420 client.add_entity_request_handler(Self::handle_rename_branch);
421 client.add_entity_request_handler(Self::handle_git_init);
422 client.add_entity_request_handler(Self::handle_push);
423 client.add_entity_request_handler(Self::handle_pull);
424 client.add_entity_request_handler(Self::handle_fetch);
425 client.add_entity_request_handler(Self::handle_stage);
426 client.add_entity_request_handler(Self::handle_unstage);
427 client.add_entity_request_handler(Self::handle_stash);
428 client.add_entity_request_handler(Self::handle_stash_pop);
429 client.add_entity_request_handler(Self::handle_stash_apply);
430 client.add_entity_request_handler(Self::handle_stash_drop);
431 client.add_entity_request_handler(Self::handle_commit);
432 client.add_entity_request_handler(Self::handle_reset);
433 client.add_entity_request_handler(Self::handle_show);
434 client.add_entity_request_handler(Self::handle_load_commit_diff);
435 client.add_entity_request_handler(Self::handle_checkout_files);
436 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
437 client.add_entity_request_handler(Self::handle_set_index_text);
438 client.add_entity_request_handler(Self::handle_askpass);
439 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
440 client.add_entity_request_handler(Self::handle_git_diff);
441 client.add_entity_request_handler(Self::handle_tree_diff);
442 client.add_entity_request_handler(Self::handle_get_blob_content);
443 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
444 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
445 client.add_entity_message_handler(Self::handle_update_diff_bases);
446 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
447 client.add_entity_request_handler(Self::handle_blame_buffer);
448 client.add_entity_message_handler(Self::handle_update_repository);
449 client.add_entity_message_handler(Self::handle_remove_repository);
450 client.add_entity_request_handler(Self::handle_git_clone);
451 client.add_entity_request_handler(Self::handle_get_worktrees);
452 client.add_entity_request_handler(Self::handle_create_worktree);
453 }
454
455 pub fn is_local(&self) -> bool {
456 matches!(self.state, GitStoreState::Local { .. })
457 }
458 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
459 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
460 let id = repo.read(cx).id;
461 if self.active_repo_id != Some(id) {
462 self.active_repo_id = Some(id);
463 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
464 }
465 }
466 }
467
468 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
469 match &mut self.state {
470 GitStoreState::Remote {
471 downstream: downstream_client,
472 ..
473 } => {
474 for repo in self.repositories.values() {
475 let update = repo.read(cx).snapshot.initial_update(project_id);
476 for update in split_repository_update(update) {
477 client.send(update).log_err();
478 }
479 }
480 *downstream_client = Some((client, ProjectId(project_id)));
481 }
482 GitStoreState::Local {
483 downstream: downstream_client,
484 ..
485 } => {
486 let mut snapshots = HashMap::default();
487 let (updates_tx, mut updates_rx) = mpsc::unbounded();
488 for repo in self.repositories.values() {
489 updates_tx
490 .unbounded_send(DownstreamUpdate::UpdateRepository(
491 repo.read(cx).snapshot.clone(),
492 ))
493 .ok();
494 }
495 *downstream_client = Some(LocalDownstreamState {
496 client: client.clone(),
497 project_id: ProjectId(project_id),
498 updates_tx,
499 _task: cx.spawn(async move |this, cx| {
500 cx.background_spawn(async move {
501 while let Some(update) = updates_rx.next().await {
502 match update {
503 DownstreamUpdate::UpdateRepository(snapshot) => {
504 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
505 {
506 let update =
507 snapshot.build_update(old_snapshot, project_id);
508 *old_snapshot = snapshot;
509 for update in split_repository_update(update) {
510 client.send(update)?;
511 }
512 } else {
513 let update = snapshot.initial_update(project_id);
514 for update in split_repository_update(update) {
515 client.send(update)?;
516 }
517 snapshots.insert(snapshot.id, snapshot);
518 }
519 }
520 DownstreamUpdate::RemoveRepository(id) => {
521 client.send(proto::RemoveRepository {
522 project_id,
523 id: id.to_proto(),
524 })?;
525 }
526 }
527 }
528 anyhow::Ok(())
529 })
530 .await
531 .ok();
532 this.update(cx, |this, _| {
533 if let GitStoreState::Local {
534 downstream: downstream_client,
535 ..
536 } = &mut this.state
537 {
538 downstream_client.take();
539 } else {
540 unreachable!("unshared called on remote store");
541 }
542 })
543 }),
544 });
545 }
546 }
547 }
548
549 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
550 match &mut self.state {
551 GitStoreState::Local {
552 downstream: downstream_client,
553 ..
554 } => {
555 downstream_client.take();
556 }
557 GitStoreState::Remote {
558 downstream: downstream_client,
559 ..
560 } => {
561 downstream_client.take();
562 }
563 }
564 self.shared_diffs.clear();
565 }
566
567 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
568 self.shared_diffs.remove(peer_id);
569 }
570
571 pub fn active_repository(&self) -> Option<Entity<Repository>> {
572 self.active_repo_id
573 .as_ref()
574 .map(|id| self.repositories[id].clone())
575 }
576
577 pub fn open_unstaged_diff(
578 &mut self,
579 buffer: Entity<Buffer>,
580 cx: &mut Context<Self>,
581 ) -> Task<Result<Entity<BufferDiff>>> {
582 let buffer_id = buffer.read(cx).remote_id();
583 if let Some(diff_state) = self.diffs.get(&buffer_id)
584 && let Some(unstaged_diff) = diff_state
585 .read(cx)
586 .unstaged_diff
587 .as_ref()
588 .and_then(|weak| weak.upgrade())
589 {
590 if let Some(task) =
591 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
592 {
593 return cx.background_executor().spawn(async move {
594 task.await;
595 Ok(unstaged_diff)
596 });
597 }
598 return Task::ready(Ok(unstaged_diff));
599 }
600
601 let Some((repo, repo_path)) =
602 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
603 else {
604 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
605 };
606
607 let task = self
608 .loading_diffs
609 .entry((buffer_id, DiffKind::Unstaged))
610 .or_insert_with(|| {
611 let staged_text = repo.update(cx, |repo, cx| {
612 repo.load_staged_text(buffer_id, repo_path, cx)
613 });
614 cx.spawn(async move |this, cx| {
615 Self::open_diff_internal(
616 this,
617 DiffKind::Unstaged,
618 staged_text.await.map(DiffBasesChange::SetIndex),
619 buffer,
620 cx,
621 )
622 .await
623 .map_err(Arc::new)
624 })
625 .shared()
626 })
627 .clone();
628
629 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
630 }
631
632 pub fn open_diff_since(
633 &mut self,
634 oid: Option<git::Oid>,
635 buffer: Entity<Buffer>,
636 repo: Entity<Repository>,
637 languages: Arc<LanguageRegistry>,
638 cx: &mut Context<Self>,
639 ) -> Task<Result<Entity<BufferDiff>>> {
640 cx.spawn(async move |this, cx| {
641 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
642 let content = match oid {
643 None => None,
644 Some(oid) => Some(
645 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
646 .await?,
647 ),
648 };
649 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
650
651 buffer_diff
652 .update(cx, |buffer_diff, cx| {
653 buffer_diff.set_base_text(
654 content.map(Arc::new),
655 buffer_snapshot.language().cloned(),
656 Some(languages.clone()),
657 buffer_snapshot.text,
658 cx,
659 )
660 })?
661 .await?;
662 let unstaged_diff = this
663 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
664 .await?;
665 buffer_diff.update(cx, |buffer_diff, _| {
666 buffer_diff.set_secondary_diff(unstaged_diff);
667 })?;
668
669 this.update(cx, |_, cx| {
670 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
671 .detach();
672 })?;
673
674 Ok(buffer_diff)
675 })
676 }
677
678 pub fn open_uncommitted_diff(
679 &mut self,
680 buffer: Entity<Buffer>,
681 cx: &mut Context<Self>,
682 ) -> Task<Result<Entity<BufferDiff>>> {
683 let buffer_id = buffer.read(cx).remote_id();
684
685 if let Some(diff_state) = self.diffs.get(&buffer_id)
686 && let Some(uncommitted_diff) = diff_state
687 .read(cx)
688 .uncommitted_diff
689 .as_ref()
690 .and_then(|weak| weak.upgrade())
691 {
692 if let Some(task) =
693 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
694 {
695 return cx.background_executor().spawn(async move {
696 task.await;
697 Ok(uncommitted_diff)
698 });
699 }
700 return Task::ready(Ok(uncommitted_diff));
701 }
702
703 let Some((repo, repo_path)) =
704 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
705 else {
706 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
707 };
708
709 let task = self
710 .loading_diffs
711 .entry((buffer_id, DiffKind::Uncommitted))
712 .or_insert_with(|| {
713 let changes = repo.update(cx, |repo, cx| {
714 repo.load_committed_text(buffer_id, repo_path, cx)
715 });
716
717 // todo(lw): hot foreground spawn
718 cx.spawn(async move |this, cx| {
719 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
720 .await
721 .map_err(Arc::new)
722 })
723 .shared()
724 })
725 .clone();
726
727 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
728 }
729
730 async fn open_diff_internal(
731 this: WeakEntity<Self>,
732 kind: DiffKind,
733 texts: Result<DiffBasesChange>,
734 buffer_entity: Entity<Buffer>,
735 cx: &mut AsyncApp,
736 ) -> Result<Entity<BufferDiff>> {
737 let diff_bases_change = match texts {
738 Err(e) => {
739 this.update(cx, |this, cx| {
740 let buffer = buffer_entity.read(cx);
741 let buffer_id = buffer.remote_id();
742 this.loading_diffs.remove(&(buffer_id, kind));
743 })?;
744 return Err(e);
745 }
746 Ok(change) => change,
747 };
748
749 this.update(cx, |this, cx| {
750 let buffer = buffer_entity.read(cx);
751 let buffer_id = buffer.remote_id();
752 let language = buffer.language().cloned();
753 let language_registry = buffer.language_registry();
754 let text_snapshot = buffer.text_snapshot();
755 this.loading_diffs.remove(&(buffer_id, kind));
756
757 let git_store = cx.weak_entity();
758 let diff_state = this
759 .diffs
760 .entry(buffer_id)
761 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
762
763 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
764
765 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
766 diff_state.update(cx, |diff_state, cx| {
767 diff_state.language = language;
768 diff_state.language_registry = language_registry;
769
770 match kind {
771 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
772 DiffKind::Uncommitted => {
773 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
774 diff
775 } else {
776 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
777 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
778 unstaged_diff
779 };
780
781 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
782 diff_state.uncommitted_diff = Some(diff.downgrade())
783 }
784 }
785
786 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
787 let rx = diff_state.wait_for_recalculation();
788
789 anyhow::Ok(async move {
790 if let Some(rx) = rx {
791 rx.await;
792 }
793 Ok(diff)
794 })
795 })
796 })??
797 .await
798 }
799
800 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
801 let diff_state = self.diffs.get(&buffer_id)?;
802 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
803 }
804
805 pub fn get_uncommitted_diff(
806 &self,
807 buffer_id: BufferId,
808 cx: &App,
809 ) -> Option<Entity<BufferDiff>> {
810 let diff_state = self.diffs.get(&buffer_id)?;
811 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
812 }
813
814 pub fn open_conflict_set(
815 &mut self,
816 buffer: Entity<Buffer>,
817 cx: &mut Context<Self>,
818 ) -> Entity<ConflictSet> {
819 log::debug!("open conflict set");
820 let buffer_id = buffer.read(cx).remote_id();
821
822 if let Some(git_state) = self.diffs.get(&buffer_id)
823 && let Some(conflict_set) = git_state
824 .read(cx)
825 .conflict_set
826 .as_ref()
827 .and_then(|weak| weak.upgrade())
828 {
829 let conflict_set = conflict_set;
830 let buffer_snapshot = buffer.read(cx).text_snapshot();
831
832 git_state.update(cx, |state, cx| {
833 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
834 });
835
836 return conflict_set;
837 }
838
839 let is_unmerged = self
840 .repository_and_path_for_buffer_id(buffer_id, cx)
841 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
842 let git_store = cx.weak_entity();
843 let buffer_git_state = self
844 .diffs
845 .entry(buffer_id)
846 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
847 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
848
849 self._subscriptions
850 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
851 cx.emit(GitStoreEvent::ConflictsUpdated);
852 }));
853
854 buffer_git_state.update(cx, |state, cx| {
855 state.conflict_set = Some(conflict_set.downgrade());
856 let buffer_snapshot = buffer.read(cx).text_snapshot();
857 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
858 });
859
860 conflict_set
861 }
862
863 pub fn project_path_git_status(
864 &self,
865 project_path: &ProjectPath,
866 cx: &App,
867 ) -> Option<FileStatus> {
868 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
869 Some(repo.read(cx).status_for_path(&repo_path)?.status)
870 }
871
872 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
873 let mut work_directory_abs_paths = Vec::new();
874 let mut checkpoints = Vec::new();
875 for repository in self.repositories.values() {
876 repository.update(cx, |repository, _| {
877 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
878 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
879 });
880 }
881
882 cx.background_executor().spawn(async move {
883 let checkpoints = future::try_join_all(checkpoints).await?;
884 Ok(GitStoreCheckpoint {
885 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
886 .into_iter()
887 .zip(checkpoints)
888 .collect(),
889 })
890 })
891 }
892
893 pub fn restore_checkpoint(
894 &self,
895 checkpoint: GitStoreCheckpoint,
896 cx: &mut App,
897 ) -> Task<Result<()>> {
898 let repositories_by_work_dir_abs_path = self
899 .repositories
900 .values()
901 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
902 .collect::<HashMap<_, _>>();
903
904 let mut tasks = Vec::new();
905 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
906 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
907 let restore = repository.update(cx, |repository, _| {
908 repository.restore_checkpoint(checkpoint)
909 });
910 tasks.push(async move { restore.await? });
911 }
912 }
913 cx.background_spawn(async move {
914 future::try_join_all(tasks).await?;
915 Ok(())
916 })
917 }
918
919 /// Compares two checkpoints, returning true if they are equal.
920 pub fn compare_checkpoints(
921 &self,
922 left: GitStoreCheckpoint,
923 mut right: GitStoreCheckpoint,
924 cx: &mut App,
925 ) -> Task<Result<bool>> {
926 let repositories_by_work_dir_abs_path = self
927 .repositories
928 .values()
929 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
930 .collect::<HashMap<_, _>>();
931
932 let mut tasks = Vec::new();
933 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
934 if let Some(right_checkpoint) = right
935 .checkpoints_by_work_dir_abs_path
936 .remove(&work_dir_abs_path)
937 {
938 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
939 {
940 let compare = repository.update(cx, |repository, _| {
941 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
942 });
943
944 tasks.push(async move { compare.await? });
945 }
946 } else {
947 return Task::ready(Ok(false));
948 }
949 }
950 cx.background_spawn(async move {
951 Ok(future::try_join_all(tasks)
952 .await?
953 .into_iter()
954 .all(|result| result))
955 })
956 }
957
958 /// Blames a buffer.
959 pub fn blame_buffer(
960 &self,
961 buffer: &Entity<Buffer>,
962 version: Option<clock::Global>,
963 cx: &mut App,
964 ) -> Task<Result<Option<Blame>>> {
965 let buffer = buffer.read(cx);
966 let Some((repo, repo_path)) =
967 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
968 else {
969 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
970 };
971 let content = match &version {
972 Some(version) => buffer.rope_for_version(version),
973 None => buffer.as_rope().clone(),
974 };
975 let version = version.unwrap_or(buffer.version());
976 let buffer_id = buffer.remote_id();
977
978 let rx = repo.update(cx, |repo, _| {
979 repo.send_job(None, move |state, _| async move {
980 match state {
981 RepositoryState::Local { backend, .. } => backend
982 .blame(repo_path.clone(), content)
983 .await
984 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
985 .map(Some),
986 RepositoryState::Remote { project_id, client } => {
987 let response = client
988 .request(proto::BlameBuffer {
989 project_id: project_id.to_proto(),
990 buffer_id: buffer_id.into(),
991 version: serialize_version(&version),
992 })
993 .await?;
994 Ok(deserialize_blame_buffer_response(response))
995 }
996 }
997 })
998 });
999
1000 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1001 }
1002
1003 pub fn get_permalink_to_line(
1004 &self,
1005 buffer: &Entity<Buffer>,
1006 selection: Range<u32>,
1007 cx: &mut App,
1008 ) -> Task<Result<url::Url>> {
1009 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1010 return Task::ready(Err(anyhow!("buffer has no file")));
1011 };
1012
1013 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1014 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1015 cx,
1016 ) else {
1017 // If we're not in a Git repo, check whether this is a Rust source
1018 // file in the Cargo registry (presumably opened with go-to-definition
1019 // from a normal Rust file). If so, we can put together a permalink
1020 // using crate metadata.
1021 if buffer
1022 .read(cx)
1023 .language()
1024 .is_none_or(|lang| lang.name() != "Rust".into())
1025 {
1026 return Task::ready(Err(anyhow!("no permalink available")));
1027 }
1028 let file_path = file.worktree.read(cx).absolutize(&file.path);
1029 return cx.spawn(async move |cx| {
1030 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1031 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1032 .context("no permalink available")
1033 });
1034 };
1035
1036 let buffer_id = buffer.read(cx).remote_id();
1037 let branch = repo.read(cx).branch.clone();
1038 let remote = branch
1039 .as_ref()
1040 .and_then(|b| b.upstream.as_ref())
1041 .and_then(|b| b.remote_name())
1042 .unwrap_or("origin")
1043 .to_string();
1044
1045 let rx = repo.update(cx, |repo, _| {
1046 repo.send_job(None, move |state, cx| async move {
1047 match state {
1048 RepositoryState::Local { backend, .. } => {
1049 let origin_url = backend
1050 .remote_url(&remote)
1051 .with_context(|| format!("remote \"{remote}\" not found"))?;
1052
1053 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1054
1055 let provider_registry =
1056 cx.update(GitHostingProviderRegistry::default_global)?;
1057
1058 let (provider, remote) =
1059 parse_git_remote_url(provider_registry, &origin_url)
1060 .context("parsing Git remote URL")?;
1061
1062 Ok(provider.build_permalink(
1063 remote,
1064 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1065 ))
1066 }
1067 RepositoryState::Remote { project_id, client } => {
1068 let response = client
1069 .request(proto::GetPermalinkToLine {
1070 project_id: project_id.to_proto(),
1071 buffer_id: buffer_id.into(),
1072 selection: Some(proto::Range {
1073 start: selection.start as u64,
1074 end: selection.end as u64,
1075 }),
1076 })
1077 .await?;
1078
1079 url::Url::parse(&response.permalink).context("failed to parse permalink")
1080 }
1081 }
1082 })
1083 });
1084 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1085 }
1086
1087 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1088 match &self.state {
1089 GitStoreState::Local {
1090 downstream: downstream_client,
1091 ..
1092 } => downstream_client
1093 .as_ref()
1094 .map(|state| (state.client.clone(), state.project_id)),
1095 GitStoreState::Remote {
1096 downstream: downstream_client,
1097 ..
1098 } => downstream_client.clone(),
1099 }
1100 }
1101
1102 fn upstream_client(&self) -> Option<AnyProtoClient> {
1103 match &self.state {
1104 GitStoreState::Local { .. } => None,
1105 GitStoreState::Remote {
1106 upstream_client, ..
1107 } => Some(upstream_client.clone()),
1108 }
1109 }
1110
1111 fn on_worktree_store_event(
1112 &mut self,
1113 worktree_store: Entity<WorktreeStore>,
1114 event: &WorktreeStoreEvent,
1115 cx: &mut Context<Self>,
1116 ) {
1117 let GitStoreState::Local {
1118 project_environment,
1119 downstream,
1120 next_repository_id,
1121 fs,
1122 } = &self.state
1123 else {
1124 return;
1125 };
1126
1127 match event {
1128 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1129 if let Some(worktree) = self
1130 .worktree_store
1131 .read(cx)
1132 .worktree_for_id(*worktree_id, cx)
1133 {
1134 let paths_by_git_repo =
1135 self.process_updated_entries(&worktree, updated_entries, cx);
1136 let downstream = downstream
1137 .as_ref()
1138 .map(|downstream| downstream.updates_tx.clone());
1139 cx.spawn(async move |_, cx| {
1140 let paths_by_git_repo = paths_by_git_repo.await;
1141 for (repo, paths) in paths_by_git_repo {
1142 repo.update(cx, |repo, cx| {
1143 repo.paths_changed(paths, downstream.clone(), cx);
1144 })
1145 .ok();
1146 }
1147 })
1148 .detach();
1149 }
1150 }
1151 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1152 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1153 else {
1154 return;
1155 };
1156 if !worktree.read(cx).is_visible() {
1157 log::debug!(
1158 "not adding repositories for local worktree {:?} because it's not visible",
1159 worktree.read(cx).abs_path()
1160 );
1161 return;
1162 }
1163 self.update_repositories_from_worktree(
1164 project_environment.clone(),
1165 next_repository_id.clone(),
1166 downstream
1167 .as_ref()
1168 .map(|downstream| downstream.updates_tx.clone()),
1169 changed_repos.clone(),
1170 fs.clone(),
1171 cx,
1172 );
1173 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1174 }
1175 _ => {}
1176 }
1177 }
1178 fn on_repository_event(
1179 &mut self,
1180 repo: Entity<Repository>,
1181 event: &RepositoryEvent,
1182 cx: &mut Context<Self>,
1183 ) {
1184 let id = repo.read(cx).id;
1185 let repo_snapshot = repo.read(cx).snapshot.clone();
1186 for (buffer_id, diff) in self.diffs.iter() {
1187 if let Some((buffer_repo, repo_path)) =
1188 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1189 && buffer_repo == repo
1190 {
1191 diff.update(cx, |diff, cx| {
1192 if let Some(conflict_set) = &diff.conflict_set {
1193 let conflict_status_changed =
1194 conflict_set.update(cx, |conflict_set, cx| {
1195 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1196 conflict_set.set_has_conflict(has_conflict, cx)
1197 })?;
1198 if conflict_status_changed {
1199 let buffer_store = self.buffer_store.read(cx);
1200 if let Some(buffer) = buffer_store.get(*buffer_id) {
1201 let _ = diff
1202 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1203 }
1204 }
1205 }
1206 anyhow::Ok(())
1207 })
1208 .ok();
1209 }
1210 }
1211 cx.emit(GitStoreEvent::RepositoryUpdated(
1212 id,
1213 event.clone(),
1214 self.active_repo_id == Some(id),
1215 ))
1216 }
1217
1218 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1219 cx.emit(GitStoreEvent::JobsUpdated)
1220 }
1221
1222 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1223 fn update_repositories_from_worktree(
1224 &mut self,
1225 project_environment: Entity<ProjectEnvironment>,
1226 next_repository_id: Arc<AtomicU64>,
1227 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1228 updated_git_repositories: UpdatedGitRepositoriesSet,
1229 fs: Arc<dyn Fs>,
1230 cx: &mut Context<Self>,
1231 ) {
1232 let mut removed_ids = Vec::new();
1233 for update in updated_git_repositories.iter() {
1234 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1235 let existing_work_directory_abs_path =
1236 repo.read(cx).work_directory_abs_path.clone();
1237 Some(&existing_work_directory_abs_path)
1238 == update.old_work_directory_abs_path.as_ref()
1239 || Some(&existing_work_directory_abs_path)
1240 == update.new_work_directory_abs_path.as_ref()
1241 }) {
1242 if let Some(new_work_directory_abs_path) =
1243 update.new_work_directory_abs_path.clone()
1244 {
1245 existing.update(cx, |existing, cx| {
1246 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1247 existing.schedule_scan(updates_tx.clone(), cx);
1248 });
1249 } else {
1250 removed_ids.push(*id);
1251 }
1252 } else if let UpdatedGitRepository {
1253 new_work_directory_abs_path: Some(work_directory_abs_path),
1254 dot_git_abs_path: Some(dot_git_abs_path),
1255 repository_dir_abs_path: Some(repository_dir_abs_path),
1256 common_dir_abs_path: Some(common_dir_abs_path),
1257 ..
1258 } = update
1259 {
1260 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1261 let git_store = cx.weak_entity();
1262 let repo = cx.new(|cx| {
1263 let mut repo = Repository::local(
1264 id,
1265 work_directory_abs_path.clone(),
1266 dot_git_abs_path.clone(),
1267 repository_dir_abs_path.clone(),
1268 common_dir_abs_path.clone(),
1269 project_environment.downgrade(),
1270 fs.clone(),
1271 git_store,
1272 cx,
1273 );
1274 repo.schedule_scan(updates_tx.clone(), cx);
1275 repo
1276 });
1277 self._subscriptions
1278 .push(cx.subscribe(&repo, Self::on_repository_event));
1279 self._subscriptions
1280 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1281 self.repositories.insert(id, repo);
1282 cx.emit(GitStoreEvent::RepositoryAdded);
1283 self.active_repo_id.get_or_insert_with(|| {
1284 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1285 id
1286 });
1287 }
1288 }
1289
1290 for id in removed_ids {
1291 if self.active_repo_id == Some(id) {
1292 self.active_repo_id = None;
1293 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1294 }
1295 self.repositories.remove(&id);
1296 if let Some(updates_tx) = updates_tx.as_ref() {
1297 updates_tx
1298 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1299 .ok();
1300 }
1301 }
1302 }
1303
1304 fn on_buffer_store_event(
1305 &mut self,
1306 _: Entity<BufferStore>,
1307 event: &BufferStoreEvent,
1308 cx: &mut Context<Self>,
1309 ) {
1310 match event {
1311 BufferStoreEvent::BufferAdded(buffer) => {
1312 cx.subscribe(buffer, |this, buffer, event, cx| {
1313 if let BufferEvent::LanguageChanged = event {
1314 let buffer_id = buffer.read(cx).remote_id();
1315 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1316 diff_state.update(cx, |diff_state, cx| {
1317 diff_state.buffer_language_changed(buffer, cx);
1318 });
1319 }
1320 }
1321 })
1322 .detach();
1323 }
1324 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1325 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1326 diffs.remove(buffer_id);
1327 }
1328 }
1329 BufferStoreEvent::BufferDropped(buffer_id) => {
1330 self.diffs.remove(buffer_id);
1331 for diffs in self.shared_diffs.values_mut() {
1332 diffs.remove(buffer_id);
1333 }
1334 }
1335
1336 _ => {}
1337 }
1338 }
1339
1340 pub fn recalculate_buffer_diffs(
1341 &mut self,
1342 buffers: Vec<Entity<Buffer>>,
1343 cx: &mut Context<Self>,
1344 ) -> impl Future<Output = ()> + use<> {
1345 let mut futures = Vec::new();
1346 for buffer in buffers {
1347 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1348 let buffer = buffer.read(cx).text_snapshot();
1349 diff_state.update(cx, |diff_state, cx| {
1350 diff_state.recalculate_diffs(buffer.clone(), cx);
1351 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1352 });
1353 futures.push(diff_state.update(cx, |diff_state, cx| {
1354 diff_state
1355 .reparse_conflict_markers(buffer, cx)
1356 .map(|_| {})
1357 .boxed()
1358 }));
1359 }
1360 }
1361 async move {
1362 futures::future::join_all(futures).await;
1363 }
1364 }
1365
1366 fn on_buffer_diff_event(
1367 &mut self,
1368 diff: Entity<buffer_diff::BufferDiff>,
1369 event: &BufferDiffEvent,
1370 cx: &mut Context<Self>,
1371 ) {
1372 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1373 let buffer_id = diff.read(cx).buffer_id;
1374 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1375 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1376 diff_state.hunk_staging_operation_count += 1;
1377 diff_state.hunk_staging_operation_count
1378 });
1379 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1380 let recv = repo.update(cx, |repo, cx| {
1381 log::debug!("hunks changed for {}", path.as_unix_str());
1382 repo.spawn_set_index_text_job(
1383 path,
1384 new_index_text.as_ref().map(|rope| rope.to_string()),
1385 Some(hunk_staging_operation_count),
1386 cx,
1387 )
1388 });
1389 let diff = diff.downgrade();
1390 cx.spawn(async move |this, cx| {
1391 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1392 diff.update(cx, |diff, cx| {
1393 diff.clear_pending_hunks(cx);
1394 })
1395 .ok();
1396 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1397 .ok();
1398 }
1399 })
1400 .detach();
1401 }
1402 }
1403 }
1404 }
1405
1406 fn local_worktree_git_repos_changed(
1407 &mut self,
1408 worktree: Entity<Worktree>,
1409 changed_repos: &UpdatedGitRepositoriesSet,
1410 cx: &mut Context<Self>,
1411 ) {
1412 log::debug!("local worktree repos changed");
1413 debug_assert!(worktree.read(cx).is_local());
1414
1415 for repository in self.repositories.values() {
1416 repository.update(cx, |repository, cx| {
1417 let repo_abs_path = &repository.work_directory_abs_path;
1418 if changed_repos.iter().any(|update| {
1419 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1420 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1421 }) {
1422 repository.reload_buffer_diff_bases(cx);
1423 }
1424 });
1425 }
1426 }
1427
1428 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1429 &self.repositories
1430 }
1431
1432 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1433 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1434 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1435 Some(status.status)
1436 }
1437
1438 pub fn repository_and_path_for_buffer_id(
1439 &self,
1440 buffer_id: BufferId,
1441 cx: &App,
1442 ) -> Option<(Entity<Repository>, RepoPath)> {
1443 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1444 let project_path = buffer.read(cx).project_path(cx)?;
1445 self.repository_and_path_for_project_path(&project_path, cx)
1446 }
1447
1448 pub fn repository_and_path_for_project_path(
1449 &self,
1450 path: &ProjectPath,
1451 cx: &App,
1452 ) -> Option<(Entity<Repository>, RepoPath)> {
1453 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1454 self.repositories
1455 .values()
1456 .filter_map(|repo| {
1457 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1458 Some((repo.clone(), repo_path))
1459 })
1460 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1461 }
1462
1463 pub fn git_init(
1464 &self,
1465 path: Arc<Path>,
1466 fallback_branch_name: String,
1467 cx: &App,
1468 ) -> Task<Result<()>> {
1469 match &self.state {
1470 GitStoreState::Local { fs, .. } => {
1471 let fs = fs.clone();
1472 cx.background_executor()
1473 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1474 }
1475 GitStoreState::Remote {
1476 upstream_client,
1477 upstream_project_id: project_id,
1478 ..
1479 } => {
1480 let client = upstream_client.clone();
1481 let project_id = *project_id;
1482 cx.background_executor().spawn(async move {
1483 client
1484 .request(proto::GitInit {
1485 project_id: project_id,
1486 abs_path: path.to_string_lossy().into_owned(),
1487 fallback_branch_name,
1488 })
1489 .await?;
1490 Ok(())
1491 })
1492 }
1493 }
1494 }
1495
1496 pub fn git_clone(
1497 &self,
1498 repo: String,
1499 path: impl Into<Arc<std::path::Path>>,
1500 cx: &App,
1501 ) -> Task<Result<()>> {
1502 let path = path.into();
1503 match &self.state {
1504 GitStoreState::Local { fs, .. } => {
1505 let fs = fs.clone();
1506 cx.background_executor()
1507 .spawn(async move { fs.git_clone(&repo, &path).await })
1508 }
1509 GitStoreState::Remote {
1510 upstream_client,
1511 upstream_project_id,
1512 ..
1513 } => {
1514 if upstream_client.is_via_collab() {
1515 return Task::ready(Err(anyhow!(
1516 "Git Clone isn't supported for project guests"
1517 )));
1518 }
1519 let request = upstream_client.request(proto::GitClone {
1520 project_id: *upstream_project_id,
1521 abs_path: path.to_string_lossy().into_owned(),
1522 remote_repo: repo,
1523 });
1524
1525 cx.background_spawn(async move {
1526 let result = request.await?;
1527
1528 match result.success {
1529 true => Ok(()),
1530 false => Err(anyhow!("Git Clone failed")),
1531 }
1532 })
1533 }
1534 }
1535 }
1536
1537 async fn handle_update_repository(
1538 this: Entity<Self>,
1539 envelope: TypedEnvelope<proto::UpdateRepository>,
1540 mut cx: AsyncApp,
1541 ) -> Result<()> {
1542 this.update(&mut cx, |this, cx| {
1543 let path_style = this.worktree_store.read(cx).path_style();
1544 let mut update = envelope.payload;
1545
1546 let id = RepositoryId::from_proto(update.id);
1547 let client = this.upstream_client().context("no upstream client")?;
1548
1549 let mut repo_subscription = None;
1550 let repo = this.repositories.entry(id).or_insert_with(|| {
1551 let git_store = cx.weak_entity();
1552 let repo = cx.new(|cx| {
1553 Repository::remote(
1554 id,
1555 Path::new(&update.abs_path).into(),
1556 path_style,
1557 ProjectId(update.project_id),
1558 client,
1559 git_store,
1560 cx,
1561 )
1562 });
1563 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1564 cx.emit(GitStoreEvent::RepositoryAdded);
1565 repo
1566 });
1567 this._subscriptions.extend(repo_subscription);
1568
1569 repo.update(cx, {
1570 let update = update.clone();
1571 |repo, cx| repo.apply_remote_update(update, cx)
1572 })?;
1573
1574 this.active_repo_id.get_or_insert_with(|| {
1575 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1576 id
1577 });
1578
1579 if let Some((client, project_id)) = this.downstream_client() {
1580 update.project_id = project_id.to_proto();
1581 client.send(update).log_err();
1582 }
1583 Ok(())
1584 })?
1585 }
1586
1587 async fn handle_remove_repository(
1588 this: Entity<Self>,
1589 envelope: TypedEnvelope<proto::RemoveRepository>,
1590 mut cx: AsyncApp,
1591 ) -> Result<()> {
1592 this.update(&mut cx, |this, cx| {
1593 let mut update = envelope.payload;
1594 let id = RepositoryId::from_proto(update.id);
1595 this.repositories.remove(&id);
1596 if let Some((client, project_id)) = this.downstream_client() {
1597 update.project_id = project_id.to_proto();
1598 client.send(update).log_err();
1599 }
1600 if this.active_repo_id == Some(id) {
1601 this.active_repo_id = None;
1602 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1603 }
1604 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1605 })
1606 }
1607
1608 async fn handle_git_init(
1609 this: Entity<Self>,
1610 envelope: TypedEnvelope<proto::GitInit>,
1611 cx: AsyncApp,
1612 ) -> Result<proto::Ack> {
1613 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1614 let name = envelope.payload.fallback_branch_name;
1615 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1616 .await?;
1617
1618 Ok(proto::Ack {})
1619 }
1620
1621 async fn handle_git_clone(
1622 this: Entity<Self>,
1623 envelope: TypedEnvelope<proto::GitClone>,
1624 cx: AsyncApp,
1625 ) -> Result<proto::GitCloneResponse> {
1626 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1627 let repo_name = envelope.payload.remote_repo;
1628 let result = cx
1629 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1630 .await;
1631
1632 Ok(proto::GitCloneResponse {
1633 success: result.is_ok(),
1634 })
1635 }
1636
1637 async fn handle_fetch(
1638 this: Entity<Self>,
1639 envelope: TypedEnvelope<proto::Fetch>,
1640 mut cx: AsyncApp,
1641 ) -> Result<proto::RemoteMessageResponse> {
1642 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1643 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1644 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1645 let askpass_id = envelope.payload.askpass_id;
1646
1647 let askpass = make_remote_delegate(
1648 this,
1649 envelope.payload.project_id,
1650 repository_id,
1651 askpass_id,
1652 &mut cx,
1653 );
1654
1655 let remote_output = repository_handle
1656 .update(&mut cx, |repository_handle, cx| {
1657 repository_handle.fetch(fetch_options, askpass, cx)
1658 })?
1659 .await??;
1660
1661 Ok(proto::RemoteMessageResponse {
1662 stdout: remote_output.stdout,
1663 stderr: remote_output.stderr,
1664 })
1665 }
1666
1667 async fn handle_push(
1668 this: Entity<Self>,
1669 envelope: TypedEnvelope<proto::Push>,
1670 mut cx: AsyncApp,
1671 ) -> Result<proto::RemoteMessageResponse> {
1672 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1673 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1674
1675 let askpass_id = envelope.payload.askpass_id;
1676 let askpass = make_remote_delegate(
1677 this,
1678 envelope.payload.project_id,
1679 repository_id,
1680 askpass_id,
1681 &mut cx,
1682 );
1683
1684 let options = envelope
1685 .payload
1686 .options
1687 .as_ref()
1688 .map(|_| match envelope.payload.options() {
1689 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1690 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1691 });
1692
1693 let branch_name = envelope.payload.branch_name.into();
1694 let remote_name = envelope.payload.remote_name.into();
1695
1696 let remote_output = repository_handle
1697 .update(&mut cx, |repository_handle, cx| {
1698 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1699 })?
1700 .await??;
1701 Ok(proto::RemoteMessageResponse {
1702 stdout: remote_output.stdout,
1703 stderr: remote_output.stderr,
1704 })
1705 }
1706
1707 async fn handle_pull(
1708 this: Entity<Self>,
1709 envelope: TypedEnvelope<proto::Pull>,
1710 mut cx: AsyncApp,
1711 ) -> Result<proto::RemoteMessageResponse> {
1712 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1713 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1714 let askpass_id = envelope.payload.askpass_id;
1715 let askpass = make_remote_delegate(
1716 this,
1717 envelope.payload.project_id,
1718 repository_id,
1719 askpass_id,
1720 &mut cx,
1721 );
1722
1723 let branch_name = envelope.payload.branch_name.into();
1724 let remote_name = envelope.payload.remote_name.into();
1725
1726 let remote_message = repository_handle
1727 .update(&mut cx, |repository_handle, cx| {
1728 repository_handle.pull(branch_name, remote_name, askpass, cx)
1729 })?
1730 .await??;
1731
1732 Ok(proto::RemoteMessageResponse {
1733 stdout: remote_message.stdout,
1734 stderr: remote_message.stderr,
1735 })
1736 }
1737
1738 async fn handle_stage(
1739 this: Entity<Self>,
1740 envelope: TypedEnvelope<proto::Stage>,
1741 mut cx: AsyncApp,
1742 ) -> Result<proto::Ack> {
1743 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1744 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1745
1746 let entries = envelope
1747 .payload
1748 .paths
1749 .into_iter()
1750 .map(|path| RepoPath::new(&path))
1751 .collect::<Result<Vec<_>>>()?;
1752
1753 repository_handle
1754 .update(&mut cx, |repository_handle, cx| {
1755 repository_handle.stage_entries(entries, cx)
1756 })?
1757 .await?;
1758 Ok(proto::Ack {})
1759 }
1760
1761 async fn handle_unstage(
1762 this: Entity<Self>,
1763 envelope: TypedEnvelope<proto::Unstage>,
1764 mut cx: AsyncApp,
1765 ) -> Result<proto::Ack> {
1766 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1767 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1768
1769 let entries = envelope
1770 .payload
1771 .paths
1772 .into_iter()
1773 .map(|path| RepoPath::new(&path))
1774 .collect::<Result<Vec<_>>>()?;
1775
1776 repository_handle
1777 .update(&mut cx, |repository_handle, cx| {
1778 repository_handle.unstage_entries(entries, cx)
1779 })?
1780 .await?;
1781
1782 Ok(proto::Ack {})
1783 }
1784
1785 async fn handle_stash(
1786 this: Entity<Self>,
1787 envelope: TypedEnvelope<proto::Stash>,
1788 mut cx: AsyncApp,
1789 ) -> Result<proto::Ack> {
1790 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1791 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1792
1793 let entries = envelope
1794 .payload
1795 .paths
1796 .into_iter()
1797 .map(|path| RepoPath::new(&path))
1798 .collect::<Result<Vec<_>>>()?;
1799
1800 repository_handle
1801 .update(&mut cx, |repository_handle, cx| {
1802 repository_handle.stash_entries(entries, cx)
1803 })?
1804 .await?;
1805
1806 Ok(proto::Ack {})
1807 }
1808
1809 async fn handle_stash_pop(
1810 this: Entity<Self>,
1811 envelope: TypedEnvelope<proto::StashPop>,
1812 mut cx: AsyncApp,
1813 ) -> Result<proto::Ack> {
1814 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1815 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1816 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1817
1818 repository_handle
1819 .update(&mut cx, |repository_handle, cx| {
1820 repository_handle.stash_pop(stash_index, cx)
1821 })?
1822 .await?;
1823
1824 Ok(proto::Ack {})
1825 }
1826
1827 async fn handle_stash_apply(
1828 this: Entity<Self>,
1829 envelope: TypedEnvelope<proto::StashApply>,
1830 mut cx: AsyncApp,
1831 ) -> Result<proto::Ack> {
1832 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1833 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1834 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1835
1836 repository_handle
1837 .update(&mut cx, |repository_handle, cx| {
1838 repository_handle.stash_apply(stash_index, cx)
1839 })?
1840 .await?;
1841
1842 Ok(proto::Ack {})
1843 }
1844
1845 async fn handle_stash_drop(
1846 this: Entity<Self>,
1847 envelope: TypedEnvelope<proto::StashDrop>,
1848 mut cx: AsyncApp,
1849 ) -> Result<proto::Ack> {
1850 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1851 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1852 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1853
1854 repository_handle
1855 .update(&mut cx, |repository_handle, cx| {
1856 repository_handle.stash_drop(stash_index, cx)
1857 })?
1858 .await??;
1859
1860 Ok(proto::Ack {})
1861 }
1862
1863 async fn handle_set_index_text(
1864 this: Entity<Self>,
1865 envelope: TypedEnvelope<proto::SetIndexText>,
1866 mut cx: AsyncApp,
1867 ) -> Result<proto::Ack> {
1868 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1869 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1870 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1871
1872 repository_handle
1873 .update(&mut cx, |repository_handle, cx| {
1874 repository_handle.spawn_set_index_text_job(
1875 repo_path,
1876 envelope.payload.text,
1877 None,
1878 cx,
1879 )
1880 })?
1881 .await??;
1882 Ok(proto::Ack {})
1883 }
1884
1885 async fn handle_commit(
1886 this: Entity<Self>,
1887 envelope: TypedEnvelope<proto::Commit>,
1888 mut cx: AsyncApp,
1889 ) -> Result<proto::Ack> {
1890 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1891 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1892
1893 let message = SharedString::from(envelope.payload.message);
1894 let name = envelope.payload.name.map(SharedString::from);
1895 let email = envelope.payload.email.map(SharedString::from);
1896 let options = envelope.payload.options.unwrap_or_default();
1897
1898 repository_handle
1899 .update(&mut cx, |repository_handle, cx| {
1900 repository_handle.commit(
1901 message,
1902 name.zip(email),
1903 CommitOptions {
1904 amend: options.amend,
1905 signoff: options.signoff,
1906 },
1907 cx,
1908 )
1909 })?
1910 .await??;
1911 Ok(proto::Ack {})
1912 }
1913
1914 async fn handle_get_remotes(
1915 this: Entity<Self>,
1916 envelope: TypedEnvelope<proto::GetRemotes>,
1917 mut cx: AsyncApp,
1918 ) -> Result<proto::GetRemotesResponse> {
1919 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1920 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1921
1922 let branch_name = envelope.payload.branch_name;
1923
1924 let remotes = repository_handle
1925 .update(&mut cx, |repository_handle, _| {
1926 repository_handle.get_remotes(branch_name)
1927 })?
1928 .await??;
1929
1930 Ok(proto::GetRemotesResponse {
1931 remotes: remotes
1932 .into_iter()
1933 .map(|remotes| proto::get_remotes_response::Remote {
1934 name: remotes.name.to_string(),
1935 })
1936 .collect::<Vec<_>>(),
1937 })
1938 }
1939
1940 async fn handle_get_worktrees(
1941 this: Entity<Self>,
1942 envelope: TypedEnvelope<proto::GitGetWorktrees>,
1943 mut cx: AsyncApp,
1944 ) -> Result<proto::GitWorktreesResponse> {
1945 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1946 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1947
1948 let worktrees = repository_handle
1949 .update(&mut cx, |repository_handle, _| {
1950 repository_handle.worktrees()
1951 })?
1952 .await??;
1953
1954 Ok(proto::GitWorktreesResponse {
1955 worktrees: worktrees
1956 .into_iter()
1957 .map(|worktree| worktree_to_proto(&worktree))
1958 .collect::<Vec<_>>(),
1959 })
1960 }
1961
1962 async fn handle_create_worktree(
1963 this: Entity<Self>,
1964 envelope: TypedEnvelope<proto::GitCreateWorktree>,
1965 mut cx: AsyncApp,
1966 ) -> Result<proto::Ack> {
1967 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1968 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1969 let directory = PathBuf::from(envelope.payload.directory);
1970 let name = envelope.payload.name;
1971 let commit = envelope.payload.commit;
1972
1973 repository_handle
1974 .update(&mut cx, |repository_handle, _| {
1975 repository_handle.create_worktree(name, directory, commit)
1976 })?
1977 .await??;
1978
1979 Ok(proto::Ack {})
1980 }
1981
1982 async fn handle_get_branches(
1983 this: Entity<Self>,
1984 envelope: TypedEnvelope<proto::GitGetBranches>,
1985 mut cx: AsyncApp,
1986 ) -> Result<proto::GitBranchesResponse> {
1987 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1988 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1989
1990 let branches = repository_handle
1991 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1992 .await??;
1993
1994 Ok(proto::GitBranchesResponse {
1995 branches: branches
1996 .into_iter()
1997 .map(|branch| branch_to_proto(&branch))
1998 .collect::<Vec<_>>(),
1999 })
2000 }
2001 async fn handle_get_default_branch(
2002 this: Entity<Self>,
2003 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2004 mut cx: AsyncApp,
2005 ) -> Result<proto::GetDefaultBranchResponse> {
2006 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2007 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2008
2009 let branch = repository_handle
2010 .update(&mut cx, |repository_handle, _| {
2011 repository_handle.default_branch()
2012 })?
2013 .await??
2014 .map(Into::into);
2015
2016 Ok(proto::GetDefaultBranchResponse { branch })
2017 }
2018 async fn handle_create_branch(
2019 this: Entity<Self>,
2020 envelope: TypedEnvelope<proto::GitCreateBranch>,
2021 mut cx: AsyncApp,
2022 ) -> Result<proto::Ack> {
2023 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2024 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2025 let branch_name = envelope.payload.branch_name;
2026
2027 repository_handle
2028 .update(&mut cx, |repository_handle, _| {
2029 repository_handle.create_branch(branch_name)
2030 })?
2031 .await??;
2032
2033 Ok(proto::Ack {})
2034 }
2035
2036 async fn handle_change_branch(
2037 this: Entity<Self>,
2038 envelope: TypedEnvelope<proto::GitChangeBranch>,
2039 mut cx: AsyncApp,
2040 ) -> Result<proto::Ack> {
2041 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2042 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2043 let branch_name = envelope.payload.branch_name;
2044
2045 repository_handle
2046 .update(&mut cx, |repository_handle, _| {
2047 repository_handle.change_branch(branch_name)
2048 })?
2049 .await??;
2050
2051 Ok(proto::Ack {})
2052 }
2053
2054 async fn handle_rename_branch(
2055 this: Entity<Self>,
2056 envelope: TypedEnvelope<proto::GitRenameBranch>,
2057 mut cx: AsyncApp,
2058 ) -> Result<proto::Ack> {
2059 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2060 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2061 let branch = envelope.payload.branch;
2062 let new_name = envelope.payload.new_name;
2063
2064 repository_handle
2065 .update(&mut cx, |repository_handle, _| {
2066 repository_handle.rename_branch(branch, new_name)
2067 })?
2068 .await??;
2069
2070 Ok(proto::Ack {})
2071 }
2072
2073 async fn handle_show(
2074 this: Entity<Self>,
2075 envelope: TypedEnvelope<proto::GitShow>,
2076 mut cx: AsyncApp,
2077 ) -> Result<proto::GitCommitDetails> {
2078 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2079 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2080
2081 let commit = repository_handle
2082 .update(&mut cx, |repository_handle, _| {
2083 repository_handle.show(envelope.payload.commit)
2084 })?
2085 .await??;
2086 Ok(proto::GitCommitDetails {
2087 sha: commit.sha.into(),
2088 message: commit.message.into(),
2089 commit_timestamp: commit.commit_timestamp,
2090 author_email: commit.author_email.into(),
2091 author_name: commit.author_name.into(),
2092 })
2093 }
2094
2095 async fn handle_load_commit_diff(
2096 this: Entity<Self>,
2097 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2098 mut cx: AsyncApp,
2099 ) -> Result<proto::LoadCommitDiffResponse> {
2100 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2101 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2102
2103 let commit_diff = repository_handle
2104 .update(&mut cx, |repository_handle, _| {
2105 repository_handle.load_commit_diff(envelope.payload.commit)
2106 })?
2107 .await??;
2108 Ok(proto::LoadCommitDiffResponse {
2109 files: commit_diff
2110 .files
2111 .into_iter()
2112 .map(|file| proto::CommitFile {
2113 path: file.path.to_proto(),
2114 old_text: file.old_text,
2115 new_text: file.new_text,
2116 })
2117 .collect(),
2118 })
2119 }
2120
2121 async fn handle_reset(
2122 this: Entity<Self>,
2123 envelope: TypedEnvelope<proto::GitReset>,
2124 mut cx: AsyncApp,
2125 ) -> Result<proto::Ack> {
2126 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2127 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2128
2129 let mode = match envelope.payload.mode() {
2130 git_reset::ResetMode::Soft => ResetMode::Soft,
2131 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2132 };
2133
2134 repository_handle
2135 .update(&mut cx, |repository_handle, cx| {
2136 repository_handle.reset(envelope.payload.commit, mode, cx)
2137 })?
2138 .await??;
2139 Ok(proto::Ack {})
2140 }
2141
2142 async fn handle_checkout_files(
2143 this: Entity<Self>,
2144 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2145 mut cx: AsyncApp,
2146 ) -> Result<proto::Ack> {
2147 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2148 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2149 let paths = envelope
2150 .payload
2151 .paths
2152 .iter()
2153 .map(|s| RepoPath::from_proto(s))
2154 .collect::<Result<Vec<_>>>()?;
2155
2156 repository_handle
2157 .update(&mut cx, |repository_handle, cx| {
2158 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2159 })?
2160 .await??;
2161 Ok(proto::Ack {})
2162 }
2163
2164 async fn handle_open_commit_message_buffer(
2165 this: Entity<Self>,
2166 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2167 mut cx: AsyncApp,
2168 ) -> Result<proto::OpenBufferResponse> {
2169 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2170 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2171 let buffer = repository
2172 .update(&mut cx, |repository, cx| {
2173 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2174 })?
2175 .await?;
2176
2177 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2178 this.update(&mut cx, |this, cx| {
2179 this.buffer_store.update(cx, |buffer_store, cx| {
2180 buffer_store
2181 .create_buffer_for_peer(
2182 &buffer,
2183 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2184 cx,
2185 )
2186 .detach_and_log_err(cx);
2187 })
2188 })?;
2189
2190 Ok(proto::OpenBufferResponse {
2191 buffer_id: buffer_id.to_proto(),
2192 })
2193 }
2194
2195 async fn handle_askpass(
2196 this: Entity<Self>,
2197 envelope: TypedEnvelope<proto::AskPassRequest>,
2198 mut cx: AsyncApp,
2199 ) -> Result<proto::AskPassResponse> {
2200 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2201 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2202
2203 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2204 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2205 debug_panic!("no askpass found");
2206 anyhow::bail!("no askpass found");
2207 };
2208
2209 let response = askpass
2210 .ask_password(envelope.payload.prompt)
2211 .await
2212 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2213
2214 delegates
2215 .lock()
2216 .insert(envelope.payload.askpass_id, askpass);
2217
2218 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2219 Ok(proto::AskPassResponse {
2220 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2221 })
2222 }
2223
2224 async fn handle_check_for_pushed_commits(
2225 this: Entity<Self>,
2226 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2227 mut cx: AsyncApp,
2228 ) -> Result<proto::CheckForPushedCommitsResponse> {
2229 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2230 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2231
2232 let branches = repository_handle
2233 .update(&mut cx, |repository_handle, _| {
2234 repository_handle.check_for_pushed_commits()
2235 })?
2236 .await??;
2237 Ok(proto::CheckForPushedCommitsResponse {
2238 pushed_to: branches
2239 .into_iter()
2240 .map(|commit| commit.to_string())
2241 .collect(),
2242 })
2243 }
2244
2245 async fn handle_git_diff(
2246 this: Entity<Self>,
2247 envelope: TypedEnvelope<proto::GitDiff>,
2248 mut cx: AsyncApp,
2249 ) -> Result<proto::GitDiffResponse> {
2250 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2251 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2252 let diff_type = match envelope.payload.diff_type() {
2253 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2254 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2255 };
2256
2257 let mut diff = repository_handle
2258 .update(&mut cx, |repository_handle, cx| {
2259 repository_handle.diff(diff_type, cx)
2260 })?
2261 .await??;
2262 const ONE_MB: usize = 1_000_000;
2263 if diff.len() > ONE_MB {
2264 diff = diff.chars().take(ONE_MB).collect()
2265 }
2266
2267 Ok(proto::GitDiffResponse { diff })
2268 }
2269
2270 async fn handle_tree_diff(
2271 this: Entity<Self>,
2272 request: TypedEnvelope<proto::GetTreeDiff>,
2273 mut cx: AsyncApp,
2274 ) -> Result<proto::GetTreeDiffResponse> {
2275 let repository_id = RepositoryId(request.payload.repository_id);
2276 let diff_type = if request.payload.is_merge {
2277 DiffTreeType::MergeBase {
2278 base: request.payload.base.into(),
2279 head: request.payload.head.into(),
2280 }
2281 } else {
2282 DiffTreeType::Since {
2283 base: request.payload.base.into(),
2284 head: request.payload.head.into(),
2285 }
2286 };
2287
2288 let diff = this
2289 .update(&mut cx, |this, cx| {
2290 let repository = this.repositories().get(&repository_id)?;
2291 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2292 })?
2293 .context("missing repository")?
2294 .await??;
2295
2296 Ok(proto::GetTreeDiffResponse {
2297 entries: diff
2298 .entries
2299 .into_iter()
2300 .map(|(path, status)| proto::TreeDiffStatus {
2301 path: path.0.to_proto(),
2302 status: match status {
2303 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2304 TreeDiffStatus::Modified { .. } => {
2305 proto::tree_diff_status::Status::Modified.into()
2306 }
2307 TreeDiffStatus::Deleted { .. } => {
2308 proto::tree_diff_status::Status::Deleted.into()
2309 }
2310 },
2311 oid: match status {
2312 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2313 Some(old.to_string())
2314 }
2315 TreeDiffStatus::Added => None,
2316 },
2317 })
2318 .collect(),
2319 })
2320 }
2321
2322 async fn handle_get_blob_content(
2323 this: Entity<Self>,
2324 request: TypedEnvelope<proto::GetBlobContent>,
2325 mut cx: AsyncApp,
2326 ) -> Result<proto::GetBlobContentResponse> {
2327 let oid = git::Oid::from_str(&request.payload.oid)?;
2328 let repository_id = RepositoryId(request.payload.repository_id);
2329 let content = this
2330 .update(&mut cx, |this, cx| {
2331 let repository = this.repositories().get(&repository_id)?;
2332 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2333 })?
2334 .context("missing repository")?
2335 .await?;
2336 Ok(proto::GetBlobContentResponse { content })
2337 }
2338
2339 async fn handle_open_unstaged_diff(
2340 this: Entity<Self>,
2341 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2342 mut cx: AsyncApp,
2343 ) -> Result<proto::OpenUnstagedDiffResponse> {
2344 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2345 let diff = this
2346 .update(&mut cx, |this, cx| {
2347 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2348 Some(this.open_unstaged_diff(buffer, cx))
2349 })?
2350 .context("missing buffer")?
2351 .await?;
2352 this.update(&mut cx, |this, _| {
2353 let shared_diffs = this
2354 .shared_diffs
2355 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2356 .or_default();
2357 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2358 })?;
2359 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2360 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2361 }
2362
2363 async fn handle_open_uncommitted_diff(
2364 this: Entity<Self>,
2365 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2366 mut cx: AsyncApp,
2367 ) -> Result<proto::OpenUncommittedDiffResponse> {
2368 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2369 let diff = this
2370 .update(&mut cx, |this, cx| {
2371 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2372 Some(this.open_uncommitted_diff(buffer, cx))
2373 })?
2374 .context("missing buffer")?
2375 .await?;
2376 this.update(&mut cx, |this, _| {
2377 let shared_diffs = this
2378 .shared_diffs
2379 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2380 .or_default();
2381 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2382 })?;
2383 diff.read_with(&cx, |diff, cx| {
2384 use proto::open_uncommitted_diff_response::Mode;
2385
2386 let unstaged_diff = diff.secondary_diff();
2387 let index_snapshot = unstaged_diff.and_then(|diff| {
2388 let diff = diff.read(cx);
2389 diff.base_text_exists().then(|| diff.base_text())
2390 });
2391
2392 let mode;
2393 let staged_text;
2394 let committed_text;
2395 if diff.base_text_exists() {
2396 let committed_snapshot = diff.base_text();
2397 committed_text = Some(committed_snapshot.text());
2398 if let Some(index_text) = index_snapshot {
2399 if index_text.remote_id() == committed_snapshot.remote_id() {
2400 mode = Mode::IndexMatchesHead;
2401 staged_text = None;
2402 } else {
2403 mode = Mode::IndexAndHead;
2404 staged_text = Some(index_text.text());
2405 }
2406 } else {
2407 mode = Mode::IndexAndHead;
2408 staged_text = None;
2409 }
2410 } else {
2411 mode = Mode::IndexAndHead;
2412 committed_text = None;
2413 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2414 }
2415
2416 proto::OpenUncommittedDiffResponse {
2417 committed_text,
2418 staged_text,
2419 mode: mode.into(),
2420 }
2421 })
2422 }
2423
2424 async fn handle_update_diff_bases(
2425 this: Entity<Self>,
2426 request: TypedEnvelope<proto::UpdateDiffBases>,
2427 mut cx: AsyncApp,
2428 ) -> Result<()> {
2429 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2430 this.update(&mut cx, |this, cx| {
2431 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2432 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2433 {
2434 let buffer = buffer.read(cx).text_snapshot();
2435 diff_state.update(cx, |diff_state, cx| {
2436 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2437 })
2438 }
2439 })
2440 }
2441
2442 async fn handle_blame_buffer(
2443 this: Entity<Self>,
2444 envelope: TypedEnvelope<proto::BlameBuffer>,
2445 mut cx: AsyncApp,
2446 ) -> Result<proto::BlameBufferResponse> {
2447 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2448 let version = deserialize_version(&envelope.payload.version);
2449 let buffer = this.read_with(&cx, |this, cx| {
2450 this.buffer_store.read(cx).get_existing(buffer_id)
2451 })??;
2452 buffer
2453 .update(&mut cx, |buffer, _| {
2454 buffer.wait_for_version(version.clone())
2455 })?
2456 .await?;
2457 let blame = this
2458 .update(&mut cx, |this, cx| {
2459 this.blame_buffer(&buffer, Some(version), cx)
2460 })?
2461 .await?;
2462 Ok(serialize_blame_buffer_response(blame))
2463 }
2464
2465 async fn handle_get_permalink_to_line(
2466 this: Entity<Self>,
2467 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2468 mut cx: AsyncApp,
2469 ) -> Result<proto::GetPermalinkToLineResponse> {
2470 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2471 // let version = deserialize_version(&envelope.payload.version);
2472 let selection = {
2473 let proto_selection = envelope
2474 .payload
2475 .selection
2476 .context("no selection to get permalink for defined")?;
2477 proto_selection.start as u32..proto_selection.end as u32
2478 };
2479 let buffer = this.read_with(&cx, |this, cx| {
2480 this.buffer_store.read(cx).get_existing(buffer_id)
2481 })??;
2482 let permalink = this
2483 .update(&mut cx, |this, cx| {
2484 this.get_permalink_to_line(&buffer, selection, cx)
2485 })?
2486 .await?;
2487 Ok(proto::GetPermalinkToLineResponse {
2488 permalink: permalink.to_string(),
2489 })
2490 }
2491
2492 fn repository_for_request(
2493 this: &Entity<Self>,
2494 id: RepositoryId,
2495 cx: &mut AsyncApp,
2496 ) -> Result<Entity<Repository>> {
2497 this.read_with(cx, |this, _| {
2498 this.repositories
2499 .get(&id)
2500 .context("missing repository handle")
2501 .cloned()
2502 })?
2503 }
2504
2505 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2506 self.repositories
2507 .iter()
2508 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2509 .collect()
2510 }
2511
2512 fn process_updated_entries(
2513 &self,
2514 worktree: &Entity<Worktree>,
2515 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2516 cx: &mut App,
2517 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2518 let path_style = worktree.read(cx).path_style();
2519 let mut repo_paths = self
2520 .repositories
2521 .values()
2522 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2523 .collect::<Vec<_>>();
2524 let mut entries: Vec<_> = updated_entries
2525 .iter()
2526 .map(|(path, _, _)| path.clone())
2527 .collect();
2528 entries.sort();
2529 let worktree = worktree.read(cx);
2530
2531 let entries = entries
2532 .into_iter()
2533 .map(|path| worktree.absolutize(&path))
2534 .collect::<Arc<[_]>>();
2535
2536 let executor = cx.background_executor().clone();
2537 cx.background_executor().spawn(async move {
2538 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2539 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2540 let mut tasks = FuturesOrdered::new();
2541 for (repo_path, repo) in repo_paths.into_iter().rev() {
2542 let entries = entries.clone();
2543 let task = executor.spawn(async move {
2544 // Find all repository paths that belong to this repo
2545 let mut ix = entries.partition_point(|path| path < &*repo_path);
2546 if ix == entries.len() {
2547 return None;
2548 };
2549
2550 let mut paths = Vec::new();
2551 // All paths prefixed by a given repo will constitute a continuous range.
2552 while let Some(path) = entries.get(ix)
2553 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2554 &repo_path, path, path_style,
2555 )
2556 {
2557 paths.push((repo_path, ix));
2558 ix += 1;
2559 }
2560 if paths.is_empty() {
2561 None
2562 } else {
2563 Some((repo, paths))
2564 }
2565 });
2566 tasks.push_back(task);
2567 }
2568
2569 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2570 let mut path_was_used = vec![false; entries.len()];
2571 let tasks = tasks.collect::<Vec<_>>().await;
2572 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2573 // We always want to assign a path to it's innermost repository.
2574 for t in tasks {
2575 let Some((repo, paths)) = t else {
2576 continue;
2577 };
2578 let entry = paths_by_git_repo.entry(repo).or_default();
2579 for (repo_path, ix) in paths {
2580 if path_was_used[ix] {
2581 continue;
2582 }
2583 path_was_used[ix] = true;
2584 entry.push(repo_path);
2585 }
2586 }
2587
2588 paths_by_git_repo
2589 })
2590 }
2591}
2592
2593impl BufferGitState {
2594 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2595 Self {
2596 unstaged_diff: Default::default(),
2597 uncommitted_diff: Default::default(),
2598 recalculate_diff_task: Default::default(),
2599 language: Default::default(),
2600 language_registry: Default::default(),
2601 recalculating_tx: postage::watch::channel_with(false).0,
2602 hunk_staging_operation_count: 0,
2603 hunk_staging_operation_count_as_of_write: 0,
2604 head_text: Default::default(),
2605 index_text: Default::default(),
2606 head_changed: Default::default(),
2607 index_changed: Default::default(),
2608 language_changed: Default::default(),
2609 conflict_updated_futures: Default::default(),
2610 conflict_set: Default::default(),
2611 reparse_conflict_markers_task: Default::default(),
2612 }
2613 }
2614
2615 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2616 self.language = buffer.read(cx).language().cloned();
2617 self.language_changed = true;
2618 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2619 }
2620
2621 fn reparse_conflict_markers(
2622 &mut self,
2623 buffer: text::BufferSnapshot,
2624 cx: &mut Context<Self>,
2625 ) -> oneshot::Receiver<()> {
2626 let (tx, rx) = oneshot::channel();
2627
2628 let Some(conflict_set) = self
2629 .conflict_set
2630 .as_ref()
2631 .and_then(|conflict_set| conflict_set.upgrade())
2632 else {
2633 return rx;
2634 };
2635
2636 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2637 if conflict_set.has_conflict {
2638 Some(conflict_set.snapshot())
2639 } else {
2640 None
2641 }
2642 });
2643
2644 if let Some(old_snapshot) = old_snapshot {
2645 self.conflict_updated_futures.push(tx);
2646 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2647 let (snapshot, changed_range) = cx
2648 .background_spawn(async move {
2649 let new_snapshot = ConflictSet::parse(&buffer);
2650 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2651 (new_snapshot, changed_range)
2652 })
2653 .await;
2654 this.update(cx, |this, cx| {
2655 if let Some(conflict_set) = &this.conflict_set {
2656 conflict_set
2657 .update(cx, |conflict_set, cx| {
2658 conflict_set.set_snapshot(snapshot, changed_range, cx);
2659 })
2660 .ok();
2661 }
2662 let futures = std::mem::take(&mut this.conflict_updated_futures);
2663 for tx in futures {
2664 tx.send(()).ok();
2665 }
2666 })
2667 }))
2668 }
2669
2670 rx
2671 }
2672
2673 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2674 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2675 }
2676
2677 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2678 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2679 }
2680
2681 fn handle_base_texts_updated(
2682 &mut self,
2683 buffer: text::BufferSnapshot,
2684 message: proto::UpdateDiffBases,
2685 cx: &mut Context<Self>,
2686 ) {
2687 use proto::update_diff_bases::Mode;
2688
2689 let Some(mode) = Mode::from_i32(message.mode) else {
2690 return;
2691 };
2692
2693 let diff_bases_change = match mode {
2694 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2695 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2696 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2697 Mode::IndexAndHead => DiffBasesChange::SetEach {
2698 index: message.staged_text,
2699 head: message.committed_text,
2700 },
2701 };
2702
2703 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2704 }
2705
2706 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2707 if *self.recalculating_tx.borrow() {
2708 let mut rx = self.recalculating_tx.subscribe();
2709 Some(async move {
2710 loop {
2711 let is_recalculating = rx.recv().await;
2712 if is_recalculating != Some(true) {
2713 break;
2714 }
2715 }
2716 })
2717 } else {
2718 None
2719 }
2720 }
2721
2722 fn diff_bases_changed(
2723 &mut self,
2724 buffer: text::BufferSnapshot,
2725 diff_bases_change: Option<DiffBasesChange>,
2726 cx: &mut Context<Self>,
2727 ) {
2728 match diff_bases_change {
2729 Some(DiffBasesChange::SetIndex(index)) => {
2730 self.index_text = index.map(|mut index| {
2731 text::LineEnding::normalize(&mut index);
2732 Arc::new(index)
2733 });
2734 self.index_changed = true;
2735 }
2736 Some(DiffBasesChange::SetHead(head)) => {
2737 self.head_text = head.map(|mut head| {
2738 text::LineEnding::normalize(&mut head);
2739 Arc::new(head)
2740 });
2741 self.head_changed = true;
2742 }
2743 Some(DiffBasesChange::SetBoth(text)) => {
2744 let text = text.map(|mut text| {
2745 text::LineEnding::normalize(&mut text);
2746 Arc::new(text)
2747 });
2748 self.head_text = text.clone();
2749 self.index_text = text;
2750 self.head_changed = true;
2751 self.index_changed = true;
2752 }
2753 Some(DiffBasesChange::SetEach { index, head }) => {
2754 self.index_text = index.map(|mut index| {
2755 text::LineEnding::normalize(&mut index);
2756 Arc::new(index)
2757 });
2758 self.index_changed = true;
2759 self.head_text = head.map(|mut head| {
2760 text::LineEnding::normalize(&mut head);
2761 Arc::new(head)
2762 });
2763 self.head_changed = true;
2764 }
2765 None => {}
2766 }
2767
2768 self.recalculate_diffs(buffer, cx)
2769 }
2770
2771 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2772 *self.recalculating_tx.borrow_mut() = true;
2773
2774 let language = self.language.clone();
2775 let language_registry = self.language_registry.clone();
2776 let unstaged_diff = self.unstaged_diff();
2777 let uncommitted_diff = self.uncommitted_diff();
2778 let head = self.head_text.clone();
2779 let index = self.index_text.clone();
2780 let index_changed = self.index_changed;
2781 let head_changed = self.head_changed;
2782 let language_changed = self.language_changed;
2783 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2784 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2785 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2786 (None, None) => true,
2787 _ => false,
2788 };
2789 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2790 log::debug!(
2791 "start recalculating diffs for buffer {}",
2792 buffer.remote_id()
2793 );
2794
2795 let mut new_unstaged_diff = None;
2796 if let Some(unstaged_diff) = &unstaged_diff {
2797 new_unstaged_diff = Some(
2798 BufferDiff::update_diff(
2799 unstaged_diff.clone(),
2800 buffer.clone(),
2801 index,
2802 index_changed,
2803 language_changed,
2804 language.clone(),
2805 language_registry.clone(),
2806 cx,
2807 )
2808 .await?,
2809 );
2810 }
2811
2812 let mut new_uncommitted_diff = None;
2813 if let Some(uncommitted_diff) = &uncommitted_diff {
2814 new_uncommitted_diff = if index_matches_head {
2815 new_unstaged_diff.clone()
2816 } else {
2817 Some(
2818 BufferDiff::update_diff(
2819 uncommitted_diff.clone(),
2820 buffer.clone(),
2821 head,
2822 head_changed,
2823 language_changed,
2824 language.clone(),
2825 language_registry.clone(),
2826 cx,
2827 )
2828 .await?,
2829 )
2830 }
2831 }
2832
2833 let cancel = this.update(cx, |this, _| {
2834 // This checks whether all pending stage/unstage operations
2835 // have quiesced (i.e. both the corresponding write and the
2836 // read of that write have completed). If not, then we cancel
2837 // this recalculation attempt to avoid invalidating pending
2838 // state too quickly; another recalculation will come along
2839 // later and clear the pending state once the state of the index has settled.
2840 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2841 *this.recalculating_tx.borrow_mut() = false;
2842 true
2843 } else {
2844 false
2845 }
2846 })?;
2847 if cancel {
2848 log::debug!(
2849 concat!(
2850 "aborting recalculating diffs for buffer {}",
2851 "due to subsequent hunk operations",
2852 ),
2853 buffer.remote_id()
2854 );
2855 return Ok(());
2856 }
2857
2858 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2859 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2860 {
2861 unstaged_diff.update(cx, |diff, cx| {
2862 if language_changed {
2863 diff.language_changed(cx);
2864 }
2865 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2866 })?
2867 } else {
2868 None
2869 };
2870
2871 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2872 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2873 {
2874 uncommitted_diff.update(cx, |diff, cx| {
2875 if language_changed {
2876 diff.language_changed(cx);
2877 }
2878 diff.set_snapshot_with_secondary(
2879 new_uncommitted_diff,
2880 &buffer,
2881 unstaged_changed_range,
2882 true,
2883 cx,
2884 );
2885 })?;
2886 }
2887
2888 log::debug!(
2889 "finished recalculating diffs for buffer {}",
2890 buffer.remote_id()
2891 );
2892
2893 if let Some(this) = this.upgrade() {
2894 this.update(cx, |this, _| {
2895 this.index_changed = false;
2896 this.head_changed = false;
2897 this.language_changed = false;
2898 *this.recalculating_tx.borrow_mut() = false;
2899 })?;
2900 }
2901
2902 Ok(())
2903 }));
2904 }
2905}
2906
2907fn make_remote_delegate(
2908 this: Entity<GitStore>,
2909 project_id: u64,
2910 repository_id: RepositoryId,
2911 askpass_id: u64,
2912 cx: &mut AsyncApp,
2913) -> AskPassDelegate {
2914 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2915 this.update(cx, |this, cx| {
2916 let Some((client, _)) = this.downstream_client() else {
2917 return;
2918 };
2919 let response = client.request(proto::AskPassRequest {
2920 project_id,
2921 repository_id: repository_id.to_proto(),
2922 askpass_id,
2923 prompt,
2924 });
2925 cx.spawn(async move |_, _| {
2926 let mut response = response.await?.response;
2927 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2928 .ok();
2929 response.zeroize();
2930 anyhow::Ok(())
2931 })
2932 .detach_and_log_err(cx);
2933 })
2934 .log_err();
2935 })
2936}
2937
2938impl RepositoryId {
2939 pub fn to_proto(self) -> u64 {
2940 self.0
2941 }
2942
2943 pub fn from_proto(id: u64) -> Self {
2944 RepositoryId(id)
2945 }
2946}
2947
2948impl RepositorySnapshot {
2949 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2950 Self {
2951 id,
2952 statuses_by_path: Default::default(),
2953 pending_ops_by_path: Default::default(),
2954 work_directory_abs_path,
2955 branch: None,
2956 head_commit: None,
2957 scan_id: 0,
2958 merge: Default::default(),
2959 remote_origin_url: None,
2960 remote_upstream_url: None,
2961 stash_entries: Default::default(),
2962 path_style,
2963 }
2964 }
2965
2966 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2967 proto::UpdateRepository {
2968 branch_summary: self.branch.as_ref().map(branch_to_proto),
2969 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2970 updated_statuses: self
2971 .statuses_by_path
2972 .iter()
2973 .map(|entry| entry.to_proto())
2974 .collect(),
2975 removed_statuses: Default::default(),
2976 current_merge_conflicts: self
2977 .merge
2978 .conflicted_paths
2979 .iter()
2980 .map(|repo_path| repo_path.to_proto())
2981 .collect(),
2982 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2983 project_id,
2984 id: self.id.to_proto(),
2985 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2986 entry_ids: vec![self.id.to_proto()],
2987 scan_id: self.scan_id,
2988 is_last_update: true,
2989 stash_entries: self
2990 .stash_entries
2991 .entries
2992 .iter()
2993 .map(stash_to_proto)
2994 .collect(),
2995 }
2996 }
2997
2998 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2999 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3000 let mut removed_statuses: Vec<String> = Vec::new();
3001
3002 let mut new_statuses = self.statuses_by_path.iter().peekable();
3003 let mut old_statuses = old.statuses_by_path.iter().peekable();
3004
3005 let mut current_new_entry = new_statuses.next();
3006 let mut current_old_entry = old_statuses.next();
3007 loop {
3008 match (current_new_entry, current_old_entry) {
3009 (Some(new_entry), Some(old_entry)) => {
3010 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3011 Ordering::Less => {
3012 updated_statuses.push(new_entry.to_proto());
3013 current_new_entry = new_statuses.next();
3014 }
3015 Ordering::Equal => {
3016 if new_entry.status != old_entry.status {
3017 updated_statuses.push(new_entry.to_proto());
3018 }
3019 current_old_entry = old_statuses.next();
3020 current_new_entry = new_statuses.next();
3021 }
3022 Ordering::Greater => {
3023 removed_statuses.push(old_entry.repo_path.to_proto());
3024 current_old_entry = old_statuses.next();
3025 }
3026 }
3027 }
3028 (None, Some(old_entry)) => {
3029 removed_statuses.push(old_entry.repo_path.to_proto());
3030 current_old_entry = old_statuses.next();
3031 }
3032 (Some(new_entry), None) => {
3033 updated_statuses.push(new_entry.to_proto());
3034 current_new_entry = new_statuses.next();
3035 }
3036 (None, None) => break,
3037 }
3038 }
3039
3040 proto::UpdateRepository {
3041 branch_summary: self.branch.as_ref().map(branch_to_proto),
3042 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3043 updated_statuses,
3044 removed_statuses,
3045 current_merge_conflicts: self
3046 .merge
3047 .conflicted_paths
3048 .iter()
3049 .map(|path| path.to_proto())
3050 .collect(),
3051 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3052 project_id,
3053 id: self.id.to_proto(),
3054 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3055 entry_ids: vec![],
3056 scan_id: self.scan_id,
3057 is_last_update: true,
3058 stash_entries: self
3059 .stash_entries
3060 .entries
3061 .iter()
3062 .map(stash_to_proto)
3063 .collect(),
3064 }
3065 }
3066
3067 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3068 self.statuses_by_path.iter().cloned()
3069 }
3070
3071 pub fn status_summary(&self) -> GitSummary {
3072 self.statuses_by_path.summary().item_summary
3073 }
3074
3075 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3076 self.statuses_by_path
3077 .get(&PathKey(path.0.clone()), ())
3078 .cloned()
3079 }
3080
3081 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3082 self.pending_ops_by_path
3083 .get(&PathKey(path.0.clone()), ())
3084 .cloned()
3085 }
3086
3087 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3088 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3089 }
3090
3091 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3092 self.path_style
3093 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3094 .unwrap()
3095 .into()
3096 }
3097
3098 #[inline]
3099 fn abs_path_to_repo_path_inner(
3100 work_directory_abs_path: &Path,
3101 abs_path: &Path,
3102 path_style: PathStyle,
3103 ) -> Option<RepoPath> {
3104 abs_path
3105 .strip_prefix(&work_directory_abs_path)
3106 .ok()
3107 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3108 }
3109
3110 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3111 self.merge.conflicted_paths.contains(repo_path)
3112 }
3113
3114 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3115 let had_conflict_on_last_merge_head_change =
3116 self.merge.conflicted_paths.contains(repo_path);
3117 let has_conflict_currently = self
3118 .status_for_path(repo_path)
3119 .is_some_and(|entry| entry.status.is_conflicted());
3120 had_conflict_on_last_merge_head_change || has_conflict_currently
3121 }
3122
3123 /// This is the name that will be displayed in the repository selector for this repository.
3124 pub fn display_name(&self) -> SharedString {
3125 self.work_directory_abs_path
3126 .file_name()
3127 .unwrap_or_default()
3128 .to_string_lossy()
3129 .to_string()
3130 .into()
3131 }
3132}
3133
3134pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3135 proto::StashEntry {
3136 oid: entry.oid.as_bytes().to_vec(),
3137 message: entry.message.clone(),
3138 branch: entry.branch.clone(),
3139 index: entry.index as u64,
3140 timestamp: entry.timestamp,
3141 }
3142}
3143
3144pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3145 Ok(StashEntry {
3146 oid: Oid::from_bytes(&entry.oid)?,
3147 message: entry.message.clone(),
3148 index: entry.index as usize,
3149 branch: entry.branch.clone(),
3150 timestamp: entry.timestamp,
3151 })
3152}
3153
3154impl MergeDetails {
3155 async fn load(
3156 backend: &Arc<dyn GitRepository>,
3157 status: &SumTree<StatusEntry>,
3158 prev_snapshot: &RepositorySnapshot,
3159 ) -> Result<(MergeDetails, bool)> {
3160 log::debug!("load merge details");
3161 let message = backend.merge_message().await;
3162 let heads = backend
3163 .revparse_batch(vec![
3164 "MERGE_HEAD".into(),
3165 "CHERRY_PICK_HEAD".into(),
3166 "REBASE_HEAD".into(),
3167 "REVERT_HEAD".into(),
3168 "APPLY_HEAD".into(),
3169 ])
3170 .await
3171 .log_err()
3172 .unwrap_or_default()
3173 .into_iter()
3174 .map(|opt| opt.map(SharedString::from))
3175 .collect::<Vec<_>>();
3176 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3177 let conflicted_paths = if merge_heads_changed {
3178 let current_conflicted_paths = TreeSet::from_ordered_entries(
3179 status
3180 .iter()
3181 .filter(|entry| entry.status.is_conflicted())
3182 .map(|entry| entry.repo_path.clone()),
3183 );
3184
3185 // It can happen that we run a scan while a lengthy merge is in progress
3186 // that will eventually result in conflicts, but before those conflicts
3187 // are reported by `git status`. Since for the moment we only care about
3188 // the merge heads state for the purposes of tracking conflicts, don't update
3189 // this state until we see some conflicts.
3190 if heads.iter().any(Option::is_some)
3191 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3192 && current_conflicted_paths.is_empty()
3193 {
3194 log::debug!("not updating merge heads because no conflicts found");
3195 return Ok((
3196 MergeDetails {
3197 message: message.map(SharedString::from),
3198 ..prev_snapshot.merge.clone()
3199 },
3200 false,
3201 ));
3202 }
3203
3204 current_conflicted_paths
3205 } else {
3206 prev_snapshot.merge.conflicted_paths.clone()
3207 };
3208 let details = MergeDetails {
3209 conflicted_paths,
3210 message: message.map(SharedString::from),
3211 heads,
3212 };
3213 Ok((details, merge_heads_changed))
3214 }
3215}
3216
3217impl Repository {
3218 pub fn snapshot(&self) -> RepositorySnapshot {
3219 self.snapshot.clone()
3220 }
3221
3222 fn local(
3223 id: RepositoryId,
3224 work_directory_abs_path: Arc<Path>,
3225 dot_git_abs_path: Arc<Path>,
3226 repository_dir_abs_path: Arc<Path>,
3227 common_dir_abs_path: Arc<Path>,
3228 project_environment: WeakEntity<ProjectEnvironment>,
3229 fs: Arc<dyn Fs>,
3230 git_store: WeakEntity<GitStore>,
3231 cx: &mut Context<Self>,
3232 ) -> Self {
3233 let snapshot =
3234 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3235 Repository {
3236 this: cx.weak_entity(),
3237 git_store,
3238 snapshot,
3239 commit_message_buffer: None,
3240 askpass_delegates: Default::default(),
3241 paths_needing_status_update: Default::default(),
3242 latest_askpass_id: 0,
3243 job_sender: Repository::spawn_local_git_worker(
3244 work_directory_abs_path,
3245 dot_git_abs_path,
3246 repository_dir_abs_path,
3247 common_dir_abs_path,
3248 project_environment,
3249 fs,
3250 cx,
3251 ),
3252 job_id: 0,
3253 active_jobs: Default::default(),
3254 }
3255 }
3256
3257 fn remote(
3258 id: RepositoryId,
3259 work_directory_abs_path: Arc<Path>,
3260 path_style: PathStyle,
3261 project_id: ProjectId,
3262 client: AnyProtoClient,
3263 git_store: WeakEntity<GitStore>,
3264 cx: &mut Context<Self>,
3265 ) -> Self {
3266 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3267 Self {
3268 this: cx.weak_entity(),
3269 snapshot,
3270 commit_message_buffer: None,
3271 git_store,
3272 paths_needing_status_update: Default::default(),
3273 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3274 askpass_delegates: Default::default(),
3275 latest_askpass_id: 0,
3276 active_jobs: Default::default(),
3277 job_id: 0,
3278 }
3279 }
3280
3281 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3282 self.git_store.upgrade()
3283 }
3284
3285 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3286 let this = cx.weak_entity();
3287 let git_store = self.git_store.clone();
3288 let _ = self.send_keyed_job(
3289 Some(GitJobKey::ReloadBufferDiffBases),
3290 None,
3291 |state, mut cx| async move {
3292 let RepositoryState::Local { backend, .. } = state else {
3293 log::error!("tried to recompute diffs for a non-local repository");
3294 return Ok(());
3295 };
3296
3297 let Some(this) = this.upgrade() else {
3298 return Ok(());
3299 };
3300
3301 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3302 git_store.update(cx, |git_store, cx| {
3303 git_store
3304 .diffs
3305 .iter()
3306 .filter_map(|(buffer_id, diff_state)| {
3307 let buffer_store = git_store.buffer_store.read(cx);
3308 let buffer = buffer_store.get(*buffer_id)?;
3309 let file = File::from_dyn(buffer.read(cx).file())?;
3310 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3311 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3312 log::debug!(
3313 "start reload diff bases for repo path {}",
3314 repo_path.as_unix_str()
3315 );
3316 diff_state.update(cx, |diff_state, _| {
3317 let has_unstaged_diff = diff_state
3318 .unstaged_diff
3319 .as_ref()
3320 .is_some_and(|diff| diff.is_upgradable());
3321 let has_uncommitted_diff = diff_state
3322 .uncommitted_diff
3323 .as_ref()
3324 .is_some_and(|set| set.is_upgradable());
3325
3326 Some((
3327 buffer,
3328 repo_path,
3329 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3330 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3331 ))
3332 })
3333 })
3334 .collect::<Vec<_>>()
3335 })
3336 })??;
3337
3338 let buffer_diff_base_changes = cx
3339 .background_spawn(async move {
3340 let mut changes = Vec::new();
3341 for (buffer, repo_path, current_index_text, current_head_text) in
3342 &repo_diff_state_updates
3343 {
3344 let index_text = if current_index_text.is_some() {
3345 backend.load_index_text(repo_path.clone()).await
3346 } else {
3347 None
3348 };
3349 let head_text = if current_head_text.is_some() {
3350 backend.load_committed_text(repo_path.clone()).await
3351 } else {
3352 None
3353 };
3354
3355 let change =
3356 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3357 (Some(current_index), Some(current_head)) => {
3358 let index_changed =
3359 index_text.as_ref() != current_index.as_deref();
3360 let head_changed =
3361 head_text.as_ref() != current_head.as_deref();
3362 if index_changed && head_changed {
3363 if index_text == head_text {
3364 Some(DiffBasesChange::SetBoth(head_text))
3365 } else {
3366 Some(DiffBasesChange::SetEach {
3367 index: index_text,
3368 head: head_text,
3369 })
3370 }
3371 } else if index_changed {
3372 Some(DiffBasesChange::SetIndex(index_text))
3373 } else if head_changed {
3374 Some(DiffBasesChange::SetHead(head_text))
3375 } else {
3376 None
3377 }
3378 }
3379 (Some(current_index), None) => {
3380 let index_changed =
3381 index_text.as_ref() != current_index.as_deref();
3382 index_changed
3383 .then_some(DiffBasesChange::SetIndex(index_text))
3384 }
3385 (None, Some(current_head)) => {
3386 let head_changed =
3387 head_text.as_ref() != current_head.as_deref();
3388 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3389 }
3390 (None, None) => None,
3391 };
3392
3393 changes.push((buffer.clone(), change))
3394 }
3395 changes
3396 })
3397 .await;
3398
3399 git_store.update(&mut cx, |git_store, cx| {
3400 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3401 let buffer_snapshot = buffer.read(cx).text_snapshot();
3402 let buffer_id = buffer_snapshot.remote_id();
3403 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3404 continue;
3405 };
3406
3407 let downstream_client = git_store.downstream_client();
3408 diff_state.update(cx, |diff_state, cx| {
3409 use proto::update_diff_bases::Mode;
3410
3411 if let Some((diff_bases_change, (client, project_id))) =
3412 diff_bases_change.clone().zip(downstream_client)
3413 {
3414 let (staged_text, committed_text, mode) = match diff_bases_change {
3415 DiffBasesChange::SetIndex(index) => {
3416 (index, None, Mode::IndexOnly)
3417 }
3418 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3419 DiffBasesChange::SetEach { index, head } => {
3420 (index, head, Mode::IndexAndHead)
3421 }
3422 DiffBasesChange::SetBoth(text) => {
3423 (None, text, Mode::IndexMatchesHead)
3424 }
3425 };
3426 client
3427 .send(proto::UpdateDiffBases {
3428 project_id: project_id.to_proto(),
3429 buffer_id: buffer_id.to_proto(),
3430 staged_text,
3431 committed_text,
3432 mode: mode as i32,
3433 })
3434 .log_err();
3435 }
3436
3437 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3438 });
3439 }
3440 })
3441 },
3442 );
3443 }
3444
3445 pub fn send_job<F, Fut, R>(
3446 &mut self,
3447 status: Option<SharedString>,
3448 job: F,
3449 ) -> oneshot::Receiver<R>
3450 where
3451 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3452 Fut: Future<Output = R> + 'static,
3453 R: Send + 'static,
3454 {
3455 self.send_keyed_job(None, status, job)
3456 }
3457
3458 fn send_keyed_job<F, Fut, R>(
3459 &mut self,
3460 key: Option<GitJobKey>,
3461 status: Option<SharedString>,
3462 job: F,
3463 ) -> oneshot::Receiver<R>
3464 where
3465 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3466 Fut: Future<Output = R> + 'static,
3467 R: Send + 'static,
3468 {
3469 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3470 let job_id = post_inc(&mut self.job_id);
3471 let this = self.this.clone();
3472 self.job_sender
3473 .unbounded_send(GitJob {
3474 key,
3475 job: Box::new(move |state, cx: &mut AsyncApp| {
3476 let job = job(state, cx.clone());
3477 cx.spawn(async move |cx| {
3478 if let Some(s) = status.clone() {
3479 this.update(cx, |this, cx| {
3480 this.active_jobs.insert(
3481 job_id,
3482 JobInfo {
3483 start: Instant::now(),
3484 message: s.clone(),
3485 },
3486 );
3487
3488 cx.notify();
3489 })
3490 .ok();
3491 }
3492 let result = job.await;
3493
3494 this.update(cx, |this, cx| {
3495 this.active_jobs.remove(&job_id);
3496 cx.notify();
3497 })
3498 .ok();
3499
3500 result_tx.send(result).ok();
3501 })
3502 }),
3503 })
3504 .ok();
3505 result_rx
3506 }
3507
3508 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3509 let Some(git_store) = self.git_store.upgrade() else {
3510 return;
3511 };
3512 let entity = cx.entity();
3513 git_store.update(cx, |git_store, cx| {
3514 let Some((&id, _)) = git_store
3515 .repositories
3516 .iter()
3517 .find(|(_, handle)| *handle == &entity)
3518 else {
3519 return;
3520 };
3521 git_store.active_repo_id = Some(id);
3522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3523 });
3524 }
3525
3526 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3527 self.snapshot.status()
3528 }
3529
3530 pub fn cached_stash(&self) -> GitStash {
3531 self.snapshot.stash_entries.clone()
3532 }
3533
3534 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3535 let git_store = self.git_store.upgrade()?;
3536 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3537 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3538 let abs_path = SanitizedPath::new(&abs_path);
3539 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3540 Some(ProjectPath {
3541 worktree_id: worktree.read(cx).id(),
3542 path: relative_path,
3543 })
3544 }
3545
3546 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3547 let git_store = self.git_store.upgrade()?;
3548 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3549 let abs_path = worktree_store.absolutize(path, cx)?;
3550 self.snapshot.abs_path_to_repo_path(&abs_path)
3551 }
3552
3553 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3554 other
3555 .read(cx)
3556 .snapshot
3557 .work_directory_abs_path
3558 .starts_with(&self.snapshot.work_directory_abs_path)
3559 }
3560
3561 pub fn open_commit_buffer(
3562 &mut self,
3563 languages: Option<Arc<LanguageRegistry>>,
3564 buffer_store: Entity<BufferStore>,
3565 cx: &mut Context<Self>,
3566 ) -> Task<Result<Entity<Buffer>>> {
3567 let id = self.id;
3568 if let Some(buffer) = self.commit_message_buffer.clone() {
3569 return Task::ready(Ok(buffer));
3570 }
3571 let this = cx.weak_entity();
3572
3573 let rx = self.send_job(None, move |state, mut cx| async move {
3574 let Some(this) = this.upgrade() else {
3575 bail!("git store was dropped");
3576 };
3577 match state {
3578 RepositoryState::Local { .. } => {
3579 this.update(&mut cx, |_, cx| {
3580 Self::open_local_commit_buffer(languages, buffer_store, cx)
3581 })?
3582 .await
3583 }
3584 RepositoryState::Remote { project_id, client } => {
3585 let request = client.request(proto::OpenCommitMessageBuffer {
3586 project_id: project_id.0,
3587 repository_id: id.to_proto(),
3588 });
3589 let response = request.await.context("requesting to open commit buffer")?;
3590 let buffer_id = BufferId::new(response.buffer_id)?;
3591 let buffer = buffer_store
3592 .update(&mut cx, |buffer_store, cx| {
3593 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3594 })?
3595 .await?;
3596 if let Some(language_registry) = languages {
3597 let git_commit_language =
3598 language_registry.language_for_name("Git Commit").await?;
3599 buffer.update(&mut cx, |buffer, cx| {
3600 buffer.set_language(Some(git_commit_language), cx);
3601 })?;
3602 }
3603 this.update(&mut cx, |this, _| {
3604 this.commit_message_buffer = Some(buffer.clone());
3605 })?;
3606 Ok(buffer)
3607 }
3608 }
3609 });
3610
3611 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3612 }
3613
3614 fn open_local_commit_buffer(
3615 language_registry: Option<Arc<LanguageRegistry>>,
3616 buffer_store: Entity<BufferStore>,
3617 cx: &mut Context<Self>,
3618 ) -> Task<Result<Entity<Buffer>>> {
3619 cx.spawn(async move |repository, cx| {
3620 let buffer = buffer_store
3621 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3622 .await?;
3623
3624 if let Some(language_registry) = language_registry {
3625 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3626 buffer.update(cx, |buffer, cx| {
3627 buffer.set_language(Some(git_commit_language), cx);
3628 })?;
3629 }
3630
3631 repository.update(cx, |repository, _| {
3632 repository.commit_message_buffer = Some(buffer.clone());
3633 })?;
3634 Ok(buffer)
3635 })
3636 }
3637
3638 pub fn checkout_files(
3639 &mut self,
3640 commit: &str,
3641 paths: Vec<RepoPath>,
3642 _cx: &mut App,
3643 ) -> oneshot::Receiver<Result<()>> {
3644 let commit = commit.to_string();
3645 let id = self.id;
3646
3647 self.send_job(
3648 Some(format!("git checkout {}", commit).into()),
3649 move |git_repo, _| async move {
3650 match git_repo {
3651 RepositoryState::Local {
3652 backend,
3653 environment,
3654 ..
3655 } => {
3656 backend
3657 .checkout_files(commit, paths, environment.clone())
3658 .await
3659 }
3660 RepositoryState::Remote { project_id, client } => {
3661 client
3662 .request(proto::GitCheckoutFiles {
3663 project_id: project_id.0,
3664 repository_id: id.to_proto(),
3665 commit,
3666 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3667 })
3668 .await?;
3669
3670 Ok(())
3671 }
3672 }
3673 },
3674 )
3675 }
3676
3677 pub fn reset(
3678 &mut self,
3679 commit: String,
3680 reset_mode: ResetMode,
3681 _cx: &mut App,
3682 ) -> oneshot::Receiver<Result<()>> {
3683 let id = self.id;
3684
3685 self.send_job(None, move |git_repo, _| async move {
3686 match git_repo {
3687 RepositoryState::Local {
3688 backend,
3689 environment,
3690 ..
3691 } => backend.reset(commit, reset_mode, environment).await,
3692 RepositoryState::Remote { project_id, client } => {
3693 client
3694 .request(proto::GitReset {
3695 project_id: project_id.0,
3696 repository_id: id.to_proto(),
3697 commit,
3698 mode: match reset_mode {
3699 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3700 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3701 },
3702 })
3703 .await?;
3704
3705 Ok(())
3706 }
3707 }
3708 })
3709 }
3710
3711 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3712 let id = self.id;
3713 self.send_job(None, move |git_repo, _cx| async move {
3714 match git_repo {
3715 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3716 RepositoryState::Remote { project_id, client } => {
3717 let resp = client
3718 .request(proto::GitShow {
3719 project_id: project_id.0,
3720 repository_id: id.to_proto(),
3721 commit,
3722 })
3723 .await?;
3724
3725 Ok(CommitDetails {
3726 sha: resp.sha.into(),
3727 message: resp.message.into(),
3728 commit_timestamp: resp.commit_timestamp,
3729 author_email: resp.author_email.into(),
3730 author_name: resp.author_name.into(),
3731 })
3732 }
3733 }
3734 })
3735 }
3736
3737 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3738 let id = self.id;
3739 self.send_job(None, move |git_repo, cx| async move {
3740 match git_repo {
3741 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3742 RepositoryState::Remote {
3743 client, project_id, ..
3744 } => {
3745 let response = client
3746 .request(proto::LoadCommitDiff {
3747 project_id: project_id.0,
3748 repository_id: id.to_proto(),
3749 commit,
3750 })
3751 .await?;
3752 Ok(CommitDiff {
3753 files: response
3754 .files
3755 .into_iter()
3756 .map(|file| {
3757 Ok(CommitFile {
3758 path: RepoPath::from_proto(&file.path)?,
3759 old_text: file.old_text,
3760 new_text: file.new_text,
3761 })
3762 })
3763 .collect::<Result<Vec<_>>>()?,
3764 })
3765 }
3766 }
3767 })
3768 }
3769
3770 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3771 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3772 }
3773
3774 fn save_buffers<'a>(
3775 &self,
3776 entries: impl IntoIterator<Item = &'a RepoPath>,
3777 cx: &mut Context<Self>,
3778 ) -> Vec<Task<anyhow::Result<()>>> {
3779 let mut save_futures = Vec::new();
3780 if let Some(buffer_store) = self.buffer_store(cx) {
3781 buffer_store.update(cx, |buffer_store, cx| {
3782 for path in entries {
3783 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3784 continue;
3785 };
3786 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3787 && buffer
3788 .read(cx)
3789 .file()
3790 .is_some_and(|file| file.disk_state().exists())
3791 && buffer.read(cx).has_unsaved_edits()
3792 {
3793 save_futures.push(buffer_store.save_buffer(buffer, cx));
3794 }
3795 }
3796 })
3797 }
3798 save_futures
3799 }
3800
3801 pub fn stage_entries(
3802 &mut self,
3803 entries: Vec<RepoPath>,
3804 cx: &mut Context<Self>,
3805 ) -> Task<anyhow::Result<()>> {
3806 if entries.is_empty() {
3807 return Task::ready(Ok(()));
3808 }
3809 let id = self.id;
3810 let save_tasks = self.save_buffers(&entries, cx);
3811 let paths = entries
3812 .iter()
3813 .map(|p| p.as_unix_str())
3814 .collect::<Vec<_>>()
3815 .join(" ");
3816 let status = format!("git add {paths}");
3817 let job_key = match entries.len() {
3818 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3819 _ => None,
3820 };
3821
3822 let mut ids = Vec::with_capacity(entries.len());
3823 let mut edits = Vec::with_capacity(entries.len());
3824
3825 for entry in &entries {
3826 let id = self
3827 .snapshot
3828 .pending_ops_by_path
3829 .summary()
3830 .item_summary
3831 .max_id
3832 + 1;
3833 let op = PendingOp {
3834 id,
3835 git_status: pending_op::GitStatus::Staged,
3836 job_status: pending_op::JobStatus::Started,
3837 };
3838 let mut ops = self
3839 .snapshot
3840 .pending_ops_for_path(entry)
3841 .unwrap_or_else(|| PendingOps::new(entry));
3842 ops.ops.push(op);
3843 edits.push(sum_tree::Edit::Insert(ops));
3844 ids.push((id, entry.clone()));
3845 }
3846 self.snapshot.pending_ops_by_path.edit(edits, ());
3847
3848 cx.spawn(async move |this, cx| {
3849 for save_task in save_tasks {
3850 save_task.await?;
3851 }
3852
3853 let res = this
3854 .update(cx, |this, _| {
3855 this.send_keyed_job(
3856 job_key,
3857 Some(status.into()),
3858 move |git_repo, _cx| async move {
3859 match git_repo {
3860 RepositoryState::Local {
3861 backend,
3862 environment,
3863 ..
3864 } => backend.stage_paths(entries, environment.clone()).await,
3865 RepositoryState::Remote { project_id, client } => {
3866 client
3867 .request(proto::Stage {
3868 project_id: project_id.0,
3869 repository_id: id.to_proto(),
3870 paths: entries
3871 .into_iter()
3872 .map(|repo_path| repo_path.to_proto())
3873 .collect(),
3874 })
3875 .await
3876 .context("sending stage request")?;
3877
3878 Ok(())
3879 }
3880 }
3881 },
3882 )
3883 })?
3884 .await;
3885
3886 let (job_status, res) = match res {
3887 Ok(res) => (pending_op::JobStatus::Finished, res),
3888 Err(err) => (err.into(), Ok(())),
3889 };
3890 res?;
3891
3892 this.update(cx, |this, _| {
3893 let mut edits = Vec::with_capacity(ids.len());
3894 for (id, entry) in ids {
3895 if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
3896 if let Some(op) = ops.op_by_id_mut(id) {
3897 op.job_status = job_status;
3898 }
3899 edits.push(sum_tree::Edit::Insert(ops));
3900 }
3901 }
3902 this.snapshot.pending_ops_by_path.edit(edits, ());
3903 })?;
3904
3905 Ok(())
3906 })
3907 }
3908
3909 pub fn unstage_entries(
3910 &mut self,
3911 entries: Vec<RepoPath>,
3912 cx: &mut Context<Self>,
3913 ) -> Task<anyhow::Result<()>> {
3914 if entries.is_empty() {
3915 return Task::ready(Ok(()));
3916 }
3917 let id = self.id;
3918 let save_tasks = self.save_buffers(&entries, cx);
3919 let paths = entries
3920 .iter()
3921 .map(|p| p.as_unix_str())
3922 .collect::<Vec<_>>()
3923 .join(" ");
3924 let status = format!("git reset {paths}");
3925 let job_key = match entries.len() {
3926 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3927 _ => None,
3928 };
3929
3930 let mut ids = Vec::with_capacity(entries.len());
3931 let mut edits = Vec::with_capacity(entries.len());
3932
3933 for entry in &entries {
3934 let id = self
3935 .snapshot
3936 .pending_ops_by_path
3937 .summary()
3938 .item_summary
3939 .max_id
3940 + 1;
3941 let op = PendingOp {
3942 id,
3943 git_status: pending_op::GitStatus::Unstaged,
3944 job_status: pending_op::JobStatus::Started,
3945 };
3946 let mut ops = self
3947 .snapshot
3948 .pending_ops_for_path(entry)
3949 .unwrap_or_else(|| PendingOps::new(entry));
3950 ops.ops.push(op);
3951 edits.push(sum_tree::Edit::Insert(ops));
3952 ids.push((id, entry.clone()));
3953 }
3954 self.snapshot.pending_ops_by_path.edit(edits, ());
3955
3956 cx.spawn(async move |this, cx| {
3957 for save_task in save_tasks {
3958 save_task.await?;
3959 }
3960
3961 let res = this
3962 .update(cx, |this, _| {
3963 this.send_keyed_job(
3964 job_key,
3965 Some(status.into()),
3966 move |git_repo, _cx| async move {
3967 match git_repo {
3968 RepositoryState::Local {
3969 backend,
3970 environment,
3971 ..
3972 } => backend.unstage_paths(entries, environment).await,
3973 RepositoryState::Remote { project_id, client } => {
3974 client
3975 .request(proto::Unstage {
3976 project_id: project_id.0,
3977 repository_id: id.to_proto(),
3978 paths: entries
3979 .into_iter()
3980 .map(|repo_path| repo_path.to_proto())
3981 .collect(),
3982 })
3983 .await
3984 .context("sending unstage request")?;
3985
3986 Ok(())
3987 }
3988 }
3989 },
3990 )
3991 })?
3992 .await;
3993
3994 let (job_status, res) = match res {
3995 Ok(res) => (pending_op::JobStatus::Finished, res),
3996 Err(err) => (err.into(), Ok(())),
3997 };
3998 res?;
3999
4000 this.update(cx, |this, _| {
4001 let mut edits = Vec::with_capacity(ids.len());
4002 for (id, entry) in ids {
4003 if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
4004 if let Some(op) = ops.op_by_id_mut(id) {
4005 op.job_status = job_status;
4006 }
4007 edits.push(sum_tree::Edit::Insert(ops));
4008 }
4009 }
4010 this.snapshot.pending_ops_by_path.edit(edits, ());
4011 })?;
4012
4013 Ok(())
4014 })
4015 }
4016
4017 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4018 let to_stage = self
4019 .cached_status()
4020 .filter(|entry| !entry.status.staging().is_fully_staged())
4021 .map(|entry| entry.repo_path)
4022 .collect();
4023 self.stage_entries(to_stage, cx)
4024 }
4025
4026 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4027 let to_unstage = self
4028 .cached_status()
4029 .filter(|entry| entry.status.staging().has_staged())
4030 .map(|entry| entry.repo_path)
4031 .collect();
4032 self.unstage_entries(to_unstage, cx)
4033 }
4034
4035 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4036 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4037
4038 self.stash_entries(to_stash, cx)
4039 }
4040
4041 pub fn stash_entries(
4042 &mut self,
4043 entries: Vec<RepoPath>,
4044 cx: &mut Context<Self>,
4045 ) -> Task<anyhow::Result<()>> {
4046 let id = self.id;
4047
4048 cx.spawn(async move |this, cx| {
4049 this.update(cx, |this, _| {
4050 this.send_job(None, move |git_repo, _cx| async move {
4051 match git_repo {
4052 RepositoryState::Local {
4053 backend,
4054 environment,
4055 ..
4056 } => backend.stash_paths(entries, environment).await,
4057 RepositoryState::Remote { project_id, client } => {
4058 client
4059 .request(proto::Stash {
4060 project_id: project_id.0,
4061 repository_id: id.to_proto(),
4062 paths: entries
4063 .into_iter()
4064 .map(|repo_path| repo_path.to_proto())
4065 .collect(),
4066 })
4067 .await
4068 .context("sending stash request")?;
4069 Ok(())
4070 }
4071 }
4072 })
4073 })?
4074 .await??;
4075 Ok(())
4076 })
4077 }
4078
4079 pub fn stash_pop(
4080 &mut self,
4081 index: Option<usize>,
4082 cx: &mut Context<Self>,
4083 ) -> Task<anyhow::Result<()>> {
4084 let id = self.id;
4085 cx.spawn(async move |this, cx| {
4086 this.update(cx, |this, _| {
4087 this.send_job(None, move |git_repo, _cx| async move {
4088 match git_repo {
4089 RepositoryState::Local {
4090 backend,
4091 environment,
4092 ..
4093 } => backend.stash_pop(index, environment).await,
4094 RepositoryState::Remote { project_id, client } => {
4095 client
4096 .request(proto::StashPop {
4097 project_id: project_id.0,
4098 repository_id: id.to_proto(),
4099 stash_index: index.map(|i| i as u64),
4100 })
4101 .await
4102 .context("sending stash pop request")?;
4103 Ok(())
4104 }
4105 }
4106 })
4107 })?
4108 .await??;
4109 Ok(())
4110 })
4111 }
4112
4113 pub fn stash_apply(
4114 &mut self,
4115 index: Option<usize>,
4116 cx: &mut Context<Self>,
4117 ) -> Task<anyhow::Result<()>> {
4118 let id = self.id;
4119 cx.spawn(async move |this, cx| {
4120 this.update(cx, |this, _| {
4121 this.send_job(None, move |git_repo, _cx| async move {
4122 match git_repo {
4123 RepositoryState::Local {
4124 backend,
4125 environment,
4126 ..
4127 } => backend.stash_apply(index, environment).await,
4128 RepositoryState::Remote { project_id, client } => {
4129 client
4130 .request(proto::StashApply {
4131 project_id: project_id.0,
4132 repository_id: id.to_proto(),
4133 stash_index: index.map(|i| i as u64),
4134 })
4135 .await
4136 .context("sending stash apply request")?;
4137 Ok(())
4138 }
4139 }
4140 })
4141 })?
4142 .await??;
4143 Ok(())
4144 })
4145 }
4146
4147 pub fn stash_drop(
4148 &mut self,
4149 index: Option<usize>,
4150 cx: &mut Context<Self>,
4151 ) -> oneshot::Receiver<anyhow::Result<()>> {
4152 let id = self.id;
4153 let updates_tx = self
4154 .git_store()
4155 .and_then(|git_store| match &git_store.read(cx).state {
4156 GitStoreState::Local { downstream, .. } => downstream
4157 .as_ref()
4158 .map(|downstream| downstream.updates_tx.clone()),
4159 _ => None,
4160 });
4161 let this = cx.weak_entity();
4162 self.send_job(None, move |git_repo, mut cx| async move {
4163 match git_repo {
4164 RepositoryState::Local {
4165 backend,
4166 environment,
4167 ..
4168 } => {
4169 // TODO would be nice to not have to do this manually
4170 let result = backend.stash_drop(index, environment).await;
4171 if result.is_ok()
4172 && let Ok(stash_entries) = backend.stash_entries().await
4173 {
4174 let snapshot = this.update(&mut cx, |this, cx| {
4175 this.snapshot.stash_entries = stash_entries;
4176 cx.emit(RepositoryEvent::StashEntriesChanged);
4177 this.snapshot.clone()
4178 })?;
4179 if let Some(updates_tx) = updates_tx {
4180 updates_tx
4181 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4182 .ok();
4183 }
4184 }
4185
4186 result
4187 }
4188 RepositoryState::Remote { project_id, client } => {
4189 client
4190 .request(proto::StashDrop {
4191 project_id: project_id.0,
4192 repository_id: id.to_proto(),
4193 stash_index: index.map(|i| i as u64),
4194 })
4195 .await
4196 .context("sending stash pop request")?;
4197 Ok(())
4198 }
4199 }
4200 })
4201 }
4202
4203 pub fn commit(
4204 &mut self,
4205 message: SharedString,
4206 name_and_email: Option<(SharedString, SharedString)>,
4207 options: CommitOptions,
4208 _cx: &mut App,
4209 ) -> oneshot::Receiver<Result<()>> {
4210 let id = self.id;
4211
4212 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4213 match git_repo {
4214 RepositoryState::Local {
4215 backend,
4216 environment,
4217 ..
4218 } => {
4219 backend
4220 .commit(message, name_and_email, options, environment)
4221 .await
4222 }
4223 RepositoryState::Remote { project_id, client } => {
4224 let (name, email) = name_and_email.unzip();
4225 client
4226 .request(proto::Commit {
4227 project_id: project_id.0,
4228 repository_id: id.to_proto(),
4229 message: String::from(message),
4230 name: name.map(String::from),
4231 email: email.map(String::from),
4232 options: Some(proto::commit::CommitOptions {
4233 amend: options.amend,
4234 signoff: options.signoff,
4235 }),
4236 })
4237 .await
4238 .context("sending commit request")?;
4239
4240 Ok(())
4241 }
4242 }
4243 })
4244 }
4245
4246 pub fn fetch(
4247 &mut self,
4248 fetch_options: FetchOptions,
4249 askpass: AskPassDelegate,
4250 _cx: &mut App,
4251 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4252 let askpass_delegates = self.askpass_delegates.clone();
4253 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4254 let id = self.id;
4255
4256 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4257 match git_repo {
4258 RepositoryState::Local {
4259 backend,
4260 environment,
4261 ..
4262 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4263 RepositoryState::Remote { project_id, client } => {
4264 askpass_delegates.lock().insert(askpass_id, askpass);
4265 let _defer = util::defer(|| {
4266 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4267 debug_assert!(askpass_delegate.is_some());
4268 });
4269
4270 let response = client
4271 .request(proto::Fetch {
4272 project_id: project_id.0,
4273 repository_id: id.to_proto(),
4274 askpass_id,
4275 remote: fetch_options.to_proto(),
4276 })
4277 .await
4278 .context("sending fetch request")?;
4279
4280 Ok(RemoteCommandOutput {
4281 stdout: response.stdout,
4282 stderr: response.stderr,
4283 })
4284 }
4285 }
4286 })
4287 }
4288
4289 pub fn push(
4290 &mut self,
4291 branch: SharedString,
4292 remote: SharedString,
4293 options: Option<PushOptions>,
4294 askpass: AskPassDelegate,
4295 cx: &mut Context<Self>,
4296 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4297 let askpass_delegates = self.askpass_delegates.clone();
4298 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4299 let id = self.id;
4300
4301 let args = options
4302 .map(|option| match option {
4303 PushOptions::SetUpstream => " --set-upstream",
4304 PushOptions::Force => " --force-with-lease",
4305 })
4306 .unwrap_or("");
4307
4308 let updates_tx = self
4309 .git_store()
4310 .and_then(|git_store| match &git_store.read(cx).state {
4311 GitStoreState::Local { downstream, .. } => downstream
4312 .as_ref()
4313 .map(|downstream| downstream.updates_tx.clone()),
4314 _ => None,
4315 });
4316
4317 let this = cx.weak_entity();
4318 self.send_job(
4319 Some(format!("git push {} {} {}", args, remote, branch).into()),
4320 move |git_repo, mut cx| async move {
4321 match git_repo {
4322 RepositoryState::Local {
4323 backend,
4324 environment,
4325 ..
4326 } => {
4327 let result = backend
4328 .push(
4329 branch.to_string(),
4330 remote.to_string(),
4331 options,
4332 askpass,
4333 environment.clone(),
4334 cx.clone(),
4335 )
4336 .await;
4337 // TODO would be nice to not have to do this manually
4338 if result.is_ok() {
4339 let branches = backend.branches().await?;
4340 let branch = branches.into_iter().find(|branch| branch.is_head);
4341 log::info!("head branch after scan is {branch:?}");
4342 let snapshot = this.update(&mut cx, |this, cx| {
4343 this.snapshot.branch = branch;
4344 cx.emit(RepositoryEvent::BranchChanged);
4345 this.snapshot.clone()
4346 })?;
4347 if let Some(updates_tx) = updates_tx {
4348 updates_tx
4349 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4350 .ok();
4351 }
4352 }
4353 result
4354 }
4355 RepositoryState::Remote { project_id, client } => {
4356 askpass_delegates.lock().insert(askpass_id, askpass);
4357 let _defer = util::defer(|| {
4358 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4359 debug_assert!(askpass_delegate.is_some());
4360 });
4361 let response = client
4362 .request(proto::Push {
4363 project_id: project_id.0,
4364 repository_id: id.to_proto(),
4365 askpass_id,
4366 branch_name: branch.to_string(),
4367 remote_name: remote.to_string(),
4368 options: options.map(|options| match options {
4369 PushOptions::Force => proto::push::PushOptions::Force,
4370 PushOptions::SetUpstream => {
4371 proto::push::PushOptions::SetUpstream
4372 }
4373 }
4374 as i32),
4375 })
4376 .await
4377 .context("sending push request")?;
4378
4379 Ok(RemoteCommandOutput {
4380 stdout: response.stdout,
4381 stderr: response.stderr,
4382 })
4383 }
4384 }
4385 },
4386 )
4387 }
4388
4389 pub fn pull(
4390 &mut self,
4391 branch: SharedString,
4392 remote: SharedString,
4393 askpass: AskPassDelegate,
4394 _cx: &mut App,
4395 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4396 let askpass_delegates = self.askpass_delegates.clone();
4397 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4398 let id = self.id;
4399
4400 self.send_job(
4401 Some(format!("git pull {} {}", remote, branch).into()),
4402 move |git_repo, cx| async move {
4403 match git_repo {
4404 RepositoryState::Local {
4405 backend,
4406 environment,
4407 ..
4408 } => {
4409 backend
4410 .pull(
4411 branch.to_string(),
4412 remote.to_string(),
4413 askpass,
4414 environment.clone(),
4415 cx,
4416 )
4417 .await
4418 }
4419 RepositoryState::Remote { project_id, client } => {
4420 askpass_delegates.lock().insert(askpass_id, askpass);
4421 let _defer = util::defer(|| {
4422 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4423 debug_assert!(askpass_delegate.is_some());
4424 });
4425 let response = client
4426 .request(proto::Pull {
4427 project_id: project_id.0,
4428 repository_id: id.to_proto(),
4429 askpass_id,
4430 branch_name: branch.to_string(),
4431 remote_name: remote.to_string(),
4432 })
4433 .await
4434 .context("sending pull request")?;
4435
4436 Ok(RemoteCommandOutput {
4437 stdout: response.stdout,
4438 stderr: response.stderr,
4439 })
4440 }
4441 }
4442 },
4443 )
4444 }
4445
4446 fn spawn_set_index_text_job(
4447 &mut self,
4448 path: RepoPath,
4449 content: Option<String>,
4450 hunk_staging_operation_count: Option<usize>,
4451 cx: &mut Context<Self>,
4452 ) -> oneshot::Receiver<anyhow::Result<()>> {
4453 let id = self.id;
4454 let this = cx.weak_entity();
4455 let git_store = self.git_store.clone();
4456 self.send_keyed_job(
4457 Some(GitJobKey::WriteIndex(path.clone())),
4458 None,
4459 move |git_repo, mut cx| async move {
4460 log::debug!(
4461 "start updating index text for buffer {}",
4462 path.as_unix_str()
4463 );
4464 match git_repo {
4465 RepositoryState::Local {
4466 backend,
4467 environment,
4468 ..
4469 } => {
4470 backend
4471 .set_index_text(path.clone(), content, environment.clone())
4472 .await?;
4473 }
4474 RepositoryState::Remote { project_id, client } => {
4475 client
4476 .request(proto::SetIndexText {
4477 project_id: project_id.0,
4478 repository_id: id.to_proto(),
4479 path: path.to_proto(),
4480 text: content,
4481 })
4482 .await?;
4483 }
4484 }
4485 log::debug!(
4486 "finish updating index text for buffer {}",
4487 path.as_unix_str()
4488 );
4489
4490 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4491 let project_path = this
4492 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4493 .ok()
4494 .flatten();
4495 git_store.update(&mut cx, |git_store, cx| {
4496 let buffer_id = git_store
4497 .buffer_store
4498 .read(cx)
4499 .get_by_path(&project_path?)?
4500 .read(cx)
4501 .remote_id();
4502 let diff_state = git_store.diffs.get(&buffer_id)?;
4503 diff_state.update(cx, |diff_state, _| {
4504 diff_state.hunk_staging_operation_count_as_of_write =
4505 hunk_staging_operation_count;
4506 });
4507 Some(())
4508 })?;
4509 }
4510 Ok(())
4511 },
4512 )
4513 }
4514
4515 pub fn get_remotes(
4516 &mut self,
4517 branch_name: Option<String>,
4518 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4519 let id = self.id;
4520 self.send_job(None, move |repo, _cx| async move {
4521 match repo {
4522 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4523 RepositoryState::Remote { project_id, client } => {
4524 let response = client
4525 .request(proto::GetRemotes {
4526 project_id: project_id.0,
4527 repository_id: id.to_proto(),
4528 branch_name,
4529 })
4530 .await?;
4531
4532 let remotes = response
4533 .remotes
4534 .into_iter()
4535 .map(|remotes| git::repository::Remote {
4536 name: remotes.name.into(),
4537 })
4538 .collect();
4539
4540 Ok(remotes)
4541 }
4542 }
4543 })
4544 }
4545
4546 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4547 let id = self.id;
4548 self.send_job(None, move |repo, _| async move {
4549 match repo {
4550 RepositoryState::Local { backend, .. } => backend.branches().await,
4551 RepositoryState::Remote { project_id, client } => {
4552 let response = client
4553 .request(proto::GitGetBranches {
4554 project_id: project_id.0,
4555 repository_id: id.to_proto(),
4556 })
4557 .await?;
4558
4559 let branches = response
4560 .branches
4561 .into_iter()
4562 .map(|branch| proto_to_branch(&branch))
4563 .collect();
4564
4565 Ok(branches)
4566 }
4567 }
4568 })
4569 }
4570
4571 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4572 let id = self.id;
4573 self.send_job(None, move |repo, _| async move {
4574 match repo {
4575 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4576 RepositoryState::Remote { project_id, client } => {
4577 let response = client
4578 .request(proto::GitGetWorktrees {
4579 project_id: project_id.0,
4580 repository_id: id.to_proto(),
4581 })
4582 .await?;
4583
4584 let worktrees = response
4585 .worktrees
4586 .into_iter()
4587 .map(|worktree| proto_to_worktree(&worktree))
4588 .collect();
4589
4590 Ok(worktrees)
4591 }
4592 }
4593 })
4594 }
4595
4596 pub fn create_worktree(
4597 &mut self,
4598 name: String,
4599 path: PathBuf,
4600 commit: Option<String>,
4601 ) -> oneshot::Receiver<Result<()>> {
4602 let id = self.id;
4603 self.send_job(
4604 Some("git worktree add".into()),
4605 move |repo, _cx| async move {
4606 match repo {
4607 RepositoryState::Local { backend, .. } => {
4608 backend.create_worktree(name, path, commit).await
4609 }
4610 RepositoryState::Remote { project_id, client } => {
4611 client
4612 .request(proto::GitCreateWorktree {
4613 project_id: project_id.0,
4614 repository_id: id.to_proto(),
4615 name,
4616 directory: path.to_string_lossy().to_string(),
4617 commit,
4618 })
4619 .await?;
4620
4621 Ok(())
4622 }
4623 }
4624 },
4625 )
4626 }
4627
4628 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4629 let id = self.id;
4630 self.send_job(None, move |repo, _| async move {
4631 match repo {
4632 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4633 RepositoryState::Remote { project_id, client } => {
4634 let response = client
4635 .request(proto::GetDefaultBranch {
4636 project_id: project_id.0,
4637 repository_id: id.to_proto(),
4638 })
4639 .await?;
4640
4641 anyhow::Ok(response.branch.map(SharedString::from))
4642 }
4643 }
4644 })
4645 }
4646
4647 pub fn diff_tree(
4648 &mut self,
4649 diff_type: DiffTreeType,
4650 _cx: &App,
4651 ) -> oneshot::Receiver<Result<TreeDiff>> {
4652 let repository_id = self.snapshot.id;
4653 self.send_job(None, move |repo, _cx| async move {
4654 match repo {
4655 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4656 RepositoryState::Remote { client, project_id } => {
4657 let response = client
4658 .request(proto::GetTreeDiff {
4659 project_id: project_id.0,
4660 repository_id: repository_id.0,
4661 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4662 base: diff_type.base().to_string(),
4663 head: diff_type.head().to_string(),
4664 })
4665 .await?;
4666
4667 let entries = response
4668 .entries
4669 .into_iter()
4670 .filter_map(|entry| {
4671 let status = match entry.status() {
4672 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4673 proto::tree_diff_status::Status::Modified => {
4674 TreeDiffStatus::Modified {
4675 old: git::Oid::from_str(
4676 &entry.oid.context("missing oid").log_err()?,
4677 )
4678 .log_err()?,
4679 }
4680 }
4681 proto::tree_diff_status::Status::Deleted => {
4682 TreeDiffStatus::Deleted {
4683 old: git::Oid::from_str(
4684 &entry.oid.context("missing oid").log_err()?,
4685 )
4686 .log_err()?,
4687 }
4688 }
4689 };
4690 Some((
4691 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4692 status,
4693 ))
4694 })
4695 .collect();
4696
4697 Ok(TreeDiff { entries })
4698 }
4699 }
4700 })
4701 }
4702
4703 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4704 let id = self.id;
4705 self.send_job(None, move |repo, _cx| async move {
4706 match repo {
4707 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4708 RepositoryState::Remote { project_id, client } => {
4709 let response = client
4710 .request(proto::GitDiff {
4711 project_id: project_id.0,
4712 repository_id: id.to_proto(),
4713 diff_type: match diff_type {
4714 DiffType::HeadToIndex => {
4715 proto::git_diff::DiffType::HeadToIndex.into()
4716 }
4717 DiffType::HeadToWorktree => {
4718 proto::git_diff::DiffType::HeadToWorktree.into()
4719 }
4720 },
4721 })
4722 .await?;
4723
4724 Ok(response.diff)
4725 }
4726 }
4727 })
4728 }
4729
4730 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4731 let id = self.id;
4732 self.send_job(
4733 Some(format!("git switch -c {branch_name}").into()),
4734 move |repo, _cx| async move {
4735 match repo {
4736 RepositoryState::Local { backend, .. } => {
4737 backend.create_branch(branch_name).await
4738 }
4739 RepositoryState::Remote { project_id, client } => {
4740 client
4741 .request(proto::GitCreateBranch {
4742 project_id: project_id.0,
4743 repository_id: id.to_proto(),
4744 branch_name,
4745 })
4746 .await?;
4747
4748 Ok(())
4749 }
4750 }
4751 },
4752 )
4753 }
4754
4755 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4756 let id = self.id;
4757 self.send_job(
4758 Some(format!("git switch {branch_name}").into()),
4759 move |repo, _cx| async move {
4760 match repo {
4761 RepositoryState::Local { backend, .. } => {
4762 backend.change_branch(branch_name).await
4763 }
4764 RepositoryState::Remote { project_id, client } => {
4765 client
4766 .request(proto::GitChangeBranch {
4767 project_id: project_id.0,
4768 repository_id: id.to_proto(),
4769 branch_name,
4770 })
4771 .await?;
4772
4773 Ok(())
4774 }
4775 }
4776 },
4777 )
4778 }
4779
4780 pub fn rename_branch(
4781 &mut self,
4782 branch: String,
4783 new_name: String,
4784 ) -> oneshot::Receiver<Result<()>> {
4785 let id = self.id;
4786 self.send_job(
4787 Some(format!("git branch -m {branch} {new_name}").into()),
4788 move |repo, _cx| async move {
4789 match repo {
4790 RepositoryState::Local { backend, .. } => {
4791 backend.rename_branch(branch, new_name).await
4792 }
4793 RepositoryState::Remote { project_id, client } => {
4794 client
4795 .request(proto::GitRenameBranch {
4796 project_id: project_id.0,
4797 repository_id: id.to_proto(),
4798 branch,
4799 new_name,
4800 })
4801 .await?;
4802
4803 Ok(())
4804 }
4805 }
4806 },
4807 )
4808 }
4809
4810 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4811 let id = self.id;
4812 self.send_job(None, move |repo, _cx| async move {
4813 match repo {
4814 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4815 RepositoryState::Remote { project_id, client } => {
4816 let response = client
4817 .request(proto::CheckForPushedCommits {
4818 project_id: project_id.0,
4819 repository_id: id.to_proto(),
4820 })
4821 .await?;
4822
4823 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4824
4825 Ok(branches)
4826 }
4827 }
4828 })
4829 }
4830
4831 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4832 self.send_job(None, |repo, _cx| async move {
4833 match repo {
4834 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4835 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4836 }
4837 })
4838 }
4839
4840 pub fn restore_checkpoint(
4841 &mut self,
4842 checkpoint: GitRepositoryCheckpoint,
4843 ) -> oneshot::Receiver<Result<()>> {
4844 self.send_job(None, move |repo, _cx| async move {
4845 match repo {
4846 RepositoryState::Local { backend, .. } => {
4847 backend.restore_checkpoint(checkpoint).await
4848 }
4849 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4850 }
4851 })
4852 }
4853
4854 pub(crate) fn apply_remote_update(
4855 &mut self,
4856 update: proto::UpdateRepository,
4857 cx: &mut Context<Self>,
4858 ) -> Result<()> {
4859 let conflicted_paths = TreeSet::from_ordered_entries(
4860 update
4861 .current_merge_conflicts
4862 .into_iter()
4863 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4864 );
4865 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4866 let new_head_commit = update
4867 .head_commit_details
4868 .as_ref()
4869 .map(proto_to_commit_details);
4870 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4871 cx.emit(RepositoryEvent::BranchChanged)
4872 }
4873 self.snapshot.branch = new_branch;
4874 self.snapshot.head_commit = new_head_commit;
4875
4876 self.snapshot.merge.conflicted_paths = conflicted_paths;
4877 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4878 let new_stash_entries = GitStash {
4879 entries: update
4880 .stash_entries
4881 .iter()
4882 .filter_map(|entry| proto_to_stash(entry).ok())
4883 .collect(),
4884 };
4885 if self.snapshot.stash_entries != new_stash_entries {
4886 cx.emit(RepositoryEvent::StashEntriesChanged)
4887 }
4888 self.snapshot.stash_entries = new_stash_entries;
4889
4890 let edits = update
4891 .removed_statuses
4892 .into_iter()
4893 .filter_map(|path| {
4894 Some(sum_tree::Edit::Remove(PathKey(
4895 RelPath::from_proto(&path).log_err()?,
4896 )))
4897 })
4898 .chain(
4899 update
4900 .updated_statuses
4901 .into_iter()
4902 .filter_map(|updated_status| {
4903 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4904 }),
4905 )
4906 .collect::<Vec<_>>();
4907 if !edits.is_empty() {
4908 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4909 }
4910 self.snapshot.statuses_by_path.edit(edits, ());
4911 if update.is_last_update {
4912 self.snapshot.scan_id = update.scan_id;
4913 }
4914 Ok(())
4915 }
4916
4917 pub fn compare_checkpoints(
4918 &mut self,
4919 left: GitRepositoryCheckpoint,
4920 right: GitRepositoryCheckpoint,
4921 ) -> oneshot::Receiver<Result<bool>> {
4922 self.send_job(None, move |repo, _cx| async move {
4923 match repo {
4924 RepositoryState::Local { backend, .. } => {
4925 backend.compare_checkpoints(left, right).await
4926 }
4927 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4928 }
4929 })
4930 }
4931
4932 pub fn diff_checkpoints(
4933 &mut self,
4934 base_checkpoint: GitRepositoryCheckpoint,
4935 target_checkpoint: GitRepositoryCheckpoint,
4936 ) -> oneshot::Receiver<Result<String>> {
4937 self.send_job(None, move |repo, _cx| async move {
4938 match repo {
4939 RepositoryState::Local { backend, .. } => {
4940 backend
4941 .diff_checkpoints(base_checkpoint, target_checkpoint)
4942 .await
4943 }
4944 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4945 }
4946 })
4947 }
4948
4949 fn schedule_scan(
4950 &mut self,
4951 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4952 cx: &mut Context<Self>,
4953 ) {
4954 let this = cx.weak_entity();
4955 let _ = self.send_keyed_job(
4956 Some(GitJobKey::ReloadGitState),
4957 None,
4958 |state, mut cx| async move {
4959 log::debug!("run scheduled git status scan");
4960
4961 let Some(this) = this.upgrade() else {
4962 return Ok(());
4963 };
4964 let RepositoryState::Local { backend, .. } = state else {
4965 bail!("not a local repository")
4966 };
4967 let (snapshot, events) = this
4968 .update(&mut cx, |this, _| {
4969 this.paths_needing_status_update.clear();
4970 compute_snapshot(
4971 this.id,
4972 this.work_directory_abs_path.clone(),
4973 this.snapshot.clone(),
4974 backend.clone(),
4975 )
4976 })?
4977 .await?;
4978 this.update(&mut cx, |this, cx| {
4979 this.snapshot = snapshot.clone();
4980 for event in events {
4981 cx.emit(event);
4982 }
4983 })?;
4984 if let Some(updates_tx) = updates_tx {
4985 updates_tx
4986 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4987 .ok();
4988 }
4989 Ok(())
4990 },
4991 );
4992 }
4993
4994 fn spawn_local_git_worker(
4995 work_directory_abs_path: Arc<Path>,
4996 dot_git_abs_path: Arc<Path>,
4997 _repository_dir_abs_path: Arc<Path>,
4998 _common_dir_abs_path: Arc<Path>,
4999 project_environment: WeakEntity<ProjectEnvironment>,
5000 fs: Arc<dyn Fs>,
5001 cx: &mut Context<Self>,
5002 ) -> mpsc::UnboundedSender<GitJob> {
5003 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5004
5005 cx.spawn(async move |_, cx| {
5006 let environment = project_environment
5007 .upgrade()
5008 .context("missing project environment")?
5009 .update(cx, |project_environment, cx| {
5010 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
5011 })?
5012 .await
5013 .unwrap_or_else(|| {
5014 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
5015 HashMap::default()
5016 });
5017 let search_paths = environment.get("PATH").map(|val| val.to_owned());
5018 let backend = cx
5019 .background_spawn(async move {
5020 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
5021 .or_else(|| which::which("git").ok());
5022 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
5023 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
5024 })
5025 .await?;
5026
5027 if let Some(git_hosting_provider_registry) =
5028 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5029 {
5030 git_hosting_providers::register_additional_providers(
5031 git_hosting_provider_registry,
5032 backend.clone(),
5033 );
5034 }
5035
5036 let state = RepositoryState::Local {
5037 backend,
5038 environment: Arc::new(environment),
5039 };
5040 let mut jobs = VecDeque::new();
5041 loop {
5042 while let Ok(Some(next_job)) = job_rx.try_next() {
5043 jobs.push_back(next_job);
5044 }
5045
5046 if let Some(job) = jobs.pop_front() {
5047 if let Some(current_key) = &job.key
5048 && jobs
5049 .iter()
5050 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5051 {
5052 continue;
5053 }
5054 (job.job)(state.clone(), cx).await;
5055 } else if let Some(job) = job_rx.next().await {
5056 jobs.push_back(job);
5057 } else {
5058 break;
5059 }
5060 }
5061 anyhow::Ok(())
5062 })
5063 .detach_and_log_err(cx);
5064
5065 job_tx
5066 }
5067
5068 fn spawn_remote_git_worker(
5069 project_id: ProjectId,
5070 client: AnyProtoClient,
5071 cx: &mut Context<Self>,
5072 ) -> mpsc::UnboundedSender<GitJob> {
5073 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5074
5075 cx.spawn(async move |_, cx| {
5076 let state = RepositoryState::Remote { project_id, client };
5077 let mut jobs = VecDeque::new();
5078 loop {
5079 while let Ok(Some(next_job)) = job_rx.try_next() {
5080 jobs.push_back(next_job);
5081 }
5082
5083 if let Some(job) = jobs.pop_front() {
5084 if let Some(current_key) = &job.key
5085 && jobs
5086 .iter()
5087 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5088 {
5089 continue;
5090 }
5091 (job.job)(state.clone(), cx).await;
5092 } else if let Some(job) = job_rx.next().await {
5093 jobs.push_back(job);
5094 } else {
5095 break;
5096 }
5097 }
5098 anyhow::Ok(())
5099 })
5100 .detach_and_log_err(cx);
5101
5102 job_tx
5103 }
5104
5105 fn load_staged_text(
5106 &mut self,
5107 buffer_id: BufferId,
5108 repo_path: RepoPath,
5109 cx: &App,
5110 ) -> Task<Result<Option<String>>> {
5111 let rx = self.send_job(None, move |state, _| async move {
5112 match state {
5113 RepositoryState::Local { backend, .. } => {
5114 anyhow::Ok(backend.load_index_text(repo_path).await)
5115 }
5116 RepositoryState::Remote { project_id, client } => {
5117 let response = client
5118 .request(proto::OpenUnstagedDiff {
5119 project_id: project_id.to_proto(),
5120 buffer_id: buffer_id.to_proto(),
5121 })
5122 .await?;
5123 Ok(response.staged_text)
5124 }
5125 }
5126 });
5127 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5128 }
5129
5130 fn load_committed_text(
5131 &mut self,
5132 buffer_id: BufferId,
5133 repo_path: RepoPath,
5134 cx: &App,
5135 ) -> Task<Result<DiffBasesChange>> {
5136 let rx = self.send_job(None, move |state, _| async move {
5137 match state {
5138 RepositoryState::Local { backend, .. } => {
5139 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5140 let staged_text = backend.load_index_text(repo_path).await;
5141 let diff_bases_change = if committed_text == staged_text {
5142 DiffBasesChange::SetBoth(committed_text)
5143 } else {
5144 DiffBasesChange::SetEach {
5145 index: staged_text,
5146 head: committed_text,
5147 }
5148 };
5149 anyhow::Ok(diff_bases_change)
5150 }
5151 RepositoryState::Remote { project_id, client } => {
5152 use proto::open_uncommitted_diff_response::Mode;
5153
5154 let response = client
5155 .request(proto::OpenUncommittedDiff {
5156 project_id: project_id.to_proto(),
5157 buffer_id: buffer_id.to_proto(),
5158 })
5159 .await?;
5160 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5161 let bases = match mode {
5162 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5163 Mode::IndexAndHead => DiffBasesChange::SetEach {
5164 head: response.committed_text,
5165 index: response.staged_text,
5166 },
5167 };
5168 Ok(bases)
5169 }
5170 }
5171 });
5172
5173 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5174 }
5175 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5176 let repository_id = self.snapshot.id;
5177 let rx = self.send_job(None, move |state, _| async move {
5178 match state {
5179 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5180 RepositoryState::Remote { client, project_id } => {
5181 let response = client
5182 .request(proto::GetBlobContent {
5183 project_id: project_id.to_proto(),
5184 repository_id: repository_id.0,
5185 oid: oid.to_string(),
5186 })
5187 .await?;
5188 Ok(response.content)
5189 }
5190 }
5191 });
5192 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5193 }
5194
5195 fn paths_changed(
5196 &mut self,
5197 paths: Vec<RepoPath>,
5198 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5199 cx: &mut Context<Self>,
5200 ) {
5201 self.paths_needing_status_update.extend(paths);
5202
5203 let this = cx.weak_entity();
5204 let _ = self.send_keyed_job(
5205 Some(GitJobKey::RefreshStatuses),
5206 None,
5207 |state, mut cx| async move {
5208 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5209 (
5210 this.snapshot.clone(),
5211 mem::take(&mut this.paths_needing_status_update),
5212 )
5213 })?;
5214 let RepositoryState::Local { backend, .. } = state else {
5215 bail!("not a local repository")
5216 };
5217
5218 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5219 if paths.is_empty() {
5220 return Ok(());
5221 }
5222 let statuses = backend.status(&paths).await?;
5223 let stash_entries = backend.stash_entries().await?;
5224
5225 let changed_path_statuses = cx
5226 .background_spawn(async move {
5227 let mut changed_path_statuses = Vec::new();
5228 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5229 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5230
5231 for (repo_path, status) in &*statuses.entries {
5232 changed_paths.remove(repo_path);
5233 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5234 && cursor.item().is_some_and(|entry| entry.status == *status)
5235 {
5236 continue;
5237 }
5238
5239 changed_path_statuses.push(Edit::Insert(StatusEntry {
5240 repo_path: repo_path.clone(),
5241 status: *status,
5242 }));
5243 }
5244 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5245 for path in changed_paths.into_iter() {
5246 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5247 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5248 }
5249 }
5250 changed_path_statuses
5251 })
5252 .await;
5253
5254 this.update(&mut cx, |this, cx| {
5255 if this.snapshot.stash_entries != stash_entries {
5256 cx.emit(RepositoryEvent::StashEntriesChanged);
5257 this.snapshot.stash_entries = stash_entries;
5258 }
5259
5260 if !changed_path_statuses.is_empty() {
5261 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5262 this.snapshot
5263 .statuses_by_path
5264 .edit(changed_path_statuses, ());
5265 this.snapshot.scan_id += 1;
5266 }
5267
5268 if let Some(updates_tx) = updates_tx {
5269 updates_tx
5270 .unbounded_send(DownstreamUpdate::UpdateRepository(
5271 this.snapshot.clone(),
5272 ))
5273 .ok();
5274 }
5275 })
5276 },
5277 );
5278 }
5279
5280 /// currently running git command and when it started
5281 pub fn current_job(&self) -> Option<JobInfo> {
5282 self.active_jobs.values().next().cloned()
5283 }
5284
5285 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5286 self.send_job(None, |_, _| async {})
5287 }
5288}
5289
5290fn get_permalink_in_rust_registry_src(
5291 provider_registry: Arc<GitHostingProviderRegistry>,
5292 path: PathBuf,
5293 selection: Range<u32>,
5294) -> Result<url::Url> {
5295 #[derive(Deserialize)]
5296 struct CargoVcsGit {
5297 sha1: String,
5298 }
5299
5300 #[derive(Deserialize)]
5301 struct CargoVcsInfo {
5302 git: CargoVcsGit,
5303 path_in_vcs: String,
5304 }
5305
5306 #[derive(Deserialize)]
5307 struct CargoPackage {
5308 repository: String,
5309 }
5310
5311 #[derive(Deserialize)]
5312 struct CargoToml {
5313 package: CargoPackage,
5314 }
5315
5316 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5317 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5318 Some((dir, json))
5319 }) else {
5320 bail!("No .cargo_vcs_info.json found in parent directories")
5321 };
5322 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5323 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5324 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5325 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5326 .context("parsing package.repository field of manifest")?;
5327 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5328 let permalink = provider.build_permalink(
5329 remote,
5330 BuildPermalinkParams::new(
5331 &cargo_vcs_info.git.sha1,
5332 &RepoPath(
5333 RelPath::new(&path, PathStyle::local())
5334 .context("invalid path")?
5335 .into_arc(),
5336 ),
5337 Some(selection),
5338 ),
5339 );
5340 Ok(permalink)
5341}
5342
5343fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5344 let Some(blame) = blame else {
5345 return proto::BlameBufferResponse {
5346 blame_response: None,
5347 };
5348 };
5349
5350 let entries = blame
5351 .entries
5352 .into_iter()
5353 .map(|entry| proto::BlameEntry {
5354 sha: entry.sha.as_bytes().into(),
5355 start_line: entry.range.start,
5356 end_line: entry.range.end,
5357 original_line_number: entry.original_line_number,
5358 author: entry.author,
5359 author_mail: entry.author_mail,
5360 author_time: entry.author_time,
5361 author_tz: entry.author_tz,
5362 committer: entry.committer_name,
5363 committer_mail: entry.committer_email,
5364 committer_time: entry.committer_time,
5365 committer_tz: entry.committer_tz,
5366 summary: entry.summary,
5367 previous: entry.previous,
5368 filename: entry.filename,
5369 })
5370 .collect::<Vec<_>>();
5371
5372 let messages = blame
5373 .messages
5374 .into_iter()
5375 .map(|(oid, message)| proto::CommitMessage {
5376 oid: oid.as_bytes().into(),
5377 message,
5378 })
5379 .collect::<Vec<_>>();
5380
5381 proto::BlameBufferResponse {
5382 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5383 entries,
5384 messages,
5385 remote_url: blame.remote_url,
5386 }),
5387 }
5388}
5389
5390fn deserialize_blame_buffer_response(
5391 response: proto::BlameBufferResponse,
5392) -> Option<git::blame::Blame> {
5393 let response = response.blame_response?;
5394 let entries = response
5395 .entries
5396 .into_iter()
5397 .filter_map(|entry| {
5398 Some(git::blame::BlameEntry {
5399 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5400 range: entry.start_line..entry.end_line,
5401 original_line_number: entry.original_line_number,
5402 committer_name: entry.committer,
5403 committer_time: entry.committer_time,
5404 committer_tz: entry.committer_tz,
5405 committer_email: entry.committer_mail,
5406 author: entry.author,
5407 author_mail: entry.author_mail,
5408 author_time: entry.author_time,
5409 author_tz: entry.author_tz,
5410 summary: entry.summary,
5411 previous: entry.previous,
5412 filename: entry.filename,
5413 })
5414 })
5415 .collect::<Vec<_>>();
5416
5417 let messages = response
5418 .messages
5419 .into_iter()
5420 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5421 .collect::<HashMap<_, _>>();
5422
5423 Some(Blame {
5424 entries,
5425 messages,
5426 remote_url: response.remote_url,
5427 })
5428}
5429
5430fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5431 proto::Branch {
5432 is_head: branch.is_head,
5433 ref_name: branch.ref_name.to_string(),
5434 unix_timestamp: branch
5435 .most_recent_commit
5436 .as_ref()
5437 .map(|commit| commit.commit_timestamp as u64),
5438 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5439 ref_name: upstream.ref_name.to_string(),
5440 tracking: upstream
5441 .tracking
5442 .status()
5443 .map(|upstream| proto::UpstreamTracking {
5444 ahead: upstream.ahead as u64,
5445 behind: upstream.behind as u64,
5446 }),
5447 }),
5448 most_recent_commit: branch
5449 .most_recent_commit
5450 .as_ref()
5451 .map(|commit| proto::CommitSummary {
5452 sha: commit.sha.to_string(),
5453 subject: commit.subject.to_string(),
5454 commit_timestamp: commit.commit_timestamp,
5455 author_name: commit.author_name.to_string(),
5456 }),
5457 }
5458}
5459
5460fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5461 proto::Worktree {
5462 path: worktree.path.to_string_lossy().to_string(),
5463 ref_name: worktree.ref_name.to_string(),
5464 sha: worktree.sha.to_string(),
5465 }
5466}
5467
5468fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5469 git::repository::Worktree {
5470 path: PathBuf::from(proto.path.clone()),
5471 ref_name: proto.ref_name.clone().into(),
5472 sha: proto.sha.clone().into(),
5473 }
5474}
5475
5476fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5477 git::repository::Branch {
5478 is_head: proto.is_head,
5479 ref_name: proto.ref_name.clone().into(),
5480 upstream: proto
5481 .upstream
5482 .as_ref()
5483 .map(|upstream| git::repository::Upstream {
5484 ref_name: upstream.ref_name.to_string().into(),
5485 tracking: upstream
5486 .tracking
5487 .as_ref()
5488 .map(|tracking| {
5489 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5490 ahead: tracking.ahead as u32,
5491 behind: tracking.behind as u32,
5492 })
5493 })
5494 .unwrap_or(git::repository::UpstreamTracking::Gone),
5495 }),
5496 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5497 git::repository::CommitSummary {
5498 sha: commit.sha.to_string().into(),
5499 subject: commit.subject.to_string().into(),
5500 commit_timestamp: commit.commit_timestamp,
5501 author_name: commit.author_name.to_string().into(),
5502 has_parent: true,
5503 }
5504 }),
5505 }
5506}
5507
5508fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5509 proto::GitCommitDetails {
5510 sha: commit.sha.to_string(),
5511 message: commit.message.to_string(),
5512 commit_timestamp: commit.commit_timestamp,
5513 author_email: commit.author_email.to_string(),
5514 author_name: commit.author_name.to_string(),
5515 }
5516}
5517
5518fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5519 CommitDetails {
5520 sha: proto.sha.clone().into(),
5521 message: proto.message.clone().into(),
5522 commit_timestamp: proto.commit_timestamp,
5523 author_email: proto.author_email.clone().into(),
5524 author_name: proto.author_name.clone().into(),
5525 }
5526}
5527
5528async fn compute_snapshot(
5529 id: RepositoryId,
5530 work_directory_abs_path: Arc<Path>,
5531 prev_snapshot: RepositorySnapshot,
5532 backend: Arc<dyn GitRepository>,
5533) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5534 let mut events = Vec::new();
5535 let branches = backend.branches().await?;
5536 let branch = branches.into_iter().find(|branch| branch.is_head);
5537 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5538 let stash_entries = backend.stash_entries().await?;
5539 let statuses_by_path = SumTree::from_iter(
5540 statuses
5541 .entries
5542 .iter()
5543 .map(|(repo_path, status)| StatusEntry {
5544 repo_path: repo_path.clone(),
5545 status: *status,
5546 }),
5547 (),
5548 );
5549 let (merge_details, merge_heads_changed) =
5550 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5551 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5552
5553 let pending_ops_by_path = prev_snapshot.pending_ops_by_path.clone();
5554
5555 if merge_heads_changed {
5556 events.push(RepositoryEvent::MergeHeadsChanged);
5557 }
5558
5559 if statuses_by_path != prev_snapshot.statuses_by_path {
5560 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5561 }
5562
5563 // Useful when branch is None in detached head state
5564 let head_commit = match backend.head_sha().await {
5565 Some(head_sha) => backend.show(head_sha).await.log_err(),
5566 None => None,
5567 };
5568
5569 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5570 events.push(RepositoryEvent::BranchChanged);
5571 }
5572
5573 // Used by edit prediction data collection
5574 let remote_origin_url = backend.remote_url("origin");
5575 let remote_upstream_url = backend.remote_url("upstream");
5576
5577 let snapshot = RepositorySnapshot {
5578 id,
5579 statuses_by_path,
5580 pending_ops_by_path,
5581 work_directory_abs_path,
5582 path_style: prev_snapshot.path_style,
5583 scan_id: prev_snapshot.scan_id + 1,
5584 branch,
5585 head_commit,
5586 merge: merge_details,
5587 remote_origin_url,
5588 remote_upstream_url,
5589 stash_entries,
5590 };
5591
5592 Ok((snapshot, events))
5593}
5594
5595fn status_from_proto(
5596 simple_status: i32,
5597 status: Option<proto::GitFileStatus>,
5598) -> anyhow::Result<FileStatus> {
5599 use proto::git_file_status::Variant;
5600
5601 let Some(variant) = status.and_then(|status| status.variant) else {
5602 let code = proto::GitStatus::from_i32(simple_status)
5603 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5604 let result = match code {
5605 proto::GitStatus::Added => TrackedStatus {
5606 worktree_status: StatusCode::Added,
5607 index_status: StatusCode::Unmodified,
5608 }
5609 .into(),
5610 proto::GitStatus::Modified => TrackedStatus {
5611 worktree_status: StatusCode::Modified,
5612 index_status: StatusCode::Unmodified,
5613 }
5614 .into(),
5615 proto::GitStatus::Conflict => UnmergedStatus {
5616 first_head: UnmergedStatusCode::Updated,
5617 second_head: UnmergedStatusCode::Updated,
5618 }
5619 .into(),
5620 proto::GitStatus::Deleted => TrackedStatus {
5621 worktree_status: StatusCode::Deleted,
5622 index_status: StatusCode::Unmodified,
5623 }
5624 .into(),
5625 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5626 };
5627 return Ok(result);
5628 };
5629
5630 let result = match variant {
5631 Variant::Untracked(_) => FileStatus::Untracked,
5632 Variant::Ignored(_) => FileStatus::Ignored,
5633 Variant::Unmerged(unmerged) => {
5634 let [first_head, second_head] =
5635 [unmerged.first_head, unmerged.second_head].map(|head| {
5636 let code = proto::GitStatus::from_i32(head)
5637 .with_context(|| format!("Invalid git status code: {head}"))?;
5638 let result = match code {
5639 proto::GitStatus::Added => UnmergedStatusCode::Added,
5640 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5641 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5642 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5643 };
5644 Ok(result)
5645 });
5646 let [first_head, second_head] = [first_head?, second_head?];
5647 UnmergedStatus {
5648 first_head,
5649 second_head,
5650 }
5651 .into()
5652 }
5653 Variant::Tracked(tracked) => {
5654 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5655 .map(|status| {
5656 let code = proto::GitStatus::from_i32(status)
5657 .with_context(|| format!("Invalid git status code: {status}"))?;
5658 let result = match code {
5659 proto::GitStatus::Modified => StatusCode::Modified,
5660 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5661 proto::GitStatus::Added => StatusCode::Added,
5662 proto::GitStatus::Deleted => StatusCode::Deleted,
5663 proto::GitStatus::Renamed => StatusCode::Renamed,
5664 proto::GitStatus::Copied => StatusCode::Copied,
5665 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5666 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5667 };
5668 Ok(result)
5669 });
5670 let [index_status, worktree_status] = [index_status?, worktree_status?];
5671 TrackedStatus {
5672 index_status,
5673 worktree_status,
5674 }
5675 .into()
5676 }
5677 };
5678 Ok(result)
5679}
5680
5681fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5682 use proto::git_file_status::{Tracked, Unmerged, Variant};
5683
5684 let variant = match status {
5685 FileStatus::Untracked => Variant::Untracked(Default::default()),
5686 FileStatus::Ignored => Variant::Ignored(Default::default()),
5687 FileStatus::Unmerged(UnmergedStatus {
5688 first_head,
5689 second_head,
5690 }) => Variant::Unmerged(Unmerged {
5691 first_head: unmerged_status_to_proto(first_head),
5692 second_head: unmerged_status_to_proto(second_head),
5693 }),
5694 FileStatus::Tracked(TrackedStatus {
5695 index_status,
5696 worktree_status,
5697 }) => Variant::Tracked(Tracked {
5698 index_status: tracked_status_to_proto(index_status),
5699 worktree_status: tracked_status_to_proto(worktree_status),
5700 }),
5701 };
5702 proto::GitFileStatus {
5703 variant: Some(variant),
5704 }
5705}
5706
5707fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5708 match code {
5709 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5710 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5711 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5712 }
5713}
5714
5715fn tracked_status_to_proto(code: StatusCode) -> i32 {
5716 match code {
5717 StatusCode::Added => proto::GitStatus::Added as _,
5718 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5719 StatusCode::Modified => proto::GitStatus::Modified as _,
5720 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5721 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5722 StatusCode::Copied => proto::GitStatus::Copied as _,
5723 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5724 }
5725}