1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use std::{
59 cmp::Ordering,
60 collections::{BTreeSet, VecDeque},
61 future::Future,
62 mem,
63 ops::Range,
64 path::{Path, PathBuf},
65 str::FromStr,
66 sync::{
67 Arc,
68 atomic::{self, AtomicU64},
69 },
70 time::Instant,
71};
72use sum_tree::{Edit, SumTree, TreeSet};
73use task::Shell;
74use text::{Bias, BufferId};
75use util::{
76 ResultExt, debug_panic,
77 paths::{PathStyle, SanitizedPath},
78 post_inc,
79 rel_path::RelPath,
80};
81use worktree::{
82 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
83 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
84};
85use zeroize::Zeroize;
86
87pub struct GitStore {
88 state: GitStoreState,
89 buffer_store: Entity<BufferStore>,
90 worktree_store: Entity<WorktreeStore>,
91 repositories: HashMap<RepositoryId, Entity<Repository>>,
92 active_repo_id: Option<RepositoryId>,
93 #[allow(clippy::type_complexity)]
94 loading_diffs:
95 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
96 diffs: HashMap<BufferId, Entity<BufferGitState>>,
97 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
98 _subscriptions: Vec<Subscription>,
99}
100
101#[derive(Default)]
102struct SharedDiffs {
103 unstaged: Option<Entity<BufferDiff>>,
104 uncommitted: Option<Entity<BufferDiff>>,
105}
106
107struct BufferGitState {
108 unstaged_diff: Option<WeakEntity<BufferDiff>>,
109 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
110 conflict_set: Option<WeakEntity<ConflictSet>>,
111 recalculate_diff_task: Option<Task<Result<()>>>,
112 reparse_conflict_markers_task: Option<Task<Result<()>>>,
113 language: Option<Arc<Language>>,
114 language_registry: Option<Arc<LanguageRegistry>>,
115 conflict_updated_futures: Vec<oneshot::Sender<()>>,
116 recalculating_tx: postage::watch::Sender<bool>,
117
118 /// These operation counts are used to ensure that head and index text
119 /// values read from the git repository are up-to-date with any hunk staging
120 /// operations that have been performed on the BufferDiff.
121 ///
122 /// The operation count is incremented immediately when the user initiates a
123 /// hunk stage/unstage operation. Then, upon finishing writing the new index
124 /// text do disk, the `operation count as of write` is updated to reflect
125 /// the operation count that prompted the write.
126 hunk_staging_operation_count: usize,
127 hunk_staging_operation_count_as_of_write: usize,
128
129 head_text: Option<Arc<String>>,
130 index_text: Option<Arc<String>>,
131 head_changed: bool,
132 index_changed: bool,
133 language_changed: bool,
134}
135
136#[derive(Clone, Debug)]
137enum DiffBasesChange {
138 SetIndex(Option<String>),
139 SetHead(Option<String>),
140 SetEach {
141 index: Option<String>,
142 head: Option<String>,
143 },
144 SetBoth(Option<String>),
145}
146
147#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
148enum DiffKind {
149 Unstaged,
150 Uncommitted,
151}
152
153enum GitStoreState {
154 Local {
155 next_repository_id: Arc<AtomicU64>,
156 downstream: Option<LocalDownstreamState>,
157 project_environment: Entity<ProjectEnvironment>,
158 fs: Arc<dyn Fs>,
159 },
160 Remote {
161 upstream_client: AnyProtoClient,
162 upstream_project_id: u64,
163 downstream: Option<(AnyProtoClient, ProjectId)>,
164 },
165}
166
167enum DownstreamUpdate {
168 UpdateRepository(RepositorySnapshot),
169 RemoveRepository(RepositoryId),
170}
171
172struct LocalDownstreamState {
173 client: AnyProtoClient,
174 project_id: ProjectId,
175 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
176 _task: Task<Result<()>>,
177}
178
179#[derive(Clone, Debug)]
180pub struct GitStoreCheckpoint {
181 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
182}
183
184#[derive(Clone, Debug, PartialEq, Eq)]
185pub struct StatusEntry {
186 pub repo_path: RepoPath,
187 pub status: FileStatus,
188}
189
190impl StatusEntry {
191 fn to_proto(&self) -> proto::StatusEntry {
192 let simple_status = match self.status {
193 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
194 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
195 FileStatus::Tracked(TrackedStatus {
196 index_status,
197 worktree_status,
198 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
199 worktree_status
200 } else {
201 index_status
202 }),
203 };
204
205 proto::StatusEntry {
206 repo_path: self.repo_path.to_proto(),
207 simple_status,
208 status: Some(status_to_proto(self.status)),
209 }
210 }
211}
212
213impl TryFrom<proto::StatusEntry> for StatusEntry {
214 type Error = anyhow::Error;
215
216 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
217 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
218 let status = status_from_proto(value.simple_status, value.status)?;
219 Ok(Self { repo_path, status })
220 }
221}
222
223impl sum_tree::Item for StatusEntry {
224 type Summary = PathSummary<GitSummary>;
225
226 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
227 PathSummary {
228 max_path: self.repo_path.0.clone(),
229 item_summary: self.status.summary(),
230 }
231 }
232}
233
234impl sum_tree::KeyedItem for StatusEntry {
235 type Key = PathKey;
236
237 fn key(&self) -> Self::Key {
238 PathKey(self.repo_path.0.clone())
239 }
240}
241
242#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
243pub struct RepositoryId(pub u64);
244
245#[derive(Clone, Debug, Default, PartialEq, Eq)]
246pub struct MergeDetails {
247 pub conflicted_paths: TreeSet<RepoPath>,
248 pub message: Option<SharedString>,
249 pub heads: Vec<Option<SharedString>>,
250}
251
252#[derive(Clone, Debug, PartialEq, Eq)]
253pub struct RepositorySnapshot {
254 pub id: RepositoryId,
255 pub statuses_by_path: SumTree<StatusEntry>,
256 pub pending_ops_by_path: SumTree<PendingOps>,
257 pub work_directory_abs_path: Arc<Path>,
258 pub path_style: PathStyle,
259 pub branch: Option<Branch>,
260 pub head_commit: Option<CommitDetails>,
261 pub scan_id: u64,
262 pub merge: MergeDetails,
263 pub remote_origin_url: Option<String>,
264 pub remote_upstream_url: Option<String>,
265 pub stash_entries: GitStash,
266}
267
268type JobId = u64;
269
270#[derive(Clone, Debug, PartialEq, Eq)]
271pub struct JobInfo {
272 pub start: Instant,
273 pub message: SharedString,
274}
275
276pub struct Repository {
277 this: WeakEntity<Self>,
278 snapshot: RepositorySnapshot,
279 commit_message_buffer: Option<Entity<Buffer>>,
280 git_store: WeakEntity<GitStore>,
281 // For a local repository, holds paths that have had worktree events since the last status scan completed,
282 // and that should be examined during the next status scan.
283 paths_needing_status_update: BTreeSet<RepoPath>,
284 job_sender: mpsc::UnboundedSender<GitJob>,
285 active_jobs: HashMap<JobId, JobInfo>,
286 job_id: JobId,
287 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
288 latest_askpass_id: u64,
289}
290
291impl std::ops::Deref for Repository {
292 type Target = RepositorySnapshot;
293
294 fn deref(&self) -> &Self::Target {
295 &self.snapshot
296 }
297}
298
299#[derive(Clone)]
300pub enum RepositoryState {
301 Local {
302 backend: Arc<dyn GitRepository>,
303 environment: Arc<HashMap<String, String>>,
304 },
305 Remote {
306 project_id: ProjectId,
307 client: AnyProtoClient,
308 },
309}
310
311#[derive(Clone, Debug, PartialEq, Eq)]
312pub enum RepositoryEvent {
313 StatusesChanged {
314 // TODO could report which statuses changed here
315 full_scan: bool,
316 },
317 MergeHeadsChanged,
318 BranchChanged,
319 StashEntriesChanged,
320}
321
322#[derive(Clone, Debug)]
323pub struct JobsUpdated;
324
325#[derive(Debug)]
326pub enum GitStoreEvent {
327 ActiveRepositoryChanged(Option<RepositoryId>),
328 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
329 RepositoryAdded,
330 RepositoryRemoved(RepositoryId),
331 IndexWriteError(anyhow::Error),
332 JobsUpdated,
333 ConflictsUpdated,
334}
335
336impl EventEmitter<RepositoryEvent> for Repository {}
337impl EventEmitter<JobsUpdated> for Repository {}
338impl EventEmitter<GitStoreEvent> for GitStore {}
339
340pub struct GitJob {
341 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
342 key: Option<GitJobKey>,
343}
344
345#[derive(PartialEq, Eq)]
346enum GitJobKey {
347 WriteIndex(RepoPath),
348 ReloadBufferDiffBases,
349 RefreshStatuses,
350 ReloadGitState,
351}
352
353impl GitStore {
354 pub fn local(
355 worktree_store: &Entity<WorktreeStore>,
356 buffer_store: Entity<BufferStore>,
357 environment: Entity<ProjectEnvironment>,
358 fs: Arc<dyn Fs>,
359 cx: &mut Context<Self>,
360 ) -> Self {
361 Self::new(
362 worktree_store.clone(),
363 buffer_store,
364 GitStoreState::Local {
365 next_repository_id: Arc::new(AtomicU64::new(1)),
366 downstream: None,
367 project_environment: environment,
368 fs,
369 },
370 cx,
371 )
372 }
373
374 pub fn remote(
375 worktree_store: &Entity<WorktreeStore>,
376 buffer_store: Entity<BufferStore>,
377 upstream_client: AnyProtoClient,
378 project_id: u64,
379 cx: &mut Context<Self>,
380 ) -> Self {
381 Self::new(
382 worktree_store.clone(),
383 buffer_store,
384 GitStoreState::Remote {
385 upstream_client,
386 upstream_project_id: project_id,
387 downstream: None,
388 },
389 cx,
390 )
391 }
392
393 fn new(
394 worktree_store: Entity<WorktreeStore>,
395 buffer_store: Entity<BufferStore>,
396 state: GitStoreState,
397 cx: &mut Context<Self>,
398 ) -> Self {
399 let _subscriptions = vec![
400 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
401 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
402 ];
403
404 GitStore {
405 state,
406 buffer_store,
407 worktree_store,
408 repositories: HashMap::default(),
409 active_repo_id: None,
410 _subscriptions,
411 loading_diffs: HashMap::default(),
412 shared_diffs: HashMap::default(),
413 diffs: HashMap::default(),
414 }
415 }
416
417 pub fn init(client: &AnyProtoClient) {
418 client.add_entity_request_handler(Self::handle_get_remotes);
419 client.add_entity_request_handler(Self::handle_get_branches);
420 client.add_entity_request_handler(Self::handle_get_default_branch);
421 client.add_entity_request_handler(Self::handle_change_branch);
422 client.add_entity_request_handler(Self::handle_create_branch);
423 client.add_entity_request_handler(Self::handle_rename_branch);
424 client.add_entity_request_handler(Self::handle_git_init);
425 client.add_entity_request_handler(Self::handle_push);
426 client.add_entity_request_handler(Self::handle_pull);
427 client.add_entity_request_handler(Self::handle_fetch);
428 client.add_entity_request_handler(Self::handle_stage);
429 client.add_entity_request_handler(Self::handle_unstage);
430 client.add_entity_request_handler(Self::handle_stash);
431 client.add_entity_request_handler(Self::handle_stash_pop);
432 client.add_entity_request_handler(Self::handle_stash_apply);
433 client.add_entity_request_handler(Self::handle_stash_drop);
434 client.add_entity_request_handler(Self::handle_commit);
435 client.add_entity_request_handler(Self::handle_reset);
436 client.add_entity_request_handler(Self::handle_show);
437 client.add_entity_request_handler(Self::handle_load_commit_diff);
438 client.add_entity_request_handler(Self::handle_checkout_files);
439 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
440 client.add_entity_request_handler(Self::handle_set_index_text);
441 client.add_entity_request_handler(Self::handle_askpass);
442 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
443 client.add_entity_request_handler(Self::handle_git_diff);
444 client.add_entity_request_handler(Self::handle_tree_diff);
445 client.add_entity_request_handler(Self::handle_get_blob_content);
446 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
447 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
448 client.add_entity_message_handler(Self::handle_update_diff_bases);
449 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
450 client.add_entity_request_handler(Self::handle_blame_buffer);
451 client.add_entity_message_handler(Self::handle_update_repository);
452 client.add_entity_message_handler(Self::handle_remove_repository);
453 client.add_entity_request_handler(Self::handle_git_clone);
454 client.add_entity_request_handler(Self::handle_get_worktrees);
455 client.add_entity_request_handler(Self::handle_create_worktree);
456 }
457
458 pub fn is_local(&self) -> bool {
459 matches!(self.state, GitStoreState::Local { .. })
460 }
461 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
462 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
463 let id = repo.read(cx).id;
464 if self.active_repo_id != Some(id) {
465 self.active_repo_id = Some(id);
466 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
467 }
468 }
469 }
470
471 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
472 match &mut self.state {
473 GitStoreState::Remote {
474 downstream: downstream_client,
475 ..
476 } => {
477 for repo in self.repositories.values() {
478 let update = repo.read(cx).snapshot.initial_update(project_id);
479 for update in split_repository_update(update) {
480 client.send(update).log_err();
481 }
482 }
483 *downstream_client = Some((client, ProjectId(project_id)));
484 }
485 GitStoreState::Local {
486 downstream: downstream_client,
487 ..
488 } => {
489 let mut snapshots = HashMap::default();
490 let (updates_tx, mut updates_rx) = mpsc::unbounded();
491 for repo in self.repositories.values() {
492 updates_tx
493 .unbounded_send(DownstreamUpdate::UpdateRepository(
494 repo.read(cx).snapshot.clone(),
495 ))
496 .ok();
497 }
498 *downstream_client = Some(LocalDownstreamState {
499 client: client.clone(),
500 project_id: ProjectId(project_id),
501 updates_tx,
502 _task: cx.spawn(async move |this, cx| {
503 cx.background_spawn(async move {
504 while let Some(update) = updates_rx.next().await {
505 match update {
506 DownstreamUpdate::UpdateRepository(snapshot) => {
507 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
508 {
509 let update =
510 snapshot.build_update(old_snapshot, project_id);
511 *old_snapshot = snapshot;
512 for update in split_repository_update(update) {
513 client.send(update)?;
514 }
515 } else {
516 let update = snapshot.initial_update(project_id);
517 for update in split_repository_update(update) {
518 client.send(update)?;
519 }
520 snapshots.insert(snapshot.id, snapshot);
521 }
522 }
523 DownstreamUpdate::RemoveRepository(id) => {
524 client.send(proto::RemoveRepository {
525 project_id,
526 id: id.to_proto(),
527 })?;
528 }
529 }
530 }
531 anyhow::Ok(())
532 })
533 .await
534 .ok();
535 this.update(cx, |this, _| {
536 if let GitStoreState::Local {
537 downstream: downstream_client,
538 ..
539 } = &mut this.state
540 {
541 downstream_client.take();
542 } else {
543 unreachable!("unshared called on remote store");
544 }
545 })
546 }),
547 });
548 }
549 }
550 }
551
552 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
553 match &mut self.state {
554 GitStoreState::Local {
555 downstream: downstream_client,
556 ..
557 } => {
558 downstream_client.take();
559 }
560 GitStoreState::Remote {
561 downstream: downstream_client,
562 ..
563 } => {
564 downstream_client.take();
565 }
566 }
567 self.shared_diffs.clear();
568 }
569
570 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
571 self.shared_diffs.remove(peer_id);
572 }
573
574 pub fn active_repository(&self) -> Option<Entity<Repository>> {
575 self.active_repo_id
576 .as_ref()
577 .map(|id| self.repositories[id].clone())
578 }
579
580 pub fn open_unstaged_diff(
581 &mut self,
582 buffer: Entity<Buffer>,
583 cx: &mut Context<Self>,
584 ) -> Task<Result<Entity<BufferDiff>>> {
585 let buffer_id = buffer.read(cx).remote_id();
586 if let Some(diff_state) = self.diffs.get(&buffer_id)
587 && let Some(unstaged_diff) = diff_state
588 .read(cx)
589 .unstaged_diff
590 .as_ref()
591 .and_then(|weak| weak.upgrade())
592 {
593 if let Some(task) =
594 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
595 {
596 return cx.background_executor().spawn(async move {
597 task.await;
598 Ok(unstaged_diff)
599 });
600 }
601 return Task::ready(Ok(unstaged_diff));
602 }
603
604 let Some((repo, repo_path)) =
605 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
606 else {
607 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
608 };
609
610 let task = self
611 .loading_diffs
612 .entry((buffer_id, DiffKind::Unstaged))
613 .or_insert_with(|| {
614 let staged_text = repo.update(cx, |repo, cx| {
615 repo.load_staged_text(buffer_id, repo_path, cx)
616 });
617 cx.spawn(async move |this, cx| {
618 Self::open_diff_internal(
619 this,
620 DiffKind::Unstaged,
621 staged_text.await.map(DiffBasesChange::SetIndex),
622 buffer,
623 cx,
624 )
625 .await
626 .map_err(Arc::new)
627 })
628 .shared()
629 })
630 .clone();
631
632 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
633 }
634
635 pub fn open_diff_since(
636 &mut self,
637 oid: Option<git::Oid>,
638 buffer: Entity<Buffer>,
639 repo: Entity<Repository>,
640 languages: Arc<LanguageRegistry>,
641 cx: &mut Context<Self>,
642 ) -> Task<Result<Entity<BufferDiff>>> {
643 cx.spawn(async move |this, cx| {
644 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
645 let content = match oid {
646 None => None,
647 Some(oid) => Some(
648 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
649 .await?,
650 ),
651 };
652 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
653
654 buffer_diff
655 .update(cx, |buffer_diff, cx| {
656 buffer_diff.set_base_text(
657 content.map(Arc::new),
658 buffer_snapshot.language().cloned(),
659 Some(languages.clone()),
660 buffer_snapshot.text,
661 cx,
662 )
663 })?
664 .await?;
665 let unstaged_diff = this
666 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
667 .await?;
668 buffer_diff.update(cx, |buffer_diff, _| {
669 buffer_diff.set_secondary_diff(unstaged_diff);
670 })?;
671
672 this.update(cx, |_, cx| {
673 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
674 .detach();
675 })?;
676
677 Ok(buffer_diff)
678 })
679 }
680
681 pub fn open_uncommitted_diff(
682 &mut self,
683 buffer: Entity<Buffer>,
684 cx: &mut Context<Self>,
685 ) -> Task<Result<Entity<BufferDiff>>> {
686 let buffer_id = buffer.read(cx).remote_id();
687
688 if let Some(diff_state) = self.diffs.get(&buffer_id)
689 && let Some(uncommitted_diff) = diff_state
690 .read(cx)
691 .uncommitted_diff
692 .as_ref()
693 .and_then(|weak| weak.upgrade())
694 {
695 if let Some(task) =
696 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
697 {
698 return cx.background_executor().spawn(async move {
699 task.await;
700 Ok(uncommitted_diff)
701 });
702 }
703 return Task::ready(Ok(uncommitted_diff));
704 }
705
706 let Some((repo, repo_path)) =
707 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
708 else {
709 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
710 };
711
712 let task = self
713 .loading_diffs
714 .entry((buffer_id, DiffKind::Uncommitted))
715 .or_insert_with(|| {
716 let changes = repo.update(cx, |repo, cx| {
717 repo.load_committed_text(buffer_id, repo_path, cx)
718 });
719
720 // todo(lw): hot foreground spawn
721 cx.spawn(async move |this, cx| {
722 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
723 .await
724 .map_err(Arc::new)
725 })
726 .shared()
727 })
728 .clone();
729
730 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
731 }
732
733 async fn open_diff_internal(
734 this: WeakEntity<Self>,
735 kind: DiffKind,
736 texts: Result<DiffBasesChange>,
737 buffer_entity: Entity<Buffer>,
738 cx: &mut AsyncApp,
739 ) -> Result<Entity<BufferDiff>> {
740 let diff_bases_change = match texts {
741 Err(e) => {
742 this.update(cx, |this, cx| {
743 let buffer = buffer_entity.read(cx);
744 let buffer_id = buffer.remote_id();
745 this.loading_diffs.remove(&(buffer_id, kind));
746 })?;
747 return Err(e);
748 }
749 Ok(change) => change,
750 };
751
752 this.update(cx, |this, cx| {
753 let buffer = buffer_entity.read(cx);
754 let buffer_id = buffer.remote_id();
755 let language = buffer.language().cloned();
756 let language_registry = buffer.language_registry();
757 let text_snapshot = buffer.text_snapshot();
758 this.loading_diffs.remove(&(buffer_id, kind));
759
760 let git_store = cx.weak_entity();
761 let diff_state = this
762 .diffs
763 .entry(buffer_id)
764 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
765
766 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
767
768 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
769 diff_state.update(cx, |diff_state, cx| {
770 diff_state.language = language;
771 diff_state.language_registry = language_registry;
772
773 match kind {
774 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
775 DiffKind::Uncommitted => {
776 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
777 diff
778 } else {
779 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
780 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
781 unstaged_diff
782 };
783
784 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
785 diff_state.uncommitted_diff = Some(diff.downgrade())
786 }
787 }
788
789 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
790 let rx = diff_state.wait_for_recalculation();
791
792 anyhow::Ok(async move {
793 if let Some(rx) = rx {
794 rx.await;
795 }
796 Ok(diff)
797 })
798 })
799 })??
800 .await
801 }
802
803 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
804 let diff_state = self.diffs.get(&buffer_id)?;
805 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
806 }
807
808 pub fn get_uncommitted_diff(
809 &self,
810 buffer_id: BufferId,
811 cx: &App,
812 ) -> Option<Entity<BufferDiff>> {
813 let diff_state = self.diffs.get(&buffer_id)?;
814 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
815 }
816
817 pub fn open_conflict_set(
818 &mut self,
819 buffer: Entity<Buffer>,
820 cx: &mut Context<Self>,
821 ) -> Entity<ConflictSet> {
822 log::debug!("open conflict set");
823 let buffer_id = buffer.read(cx).remote_id();
824
825 if let Some(git_state) = self.diffs.get(&buffer_id)
826 && let Some(conflict_set) = git_state
827 .read(cx)
828 .conflict_set
829 .as_ref()
830 .and_then(|weak| weak.upgrade())
831 {
832 let conflict_set = conflict_set;
833 let buffer_snapshot = buffer.read(cx).text_snapshot();
834
835 git_state.update(cx, |state, cx| {
836 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
837 });
838
839 return conflict_set;
840 }
841
842 let is_unmerged = self
843 .repository_and_path_for_buffer_id(buffer_id, cx)
844 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
845 let git_store = cx.weak_entity();
846 let buffer_git_state = self
847 .diffs
848 .entry(buffer_id)
849 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
850 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
851
852 self._subscriptions
853 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
854 cx.emit(GitStoreEvent::ConflictsUpdated);
855 }));
856
857 buffer_git_state.update(cx, |state, cx| {
858 state.conflict_set = Some(conflict_set.downgrade());
859 let buffer_snapshot = buffer.read(cx).text_snapshot();
860 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
861 });
862
863 conflict_set
864 }
865
866 pub fn project_path_git_status(
867 &self,
868 project_path: &ProjectPath,
869 cx: &App,
870 ) -> Option<FileStatus> {
871 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
872 Some(repo.read(cx).status_for_path(&repo_path)?.status)
873 }
874
875 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
876 let mut work_directory_abs_paths = Vec::new();
877 let mut checkpoints = Vec::new();
878 for repository in self.repositories.values() {
879 repository.update(cx, |repository, _| {
880 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
881 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
882 });
883 }
884
885 cx.background_executor().spawn(async move {
886 let checkpoints = future::try_join_all(checkpoints).await?;
887 Ok(GitStoreCheckpoint {
888 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
889 .into_iter()
890 .zip(checkpoints)
891 .collect(),
892 })
893 })
894 }
895
896 pub fn restore_checkpoint(
897 &self,
898 checkpoint: GitStoreCheckpoint,
899 cx: &mut App,
900 ) -> Task<Result<()>> {
901 let repositories_by_work_dir_abs_path = self
902 .repositories
903 .values()
904 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
905 .collect::<HashMap<_, _>>();
906
907 let mut tasks = Vec::new();
908 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
909 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
910 let restore = repository.update(cx, |repository, _| {
911 repository.restore_checkpoint(checkpoint)
912 });
913 tasks.push(async move { restore.await? });
914 }
915 }
916 cx.background_spawn(async move {
917 future::try_join_all(tasks).await?;
918 Ok(())
919 })
920 }
921
922 /// Compares two checkpoints, returning true if they are equal.
923 pub fn compare_checkpoints(
924 &self,
925 left: GitStoreCheckpoint,
926 mut right: GitStoreCheckpoint,
927 cx: &mut App,
928 ) -> Task<Result<bool>> {
929 let repositories_by_work_dir_abs_path = self
930 .repositories
931 .values()
932 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
933 .collect::<HashMap<_, _>>();
934
935 let mut tasks = Vec::new();
936 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
937 if let Some(right_checkpoint) = right
938 .checkpoints_by_work_dir_abs_path
939 .remove(&work_dir_abs_path)
940 {
941 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
942 {
943 let compare = repository.update(cx, |repository, _| {
944 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
945 });
946
947 tasks.push(async move { compare.await? });
948 }
949 } else {
950 return Task::ready(Ok(false));
951 }
952 }
953 cx.background_spawn(async move {
954 Ok(future::try_join_all(tasks)
955 .await?
956 .into_iter()
957 .all(|result| result))
958 })
959 }
960
961 /// Blames a buffer.
962 pub fn blame_buffer(
963 &self,
964 buffer: &Entity<Buffer>,
965 version: Option<clock::Global>,
966 cx: &mut App,
967 ) -> Task<Result<Option<Blame>>> {
968 let buffer = buffer.read(cx);
969 let Some((repo, repo_path)) =
970 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
971 else {
972 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
973 };
974 let content = match &version {
975 Some(version) => buffer.rope_for_version(version),
976 None => buffer.as_rope().clone(),
977 };
978 let version = version.unwrap_or(buffer.version());
979 let buffer_id = buffer.remote_id();
980
981 let rx = repo.update(cx, |repo, _| {
982 repo.send_job(None, move |state, _| async move {
983 match state {
984 RepositoryState::Local { backend, .. } => backend
985 .blame(repo_path.clone(), content)
986 .await
987 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
988 .map(Some),
989 RepositoryState::Remote { project_id, client } => {
990 let response = client
991 .request(proto::BlameBuffer {
992 project_id: project_id.to_proto(),
993 buffer_id: buffer_id.into(),
994 version: serialize_version(&version),
995 })
996 .await?;
997 Ok(deserialize_blame_buffer_response(response))
998 }
999 }
1000 })
1001 });
1002
1003 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1004 }
1005
1006 pub fn get_permalink_to_line(
1007 &self,
1008 buffer: &Entity<Buffer>,
1009 selection: Range<u32>,
1010 cx: &mut App,
1011 ) -> Task<Result<url::Url>> {
1012 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1013 return Task::ready(Err(anyhow!("buffer has no file")));
1014 };
1015
1016 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1017 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1018 cx,
1019 ) else {
1020 // If we're not in a Git repo, check whether this is a Rust source
1021 // file in the Cargo registry (presumably opened with go-to-definition
1022 // from a normal Rust file). If so, we can put together a permalink
1023 // using crate metadata.
1024 if buffer
1025 .read(cx)
1026 .language()
1027 .is_none_or(|lang| lang.name() != "Rust".into())
1028 {
1029 return Task::ready(Err(anyhow!("no permalink available")));
1030 }
1031 let file_path = file.worktree.read(cx).absolutize(&file.path);
1032 return cx.spawn(async move |cx| {
1033 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1034 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1035 .context("no permalink available")
1036 });
1037 };
1038
1039 let buffer_id = buffer.read(cx).remote_id();
1040 let branch = repo.read(cx).branch.clone();
1041 let remote = branch
1042 .as_ref()
1043 .and_then(|b| b.upstream.as_ref())
1044 .and_then(|b| b.remote_name())
1045 .unwrap_or("origin")
1046 .to_string();
1047
1048 let rx = repo.update(cx, |repo, _| {
1049 repo.send_job(None, move |state, cx| async move {
1050 match state {
1051 RepositoryState::Local { backend, .. } => {
1052 let origin_url = backend
1053 .remote_url(&remote)
1054 .with_context(|| format!("remote \"{remote}\" not found"))?;
1055
1056 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1057
1058 let provider_registry =
1059 cx.update(GitHostingProviderRegistry::default_global)?;
1060
1061 let (provider, remote) =
1062 parse_git_remote_url(provider_registry, &origin_url)
1063 .context("parsing Git remote URL")?;
1064
1065 Ok(provider.build_permalink(
1066 remote,
1067 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1068 ))
1069 }
1070 RepositoryState::Remote { project_id, client } => {
1071 let response = client
1072 .request(proto::GetPermalinkToLine {
1073 project_id: project_id.to_proto(),
1074 buffer_id: buffer_id.into(),
1075 selection: Some(proto::Range {
1076 start: selection.start as u64,
1077 end: selection.end as u64,
1078 }),
1079 })
1080 .await?;
1081
1082 url::Url::parse(&response.permalink).context("failed to parse permalink")
1083 }
1084 }
1085 })
1086 });
1087 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1088 }
1089
1090 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1091 match &self.state {
1092 GitStoreState::Local {
1093 downstream: downstream_client,
1094 ..
1095 } => downstream_client
1096 .as_ref()
1097 .map(|state| (state.client.clone(), state.project_id)),
1098 GitStoreState::Remote {
1099 downstream: downstream_client,
1100 ..
1101 } => downstream_client.clone(),
1102 }
1103 }
1104
1105 fn upstream_client(&self) -> Option<AnyProtoClient> {
1106 match &self.state {
1107 GitStoreState::Local { .. } => None,
1108 GitStoreState::Remote {
1109 upstream_client, ..
1110 } => Some(upstream_client.clone()),
1111 }
1112 }
1113
1114 fn on_worktree_store_event(
1115 &mut self,
1116 worktree_store: Entity<WorktreeStore>,
1117 event: &WorktreeStoreEvent,
1118 cx: &mut Context<Self>,
1119 ) {
1120 let GitStoreState::Local {
1121 project_environment,
1122 downstream,
1123 next_repository_id,
1124 fs,
1125 } = &self.state
1126 else {
1127 return;
1128 };
1129
1130 match event {
1131 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1132 if let Some(worktree) = self
1133 .worktree_store
1134 .read(cx)
1135 .worktree_for_id(*worktree_id, cx)
1136 {
1137 let paths_by_git_repo =
1138 self.process_updated_entries(&worktree, updated_entries, cx);
1139 let downstream = downstream
1140 .as_ref()
1141 .map(|downstream| downstream.updates_tx.clone());
1142 cx.spawn(async move |_, cx| {
1143 let paths_by_git_repo = paths_by_git_repo.await;
1144 for (repo, paths) in paths_by_git_repo {
1145 repo.update(cx, |repo, cx| {
1146 repo.paths_changed(paths, downstream.clone(), cx);
1147 })
1148 .ok();
1149 }
1150 })
1151 .detach();
1152 }
1153 }
1154 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1155 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1156 else {
1157 return;
1158 };
1159 if !worktree.read(cx).is_visible() {
1160 log::debug!(
1161 "not adding repositories for local worktree {:?} because it's not visible",
1162 worktree.read(cx).abs_path()
1163 );
1164 return;
1165 }
1166 self.update_repositories_from_worktree(
1167 project_environment.clone(),
1168 next_repository_id.clone(),
1169 downstream
1170 .as_ref()
1171 .map(|downstream| downstream.updates_tx.clone()),
1172 changed_repos.clone(),
1173 fs.clone(),
1174 cx,
1175 );
1176 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1177 }
1178 _ => {}
1179 }
1180 }
1181 fn on_repository_event(
1182 &mut self,
1183 repo: Entity<Repository>,
1184 event: &RepositoryEvent,
1185 cx: &mut Context<Self>,
1186 ) {
1187 let id = repo.read(cx).id;
1188 let repo_snapshot = repo.read(cx).snapshot.clone();
1189 for (buffer_id, diff) in self.diffs.iter() {
1190 if let Some((buffer_repo, repo_path)) =
1191 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1192 && buffer_repo == repo
1193 {
1194 diff.update(cx, |diff, cx| {
1195 if let Some(conflict_set) = &diff.conflict_set {
1196 let conflict_status_changed =
1197 conflict_set.update(cx, |conflict_set, cx| {
1198 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1199 conflict_set.set_has_conflict(has_conflict, cx)
1200 })?;
1201 if conflict_status_changed {
1202 let buffer_store = self.buffer_store.read(cx);
1203 if let Some(buffer) = buffer_store.get(*buffer_id) {
1204 let _ = diff
1205 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1206 }
1207 }
1208 }
1209 anyhow::Ok(())
1210 })
1211 .ok();
1212 }
1213 }
1214 cx.emit(GitStoreEvent::RepositoryUpdated(
1215 id,
1216 event.clone(),
1217 self.active_repo_id == Some(id),
1218 ))
1219 }
1220
1221 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1222 cx.emit(GitStoreEvent::JobsUpdated)
1223 }
1224
1225 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1226 fn update_repositories_from_worktree(
1227 &mut self,
1228 project_environment: Entity<ProjectEnvironment>,
1229 next_repository_id: Arc<AtomicU64>,
1230 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1231 updated_git_repositories: UpdatedGitRepositoriesSet,
1232 fs: Arc<dyn Fs>,
1233 cx: &mut Context<Self>,
1234 ) {
1235 let mut removed_ids = Vec::new();
1236 for update in updated_git_repositories.iter() {
1237 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1238 let existing_work_directory_abs_path =
1239 repo.read(cx).work_directory_abs_path.clone();
1240 Some(&existing_work_directory_abs_path)
1241 == update.old_work_directory_abs_path.as_ref()
1242 || Some(&existing_work_directory_abs_path)
1243 == update.new_work_directory_abs_path.as_ref()
1244 }) {
1245 if let Some(new_work_directory_abs_path) =
1246 update.new_work_directory_abs_path.clone()
1247 {
1248 existing.update(cx, |existing, cx| {
1249 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1250 existing.schedule_scan(updates_tx.clone(), cx);
1251 });
1252 } else {
1253 removed_ids.push(*id);
1254 }
1255 } else if let UpdatedGitRepository {
1256 new_work_directory_abs_path: Some(work_directory_abs_path),
1257 dot_git_abs_path: Some(dot_git_abs_path),
1258 repository_dir_abs_path: Some(repository_dir_abs_path),
1259 common_dir_abs_path: Some(common_dir_abs_path),
1260 ..
1261 } = update
1262 {
1263 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1264 let git_store = cx.weak_entity();
1265 let repo = cx.new(|cx| {
1266 let mut repo = Repository::local(
1267 id,
1268 work_directory_abs_path.clone(),
1269 dot_git_abs_path.clone(),
1270 repository_dir_abs_path.clone(),
1271 common_dir_abs_path.clone(),
1272 project_environment.downgrade(),
1273 fs.clone(),
1274 git_store,
1275 cx,
1276 );
1277 repo.schedule_scan(updates_tx.clone(), cx);
1278 repo
1279 });
1280 self._subscriptions
1281 .push(cx.subscribe(&repo, Self::on_repository_event));
1282 self._subscriptions
1283 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1284 self.repositories.insert(id, repo);
1285 cx.emit(GitStoreEvent::RepositoryAdded);
1286 self.active_repo_id.get_or_insert_with(|| {
1287 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1288 id
1289 });
1290 }
1291 }
1292
1293 for id in removed_ids {
1294 if self.active_repo_id == Some(id) {
1295 self.active_repo_id = None;
1296 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1297 }
1298 self.repositories.remove(&id);
1299 if let Some(updates_tx) = updates_tx.as_ref() {
1300 updates_tx
1301 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1302 .ok();
1303 }
1304 }
1305 }
1306
1307 fn on_buffer_store_event(
1308 &mut self,
1309 _: Entity<BufferStore>,
1310 event: &BufferStoreEvent,
1311 cx: &mut Context<Self>,
1312 ) {
1313 match event {
1314 BufferStoreEvent::BufferAdded(buffer) => {
1315 cx.subscribe(buffer, |this, buffer, event, cx| {
1316 if let BufferEvent::LanguageChanged = event {
1317 let buffer_id = buffer.read(cx).remote_id();
1318 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1319 diff_state.update(cx, |diff_state, cx| {
1320 diff_state.buffer_language_changed(buffer, cx);
1321 });
1322 }
1323 }
1324 })
1325 .detach();
1326 }
1327 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1328 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1329 diffs.remove(buffer_id);
1330 }
1331 }
1332 BufferStoreEvent::BufferDropped(buffer_id) => {
1333 self.diffs.remove(buffer_id);
1334 for diffs in self.shared_diffs.values_mut() {
1335 diffs.remove(buffer_id);
1336 }
1337 }
1338
1339 _ => {}
1340 }
1341 }
1342
1343 pub fn recalculate_buffer_diffs(
1344 &mut self,
1345 buffers: Vec<Entity<Buffer>>,
1346 cx: &mut Context<Self>,
1347 ) -> impl Future<Output = ()> + use<> {
1348 let mut futures = Vec::new();
1349 for buffer in buffers {
1350 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1351 let buffer = buffer.read(cx).text_snapshot();
1352 diff_state.update(cx, |diff_state, cx| {
1353 diff_state.recalculate_diffs(buffer.clone(), cx);
1354 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1355 });
1356 futures.push(diff_state.update(cx, |diff_state, cx| {
1357 diff_state
1358 .reparse_conflict_markers(buffer, cx)
1359 .map(|_| {})
1360 .boxed()
1361 }));
1362 }
1363 }
1364 async move {
1365 futures::future::join_all(futures).await;
1366 }
1367 }
1368
1369 fn on_buffer_diff_event(
1370 &mut self,
1371 diff: Entity<buffer_diff::BufferDiff>,
1372 event: &BufferDiffEvent,
1373 cx: &mut Context<Self>,
1374 ) {
1375 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1376 let buffer_id = diff.read(cx).buffer_id;
1377 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1378 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1379 diff_state.hunk_staging_operation_count += 1;
1380 diff_state.hunk_staging_operation_count
1381 });
1382 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1383 let recv = repo.update(cx, |repo, cx| {
1384 log::debug!("hunks changed for {}", path.as_unix_str());
1385 repo.spawn_set_index_text_job(
1386 path,
1387 new_index_text.as_ref().map(|rope| rope.to_string()),
1388 Some(hunk_staging_operation_count),
1389 cx,
1390 )
1391 });
1392 let diff = diff.downgrade();
1393 cx.spawn(async move |this, cx| {
1394 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1395 diff.update(cx, |diff, cx| {
1396 diff.clear_pending_hunks(cx);
1397 })
1398 .ok();
1399 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1400 .ok();
1401 }
1402 })
1403 .detach();
1404 }
1405 }
1406 }
1407 }
1408
1409 fn local_worktree_git_repos_changed(
1410 &mut self,
1411 worktree: Entity<Worktree>,
1412 changed_repos: &UpdatedGitRepositoriesSet,
1413 cx: &mut Context<Self>,
1414 ) {
1415 log::debug!("local worktree repos changed");
1416 debug_assert!(worktree.read(cx).is_local());
1417
1418 for repository in self.repositories.values() {
1419 repository.update(cx, |repository, cx| {
1420 let repo_abs_path = &repository.work_directory_abs_path;
1421 if changed_repos.iter().any(|update| {
1422 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1423 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1424 }) {
1425 repository.reload_buffer_diff_bases(cx);
1426 }
1427 });
1428 }
1429 }
1430
1431 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1432 &self.repositories
1433 }
1434
1435 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1436 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1437 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1438 Some(status.status)
1439 }
1440
1441 pub fn repository_and_path_for_buffer_id(
1442 &self,
1443 buffer_id: BufferId,
1444 cx: &App,
1445 ) -> Option<(Entity<Repository>, RepoPath)> {
1446 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1447 let project_path = buffer.read(cx).project_path(cx)?;
1448 self.repository_and_path_for_project_path(&project_path, cx)
1449 }
1450
1451 pub fn repository_and_path_for_project_path(
1452 &self,
1453 path: &ProjectPath,
1454 cx: &App,
1455 ) -> Option<(Entity<Repository>, RepoPath)> {
1456 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1457 self.repositories
1458 .values()
1459 .filter_map(|repo| {
1460 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1461 Some((repo.clone(), repo_path))
1462 })
1463 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1464 }
1465
1466 pub fn git_init(
1467 &self,
1468 path: Arc<Path>,
1469 fallback_branch_name: String,
1470 cx: &App,
1471 ) -> Task<Result<()>> {
1472 match &self.state {
1473 GitStoreState::Local { fs, .. } => {
1474 let fs = fs.clone();
1475 cx.background_executor()
1476 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1477 }
1478 GitStoreState::Remote {
1479 upstream_client,
1480 upstream_project_id: project_id,
1481 ..
1482 } => {
1483 let client = upstream_client.clone();
1484 let project_id = *project_id;
1485 cx.background_executor().spawn(async move {
1486 client
1487 .request(proto::GitInit {
1488 project_id: project_id,
1489 abs_path: path.to_string_lossy().into_owned(),
1490 fallback_branch_name,
1491 })
1492 .await?;
1493 Ok(())
1494 })
1495 }
1496 }
1497 }
1498
1499 pub fn git_clone(
1500 &self,
1501 repo: String,
1502 path: impl Into<Arc<std::path::Path>>,
1503 cx: &App,
1504 ) -> Task<Result<()>> {
1505 let path = path.into();
1506 match &self.state {
1507 GitStoreState::Local { fs, .. } => {
1508 let fs = fs.clone();
1509 cx.background_executor()
1510 .spawn(async move { fs.git_clone(&repo, &path).await })
1511 }
1512 GitStoreState::Remote {
1513 upstream_client,
1514 upstream_project_id,
1515 ..
1516 } => {
1517 if upstream_client.is_via_collab() {
1518 return Task::ready(Err(anyhow!(
1519 "Git Clone isn't supported for project guests"
1520 )));
1521 }
1522 let request = upstream_client.request(proto::GitClone {
1523 project_id: *upstream_project_id,
1524 abs_path: path.to_string_lossy().into_owned(),
1525 remote_repo: repo,
1526 });
1527
1528 cx.background_spawn(async move {
1529 let result = request.await?;
1530
1531 match result.success {
1532 true => Ok(()),
1533 false => Err(anyhow!("Git Clone failed")),
1534 }
1535 })
1536 }
1537 }
1538 }
1539
1540 async fn handle_update_repository(
1541 this: Entity<Self>,
1542 envelope: TypedEnvelope<proto::UpdateRepository>,
1543 mut cx: AsyncApp,
1544 ) -> Result<()> {
1545 this.update(&mut cx, |this, cx| {
1546 let path_style = this.worktree_store.read(cx).path_style();
1547 let mut update = envelope.payload;
1548
1549 let id = RepositoryId::from_proto(update.id);
1550 let client = this.upstream_client().context("no upstream client")?;
1551
1552 let mut repo_subscription = None;
1553 let repo = this.repositories.entry(id).or_insert_with(|| {
1554 let git_store = cx.weak_entity();
1555 let repo = cx.new(|cx| {
1556 Repository::remote(
1557 id,
1558 Path::new(&update.abs_path).into(),
1559 path_style,
1560 ProjectId(update.project_id),
1561 client,
1562 git_store,
1563 cx,
1564 )
1565 });
1566 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1567 cx.emit(GitStoreEvent::RepositoryAdded);
1568 repo
1569 });
1570 this._subscriptions.extend(repo_subscription);
1571
1572 repo.update(cx, {
1573 let update = update.clone();
1574 |repo, cx| repo.apply_remote_update(update, cx)
1575 })?;
1576
1577 this.active_repo_id.get_or_insert_with(|| {
1578 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1579 id
1580 });
1581
1582 if let Some((client, project_id)) = this.downstream_client() {
1583 update.project_id = project_id.to_proto();
1584 client.send(update).log_err();
1585 }
1586 Ok(())
1587 })?
1588 }
1589
1590 async fn handle_remove_repository(
1591 this: Entity<Self>,
1592 envelope: TypedEnvelope<proto::RemoveRepository>,
1593 mut cx: AsyncApp,
1594 ) -> Result<()> {
1595 this.update(&mut cx, |this, cx| {
1596 let mut update = envelope.payload;
1597 let id = RepositoryId::from_proto(update.id);
1598 this.repositories.remove(&id);
1599 if let Some((client, project_id)) = this.downstream_client() {
1600 update.project_id = project_id.to_proto();
1601 client.send(update).log_err();
1602 }
1603 if this.active_repo_id == Some(id) {
1604 this.active_repo_id = None;
1605 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1606 }
1607 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1608 })
1609 }
1610
1611 async fn handle_git_init(
1612 this: Entity<Self>,
1613 envelope: TypedEnvelope<proto::GitInit>,
1614 cx: AsyncApp,
1615 ) -> Result<proto::Ack> {
1616 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1617 let name = envelope.payload.fallback_branch_name;
1618 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1619 .await?;
1620
1621 Ok(proto::Ack {})
1622 }
1623
1624 async fn handle_git_clone(
1625 this: Entity<Self>,
1626 envelope: TypedEnvelope<proto::GitClone>,
1627 cx: AsyncApp,
1628 ) -> Result<proto::GitCloneResponse> {
1629 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1630 let repo_name = envelope.payload.remote_repo;
1631 let result = cx
1632 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1633 .await;
1634
1635 Ok(proto::GitCloneResponse {
1636 success: result.is_ok(),
1637 })
1638 }
1639
1640 async fn handle_fetch(
1641 this: Entity<Self>,
1642 envelope: TypedEnvelope<proto::Fetch>,
1643 mut cx: AsyncApp,
1644 ) -> Result<proto::RemoteMessageResponse> {
1645 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1646 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1647 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1648 let askpass_id = envelope.payload.askpass_id;
1649
1650 let askpass = make_remote_delegate(
1651 this,
1652 envelope.payload.project_id,
1653 repository_id,
1654 askpass_id,
1655 &mut cx,
1656 );
1657
1658 let remote_output = repository_handle
1659 .update(&mut cx, |repository_handle, cx| {
1660 repository_handle.fetch(fetch_options, askpass, cx)
1661 })?
1662 .await??;
1663
1664 Ok(proto::RemoteMessageResponse {
1665 stdout: remote_output.stdout,
1666 stderr: remote_output.stderr,
1667 })
1668 }
1669
1670 async fn handle_push(
1671 this: Entity<Self>,
1672 envelope: TypedEnvelope<proto::Push>,
1673 mut cx: AsyncApp,
1674 ) -> Result<proto::RemoteMessageResponse> {
1675 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1676 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1677
1678 let askpass_id = envelope.payload.askpass_id;
1679 let askpass = make_remote_delegate(
1680 this,
1681 envelope.payload.project_id,
1682 repository_id,
1683 askpass_id,
1684 &mut cx,
1685 );
1686
1687 let options = envelope
1688 .payload
1689 .options
1690 .as_ref()
1691 .map(|_| match envelope.payload.options() {
1692 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1693 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1694 });
1695
1696 let branch_name = envelope.payload.branch_name.into();
1697 let remote_name = envelope.payload.remote_name.into();
1698
1699 let remote_output = repository_handle
1700 .update(&mut cx, |repository_handle, cx| {
1701 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1702 })?
1703 .await??;
1704 Ok(proto::RemoteMessageResponse {
1705 stdout: remote_output.stdout,
1706 stderr: remote_output.stderr,
1707 })
1708 }
1709
1710 async fn handle_pull(
1711 this: Entity<Self>,
1712 envelope: TypedEnvelope<proto::Pull>,
1713 mut cx: AsyncApp,
1714 ) -> Result<proto::RemoteMessageResponse> {
1715 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1716 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1717 let askpass_id = envelope.payload.askpass_id;
1718 let askpass = make_remote_delegate(
1719 this,
1720 envelope.payload.project_id,
1721 repository_id,
1722 askpass_id,
1723 &mut cx,
1724 );
1725
1726 let branch_name = envelope.payload.branch_name.into();
1727 let remote_name = envelope.payload.remote_name.into();
1728
1729 let remote_message = repository_handle
1730 .update(&mut cx, |repository_handle, cx| {
1731 repository_handle.pull(branch_name, remote_name, askpass, cx)
1732 })?
1733 .await??;
1734
1735 Ok(proto::RemoteMessageResponse {
1736 stdout: remote_message.stdout,
1737 stderr: remote_message.stderr,
1738 })
1739 }
1740
1741 async fn handle_stage(
1742 this: Entity<Self>,
1743 envelope: TypedEnvelope<proto::Stage>,
1744 mut cx: AsyncApp,
1745 ) -> Result<proto::Ack> {
1746 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1747 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1748
1749 let entries = envelope
1750 .payload
1751 .paths
1752 .into_iter()
1753 .map(|path| RepoPath::new(&path))
1754 .collect::<Result<Vec<_>>>()?;
1755
1756 repository_handle
1757 .update(&mut cx, |repository_handle, cx| {
1758 repository_handle.stage_entries(entries, cx)
1759 })?
1760 .await?;
1761 Ok(proto::Ack {})
1762 }
1763
1764 async fn handle_unstage(
1765 this: Entity<Self>,
1766 envelope: TypedEnvelope<proto::Unstage>,
1767 mut cx: AsyncApp,
1768 ) -> Result<proto::Ack> {
1769 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1770 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1771
1772 let entries = envelope
1773 .payload
1774 .paths
1775 .into_iter()
1776 .map(|path| RepoPath::new(&path))
1777 .collect::<Result<Vec<_>>>()?;
1778
1779 repository_handle
1780 .update(&mut cx, |repository_handle, cx| {
1781 repository_handle.unstage_entries(entries, cx)
1782 })?
1783 .await?;
1784
1785 Ok(proto::Ack {})
1786 }
1787
1788 async fn handle_stash(
1789 this: Entity<Self>,
1790 envelope: TypedEnvelope<proto::Stash>,
1791 mut cx: AsyncApp,
1792 ) -> Result<proto::Ack> {
1793 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1794 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1795
1796 let entries = envelope
1797 .payload
1798 .paths
1799 .into_iter()
1800 .map(|path| RepoPath::new(&path))
1801 .collect::<Result<Vec<_>>>()?;
1802
1803 repository_handle
1804 .update(&mut cx, |repository_handle, cx| {
1805 repository_handle.stash_entries(entries, cx)
1806 })?
1807 .await?;
1808
1809 Ok(proto::Ack {})
1810 }
1811
1812 async fn handle_stash_pop(
1813 this: Entity<Self>,
1814 envelope: TypedEnvelope<proto::StashPop>,
1815 mut cx: AsyncApp,
1816 ) -> Result<proto::Ack> {
1817 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1818 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1819 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1820
1821 repository_handle
1822 .update(&mut cx, |repository_handle, cx| {
1823 repository_handle.stash_pop(stash_index, cx)
1824 })?
1825 .await?;
1826
1827 Ok(proto::Ack {})
1828 }
1829
1830 async fn handle_stash_apply(
1831 this: Entity<Self>,
1832 envelope: TypedEnvelope<proto::StashApply>,
1833 mut cx: AsyncApp,
1834 ) -> Result<proto::Ack> {
1835 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1836 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1837 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1838
1839 repository_handle
1840 .update(&mut cx, |repository_handle, cx| {
1841 repository_handle.stash_apply(stash_index, cx)
1842 })?
1843 .await?;
1844
1845 Ok(proto::Ack {})
1846 }
1847
1848 async fn handle_stash_drop(
1849 this: Entity<Self>,
1850 envelope: TypedEnvelope<proto::StashDrop>,
1851 mut cx: AsyncApp,
1852 ) -> Result<proto::Ack> {
1853 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1854 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1855 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1856
1857 repository_handle
1858 .update(&mut cx, |repository_handle, cx| {
1859 repository_handle.stash_drop(stash_index, cx)
1860 })?
1861 .await??;
1862
1863 Ok(proto::Ack {})
1864 }
1865
1866 async fn handle_set_index_text(
1867 this: Entity<Self>,
1868 envelope: TypedEnvelope<proto::SetIndexText>,
1869 mut cx: AsyncApp,
1870 ) -> Result<proto::Ack> {
1871 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1872 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1873 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1874
1875 repository_handle
1876 .update(&mut cx, |repository_handle, cx| {
1877 repository_handle.spawn_set_index_text_job(
1878 repo_path,
1879 envelope.payload.text,
1880 None,
1881 cx,
1882 )
1883 })?
1884 .await??;
1885 Ok(proto::Ack {})
1886 }
1887
1888 async fn handle_commit(
1889 this: Entity<Self>,
1890 envelope: TypedEnvelope<proto::Commit>,
1891 mut cx: AsyncApp,
1892 ) -> Result<proto::Ack> {
1893 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1894 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1895
1896 let message = SharedString::from(envelope.payload.message);
1897 let name = envelope.payload.name.map(SharedString::from);
1898 let email = envelope.payload.email.map(SharedString::from);
1899 let options = envelope.payload.options.unwrap_or_default();
1900
1901 repository_handle
1902 .update(&mut cx, |repository_handle, cx| {
1903 repository_handle.commit(
1904 message,
1905 name.zip(email),
1906 CommitOptions {
1907 amend: options.amend,
1908 signoff: options.signoff,
1909 },
1910 cx,
1911 )
1912 })?
1913 .await??;
1914 Ok(proto::Ack {})
1915 }
1916
1917 async fn handle_get_remotes(
1918 this: Entity<Self>,
1919 envelope: TypedEnvelope<proto::GetRemotes>,
1920 mut cx: AsyncApp,
1921 ) -> Result<proto::GetRemotesResponse> {
1922 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1923 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1924
1925 let branch_name = envelope.payload.branch_name;
1926
1927 let remotes = repository_handle
1928 .update(&mut cx, |repository_handle, _| {
1929 repository_handle.get_remotes(branch_name)
1930 })?
1931 .await??;
1932
1933 Ok(proto::GetRemotesResponse {
1934 remotes: remotes
1935 .into_iter()
1936 .map(|remotes| proto::get_remotes_response::Remote {
1937 name: remotes.name.to_string(),
1938 })
1939 .collect::<Vec<_>>(),
1940 })
1941 }
1942
1943 async fn handle_get_worktrees(
1944 this: Entity<Self>,
1945 envelope: TypedEnvelope<proto::GitGetWorktrees>,
1946 mut cx: AsyncApp,
1947 ) -> Result<proto::GitWorktreesResponse> {
1948 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1949 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1950
1951 let worktrees = repository_handle
1952 .update(&mut cx, |repository_handle, _| {
1953 repository_handle.worktrees()
1954 })?
1955 .await??;
1956
1957 Ok(proto::GitWorktreesResponse {
1958 worktrees: worktrees
1959 .into_iter()
1960 .map(|worktree| worktree_to_proto(&worktree))
1961 .collect::<Vec<_>>(),
1962 })
1963 }
1964
1965 async fn handle_create_worktree(
1966 this: Entity<Self>,
1967 envelope: TypedEnvelope<proto::GitCreateWorktree>,
1968 mut cx: AsyncApp,
1969 ) -> Result<proto::Ack> {
1970 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1971 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1972 let directory = PathBuf::from(envelope.payload.directory);
1973 let name = envelope.payload.name;
1974 let commit = envelope.payload.commit;
1975
1976 repository_handle
1977 .update(&mut cx, |repository_handle, _| {
1978 repository_handle.create_worktree(name, directory, commit)
1979 })?
1980 .await??;
1981
1982 Ok(proto::Ack {})
1983 }
1984
1985 async fn handle_get_branches(
1986 this: Entity<Self>,
1987 envelope: TypedEnvelope<proto::GitGetBranches>,
1988 mut cx: AsyncApp,
1989 ) -> Result<proto::GitBranchesResponse> {
1990 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1991 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1992
1993 let branches = repository_handle
1994 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1995 .await??;
1996
1997 Ok(proto::GitBranchesResponse {
1998 branches: branches
1999 .into_iter()
2000 .map(|branch| branch_to_proto(&branch))
2001 .collect::<Vec<_>>(),
2002 })
2003 }
2004 async fn handle_get_default_branch(
2005 this: Entity<Self>,
2006 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2007 mut cx: AsyncApp,
2008 ) -> Result<proto::GetDefaultBranchResponse> {
2009 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2010 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2011
2012 let branch = repository_handle
2013 .update(&mut cx, |repository_handle, _| {
2014 repository_handle.default_branch()
2015 })?
2016 .await??
2017 .map(Into::into);
2018
2019 Ok(proto::GetDefaultBranchResponse { branch })
2020 }
2021 async fn handle_create_branch(
2022 this: Entity<Self>,
2023 envelope: TypedEnvelope<proto::GitCreateBranch>,
2024 mut cx: AsyncApp,
2025 ) -> Result<proto::Ack> {
2026 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2027 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2028 let branch_name = envelope.payload.branch_name;
2029
2030 repository_handle
2031 .update(&mut cx, |repository_handle, _| {
2032 repository_handle.create_branch(branch_name)
2033 })?
2034 .await??;
2035
2036 Ok(proto::Ack {})
2037 }
2038
2039 async fn handle_change_branch(
2040 this: Entity<Self>,
2041 envelope: TypedEnvelope<proto::GitChangeBranch>,
2042 mut cx: AsyncApp,
2043 ) -> Result<proto::Ack> {
2044 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2045 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2046 let branch_name = envelope.payload.branch_name;
2047
2048 repository_handle
2049 .update(&mut cx, |repository_handle, _| {
2050 repository_handle.change_branch(branch_name)
2051 })?
2052 .await??;
2053
2054 Ok(proto::Ack {})
2055 }
2056
2057 async fn handle_rename_branch(
2058 this: Entity<Self>,
2059 envelope: TypedEnvelope<proto::GitRenameBranch>,
2060 mut cx: AsyncApp,
2061 ) -> Result<proto::Ack> {
2062 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2063 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2064 let branch = envelope.payload.branch;
2065 let new_name = envelope.payload.new_name;
2066
2067 repository_handle
2068 .update(&mut cx, |repository_handle, _| {
2069 repository_handle.rename_branch(branch, new_name)
2070 })?
2071 .await??;
2072
2073 Ok(proto::Ack {})
2074 }
2075
2076 async fn handle_show(
2077 this: Entity<Self>,
2078 envelope: TypedEnvelope<proto::GitShow>,
2079 mut cx: AsyncApp,
2080 ) -> Result<proto::GitCommitDetails> {
2081 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2082 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2083
2084 let commit = repository_handle
2085 .update(&mut cx, |repository_handle, _| {
2086 repository_handle.show(envelope.payload.commit)
2087 })?
2088 .await??;
2089 Ok(proto::GitCommitDetails {
2090 sha: commit.sha.into(),
2091 message: commit.message.into(),
2092 commit_timestamp: commit.commit_timestamp,
2093 author_email: commit.author_email.into(),
2094 author_name: commit.author_name.into(),
2095 })
2096 }
2097
2098 async fn handle_load_commit_diff(
2099 this: Entity<Self>,
2100 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2101 mut cx: AsyncApp,
2102 ) -> Result<proto::LoadCommitDiffResponse> {
2103 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2104 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2105
2106 let commit_diff = repository_handle
2107 .update(&mut cx, |repository_handle, _| {
2108 repository_handle.load_commit_diff(envelope.payload.commit)
2109 })?
2110 .await??;
2111 Ok(proto::LoadCommitDiffResponse {
2112 files: commit_diff
2113 .files
2114 .into_iter()
2115 .map(|file| proto::CommitFile {
2116 path: file.path.to_proto(),
2117 old_text: file.old_text,
2118 new_text: file.new_text,
2119 })
2120 .collect(),
2121 })
2122 }
2123
2124 async fn handle_reset(
2125 this: Entity<Self>,
2126 envelope: TypedEnvelope<proto::GitReset>,
2127 mut cx: AsyncApp,
2128 ) -> Result<proto::Ack> {
2129 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2130 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2131
2132 let mode = match envelope.payload.mode() {
2133 git_reset::ResetMode::Soft => ResetMode::Soft,
2134 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2135 };
2136
2137 repository_handle
2138 .update(&mut cx, |repository_handle, cx| {
2139 repository_handle.reset(envelope.payload.commit, mode, cx)
2140 })?
2141 .await??;
2142 Ok(proto::Ack {})
2143 }
2144
2145 async fn handle_checkout_files(
2146 this: Entity<Self>,
2147 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2148 mut cx: AsyncApp,
2149 ) -> Result<proto::Ack> {
2150 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2151 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2152 let paths = envelope
2153 .payload
2154 .paths
2155 .iter()
2156 .map(|s| RepoPath::from_proto(s))
2157 .collect::<Result<Vec<_>>>()?;
2158
2159 repository_handle
2160 .update(&mut cx, |repository_handle, cx| {
2161 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2162 })?
2163 .await?;
2164 Ok(proto::Ack {})
2165 }
2166
2167 async fn handle_open_commit_message_buffer(
2168 this: Entity<Self>,
2169 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::OpenBufferResponse> {
2172 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2173 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2174 let buffer = repository
2175 .update(&mut cx, |repository, cx| {
2176 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2177 })?
2178 .await?;
2179
2180 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2181 this.update(&mut cx, |this, cx| {
2182 this.buffer_store.update(cx, |buffer_store, cx| {
2183 buffer_store
2184 .create_buffer_for_peer(
2185 &buffer,
2186 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2187 cx,
2188 )
2189 .detach_and_log_err(cx);
2190 })
2191 })?;
2192
2193 Ok(proto::OpenBufferResponse {
2194 buffer_id: buffer_id.to_proto(),
2195 })
2196 }
2197
2198 async fn handle_askpass(
2199 this: Entity<Self>,
2200 envelope: TypedEnvelope<proto::AskPassRequest>,
2201 mut cx: AsyncApp,
2202 ) -> Result<proto::AskPassResponse> {
2203 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2204 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2205
2206 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2207 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2208 debug_panic!("no askpass found");
2209 anyhow::bail!("no askpass found");
2210 };
2211
2212 let response = askpass
2213 .ask_password(envelope.payload.prompt)
2214 .await
2215 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2216
2217 delegates
2218 .lock()
2219 .insert(envelope.payload.askpass_id, askpass);
2220
2221 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2222 Ok(proto::AskPassResponse {
2223 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2224 })
2225 }
2226
2227 async fn handle_check_for_pushed_commits(
2228 this: Entity<Self>,
2229 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2230 mut cx: AsyncApp,
2231 ) -> Result<proto::CheckForPushedCommitsResponse> {
2232 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2233 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2234
2235 let branches = repository_handle
2236 .update(&mut cx, |repository_handle, _| {
2237 repository_handle.check_for_pushed_commits()
2238 })?
2239 .await??;
2240 Ok(proto::CheckForPushedCommitsResponse {
2241 pushed_to: branches
2242 .into_iter()
2243 .map(|commit| commit.to_string())
2244 .collect(),
2245 })
2246 }
2247
2248 async fn handle_git_diff(
2249 this: Entity<Self>,
2250 envelope: TypedEnvelope<proto::GitDiff>,
2251 mut cx: AsyncApp,
2252 ) -> Result<proto::GitDiffResponse> {
2253 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2254 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2255 let diff_type = match envelope.payload.diff_type() {
2256 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2257 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2258 };
2259
2260 let mut diff = repository_handle
2261 .update(&mut cx, |repository_handle, cx| {
2262 repository_handle.diff(diff_type, cx)
2263 })?
2264 .await??;
2265 const ONE_MB: usize = 1_000_000;
2266 if diff.len() > ONE_MB {
2267 diff = diff.chars().take(ONE_MB).collect()
2268 }
2269
2270 Ok(proto::GitDiffResponse { diff })
2271 }
2272
2273 async fn handle_tree_diff(
2274 this: Entity<Self>,
2275 request: TypedEnvelope<proto::GetTreeDiff>,
2276 mut cx: AsyncApp,
2277 ) -> Result<proto::GetTreeDiffResponse> {
2278 let repository_id = RepositoryId(request.payload.repository_id);
2279 let diff_type = if request.payload.is_merge {
2280 DiffTreeType::MergeBase {
2281 base: request.payload.base.into(),
2282 head: request.payload.head.into(),
2283 }
2284 } else {
2285 DiffTreeType::Since {
2286 base: request.payload.base.into(),
2287 head: request.payload.head.into(),
2288 }
2289 };
2290
2291 let diff = this
2292 .update(&mut cx, |this, cx| {
2293 let repository = this.repositories().get(&repository_id)?;
2294 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2295 })?
2296 .context("missing repository")?
2297 .await??;
2298
2299 Ok(proto::GetTreeDiffResponse {
2300 entries: diff
2301 .entries
2302 .into_iter()
2303 .map(|(path, status)| proto::TreeDiffStatus {
2304 path: path.0.to_proto(),
2305 status: match status {
2306 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2307 TreeDiffStatus::Modified { .. } => {
2308 proto::tree_diff_status::Status::Modified.into()
2309 }
2310 TreeDiffStatus::Deleted { .. } => {
2311 proto::tree_diff_status::Status::Deleted.into()
2312 }
2313 },
2314 oid: match status {
2315 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2316 Some(old.to_string())
2317 }
2318 TreeDiffStatus::Added => None,
2319 },
2320 })
2321 .collect(),
2322 })
2323 }
2324
2325 async fn handle_get_blob_content(
2326 this: Entity<Self>,
2327 request: TypedEnvelope<proto::GetBlobContent>,
2328 mut cx: AsyncApp,
2329 ) -> Result<proto::GetBlobContentResponse> {
2330 let oid = git::Oid::from_str(&request.payload.oid)?;
2331 let repository_id = RepositoryId(request.payload.repository_id);
2332 let content = this
2333 .update(&mut cx, |this, cx| {
2334 let repository = this.repositories().get(&repository_id)?;
2335 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2336 })?
2337 .context("missing repository")?
2338 .await?;
2339 Ok(proto::GetBlobContentResponse { content })
2340 }
2341
2342 async fn handle_open_unstaged_diff(
2343 this: Entity<Self>,
2344 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2345 mut cx: AsyncApp,
2346 ) -> Result<proto::OpenUnstagedDiffResponse> {
2347 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2348 let diff = this
2349 .update(&mut cx, |this, cx| {
2350 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2351 Some(this.open_unstaged_diff(buffer, cx))
2352 })?
2353 .context("missing buffer")?
2354 .await?;
2355 this.update(&mut cx, |this, _| {
2356 let shared_diffs = this
2357 .shared_diffs
2358 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2359 .or_default();
2360 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2361 })?;
2362 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2363 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2364 }
2365
2366 async fn handle_open_uncommitted_diff(
2367 this: Entity<Self>,
2368 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2369 mut cx: AsyncApp,
2370 ) -> Result<proto::OpenUncommittedDiffResponse> {
2371 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2372 let diff = this
2373 .update(&mut cx, |this, cx| {
2374 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2375 Some(this.open_uncommitted_diff(buffer, cx))
2376 })?
2377 .context("missing buffer")?
2378 .await?;
2379 this.update(&mut cx, |this, _| {
2380 let shared_diffs = this
2381 .shared_diffs
2382 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2383 .or_default();
2384 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2385 })?;
2386 diff.read_with(&cx, |diff, cx| {
2387 use proto::open_uncommitted_diff_response::Mode;
2388
2389 let unstaged_diff = diff.secondary_diff();
2390 let index_snapshot = unstaged_diff.and_then(|diff| {
2391 let diff = diff.read(cx);
2392 diff.base_text_exists().then(|| diff.base_text())
2393 });
2394
2395 let mode;
2396 let staged_text;
2397 let committed_text;
2398 if diff.base_text_exists() {
2399 let committed_snapshot = diff.base_text();
2400 committed_text = Some(committed_snapshot.text());
2401 if let Some(index_text) = index_snapshot {
2402 if index_text.remote_id() == committed_snapshot.remote_id() {
2403 mode = Mode::IndexMatchesHead;
2404 staged_text = None;
2405 } else {
2406 mode = Mode::IndexAndHead;
2407 staged_text = Some(index_text.text());
2408 }
2409 } else {
2410 mode = Mode::IndexAndHead;
2411 staged_text = None;
2412 }
2413 } else {
2414 mode = Mode::IndexAndHead;
2415 committed_text = None;
2416 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2417 }
2418
2419 proto::OpenUncommittedDiffResponse {
2420 committed_text,
2421 staged_text,
2422 mode: mode.into(),
2423 }
2424 })
2425 }
2426
2427 async fn handle_update_diff_bases(
2428 this: Entity<Self>,
2429 request: TypedEnvelope<proto::UpdateDiffBases>,
2430 mut cx: AsyncApp,
2431 ) -> Result<()> {
2432 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2433 this.update(&mut cx, |this, cx| {
2434 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2435 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2436 {
2437 let buffer = buffer.read(cx).text_snapshot();
2438 diff_state.update(cx, |diff_state, cx| {
2439 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2440 })
2441 }
2442 })
2443 }
2444
2445 async fn handle_blame_buffer(
2446 this: Entity<Self>,
2447 envelope: TypedEnvelope<proto::BlameBuffer>,
2448 mut cx: AsyncApp,
2449 ) -> Result<proto::BlameBufferResponse> {
2450 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2451 let version = deserialize_version(&envelope.payload.version);
2452 let buffer = this.read_with(&cx, |this, cx| {
2453 this.buffer_store.read(cx).get_existing(buffer_id)
2454 })??;
2455 buffer
2456 .update(&mut cx, |buffer, _| {
2457 buffer.wait_for_version(version.clone())
2458 })?
2459 .await?;
2460 let blame = this
2461 .update(&mut cx, |this, cx| {
2462 this.blame_buffer(&buffer, Some(version), cx)
2463 })?
2464 .await?;
2465 Ok(serialize_blame_buffer_response(blame))
2466 }
2467
2468 async fn handle_get_permalink_to_line(
2469 this: Entity<Self>,
2470 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2471 mut cx: AsyncApp,
2472 ) -> Result<proto::GetPermalinkToLineResponse> {
2473 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2474 // let version = deserialize_version(&envelope.payload.version);
2475 let selection = {
2476 let proto_selection = envelope
2477 .payload
2478 .selection
2479 .context("no selection to get permalink for defined")?;
2480 proto_selection.start as u32..proto_selection.end as u32
2481 };
2482 let buffer = this.read_with(&cx, |this, cx| {
2483 this.buffer_store.read(cx).get_existing(buffer_id)
2484 })??;
2485 let permalink = this
2486 .update(&mut cx, |this, cx| {
2487 this.get_permalink_to_line(&buffer, selection, cx)
2488 })?
2489 .await?;
2490 Ok(proto::GetPermalinkToLineResponse {
2491 permalink: permalink.to_string(),
2492 })
2493 }
2494
2495 fn repository_for_request(
2496 this: &Entity<Self>,
2497 id: RepositoryId,
2498 cx: &mut AsyncApp,
2499 ) -> Result<Entity<Repository>> {
2500 this.read_with(cx, |this, _| {
2501 this.repositories
2502 .get(&id)
2503 .context("missing repository handle")
2504 .cloned()
2505 })?
2506 }
2507
2508 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2509 self.repositories
2510 .iter()
2511 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2512 .collect()
2513 }
2514
2515 fn process_updated_entries(
2516 &self,
2517 worktree: &Entity<Worktree>,
2518 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2519 cx: &mut App,
2520 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2521 let path_style = worktree.read(cx).path_style();
2522 let mut repo_paths = self
2523 .repositories
2524 .values()
2525 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2526 .collect::<Vec<_>>();
2527 let mut entries: Vec<_> = updated_entries
2528 .iter()
2529 .map(|(path, _, _)| path.clone())
2530 .collect();
2531 entries.sort();
2532 let worktree = worktree.read(cx);
2533
2534 let entries = entries
2535 .into_iter()
2536 .map(|path| worktree.absolutize(&path))
2537 .collect::<Arc<[_]>>();
2538
2539 let executor = cx.background_executor().clone();
2540 cx.background_executor().spawn(async move {
2541 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2542 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2543 let mut tasks = FuturesOrdered::new();
2544 for (repo_path, repo) in repo_paths.into_iter().rev() {
2545 let entries = entries.clone();
2546 let task = executor.spawn(async move {
2547 // Find all repository paths that belong to this repo
2548 let mut ix = entries.partition_point(|path| path < &*repo_path);
2549 if ix == entries.len() {
2550 return None;
2551 };
2552
2553 let mut paths = Vec::new();
2554 // All paths prefixed by a given repo will constitute a continuous range.
2555 while let Some(path) = entries.get(ix)
2556 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2557 &repo_path, path, path_style,
2558 )
2559 {
2560 paths.push((repo_path, ix));
2561 ix += 1;
2562 }
2563 if paths.is_empty() {
2564 None
2565 } else {
2566 Some((repo, paths))
2567 }
2568 });
2569 tasks.push_back(task);
2570 }
2571
2572 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2573 let mut path_was_used = vec![false; entries.len()];
2574 let tasks = tasks.collect::<Vec<_>>().await;
2575 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2576 // We always want to assign a path to it's innermost repository.
2577 for t in tasks {
2578 let Some((repo, paths)) = t else {
2579 continue;
2580 };
2581 let entry = paths_by_git_repo.entry(repo).or_default();
2582 for (repo_path, ix) in paths {
2583 if path_was_used[ix] {
2584 continue;
2585 }
2586 path_was_used[ix] = true;
2587 entry.push(repo_path);
2588 }
2589 }
2590
2591 paths_by_git_repo
2592 })
2593 }
2594}
2595
2596impl BufferGitState {
2597 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2598 Self {
2599 unstaged_diff: Default::default(),
2600 uncommitted_diff: Default::default(),
2601 recalculate_diff_task: Default::default(),
2602 language: Default::default(),
2603 language_registry: Default::default(),
2604 recalculating_tx: postage::watch::channel_with(false).0,
2605 hunk_staging_operation_count: 0,
2606 hunk_staging_operation_count_as_of_write: 0,
2607 head_text: Default::default(),
2608 index_text: Default::default(),
2609 head_changed: Default::default(),
2610 index_changed: Default::default(),
2611 language_changed: Default::default(),
2612 conflict_updated_futures: Default::default(),
2613 conflict_set: Default::default(),
2614 reparse_conflict_markers_task: Default::default(),
2615 }
2616 }
2617
2618 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2619 self.language = buffer.read(cx).language().cloned();
2620 self.language_changed = true;
2621 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2622 }
2623
2624 fn reparse_conflict_markers(
2625 &mut self,
2626 buffer: text::BufferSnapshot,
2627 cx: &mut Context<Self>,
2628 ) -> oneshot::Receiver<()> {
2629 let (tx, rx) = oneshot::channel();
2630
2631 let Some(conflict_set) = self
2632 .conflict_set
2633 .as_ref()
2634 .and_then(|conflict_set| conflict_set.upgrade())
2635 else {
2636 return rx;
2637 };
2638
2639 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2640 if conflict_set.has_conflict {
2641 Some(conflict_set.snapshot())
2642 } else {
2643 None
2644 }
2645 });
2646
2647 if let Some(old_snapshot) = old_snapshot {
2648 self.conflict_updated_futures.push(tx);
2649 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2650 let (snapshot, changed_range) = cx
2651 .background_spawn(async move {
2652 let new_snapshot = ConflictSet::parse(&buffer);
2653 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2654 (new_snapshot, changed_range)
2655 })
2656 .await;
2657 this.update(cx, |this, cx| {
2658 if let Some(conflict_set) = &this.conflict_set {
2659 conflict_set
2660 .update(cx, |conflict_set, cx| {
2661 conflict_set.set_snapshot(snapshot, changed_range, cx);
2662 })
2663 .ok();
2664 }
2665 let futures = std::mem::take(&mut this.conflict_updated_futures);
2666 for tx in futures {
2667 tx.send(()).ok();
2668 }
2669 })
2670 }))
2671 }
2672
2673 rx
2674 }
2675
2676 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2677 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2678 }
2679
2680 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2681 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2682 }
2683
2684 fn handle_base_texts_updated(
2685 &mut self,
2686 buffer: text::BufferSnapshot,
2687 message: proto::UpdateDiffBases,
2688 cx: &mut Context<Self>,
2689 ) {
2690 use proto::update_diff_bases::Mode;
2691
2692 let Some(mode) = Mode::from_i32(message.mode) else {
2693 return;
2694 };
2695
2696 let diff_bases_change = match mode {
2697 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2698 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2699 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2700 Mode::IndexAndHead => DiffBasesChange::SetEach {
2701 index: message.staged_text,
2702 head: message.committed_text,
2703 },
2704 };
2705
2706 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2707 }
2708
2709 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2710 if *self.recalculating_tx.borrow() {
2711 let mut rx = self.recalculating_tx.subscribe();
2712 Some(async move {
2713 loop {
2714 let is_recalculating = rx.recv().await;
2715 if is_recalculating != Some(true) {
2716 break;
2717 }
2718 }
2719 })
2720 } else {
2721 None
2722 }
2723 }
2724
2725 fn diff_bases_changed(
2726 &mut self,
2727 buffer: text::BufferSnapshot,
2728 diff_bases_change: Option<DiffBasesChange>,
2729 cx: &mut Context<Self>,
2730 ) {
2731 match diff_bases_change {
2732 Some(DiffBasesChange::SetIndex(index)) => {
2733 self.index_text = index.map(|mut index| {
2734 text::LineEnding::normalize(&mut index);
2735 Arc::new(index)
2736 });
2737 self.index_changed = true;
2738 }
2739 Some(DiffBasesChange::SetHead(head)) => {
2740 self.head_text = head.map(|mut head| {
2741 text::LineEnding::normalize(&mut head);
2742 Arc::new(head)
2743 });
2744 self.head_changed = true;
2745 }
2746 Some(DiffBasesChange::SetBoth(text)) => {
2747 let text = text.map(|mut text| {
2748 text::LineEnding::normalize(&mut text);
2749 Arc::new(text)
2750 });
2751 self.head_text = text.clone();
2752 self.index_text = text;
2753 self.head_changed = true;
2754 self.index_changed = true;
2755 }
2756 Some(DiffBasesChange::SetEach { index, head }) => {
2757 self.index_text = index.map(|mut index| {
2758 text::LineEnding::normalize(&mut index);
2759 Arc::new(index)
2760 });
2761 self.index_changed = true;
2762 self.head_text = head.map(|mut head| {
2763 text::LineEnding::normalize(&mut head);
2764 Arc::new(head)
2765 });
2766 self.head_changed = true;
2767 }
2768 None => {}
2769 }
2770
2771 self.recalculate_diffs(buffer, cx)
2772 }
2773
2774 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2775 *self.recalculating_tx.borrow_mut() = true;
2776
2777 let language = self.language.clone();
2778 let language_registry = self.language_registry.clone();
2779 let unstaged_diff = self.unstaged_diff();
2780 let uncommitted_diff = self.uncommitted_diff();
2781 let head = self.head_text.clone();
2782 let index = self.index_text.clone();
2783 let index_changed = self.index_changed;
2784 let head_changed = self.head_changed;
2785 let language_changed = self.language_changed;
2786 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2787 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2788 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2789 (None, None) => true,
2790 _ => false,
2791 };
2792 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2793 log::debug!(
2794 "start recalculating diffs for buffer {}",
2795 buffer.remote_id()
2796 );
2797
2798 let mut new_unstaged_diff = None;
2799 if let Some(unstaged_diff) = &unstaged_diff {
2800 new_unstaged_diff = Some(
2801 BufferDiff::update_diff(
2802 unstaged_diff.clone(),
2803 buffer.clone(),
2804 index,
2805 index_changed,
2806 language_changed,
2807 language.clone(),
2808 language_registry.clone(),
2809 cx,
2810 )
2811 .await?,
2812 );
2813 }
2814
2815 let mut new_uncommitted_diff = None;
2816 if let Some(uncommitted_diff) = &uncommitted_diff {
2817 new_uncommitted_diff = if index_matches_head {
2818 new_unstaged_diff.clone()
2819 } else {
2820 Some(
2821 BufferDiff::update_diff(
2822 uncommitted_diff.clone(),
2823 buffer.clone(),
2824 head,
2825 head_changed,
2826 language_changed,
2827 language.clone(),
2828 language_registry.clone(),
2829 cx,
2830 )
2831 .await?,
2832 )
2833 }
2834 }
2835
2836 let cancel = this.update(cx, |this, _| {
2837 // This checks whether all pending stage/unstage operations
2838 // have quiesced (i.e. both the corresponding write and the
2839 // read of that write have completed). If not, then we cancel
2840 // this recalculation attempt to avoid invalidating pending
2841 // state too quickly; another recalculation will come along
2842 // later and clear the pending state once the state of the index has settled.
2843 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2844 *this.recalculating_tx.borrow_mut() = false;
2845 true
2846 } else {
2847 false
2848 }
2849 })?;
2850 if cancel {
2851 log::debug!(
2852 concat!(
2853 "aborting recalculating diffs for buffer {}",
2854 "due to subsequent hunk operations",
2855 ),
2856 buffer.remote_id()
2857 );
2858 return Ok(());
2859 }
2860
2861 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2862 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2863 {
2864 unstaged_diff.update(cx, |diff, cx| {
2865 if language_changed {
2866 diff.language_changed(cx);
2867 }
2868 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2869 })?
2870 } else {
2871 None
2872 };
2873
2874 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2875 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2876 {
2877 uncommitted_diff.update(cx, |diff, cx| {
2878 if language_changed {
2879 diff.language_changed(cx);
2880 }
2881 diff.set_snapshot_with_secondary(
2882 new_uncommitted_diff,
2883 &buffer,
2884 unstaged_changed_range,
2885 true,
2886 cx,
2887 );
2888 })?;
2889 }
2890
2891 log::debug!(
2892 "finished recalculating diffs for buffer {}",
2893 buffer.remote_id()
2894 );
2895
2896 if let Some(this) = this.upgrade() {
2897 this.update(cx, |this, _| {
2898 this.index_changed = false;
2899 this.head_changed = false;
2900 this.language_changed = false;
2901 *this.recalculating_tx.borrow_mut() = false;
2902 })?;
2903 }
2904
2905 Ok(())
2906 }));
2907 }
2908}
2909
2910fn make_remote_delegate(
2911 this: Entity<GitStore>,
2912 project_id: u64,
2913 repository_id: RepositoryId,
2914 askpass_id: u64,
2915 cx: &mut AsyncApp,
2916) -> AskPassDelegate {
2917 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2918 this.update(cx, |this, cx| {
2919 let Some((client, _)) = this.downstream_client() else {
2920 return;
2921 };
2922 let response = client.request(proto::AskPassRequest {
2923 project_id,
2924 repository_id: repository_id.to_proto(),
2925 askpass_id,
2926 prompt,
2927 });
2928 cx.spawn(async move |_, _| {
2929 let mut response = response.await?.response;
2930 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2931 .ok();
2932 response.zeroize();
2933 anyhow::Ok(())
2934 })
2935 .detach_and_log_err(cx);
2936 })
2937 .log_err();
2938 })
2939}
2940
2941impl RepositoryId {
2942 pub fn to_proto(self) -> u64 {
2943 self.0
2944 }
2945
2946 pub fn from_proto(id: u64) -> Self {
2947 RepositoryId(id)
2948 }
2949}
2950
2951impl RepositorySnapshot {
2952 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2953 Self {
2954 id,
2955 statuses_by_path: Default::default(),
2956 pending_ops_by_path: Default::default(),
2957 work_directory_abs_path,
2958 branch: None,
2959 head_commit: None,
2960 scan_id: 0,
2961 merge: Default::default(),
2962 remote_origin_url: None,
2963 remote_upstream_url: None,
2964 stash_entries: Default::default(),
2965 path_style,
2966 }
2967 }
2968
2969 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2970 proto::UpdateRepository {
2971 branch_summary: self.branch.as_ref().map(branch_to_proto),
2972 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2973 updated_statuses: self
2974 .statuses_by_path
2975 .iter()
2976 .map(|entry| entry.to_proto())
2977 .collect(),
2978 removed_statuses: Default::default(),
2979 current_merge_conflicts: self
2980 .merge
2981 .conflicted_paths
2982 .iter()
2983 .map(|repo_path| repo_path.to_proto())
2984 .collect(),
2985 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2986 project_id,
2987 id: self.id.to_proto(),
2988 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2989 entry_ids: vec![self.id.to_proto()],
2990 scan_id: self.scan_id,
2991 is_last_update: true,
2992 stash_entries: self
2993 .stash_entries
2994 .entries
2995 .iter()
2996 .map(stash_to_proto)
2997 .collect(),
2998 }
2999 }
3000
3001 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3002 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3003 let mut removed_statuses: Vec<String> = Vec::new();
3004
3005 let mut new_statuses = self.statuses_by_path.iter().peekable();
3006 let mut old_statuses = old.statuses_by_path.iter().peekable();
3007
3008 let mut current_new_entry = new_statuses.next();
3009 let mut current_old_entry = old_statuses.next();
3010 loop {
3011 match (current_new_entry, current_old_entry) {
3012 (Some(new_entry), Some(old_entry)) => {
3013 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3014 Ordering::Less => {
3015 updated_statuses.push(new_entry.to_proto());
3016 current_new_entry = new_statuses.next();
3017 }
3018 Ordering::Equal => {
3019 if new_entry.status != old_entry.status {
3020 updated_statuses.push(new_entry.to_proto());
3021 }
3022 current_old_entry = old_statuses.next();
3023 current_new_entry = new_statuses.next();
3024 }
3025 Ordering::Greater => {
3026 removed_statuses.push(old_entry.repo_path.to_proto());
3027 current_old_entry = old_statuses.next();
3028 }
3029 }
3030 }
3031 (None, Some(old_entry)) => {
3032 removed_statuses.push(old_entry.repo_path.to_proto());
3033 current_old_entry = old_statuses.next();
3034 }
3035 (Some(new_entry), None) => {
3036 updated_statuses.push(new_entry.to_proto());
3037 current_new_entry = new_statuses.next();
3038 }
3039 (None, None) => break,
3040 }
3041 }
3042
3043 proto::UpdateRepository {
3044 branch_summary: self.branch.as_ref().map(branch_to_proto),
3045 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3046 updated_statuses,
3047 removed_statuses,
3048 current_merge_conflicts: self
3049 .merge
3050 .conflicted_paths
3051 .iter()
3052 .map(|path| path.to_proto())
3053 .collect(),
3054 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3055 project_id,
3056 id: self.id.to_proto(),
3057 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3058 entry_ids: vec![],
3059 scan_id: self.scan_id,
3060 is_last_update: true,
3061 stash_entries: self
3062 .stash_entries
3063 .entries
3064 .iter()
3065 .map(stash_to_proto)
3066 .collect(),
3067 }
3068 }
3069
3070 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3071 self.statuses_by_path.iter().cloned()
3072 }
3073
3074 pub fn status_summary(&self) -> GitSummary {
3075 self.statuses_by_path.summary().item_summary
3076 }
3077
3078 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3079 self.statuses_by_path
3080 .get(&PathKey(path.0.clone()), ())
3081 .cloned()
3082 }
3083
3084 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3085 self.pending_ops_by_path
3086 .get(&PathKey(path.0.clone()), ())
3087 .cloned()
3088 }
3089
3090 pub fn new_pending_op(&self, git_status: pending_op::GitStatus) -> PendingOp {
3091 let id = self.pending_ops_by_path.summary().item_summary.max_id + 1;
3092 PendingOp {
3093 id,
3094 git_status,
3095 job_status: pending_op::JobStatus::Started,
3096 }
3097 }
3098
3099 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3100 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3101 }
3102
3103 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3104 self.path_style
3105 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3106 .unwrap()
3107 .into()
3108 }
3109
3110 #[inline]
3111 fn abs_path_to_repo_path_inner(
3112 work_directory_abs_path: &Path,
3113 abs_path: &Path,
3114 path_style: PathStyle,
3115 ) -> Option<RepoPath> {
3116 abs_path
3117 .strip_prefix(&work_directory_abs_path)
3118 .ok()
3119 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3120 }
3121
3122 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3123 self.merge.conflicted_paths.contains(repo_path)
3124 }
3125
3126 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3127 let had_conflict_on_last_merge_head_change =
3128 self.merge.conflicted_paths.contains(repo_path);
3129 let has_conflict_currently = self
3130 .status_for_path(repo_path)
3131 .is_some_and(|entry| entry.status.is_conflicted());
3132 had_conflict_on_last_merge_head_change || has_conflict_currently
3133 }
3134
3135 /// This is the name that will be displayed in the repository selector for this repository.
3136 pub fn display_name(&self) -> SharedString {
3137 self.work_directory_abs_path
3138 .file_name()
3139 .unwrap_or_default()
3140 .to_string_lossy()
3141 .to_string()
3142 .into()
3143 }
3144}
3145
3146pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3147 proto::StashEntry {
3148 oid: entry.oid.as_bytes().to_vec(),
3149 message: entry.message.clone(),
3150 branch: entry.branch.clone(),
3151 index: entry.index as u64,
3152 timestamp: entry.timestamp,
3153 }
3154}
3155
3156pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3157 Ok(StashEntry {
3158 oid: Oid::from_bytes(&entry.oid)?,
3159 message: entry.message.clone(),
3160 index: entry.index as usize,
3161 branch: entry.branch.clone(),
3162 timestamp: entry.timestamp,
3163 })
3164}
3165
3166impl MergeDetails {
3167 async fn load(
3168 backend: &Arc<dyn GitRepository>,
3169 status: &SumTree<StatusEntry>,
3170 prev_snapshot: &RepositorySnapshot,
3171 ) -> Result<(MergeDetails, bool)> {
3172 log::debug!("load merge details");
3173 let message = backend.merge_message().await;
3174 let heads = backend
3175 .revparse_batch(vec![
3176 "MERGE_HEAD".into(),
3177 "CHERRY_PICK_HEAD".into(),
3178 "REBASE_HEAD".into(),
3179 "REVERT_HEAD".into(),
3180 "APPLY_HEAD".into(),
3181 ])
3182 .await
3183 .log_err()
3184 .unwrap_or_default()
3185 .into_iter()
3186 .map(|opt| opt.map(SharedString::from))
3187 .collect::<Vec<_>>();
3188 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3189 let conflicted_paths = if merge_heads_changed {
3190 let current_conflicted_paths = TreeSet::from_ordered_entries(
3191 status
3192 .iter()
3193 .filter(|entry| entry.status.is_conflicted())
3194 .map(|entry| entry.repo_path.clone()),
3195 );
3196
3197 // It can happen that we run a scan while a lengthy merge is in progress
3198 // that will eventually result in conflicts, but before those conflicts
3199 // are reported by `git status`. Since for the moment we only care about
3200 // the merge heads state for the purposes of tracking conflicts, don't update
3201 // this state until we see some conflicts.
3202 if heads.iter().any(Option::is_some)
3203 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3204 && current_conflicted_paths.is_empty()
3205 {
3206 log::debug!("not updating merge heads because no conflicts found");
3207 return Ok((
3208 MergeDetails {
3209 message: message.map(SharedString::from),
3210 ..prev_snapshot.merge.clone()
3211 },
3212 false,
3213 ));
3214 }
3215
3216 current_conflicted_paths
3217 } else {
3218 prev_snapshot.merge.conflicted_paths.clone()
3219 };
3220 let details = MergeDetails {
3221 conflicted_paths,
3222 message: message.map(SharedString::from),
3223 heads,
3224 };
3225 Ok((details, merge_heads_changed))
3226 }
3227}
3228
3229impl Repository {
3230 pub fn snapshot(&self) -> RepositorySnapshot {
3231 self.snapshot.clone()
3232 }
3233
3234 fn local(
3235 id: RepositoryId,
3236 work_directory_abs_path: Arc<Path>,
3237 dot_git_abs_path: Arc<Path>,
3238 repository_dir_abs_path: Arc<Path>,
3239 common_dir_abs_path: Arc<Path>,
3240 project_environment: WeakEntity<ProjectEnvironment>,
3241 fs: Arc<dyn Fs>,
3242 git_store: WeakEntity<GitStore>,
3243 cx: &mut Context<Self>,
3244 ) -> Self {
3245 let snapshot =
3246 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3247 Repository {
3248 this: cx.weak_entity(),
3249 git_store,
3250 snapshot,
3251 commit_message_buffer: None,
3252 askpass_delegates: Default::default(),
3253 paths_needing_status_update: Default::default(),
3254 latest_askpass_id: 0,
3255 job_sender: Repository::spawn_local_git_worker(
3256 work_directory_abs_path,
3257 dot_git_abs_path,
3258 repository_dir_abs_path,
3259 common_dir_abs_path,
3260 project_environment,
3261 fs,
3262 cx,
3263 ),
3264 job_id: 0,
3265 active_jobs: Default::default(),
3266 }
3267 }
3268
3269 fn remote(
3270 id: RepositoryId,
3271 work_directory_abs_path: Arc<Path>,
3272 path_style: PathStyle,
3273 project_id: ProjectId,
3274 client: AnyProtoClient,
3275 git_store: WeakEntity<GitStore>,
3276 cx: &mut Context<Self>,
3277 ) -> Self {
3278 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3279 Self {
3280 this: cx.weak_entity(),
3281 snapshot,
3282 commit_message_buffer: None,
3283 git_store,
3284 paths_needing_status_update: Default::default(),
3285 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3286 askpass_delegates: Default::default(),
3287 latest_askpass_id: 0,
3288 active_jobs: Default::default(),
3289 job_id: 0,
3290 }
3291 }
3292
3293 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3294 self.git_store.upgrade()
3295 }
3296
3297 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3298 let this = cx.weak_entity();
3299 let git_store = self.git_store.clone();
3300 let _ = self.send_keyed_job(
3301 Some(GitJobKey::ReloadBufferDiffBases),
3302 None,
3303 |state, mut cx| async move {
3304 let RepositoryState::Local { backend, .. } = state else {
3305 log::error!("tried to recompute diffs for a non-local repository");
3306 return Ok(());
3307 };
3308
3309 let Some(this) = this.upgrade() else {
3310 return Ok(());
3311 };
3312
3313 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3314 git_store.update(cx, |git_store, cx| {
3315 git_store
3316 .diffs
3317 .iter()
3318 .filter_map(|(buffer_id, diff_state)| {
3319 let buffer_store = git_store.buffer_store.read(cx);
3320 let buffer = buffer_store.get(*buffer_id)?;
3321 let file = File::from_dyn(buffer.read(cx).file())?;
3322 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3323 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3324 log::debug!(
3325 "start reload diff bases for repo path {}",
3326 repo_path.as_unix_str()
3327 );
3328 diff_state.update(cx, |diff_state, _| {
3329 let has_unstaged_diff = diff_state
3330 .unstaged_diff
3331 .as_ref()
3332 .is_some_and(|diff| diff.is_upgradable());
3333 let has_uncommitted_diff = diff_state
3334 .uncommitted_diff
3335 .as_ref()
3336 .is_some_and(|set| set.is_upgradable());
3337
3338 Some((
3339 buffer,
3340 repo_path,
3341 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3342 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3343 ))
3344 })
3345 })
3346 .collect::<Vec<_>>()
3347 })
3348 })??;
3349
3350 let buffer_diff_base_changes = cx
3351 .background_spawn(async move {
3352 let mut changes = Vec::new();
3353 for (buffer, repo_path, current_index_text, current_head_text) in
3354 &repo_diff_state_updates
3355 {
3356 let index_text = if current_index_text.is_some() {
3357 backend.load_index_text(repo_path.clone()).await
3358 } else {
3359 None
3360 };
3361 let head_text = if current_head_text.is_some() {
3362 backend.load_committed_text(repo_path.clone()).await
3363 } else {
3364 None
3365 };
3366
3367 let change =
3368 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3369 (Some(current_index), Some(current_head)) => {
3370 let index_changed =
3371 index_text.as_ref() != current_index.as_deref();
3372 let head_changed =
3373 head_text.as_ref() != current_head.as_deref();
3374 if index_changed && head_changed {
3375 if index_text == head_text {
3376 Some(DiffBasesChange::SetBoth(head_text))
3377 } else {
3378 Some(DiffBasesChange::SetEach {
3379 index: index_text,
3380 head: head_text,
3381 })
3382 }
3383 } else if index_changed {
3384 Some(DiffBasesChange::SetIndex(index_text))
3385 } else if head_changed {
3386 Some(DiffBasesChange::SetHead(head_text))
3387 } else {
3388 None
3389 }
3390 }
3391 (Some(current_index), None) => {
3392 let index_changed =
3393 index_text.as_ref() != current_index.as_deref();
3394 index_changed
3395 .then_some(DiffBasesChange::SetIndex(index_text))
3396 }
3397 (None, Some(current_head)) => {
3398 let head_changed =
3399 head_text.as_ref() != current_head.as_deref();
3400 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3401 }
3402 (None, None) => None,
3403 };
3404
3405 changes.push((buffer.clone(), change))
3406 }
3407 changes
3408 })
3409 .await;
3410
3411 git_store.update(&mut cx, |git_store, cx| {
3412 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3413 let buffer_snapshot = buffer.read(cx).text_snapshot();
3414 let buffer_id = buffer_snapshot.remote_id();
3415 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3416 continue;
3417 };
3418
3419 let downstream_client = git_store.downstream_client();
3420 diff_state.update(cx, |diff_state, cx| {
3421 use proto::update_diff_bases::Mode;
3422
3423 if let Some((diff_bases_change, (client, project_id))) =
3424 diff_bases_change.clone().zip(downstream_client)
3425 {
3426 let (staged_text, committed_text, mode) = match diff_bases_change {
3427 DiffBasesChange::SetIndex(index) => {
3428 (index, None, Mode::IndexOnly)
3429 }
3430 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3431 DiffBasesChange::SetEach { index, head } => {
3432 (index, head, Mode::IndexAndHead)
3433 }
3434 DiffBasesChange::SetBoth(text) => {
3435 (None, text, Mode::IndexMatchesHead)
3436 }
3437 };
3438 client
3439 .send(proto::UpdateDiffBases {
3440 project_id: project_id.to_proto(),
3441 buffer_id: buffer_id.to_proto(),
3442 staged_text,
3443 committed_text,
3444 mode: mode as i32,
3445 })
3446 .log_err();
3447 }
3448
3449 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3450 });
3451 }
3452 })
3453 },
3454 );
3455 }
3456
3457 pub fn send_job<F, Fut, R>(
3458 &mut self,
3459 status: Option<SharedString>,
3460 job: F,
3461 ) -> oneshot::Receiver<R>
3462 where
3463 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3464 Fut: Future<Output = R> + 'static,
3465 R: Send + 'static,
3466 {
3467 self.send_keyed_job(None, status, job)
3468 }
3469
3470 fn send_keyed_job<F, Fut, R>(
3471 &mut self,
3472 key: Option<GitJobKey>,
3473 status: Option<SharedString>,
3474 job: F,
3475 ) -> oneshot::Receiver<R>
3476 where
3477 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3478 Fut: Future<Output = R> + 'static,
3479 R: Send + 'static,
3480 {
3481 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3482 let job_id = post_inc(&mut self.job_id);
3483 let this = self.this.clone();
3484 self.job_sender
3485 .unbounded_send(GitJob {
3486 key,
3487 job: Box::new(move |state, cx: &mut AsyncApp| {
3488 let job = job(state, cx.clone());
3489 cx.spawn(async move |cx| {
3490 if let Some(s) = status.clone() {
3491 this.update(cx, |this, cx| {
3492 this.active_jobs.insert(
3493 job_id,
3494 JobInfo {
3495 start: Instant::now(),
3496 message: s.clone(),
3497 },
3498 );
3499
3500 cx.notify();
3501 })
3502 .ok();
3503 }
3504 let result = job.await;
3505
3506 this.update(cx, |this, cx| {
3507 this.active_jobs.remove(&job_id);
3508 cx.notify();
3509 })
3510 .ok();
3511
3512 result_tx.send(result).ok();
3513 })
3514 }),
3515 })
3516 .ok();
3517 result_rx
3518 }
3519
3520 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3521 let Some(git_store) = self.git_store.upgrade() else {
3522 return;
3523 };
3524 let entity = cx.entity();
3525 git_store.update(cx, |git_store, cx| {
3526 let Some((&id, _)) = git_store
3527 .repositories
3528 .iter()
3529 .find(|(_, handle)| *handle == &entity)
3530 else {
3531 return;
3532 };
3533 git_store.active_repo_id = Some(id);
3534 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3535 });
3536 }
3537
3538 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3539 self.snapshot.status()
3540 }
3541
3542 pub fn cached_stash(&self) -> GitStash {
3543 self.snapshot.stash_entries.clone()
3544 }
3545
3546 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3547 let git_store = self.git_store.upgrade()?;
3548 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3549 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3550 let abs_path = SanitizedPath::new(&abs_path);
3551 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3552 Some(ProjectPath {
3553 worktree_id: worktree.read(cx).id(),
3554 path: relative_path,
3555 })
3556 }
3557
3558 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3559 let git_store = self.git_store.upgrade()?;
3560 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3561 let abs_path = worktree_store.absolutize(path, cx)?;
3562 self.snapshot.abs_path_to_repo_path(&abs_path)
3563 }
3564
3565 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3566 other
3567 .read(cx)
3568 .snapshot
3569 .work_directory_abs_path
3570 .starts_with(&self.snapshot.work_directory_abs_path)
3571 }
3572
3573 pub fn open_commit_buffer(
3574 &mut self,
3575 languages: Option<Arc<LanguageRegistry>>,
3576 buffer_store: Entity<BufferStore>,
3577 cx: &mut Context<Self>,
3578 ) -> Task<Result<Entity<Buffer>>> {
3579 let id = self.id;
3580 if let Some(buffer) = self.commit_message_buffer.clone() {
3581 return Task::ready(Ok(buffer));
3582 }
3583 let this = cx.weak_entity();
3584
3585 let rx = self.send_job(None, move |state, mut cx| async move {
3586 let Some(this) = this.upgrade() else {
3587 bail!("git store was dropped");
3588 };
3589 match state {
3590 RepositoryState::Local { .. } => {
3591 this.update(&mut cx, |_, cx| {
3592 Self::open_local_commit_buffer(languages, buffer_store, cx)
3593 })?
3594 .await
3595 }
3596 RepositoryState::Remote { project_id, client } => {
3597 let request = client.request(proto::OpenCommitMessageBuffer {
3598 project_id: project_id.0,
3599 repository_id: id.to_proto(),
3600 });
3601 let response = request.await.context("requesting to open commit buffer")?;
3602 let buffer_id = BufferId::new(response.buffer_id)?;
3603 let buffer = buffer_store
3604 .update(&mut cx, |buffer_store, cx| {
3605 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3606 })?
3607 .await?;
3608 if let Some(language_registry) = languages {
3609 let git_commit_language =
3610 language_registry.language_for_name("Git Commit").await?;
3611 buffer.update(&mut cx, |buffer, cx| {
3612 buffer.set_language(Some(git_commit_language), cx);
3613 })?;
3614 }
3615 this.update(&mut cx, |this, _| {
3616 this.commit_message_buffer = Some(buffer.clone());
3617 })?;
3618 Ok(buffer)
3619 }
3620 }
3621 });
3622
3623 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3624 }
3625
3626 fn open_local_commit_buffer(
3627 language_registry: Option<Arc<LanguageRegistry>>,
3628 buffer_store: Entity<BufferStore>,
3629 cx: &mut Context<Self>,
3630 ) -> Task<Result<Entity<Buffer>>> {
3631 cx.spawn(async move |repository, cx| {
3632 let buffer = buffer_store
3633 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3634 .await?;
3635
3636 if let Some(language_registry) = language_registry {
3637 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3638 buffer.update(cx, |buffer, cx| {
3639 buffer.set_language(Some(git_commit_language), cx);
3640 })?;
3641 }
3642
3643 repository.update(cx, |repository, _| {
3644 repository.commit_message_buffer = Some(buffer.clone());
3645 })?;
3646 Ok(buffer)
3647 })
3648 }
3649
3650 pub fn checkout_files(
3651 &mut self,
3652 commit: &str,
3653 paths: Vec<RepoPath>,
3654 cx: &mut Context<Self>,
3655 ) -> Task<Result<()>> {
3656 let commit = commit.to_string();
3657 let id = self.id;
3658
3659 self.spawn_job_with_tracking(
3660 paths.clone(),
3661 pending_op::GitStatus::Reverted,
3662 cx,
3663 async move |this, cx| {
3664 this.update(cx, |this, _cx| {
3665 this.send_job(
3666 Some(format!("git checkout {}", commit).into()),
3667 move |git_repo, _| async move {
3668 match git_repo {
3669 RepositoryState::Local {
3670 backend,
3671 environment,
3672 ..
3673 } => {
3674 backend
3675 .checkout_files(commit, paths, environment.clone())
3676 .await
3677 }
3678 RepositoryState::Remote { project_id, client } => {
3679 client
3680 .request(proto::GitCheckoutFiles {
3681 project_id: project_id.0,
3682 repository_id: id.to_proto(),
3683 commit,
3684 paths: paths
3685 .into_iter()
3686 .map(|p| p.to_proto())
3687 .collect(),
3688 })
3689 .await?;
3690
3691 Ok(())
3692 }
3693 }
3694 },
3695 )
3696 })?
3697 .await?
3698 },
3699 )
3700 }
3701
3702 pub fn reset(
3703 &mut self,
3704 commit: String,
3705 reset_mode: ResetMode,
3706 _cx: &mut App,
3707 ) -> oneshot::Receiver<Result<()>> {
3708 let id = self.id;
3709
3710 self.send_job(None, move |git_repo, _| async move {
3711 match git_repo {
3712 RepositoryState::Local {
3713 backend,
3714 environment,
3715 ..
3716 } => backend.reset(commit, reset_mode, environment).await,
3717 RepositoryState::Remote { project_id, client } => {
3718 client
3719 .request(proto::GitReset {
3720 project_id: project_id.0,
3721 repository_id: id.to_proto(),
3722 commit,
3723 mode: match reset_mode {
3724 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3725 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3726 },
3727 })
3728 .await?;
3729
3730 Ok(())
3731 }
3732 }
3733 })
3734 }
3735
3736 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3737 let id = self.id;
3738 self.send_job(None, move |git_repo, _cx| async move {
3739 match git_repo {
3740 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3741 RepositoryState::Remote { project_id, client } => {
3742 let resp = client
3743 .request(proto::GitShow {
3744 project_id: project_id.0,
3745 repository_id: id.to_proto(),
3746 commit,
3747 })
3748 .await?;
3749
3750 Ok(CommitDetails {
3751 sha: resp.sha.into(),
3752 message: resp.message.into(),
3753 commit_timestamp: resp.commit_timestamp,
3754 author_email: resp.author_email.into(),
3755 author_name: resp.author_name.into(),
3756 })
3757 }
3758 }
3759 })
3760 }
3761
3762 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3763 let id = self.id;
3764 self.send_job(None, move |git_repo, cx| async move {
3765 match git_repo {
3766 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3767 RepositoryState::Remote {
3768 client, project_id, ..
3769 } => {
3770 let response = client
3771 .request(proto::LoadCommitDiff {
3772 project_id: project_id.0,
3773 repository_id: id.to_proto(),
3774 commit,
3775 })
3776 .await?;
3777 Ok(CommitDiff {
3778 files: response
3779 .files
3780 .into_iter()
3781 .map(|file| {
3782 Ok(CommitFile {
3783 path: RepoPath::from_proto(&file.path)?,
3784 old_text: file.old_text,
3785 new_text: file.new_text,
3786 })
3787 })
3788 .collect::<Result<Vec<_>>>()?,
3789 })
3790 }
3791 }
3792 })
3793 }
3794
3795 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3796 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3797 }
3798
3799 fn save_buffers<'a>(
3800 &self,
3801 entries: impl IntoIterator<Item = &'a RepoPath>,
3802 cx: &mut Context<Self>,
3803 ) -> Vec<Task<anyhow::Result<()>>> {
3804 let mut save_futures = Vec::new();
3805 if let Some(buffer_store) = self.buffer_store(cx) {
3806 buffer_store.update(cx, |buffer_store, cx| {
3807 for path in entries {
3808 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3809 continue;
3810 };
3811 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3812 && buffer
3813 .read(cx)
3814 .file()
3815 .is_some_and(|file| file.disk_state().exists())
3816 && buffer.read(cx).has_unsaved_edits()
3817 {
3818 save_futures.push(buffer_store.save_buffer(buffer, cx));
3819 }
3820 }
3821 })
3822 }
3823 save_futures
3824 }
3825
3826 pub fn stage_entries(
3827 &mut self,
3828 entries: Vec<RepoPath>,
3829 cx: &mut Context<Self>,
3830 ) -> Task<anyhow::Result<()>> {
3831 if entries.is_empty() {
3832 return Task::ready(Ok(()));
3833 }
3834 let id = self.id;
3835 let save_tasks = self.save_buffers(&entries, cx);
3836 let paths = entries
3837 .iter()
3838 .map(|p| p.as_unix_str())
3839 .collect::<Vec<_>>()
3840 .join(" ");
3841 let status = format!("git add {paths}");
3842 let job_key = match entries.len() {
3843 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3844 _ => None,
3845 };
3846
3847 self.spawn_job_with_tracking(
3848 entries.clone(),
3849 pending_op::GitStatus::Staged,
3850 cx,
3851 async move |this, cx| {
3852 for save_task in save_tasks {
3853 save_task.await?;
3854 }
3855
3856 this.update(cx, |this, _| {
3857 this.send_keyed_job(
3858 job_key,
3859 Some(status.into()),
3860 move |git_repo, _cx| async move {
3861 match git_repo {
3862 RepositoryState::Local {
3863 backend,
3864 environment,
3865 ..
3866 } => backend.stage_paths(entries, environment.clone()).await,
3867 RepositoryState::Remote { project_id, client } => {
3868 client
3869 .request(proto::Stage {
3870 project_id: project_id.0,
3871 repository_id: id.to_proto(),
3872 paths: entries
3873 .into_iter()
3874 .map(|repo_path| repo_path.to_proto())
3875 .collect(),
3876 })
3877 .await
3878 .context("sending stage request")?;
3879
3880 Ok(())
3881 }
3882 }
3883 },
3884 )
3885 })?
3886 .await?
3887 },
3888 )
3889 }
3890
3891 pub fn unstage_entries(
3892 &mut self,
3893 entries: Vec<RepoPath>,
3894 cx: &mut Context<Self>,
3895 ) -> Task<anyhow::Result<()>> {
3896 if entries.is_empty() {
3897 return Task::ready(Ok(()));
3898 }
3899 let id = self.id;
3900 let save_tasks = self.save_buffers(&entries, cx);
3901 let paths = entries
3902 .iter()
3903 .map(|p| p.as_unix_str())
3904 .collect::<Vec<_>>()
3905 .join(" ");
3906 let status = format!("git reset {paths}");
3907 let job_key = match entries.len() {
3908 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3909 _ => None,
3910 };
3911
3912 self.spawn_job_with_tracking(
3913 entries.clone(),
3914 pending_op::GitStatus::Unstaged,
3915 cx,
3916 async move |this, cx| {
3917 for save_task in save_tasks {
3918 save_task.await?;
3919 }
3920
3921 this.update(cx, |this, _| {
3922 this.send_keyed_job(
3923 job_key,
3924 Some(status.into()),
3925 move |git_repo, _cx| async move {
3926 match git_repo {
3927 RepositoryState::Local {
3928 backend,
3929 environment,
3930 ..
3931 } => backend.unstage_paths(entries, environment).await,
3932 RepositoryState::Remote { project_id, client } => {
3933 client
3934 .request(proto::Unstage {
3935 project_id: project_id.0,
3936 repository_id: id.to_proto(),
3937 paths: entries
3938 .into_iter()
3939 .map(|repo_path| repo_path.to_proto())
3940 .collect(),
3941 })
3942 .await
3943 .context("sending unstage request")?;
3944
3945 Ok(())
3946 }
3947 }
3948 },
3949 )
3950 })?
3951 .await?
3952 },
3953 )
3954 }
3955
3956 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3957 let to_stage = self
3958 .cached_status()
3959 .filter(|entry| !entry.status.staging().is_fully_staged())
3960 .map(|entry| entry.repo_path)
3961 .collect();
3962 self.stage_entries(to_stage, cx)
3963 }
3964
3965 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3966 let to_unstage = self
3967 .cached_status()
3968 .filter(|entry| entry.status.staging().has_staged())
3969 .map(|entry| entry.repo_path)
3970 .collect();
3971 self.unstage_entries(to_unstage, cx)
3972 }
3973
3974 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3975 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3976
3977 self.stash_entries(to_stash, cx)
3978 }
3979
3980 pub fn stash_entries(
3981 &mut self,
3982 entries: Vec<RepoPath>,
3983 cx: &mut Context<Self>,
3984 ) -> Task<anyhow::Result<()>> {
3985 let id = self.id;
3986
3987 cx.spawn(async move |this, cx| {
3988 this.update(cx, |this, _| {
3989 this.send_job(None, move |git_repo, _cx| async move {
3990 match git_repo {
3991 RepositoryState::Local {
3992 backend,
3993 environment,
3994 ..
3995 } => backend.stash_paths(entries, environment).await,
3996 RepositoryState::Remote { project_id, client } => {
3997 client
3998 .request(proto::Stash {
3999 project_id: project_id.0,
4000 repository_id: id.to_proto(),
4001 paths: entries
4002 .into_iter()
4003 .map(|repo_path| repo_path.to_proto())
4004 .collect(),
4005 })
4006 .await
4007 .context("sending stash request")?;
4008 Ok(())
4009 }
4010 }
4011 })
4012 })?
4013 .await??;
4014 Ok(())
4015 })
4016 }
4017
4018 pub fn stash_pop(
4019 &mut self,
4020 index: Option<usize>,
4021 cx: &mut Context<Self>,
4022 ) -> Task<anyhow::Result<()>> {
4023 let id = self.id;
4024 cx.spawn(async move |this, cx| {
4025 this.update(cx, |this, _| {
4026 this.send_job(None, move |git_repo, _cx| async move {
4027 match git_repo {
4028 RepositoryState::Local {
4029 backend,
4030 environment,
4031 ..
4032 } => backend.stash_pop(index, environment).await,
4033 RepositoryState::Remote { project_id, client } => {
4034 client
4035 .request(proto::StashPop {
4036 project_id: project_id.0,
4037 repository_id: id.to_proto(),
4038 stash_index: index.map(|i| i as u64),
4039 })
4040 .await
4041 .context("sending stash pop request")?;
4042 Ok(())
4043 }
4044 }
4045 })
4046 })?
4047 .await??;
4048 Ok(())
4049 })
4050 }
4051
4052 pub fn stash_apply(
4053 &mut self,
4054 index: Option<usize>,
4055 cx: &mut Context<Self>,
4056 ) -> Task<anyhow::Result<()>> {
4057 let id = self.id;
4058 cx.spawn(async move |this, cx| {
4059 this.update(cx, |this, _| {
4060 this.send_job(None, move |git_repo, _cx| async move {
4061 match git_repo {
4062 RepositoryState::Local {
4063 backend,
4064 environment,
4065 ..
4066 } => backend.stash_apply(index, environment).await,
4067 RepositoryState::Remote { project_id, client } => {
4068 client
4069 .request(proto::StashApply {
4070 project_id: project_id.0,
4071 repository_id: id.to_proto(),
4072 stash_index: index.map(|i| i as u64),
4073 })
4074 .await
4075 .context("sending stash apply request")?;
4076 Ok(())
4077 }
4078 }
4079 })
4080 })?
4081 .await??;
4082 Ok(())
4083 })
4084 }
4085
4086 pub fn stash_drop(
4087 &mut self,
4088 index: Option<usize>,
4089 cx: &mut Context<Self>,
4090 ) -> oneshot::Receiver<anyhow::Result<()>> {
4091 let id = self.id;
4092 let updates_tx = self
4093 .git_store()
4094 .and_then(|git_store| match &git_store.read(cx).state {
4095 GitStoreState::Local { downstream, .. } => downstream
4096 .as_ref()
4097 .map(|downstream| downstream.updates_tx.clone()),
4098 _ => None,
4099 });
4100 let this = cx.weak_entity();
4101 self.send_job(None, move |git_repo, mut cx| async move {
4102 match git_repo {
4103 RepositoryState::Local {
4104 backend,
4105 environment,
4106 ..
4107 } => {
4108 // TODO would be nice to not have to do this manually
4109 let result = backend.stash_drop(index, environment).await;
4110 if result.is_ok()
4111 && let Ok(stash_entries) = backend.stash_entries().await
4112 {
4113 let snapshot = this.update(&mut cx, |this, cx| {
4114 this.snapshot.stash_entries = stash_entries;
4115 cx.emit(RepositoryEvent::StashEntriesChanged);
4116 this.snapshot.clone()
4117 })?;
4118 if let Some(updates_tx) = updates_tx {
4119 updates_tx
4120 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4121 .ok();
4122 }
4123 }
4124
4125 result
4126 }
4127 RepositoryState::Remote { project_id, client } => {
4128 client
4129 .request(proto::StashDrop {
4130 project_id: project_id.0,
4131 repository_id: id.to_proto(),
4132 stash_index: index.map(|i| i as u64),
4133 })
4134 .await
4135 .context("sending stash pop request")?;
4136 Ok(())
4137 }
4138 }
4139 })
4140 }
4141
4142 pub fn commit(
4143 &mut self,
4144 message: SharedString,
4145 name_and_email: Option<(SharedString, SharedString)>,
4146 options: CommitOptions,
4147 _cx: &mut App,
4148 ) -> oneshot::Receiver<Result<()>> {
4149 let id = self.id;
4150
4151 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4152 match git_repo {
4153 RepositoryState::Local {
4154 backend,
4155 environment,
4156 ..
4157 } => {
4158 backend
4159 .commit(message, name_and_email, options, environment)
4160 .await
4161 }
4162 RepositoryState::Remote { project_id, client } => {
4163 let (name, email) = name_and_email.unzip();
4164 client
4165 .request(proto::Commit {
4166 project_id: project_id.0,
4167 repository_id: id.to_proto(),
4168 message: String::from(message),
4169 name: name.map(String::from),
4170 email: email.map(String::from),
4171 options: Some(proto::commit::CommitOptions {
4172 amend: options.amend,
4173 signoff: options.signoff,
4174 }),
4175 })
4176 .await
4177 .context("sending commit request")?;
4178
4179 Ok(())
4180 }
4181 }
4182 })
4183 }
4184
4185 pub fn fetch(
4186 &mut self,
4187 fetch_options: FetchOptions,
4188 askpass: AskPassDelegate,
4189 _cx: &mut App,
4190 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4191 let askpass_delegates = self.askpass_delegates.clone();
4192 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4193 let id = self.id;
4194
4195 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4196 match git_repo {
4197 RepositoryState::Local {
4198 backend,
4199 environment,
4200 ..
4201 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4202 RepositoryState::Remote { project_id, client } => {
4203 askpass_delegates.lock().insert(askpass_id, askpass);
4204 let _defer = util::defer(|| {
4205 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4206 debug_assert!(askpass_delegate.is_some());
4207 });
4208
4209 let response = client
4210 .request(proto::Fetch {
4211 project_id: project_id.0,
4212 repository_id: id.to_proto(),
4213 askpass_id,
4214 remote: fetch_options.to_proto(),
4215 })
4216 .await
4217 .context("sending fetch request")?;
4218
4219 Ok(RemoteCommandOutput {
4220 stdout: response.stdout,
4221 stderr: response.stderr,
4222 })
4223 }
4224 }
4225 })
4226 }
4227
4228 pub fn push(
4229 &mut self,
4230 branch: SharedString,
4231 remote: SharedString,
4232 options: Option<PushOptions>,
4233 askpass: AskPassDelegate,
4234 cx: &mut Context<Self>,
4235 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4236 let askpass_delegates = self.askpass_delegates.clone();
4237 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4238 let id = self.id;
4239
4240 let args = options
4241 .map(|option| match option {
4242 PushOptions::SetUpstream => " --set-upstream",
4243 PushOptions::Force => " --force-with-lease",
4244 })
4245 .unwrap_or("");
4246
4247 let updates_tx = self
4248 .git_store()
4249 .and_then(|git_store| match &git_store.read(cx).state {
4250 GitStoreState::Local { downstream, .. } => downstream
4251 .as_ref()
4252 .map(|downstream| downstream.updates_tx.clone()),
4253 _ => None,
4254 });
4255
4256 let this = cx.weak_entity();
4257 self.send_job(
4258 Some(format!("git push {} {} {}", args, remote, branch).into()),
4259 move |git_repo, mut cx| async move {
4260 match git_repo {
4261 RepositoryState::Local {
4262 backend,
4263 environment,
4264 ..
4265 } => {
4266 let result = backend
4267 .push(
4268 branch.to_string(),
4269 remote.to_string(),
4270 options,
4271 askpass,
4272 environment.clone(),
4273 cx.clone(),
4274 )
4275 .await;
4276 // TODO would be nice to not have to do this manually
4277 if result.is_ok() {
4278 let branches = backend.branches().await?;
4279 let branch = branches.into_iter().find(|branch| branch.is_head);
4280 log::info!("head branch after scan is {branch:?}");
4281 let snapshot = this.update(&mut cx, |this, cx| {
4282 this.snapshot.branch = branch;
4283 cx.emit(RepositoryEvent::BranchChanged);
4284 this.snapshot.clone()
4285 })?;
4286 if let Some(updates_tx) = updates_tx {
4287 updates_tx
4288 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4289 .ok();
4290 }
4291 }
4292 result
4293 }
4294 RepositoryState::Remote { project_id, client } => {
4295 askpass_delegates.lock().insert(askpass_id, askpass);
4296 let _defer = util::defer(|| {
4297 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4298 debug_assert!(askpass_delegate.is_some());
4299 });
4300 let response = client
4301 .request(proto::Push {
4302 project_id: project_id.0,
4303 repository_id: id.to_proto(),
4304 askpass_id,
4305 branch_name: branch.to_string(),
4306 remote_name: remote.to_string(),
4307 options: options.map(|options| match options {
4308 PushOptions::Force => proto::push::PushOptions::Force,
4309 PushOptions::SetUpstream => {
4310 proto::push::PushOptions::SetUpstream
4311 }
4312 }
4313 as i32),
4314 })
4315 .await
4316 .context("sending push request")?;
4317
4318 Ok(RemoteCommandOutput {
4319 stdout: response.stdout,
4320 stderr: response.stderr,
4321 })
4322 }
4323 }
4324 },
4325 )
4326 }
4327
4328 pub fn pull(
4329 &mut self,
4330 branch: SharedString,
4331 remote: SharedString,
4332 askpass: AskPassDelegate,
4333 _cx: &mut App,
4334 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4335 let askpass_delegates = self.askpass_delegates.clone();
4336 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4337 let id = self.id;
4338
4339 self.send_job(
4340 Some(format!("git pull {} {}", remote, branch).into()),
4341 move |git_repo, cx| async move {
4342 match git_repo {
4343 RepositoryState::Local {
4344 backend,
4345 environment,
4346 ..
4347 } => {
4348 backend
4349 .pull(
4350 branch.to_string(),
4351 remote.to_string(),
4352 askpass,
4353 environment.clone(),
4354 cx,
4355 )
4356 .await
4357 }
4358 RepositoryState::Remote { project_id, client } => {
4359 askpass_delegates.lock().insert(askpass_id, askpass);
4360 let _defer = util::defer(|| {
4361 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4362 debug_assert!(askpass_delegate.is_some());
4363 });
4364 let response = client
4365 .request(proto::Pull {
4366 project_id: project_id.0,
4367 repository_id: id.to_proto(),
4368 askpass_id,
4369 branch_name: branch.to_string(),
4370 remote_name: remote.to_string(),
4371 })
4372 .await
4373 .context("sending pull request")?;
4374
4375 Ok(RemoteCommandOutput {
4376 stdout: response.stdout,
4377 stderr: response.stderr,
4378 })
4379 }
4380 }
4381 },
4382 )
4383 }
4384
4385 fn spawn_set_index_text_job(
4386 &mut self,
4387 path: RepoPath,
4388 content: Option<String>,
4389 hunk_staging_operation_count: Option<usize>,
4390 cx: &mut Context<Self>,
4391 ) -> oneshot::Receiver<anyhow::Result<()>> {
4392 let id = self.id;
4393 let this = cx.weak_entity();
4394 let git_store = self.git_store.clone();
4395 self.send_keyed_job(
4396 Some(GitJobKey::WriteIndex(path.clone())),
4397 None,
4398 move |git_repo, mut cx| async move {
4399 log::debug!(
4400 "start updating index text for buffer {}",
4401 path.as_unix_str()
4402 );
4403 match git_repo {
4404 RepositoryState::Local {
4405 backend,
4406 environment,
4407 ..
4408 } => {
4409 backend
4410 .set_index_text(path.clone(), content, environment.clone())
4411 .await?;
4412 }
4413 RepositoryState::Remote { project_id, client } => {
4414 client
4415 .request(proto::SetIndexText {
4416 project_id: project_id.0,
4417 repository_id: id.to_proto(),
4418 path: path.to_proto(),
4419 text: content,
4420 })
4421 .await?;
4422 }
4423 }
4424 log::debug!(
4425 "finish updating index text for buffer {}",
4426 path.as_unix_str()
4427 );
4428
4429 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4430 let project_path = this
4431 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4432 .ok()
4433 .flatten();
4434 git_store.update(&mut cx, |git_store, cx| {
4435 let buffer_id = git_store
4436 .buffer_store
4437 .read(cx)
4438 .get_by_path(&project_path?)?
4439 .read(cx)
4440 .remote_id();
4441 let diff_state = git_store.diffs.get(&buffer_id)?;
4442 diff_state.update(cx, |diff_state, _| {
4443 diff_state.hunk_staging_operation_count_as_of_write =
4444 hunk_staging_operation_count;
4445 });
4446 Some(())
4447 })?;
4448 }
4449 Ok(())
4450 },
4451 )
4452 }
4453
4454 pub fn get_remotes(
4455 &mut self,
4456 branch_name: Option<String>,
4457 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4458 let id = self.id;
4459 self.send_job(None, move |repo, _cx| async move {
4460 match repo {
4461 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4462 RepositoryState::Remote { project_id, client } => {
4463 let response = client
4464 .request(proto::GetRemotes {
4465 project_id: project_id.0,
4466 repository_id: id.to_proto(),
4467 branch_name,
4468 })
4469 .await?;
4470
4471 let remotes = response
4472 .remotes
4473 .into_iter()
4474 .map(|remotes| git::repository::Remote {
4475 name: remotes.name.into(),
4476 })
4477 .collect();
4478
4479 Ok(remotes)
4480 }
4481 }
4482 })
4483 }
4484
4485 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4486 let id = self.id;
4487 self.send_job(None, move |repo, _| async move {
4488 match repo {
4489 RepositoryState::Local { backend, .. } => backend.branches().await,
4490 RepositoryState::Remote { project_id, client } => {
4491 let response = client
4492 .request(proto::GitGetBranches {
4493 project_id: project_id.0,
4494 repository_id: id.to_proto(),
4495 })
4496 .await?;
4497
4498 let branches = response
4499 .branches
4500 .into_iter()
4501 .map(|branch| proto_to_branch(&branch))
4502 .collect();
4503
4504 Ok(branches)
4505 }
4506 }
4507 })
4508 }
4509
4510 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4511 let id = self.id;
4512 self.send_job(None, move |repo, _| async move {
4513 match repo {
4514 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4515 RepositoryState::Remote { project_id, client } => {
4516 let response = client
4517 .request(proto::GitGetWorktrees {
4518 project_id: project_id.0,
4519 repository_id: id.to_proto(),
4520 })
4521 .await?;
4522
4523 let worktrees = response
4524 .worktrees
4525 .into_iter()
4526 .map(|worktree| proto_to_worktree(&worktree))
4527 .collect();
4528
4529 Ok(worktrees)
4530 }
4531 }
4532 })
4533 }
4534
4535 pub fn create_worktree(
4536 &mut self,
4537 name: String,
4538 path: PathBuf,
4539 commit: Option<String>,
4540 ) -> oneshot::Receiver<Result<()>> {
4541 let id = self.id;
4542 self.send_job(
4543 Some("git worktree add".into()),
4544 move |repo, _cx| async move {
4545 match repo {
4546 RepositoryState::Local { backend, .. } => {
4547 backend.create_worktree(name, path, commit).await
4548 }
4549 RepositoryState::Remote { project_id, client } => {
4550 client
4551 .request(proto::GitCreateWorktree {
4552 project_id: project_id.0,
4553 repository_id: id.to_proto(),
4554 name,
4555 directory: path.to_string_lossy().to_string(),
4556 commit,
4557 })
4558 .await?;
4559
4560 Ok(())
4561 }
4562 }
4563 },
4564 )
4565 }
4566
4567 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4568 let id = self.id;
4569 self.send_job(None, move |repo, _| async move {
4570 match repo {
4571 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4572 RepositoryState::Remote { project_id, client } => {
4573 let response = client
4574 .request(proto::GetDefaultBranch {
4575 project_id: project_id.0,
4576 repository_id: id.to_proto(),
4577 })
4578 .await?;
4579
4580 anyhow::Ok(response.branch.map(SharedString::from))
4581 }
4582 }
4583 })
4584 }
4585
4586 pub fn diff_tree(
4587 &mut self,
4588 diff_type: DiffTreeType,
4589 _cx: &App,
4590 ) -> oneshot::Receiver<Result<TreeDiff>> {
4591 let repository_id = self.snapshot.id;
4592 self.send_job(None, move |repo, _cx| async move {
4593 match repo {
4594 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4595 RepositoryState::Remote { client, project_id } => {
4596 let response = client
4597 .request(proto::GetTreeDiff {
4598 project_id: project_id.0,
4599 repository_id: repository_id.0,
4600 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4601 base: diff_type.base().to_string(),
4602 head: diff_type.head().to_string(),
4603 })
4604 .await?;
4605
4606 let entries = response
4607 .entries
4608 .into_iter()
4609 .filter_map(|entry| {
4610 let status = match entry.status() {
4611 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4612 proto::tree_diff_status::Status::Modified => {
4613 TreeDiffStatus::Modified {
4614 old: git::Oid::from_str(
4615 &entry.oid.context("missing oid").log_err()?,
4616 )
4617 .log_err()?,
4618 }
4619 }
4620 proto::tree_diff_status::Status::Deleted => {
4621 TreeDiffStatus::Deleted {
4622 old: git::Oid::from_str(
4623 &entry.oid.context("missing oid").log_err()?,
4624 )
4625 .log_err()?,
4626 }
4627 }
4628 };
4629 Some((
4630 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4631 status,
4632 ))
4633 })
4634 .collect();
4635
4636 Ok(TreeDiff { entries })
4637 }
4638 }
4639 })
4640 }
4641
4642 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4643 let id = self.id;
4644 self.send_job(None, move |repo, _cx| async move {
4645 match repo {
4646 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4647 RepositoryState::Remote { project_id, client } => {
4648 let response = client
4649 .request(proto::GitDiff {
4650 project_id: project_id.0,
4651 repository_id: id.to_proto(),
4652 diff_type: match diff_type {
4653 DiffType::HeadToIndex => {
4654 proto::git_diff::DiffType::HeadToIndex.into()
4655 }
4656 DiffType::HeadToWorktree => {
4657 proto::git_diff::DiffType::HeadToWorktree.into()
4658 }
4659 },
4660 })
4661 .await?;
4662
4663 Ok(response.diff)
4664 }
4665 }
4666 })
4667 }
4668
4669 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4670 let id = self.id;
4671 self.send_job(
4672 Some(format!("git switch -c {branch_name}").into()),
4673 move |repo, _cx| async move {
4674 match repo {
4675 RepositoryState::Local { backend, .. } => {
4676 backend.create_branch(branch_name).await
4677 }
4678 RepositoryState::Remote { project_id, client } => {
4679 client
4680 .request(proto::GitCreateBranch {
4681 project_id: project_id.0,
4682 repository_id: id.to_proto(),
4683 branch_name,
4684 })
4685 .await?;
4686
4687 Ok(())
4688 }
4689 }
4690 },
4691 )
4692 }
4693
4694 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4695 let id = self.id;
4696 self.send_job(
4697 Some(format!("git switch {branch_name}").into()),
4698 move |repo, _cx| async move {
4699 match repo {
4700 RepositoryState::Local { backend, .. } => {
4701 backend.change_branch(branch_name).await
4702 }
4703 RepositoryState::Remote { project_id, client } => {
4704 client
4705 .request(proto::GitChangeBranch {
4706 project_id: project_id.0,
4707 repository_id: id.to_proto(),
4708 branch_name,
4709 })
4710 .await?;
4711
4712 Ok(())
4713 }
4714 }
4715 },
4716 )
4717 }
4718
4719 pub fn rename_branch(
4720 &mut self,
4721 branch: String,
4722 new_name: String,
4723 ) -> oneshot::Receiver<Result<()>> {
4724 let id = self.id;
4725 self.send_job(
4726 Some(format!("git branch -m {branch} {new_name}").into()),
4727 move |repo, _cx| async move {
4728 match repo {
4729 RepositoryState::Local { backend, .. } => {
4730 backend.rename_branch(branch, new_name).await
4731 }
4732 RepositoryState::Remote { project_id, client } => {
4733 client
4734 .request(proto::GitRenameBranch {
4735 project_id: project_id.0,
4736 repository_id: id.to_proto(),
4737 branch,
4738 new_name,
4739 })
4740 .await?;
4741
4742 Ok(())
4743 }
4744 }
4745 },
4746 )
4747 }
4748
4749 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4750 let id = self.id;
4751 self.send_job(None, move |repo, _cx| async move {
4752 match repo {
4753 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4754 RepositoryState::Remote { project_id, client } => {
4755 let response = client
4756 .request(proto::CheckForPushedCommits {
4757 project_id: project_id.0,
4758 repository_id: id.to_proto(),
4759 })
4760 .await?;
4761
4762 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4763
4764 Ok(branches)
4765 }
4766 }
4767 })
4768 }
4769
4770 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4771 self.send_job(None, |repo, _cx| async move {
4772 match repo {
4773 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4774 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4775 }
4776 })
4777 }
4778
4779 pub fn restore_checkpoint(
4780 &mut self,
4781 checkpoint: GitRepositoryCheckpoint,
4782 ) -> oneshot::Receiver<Result<()>> {
4783 self.send_job(None, move |repo, _cx| async move {
4784 match repo {
4785 RepositoryState::Local { backend, .. } => {
4786 backend.restore_checkpoint(checkpoint).await
4787 }
4788 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4789 }
4790 })
4791 }
4792
4793 pub(crate) fn apply_remote_update(
4794 &mut self,
4795 update: proto::UpdateRepository,
4796 cx: &mut Context<Self>,
4797 ) -> Result<()> {
4798 let conflicted_paths = TreeSet::from_ordered_entries(
4799 update
4800 .current_merge_conflicts
4801 .into_iter()
4802 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4803 );
4804 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4805 let new_head_commit = update
4806 .head_commit_details
4807 .as_ref()
4808 .map(proto_to_commit_details);
4809 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4810 cx.emit(RepositoryEvent::BranchChanged)
4811 }
4812 self.snapshot.branch = new_branch;
4813 self.snapshot.head_commit = new_head_commit;
4814
4815 self.snapshot.merge.conflicted_paths = conflicted_paths;
4816 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4817 let new_stash_entries = GitStash {
4818 entries: update
4819 .stash_entries
4820 .iter()
4821 .filter_map(|entry| proto_to_stash(entry).ok())
4822 .collect(),
4823 };
4824 if self.snapshot.stash_entries != new_stash_entries {
4825 cx.emit(RepositoryEvent::StashEntriesChanged)
4826 }
4827 self.snapshot.stash_entries = new_stash_entries;
4828
4829 let edits = update
4830 .removed_statuses
4831 .into_iter()
4832 .filter_map(|path| {
4833 Some(sum_tree::Edit::Remove(PathKey(
4834 RelPath::from_proto(&path).log_err()?,
4835 )))
4836 })
4837 .chain(
4838 update
4839 .updated_statuses
4840 .into_iter()
4841 .filter_map(|updated_status| {
4842 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4843 }),
4844 )
4845 .collect::<Vec<_>>();
4846 if !edits.is_empty() {
4847 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4848 }
4849 self.snapshot.statuses_by_path.edit(edits, ());
4850 if update.is_last_update {
4851 self.snapshot.scan_id = update.scan_id;
4852 }
4853 Ok(())
4854 }
4855
4856 pub fn compare_checkpoints(
4857 &mut self,
4858 left: GitRepositoryCheckpoint,
4859 right: GitRepositoryCheckpoint,
4860 ) -> oneshot::Receiver<Result<bool>> {
4861 self.send_job(None, move |repo, _cx| async move {
4862 match repo {
4863 RepositoryState::Local { backend, .. } => {
4864 backend.compare_checkpoints(left, right).await
4865 }
4866 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4867 }
4868 })
4869 }
4870
4871 pub fn diff_checkpoints(
4872 &mut self,
4873 base_checkpoint: GitRepositoryCheckpoint,
4874 target_checkpoint: GitRepositoryCheckpoint,
4875 ) -> oneshot::Receiver<Result<String>> {
4876 self.send_job(None, move |repo, _cx| async move {
4877 match repo {
4878 RepositoryState::Local { backend, .. } => {
4879 backend
4880 .diff_checkpoints(base_checkpoint, target_checkpoint)
4881 .await
4882 }
4883 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4884 }
4885 })
4886 }
4887
4888 fn schedule_scan(
4889 &mut self,
4890 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4891 cx: &mut Context<Self>,
4892 ) {
4893 let this = cx.weak_entity();
4894 let _ = self.send_keyed_job(
4895 Some(GitJobKey::ReloadGitState),
4896 None,
4897 |state, mut cx| async move {
4898 log::debug!("run scheduled git status scan");
4899
4900 let Some(this) = this.upgrade() else {
4901 return Ok(());
4902 };
4903 let RepositoryState::Local { backend, .. } = state else {
4904 bail!("not a local repository")
4905 };
4906 let (snapshot, events) = this
4907 .update(&mut cx, |this, _| {
4908 this.paths_needing_status_update.clear();
4909 compute_snapshot(
4910 this.id,
4911 this.work_directory_abs_path.clone(),
4912 this.snapshot.clone(),
4913 backend.clone(),
4914 )
4915 })?
4916 .await?;
4917 this.update(&mut cx, |this, cx| {
4918 this.snapshot = snapshot.clone();
4919 for event in events {
4920 cx.emit(event);
4921 }
4922 })?;
4923 if let Some(updates_tx) = updates_tx {
4924 updates_tx
4925 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4926 .ok();
4927 }
4928 Ok(())
4929 },
4930 );
4931 }
4932
4933 fn spawn_local_git_worker(
4934 work_directory_abs_path: Arc<Path>,
4935 dot_git_abs_path: Arc<Path>,
4936 _repository_dir_abs_path: Arc<Path>,
4937 _common_dir_abs_path: Arc<Path>,
4938 project_environment: WeakEntity<ProjectEnvironment>,
4939 fs: Arc<dyn Fs>,
4940 cx: &mut Context<Self>,
4941 ) -> mpsc::UnboundedSender<GitJob> {
4942 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4943
4944 cx.spawn(async move |_, cx| {
4945 let environment = project_environment
4946 .upgrade()
4947 .context("missing project environment")?
4948 .update(cx, |project_environment, cx| {
4949 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4950 })?
4951 .await
4952 .unwrap_or_else(|| {
4953 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4954 HashMap::default()
4955 });
4956 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4957 let backend = cx
4958 .background_spawn(async move {
4959 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4960 .or_else(|| which::which("git").ok());
4961 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4962 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4963 })
4964 .await?;
4965
4966 if let Some(git_hosting_provider_registry) =
4967 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4968 {
4969 git_hosting_providers::register_additional_providers(
4970 git_hosting_provider_registry,
4971 backend.clone(),
4972 );
4973 }
4974
4975 let state = RepositoryState::Local {
4976 backend,
4977 environment: Arc::new(environment),
4978 };
4979 let mut jobs = VecDeque::new();
4980 loop {
4981 while let Ok(Some(next_job)) = job_rx.try_next() {
4982 jobs.push_back(next_job);
4983 }
4984
4985 if let Some(job) = jobs.pop_front() {
4986 if let Some(current_key) = &job.key
4987 && jobs
4988 .iter()
4989 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4990 {
4991 continue;
4992 }
4993 (job.job)(state.clone(), cx).await;
4994 } else if let Some(job) = job_rx.next().await {
4995 jobs.push_back(job);
4996 } else {
4997 break;
4998 }
4999 }
5000 anyhow::Ok(())
5001 })
5002 .detach_and_log_err(cx);
5003
5004 job_tx
5005 }
5006
5007 fn spawn_remote_git_worker(
5008 project_id: ProjectId,
5009 client: AnyProtoClient,
5010 cx: &mut Context<Self>,
5011 ) -> mpsc::UnboundedSender<GitJob> {
5012 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5013
5014 cx.spawn(async move |_, cx| {
5015 let state = RepositoryState::Remote { project_id, client };
5016 let mut jobs = VecDeque::new();
5017 loop {
5018 while let Ok(Some(next_job)) = job_rx.try_next() {
5019 jobs.push_back(next_job);
5020 }
5021
5022 if let Some(job) = jobs.pop_front() {
5023 if let Some(current_key) = &job.key
5024 && jobs
5025 .iter()
5026 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5027 {
5028 continue;
5029 }
5030 (job.job)(state.clone(), cx).await;
5031 } else if let Some(job) = job_rx.next().await {
5032 jobs.push_back(job);
5033 } else {
5034 break;
5035 }
5036 }
5037 anyhow::Ok(())
5038 })
5039 .detach_and_log_err(cx);
5040
5041 job_tx
5042 }
5043
5044 fn load_staged_text(
5045 &mut self,
5046 buffer_id: BufferId,
5047 repo_path: RepoPath,
5048 cx: &App,
5049 ) -> Task<Result<Option<String>>> {
5050 let rx = self.send_job(None, move |state, _| async move {
5051 match state {
5052 RepositoryState::Local { backend, .. } => {
5053 anyhow::Ok(backend.load_index_text(repo_path).await)
5054 }
5055 RepositoryState::Remote { project_id, client } => {
5056 let response = client
5057 .request(proto::OpenUnstagedDiff {
5058 project_id: project_id.to_proto(),
5059 buffer_id: buffer_id.to_proto(),
5060 })
5061 .await?;
5062 Ok(response.staged_text)
5063 }
5064 }
5065 });
5066 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5067 }
5068
5069 fn load_committed_text(
5070 &mut self,
5071 buffer_id: BufferId,
5072 repo_path: RepoPath,
5073 cx: &App,
5074 ) -> Task<Result<DiffBasesChange>> {
5075 let rx = self.send_job(None, move |state, _| async move {
5076 match state {
5077 RepositoryState::Local { backend, .. } => {
5078 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5079 let staged_text = backend.load_index_text(repo_path).await;
5080 let diff_bases_change = if committed_text == staged_text {
5081 DiffBasesChange::SetBoth(committed_text)
5082 } else {
5083 DiffBasesChange::SetEach {
5084 index: staged_text,
5085 head: committed_text,
5086 }
5087 };
5088 anyhow::Ok(diff_bases_change)
5089 }
5090 RepositoryState::Remote { project_id, client } => {
5091 use proto::open_uncommitted_diff_response::Mode;
5092
5093 let response = client
5094 .request(proto::OpenUncommittedDiff {
5095 project_id: project_id.to_proto(),
5096 buffer_id: buffer_id.to_proto(),
5097 })
5098 .await?;
5099 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5100 let bases = match mode {
5101 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5102 Mode::IndexAndHead => DiffBasesChange::SetEach {
5103 head: response.committed_text,
5104 index: response.staged_text,
5105 },
5106 };
5107 Ok(bases)
5108 }
5109 }
5110 });
5111
5112 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5113 }
5114 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5115 let repository_id = self.snapshot.id;
5116 let rx = self.send_job(None, move |state, _| async move {
5117 match state {
5118 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5119 RepositoryState::Remote { client, project_id } => {
5120 let response = client
5121 .request(proto::GetBlobContent {
5122 project_id: project_id.to_proto(),
5123 repository_id: repository_id.0,
5124 oid: oid.to_string(),
5125 })
5126 .await?;
5127 Ok(response.content)
5128 }
5129 }
5130 });
5131 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5132 }
5133
5134 fn paths_changed(
5135 &mut self,
5136 paths: Vec<RepoPath>,
5137 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5138 cx: &mut Context<Self>,
5139 ) {
5140 self.paths_needing_status_update.extend(paths);
5141
5142 let this = cx.weak_entity();
5143 let _ = self.send_keyed_job(
5144 Some(GitJobKey::RefreshStatuses),
5145 None,
5146 |state, mut cx| async move {
5147 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5148 (
5149 this.snapshot.clone(),
5150 mem::take(&mut this.paths_needing_status_update),
5151 )
5152 })?;
5153 let RepositoryState::Local { backend, .. } = state else {
5154 bail!("not a local repository")
5155 };
5156
5157 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5158 if paths.is_empty() {
5159 return Ok(());
5160 }
5161 let statuses = backend.status(&paths).await?;
5162 let stash_entries = backend.stash_entries().await?;
5163
5164 let changed_path_statuses = cx
5165 .background_spawn(async move {
5166 let mut changed_path_statuses = Vec::new();
5167 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5168 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5169
5170 for (repo_path, status) in &*statuses.entries {
5171 changed_paths.remove(repo_path);
5172 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5173 && cursor.item().is_some_and(|entry| entry.status == *status)
5174 {
5175 continue;
5176 }
5177
5178 changed_path_statuses.push(Edit::Insert(StatusEntry {
5179 repo_path: repo_path.clone(),
5180 status: *status,
5181 }));
5182 }
5183 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5184 for path in changed_paths.into_iter() {
5185 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5186 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5187 }
5188 }
5189 changed_path_statuses
5190 })
5191 .await;
5192
5193 this.update(&mut cx, |this, cx| {
5194 if this.snapshot.stash_entries != stash_entries {
5195 cx.emit(RepositoryEvent::StashEntriesChanged);
5196 this.snapshot.stash_entries = stash_entries;
5197 }
5198
5199 if !changed_path_statuses.is_empty() {
5200 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5201 this.snapshot
5202 .statuses_by_path
5203 .edit(changed_path_statuses, ());
5204 this.snapshot.scan_id += 1;
5205 }
5206
5207 if let Some(updates_tx) = updates_tx {
5208 updates_tx
5209 .unbounded_send(DownstreamUpdate::UpdateRepository(
5210 this.snapshot.clone(),
5211 ))
5212 .ok();
5213 }
5214 })
5215 },
5216 );
5217 }
5218
5219 /// currently running git command and when it started
5220 pub fn current_job(&self) -> Option<JobInfo> {
5221 self.active_jobs.values().next().cloned()
5222 }
5223
5224 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5225 self.send_job(None, |_, _| async {})
5226 }
5227
5228 fn spawn_job_with_tracking<AsyncFn>(
5229 &mut self,
5230 paths: Vec<RepoPath>,
5231 git_status: pending_op::GitStatus,
5232 cx: &mut Context<Self>,
5233 f: AsyncFn,
5234 ) -> Task<Result<()>>
5235 where
5236 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5237 {
5238 let ids = self.new_pending_ops_for_paths(paths, git_status);
5239
5240 cx.spawn(async move |this, cx| {
5241 let job_status = match f(this.clone(), cx).await {
5242 Ok(()) => pending_op::JobStatus::Finished,
5243 Err(err) if err.is::<Canceled>() => pending_op::JobStatus::Skipped,
5244 Err(err) => return Err(err),
5245 };
5246
5247 this.update(cx, |this, _| {
5248 let mut edits = Vec::with_capacity(ids.len());
5249 for (id, entry) in ids {
5250 if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
5251 if let Some(op) = ops.op_by_id_mut(id) {
5252 op.job_status = job_status;
5253 }
5254 edits.push(sum_tree::Edit::Insert(ops));
5255 }
5256 }
5257 this.snapshot.pending_ops_by_path.edit(edits, ());
5258 })?;
5259
5260 Ok(())
5261 })
5262 }
5263
5264 fn new_pending_ops_for_paths(
5265 &mut self,
5266 paths: Vec<RepoPath>,
5267 git_status: pending_op::GitStatus,
5268 ) -> Vec<(PendingOpId, RepoPath)> {
5269 let mut edits = Vec::with_capacity(paths.len());
5270 let mut ids = Vec::with_capacity(paths.len());
5271 for path in paths {
5272 let op = self.snapshot.new_pending_op(git_status);
5273 let mut ops = self
5274 .snapshot
5275 .pending_ops_for_path(&path)
5276 .unwrap_or_else(|| PendingOps::new(&path));
5277 ops.ops.push(op);
5278 edits.push(sum_tree::Edit::Insert(ops));
5279 ids.push((op.id, path));
5280 }
5281 self.snapshot.pending_ops_by_path.edit(edits, ());
5282 ids
5283 }
5284}
5285
5286fn get_permalink_in_rust_registry_src(
5287 provider_registry: Arc<GitHostingProviderRegistry>,
5288 path: PathBuf,
5289 selection: Range<u32>,
5290) -> Result<url::Url> {
5291 #[derive(Deserialize)]
5292 struct CargoVcsGit {
5293 sha1: String,
5294 }
5295
5296 #[derive(Deserialize)]
5297 struct CargoVcsInfo {
5298 git: CargoVcsGit,
5299 path_in_vcs: String,
5300 }
5301
5302 #[derive(Deserialize)]
5303 struct CargoPackage {
5304 repository: String,
5305 }
5306
5307 #[derive(Deserialize)]
5308 struct CargoToml {
5309 package: CargoPackage,
5310 }
5311
5312 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5313 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5314 Some((dir, json))
5315 }) else {
5316 bail!("No .cargo_vcs_info.json found in parent directories")
5317 };
5318 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5319 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5320 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5321 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5322 .context("parsing package.repository field of manifest")?;
5323 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5324 let permalink = provider.build_permalink(
5325 remote,
5326 BuildPermalinkParams::new(
5327 &cargo_vcs_info.git.sha1,
5328 &RepoPath(
5329 RelPath::new(&path, PathStyle::local())
5330 .context("invalid path")?
5331 .into_arc(),
5332 ),
5333 Some(selection),
5334 ),
5335 );
5336 Ok(permalink)
5337}
5338
5339fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5340 let Some(blame) = blame else {
5341 return proto::BlameBufferResponse {
5342 blame_response: None,
5343 };
5344 };
5345
5346 let entries = blame
5347 .entries
5348 .into_iter()
5349 .map(|entry| proto::BlameEntry {
5350 sha: entry.sha.as_bytes().into(),
5351 start_line: entry.range.start,
5352 end_line: entry.range.end,
5353 original_line_number: entry.original_line_number,
5354 author: entry.author,
5355 author_mail: entry.author_mail,
5356 author_time: entry.author_time,
5357 author_tz: entry.author_tz,
5358 committer: entry.committer_name,
5359 committer_mail: entry.committer_email,
5360 committer_time: entry.committer_time,
5361 committer_tz: entry.committer_tz,
5362 summary: entry.summary,
5363 previous: entry.previous,
5364 filename: entry.filename,
5365 })
5366 .collect::<Vec<_>>();
5367
5368 let messages = blame
5369 .messages
5370 .into_iter()
5371 .map(|(oid, message)| proto::CommitMessage {
5372 oid: oid.as_bytes().into(),
5373 message,
5374 })
5375 .collect::<Vec<_>>();
5376
5377 proto::BlameBufferResponse {
5378 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5379 entries,
5380 messages,
5381 remote_url: blame.remote_url,
5382 }),
5383 }
5384}
5385
5386fn deserialize_blame_buffer_response(
5387 response: proto::BlameBufferResponse,
5388) -> Option<git::blame::Blame> {
5389 let response = response.blame_response?;
5390 let entries = response
5391 .entries
5392 .into_iter()
5393 .filter_map(|entry| {
5394 Some(git::blame::BlameEntry {
5395 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5396 range: entry.start_line..entry.end_line,
5397 original_line_number: entry.original_line_number,
5398 committer_name: entry.committer,
5399 committer_time: entry.committer_time,
5400 committer_tz: entry.committer_tz,
5401 committer_email: entry.committer_mail,
5402 author: entry.author,
5403 author_mail: entry.author_mail,
5404 author_time: entry.author_time,
5405 author_tz: entry.author_tz,
5406 summary: entry.summary,
5407 previous: entry.previous,
5408 filename: entry.filename,
5409 })
5410 })
5411 .collect::<Vec<_>>();
5412
5413 let messages = response
5414 .messages
5415 .into_iter()
5416 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5417 .collect::<HashMap<_, _>>();
5418
5419 Some(Blame {
5420 entries,
5421 messages,
5422 remote_url: response.remote_url,
5423 })
5424}
5425
5426fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5427 proto::Branch {
5428 is_head: branch.is_head,
5429 ref_name: branch.ref_name.to_string(),
5430 unix_timestamp: branch
5431 .most_recent_commit
5432 .as_ref()
5433 .map(|commit| commit.commit_timestamp as u64),
5434 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5435 ref_name: upstream.ref_name.to_string(),
5436 tracking: upstream
5437 .tracking
5438 .status()
5439 .map(|upstream| proto::UpstreamTracking {
5440 ahead: upstream.ahead as u64,
5441 behind: upstream.behind as u64,
5442 }),
5443 }),
5444 most_recent_commit: branch
5445 .most_recent_commit
5446 .as_ref()
5447 .map(|commit| proto::CommitSummary {
5448 sha: commit.sha.to_string(),
5449 subject: commit.subject.to_string(),
5450 commit_timestamp: commit.commit_timestamp,
5451 author_name: commit.author_name.to_string(),
5452 }),
5453 }
5454}
5455
5456fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5457 proto::Worktree {
5458 path: worktree.path.to_string_lossy().to_string(),
5459 ref_name: worktree.ref_name.to_string(),
5460 sha: worktree.sha.to_string(),
5461 }
5462}
5463
5464fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5465 git::repository::Worktree {
5466 path: PathBuf::from(proto.path.clone()),
5467 ref_name: proto.ref_name.clone().into(),
5468 sha: proto.sha.clone().into(),
5469 }
5470}
5471
5472fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5473 git::repository::Branch {
5474 is_head: proto.is_head,
5475 ref_name: proto.ref_name.clone().into(),
5476 upstream: proto
5477 .upstream
5478 .as_ref()
5479 .map(|upstream| git::repository::Upstream {
5480 ref_name: upstream.ref_name.to_string().into(),
5481 tracking: upstream
5482 .tracking
5483 .as_ref()
5484 .map(|tracking| {
5485 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5486 ahead: tracking.ahead as u32,
5487 behind: tracking.behind as u32,
5488 })
5489 })
5490 .unwrap_or(git::repository::UpstreamTracking::Gone),
5491 }),
5492 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5493 git::repository::CommitSummary {
5494 sha: commit.sha.to_string().into(),
5495 subject: commit.subject.to_string().into(),
5496 commit_timestamp: commit.commit_timestamp,
5497 author_name: commit.author_name.to_string().into(),
5498 has_parent: true,
5499 }
5500 }),
5501 }
5502}
5503
5504fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5505 proto::GitCommitDetails {
5506 sha: commit.sha.to_string(),
5507 message: commit.message.to_string(),
5508 commit_timestamp: commit.commit_timestamp,
5509 author_email: commit.author_email.to_string(),
5510 author_name: commit.author_name.to_string(),
5511 }
5512}
5513
5514fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5515 CommitDetails {
5516 sha: proto.sha.clone().into(),
5517 message: proto.message.clone().into(),
5518 commit_timestamp: proto.commit_timestamp,
5519 author_email: proto.author_email.clone().into(),
5520 author_name: proto.author_name.clone().into(),
5521 }
5522}
5523
5524async fn compute_snapshot(
5525 id: RepositoryId,
5526 work_directory_abs_path: Arc<Path>,
5527 prev_snapshot: RepositorySnapshot,
5528 backend: Arc<dyn GitRepository>,
5529) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5530 let mut events = Vec::new();
5531 let branches = backend.branches().await?;
5532 let branch = branches.into_iter().find(|branch| branch.is_head);
5533 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5534 let stash_entries = backend.stash_entries().await?;
5535 let statuses_by_path = SumTree::from_iter(
5536 statuses
5537 .entries
5538 .iter()
5539 .map(|(repo_path, status)| StatusEntry {
5540 repo_path: repo_path.clone(),
5541 status: *status,
5542 }),
5543 (),
5544 );
5545 let (merge_details, merge_heads_changed) =
5546 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5547 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5548
5549 let pending_ops_by_path = prev_snapshot.pending_ops_by_path.clone();
5550
5551 if merge_heads_changed {
5552 events.push(RepositoryEvent::MergeHeadsChanged);
5553 }
5554
5555 if statuses_by_path != prev_snapshot.statuses_by_path {
5556 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5557 }
5558
5559 // Useful when branch is None in detached head state
5560 let head_commit = match backend.head_sha().await {
5561 Some(head_sha) => backend.show(head_sha).await.log_err(),
5562 None => None,
5563 };
5564
5565 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5566 events.push(RepositoryEvent::BranchChanged);
5567 }
5568
5569 // Used by edit prediction data collection
5570 let remote_origin_url = backend.remote_url("origin");
5571 let remote_upstream_url = backend.remote_url("upstream");
5572
5573 let snapshot = RepositorySnapshot {
5574 id,
5575 statuses_by_path,
5576 pending_ops_by_path,
5577 work_directory_abs_path,
5578 path_style: prev_snapshot.path_style,
5579 scan_id: prev_snapshot.scan_id + 1,
5580 branch,
5581 head_commit,
5582 merge: merge_details,
5583 remote_origin_url,
5584 remote_upstream_url,
5585 stash_entries,
5586 };
5587
5588 Ok((snapshot, events))
5589}
5590
5591fn status_from_proto(
5592 simple_status: i32,
5593 status: Option<proto::GitFileStatus>,
5594) -> anyhow::Result<FileStatus> {
5595 use proto::git_file_status::Variant;
5596
5597 let Some(variant) = status.and_then(|status| status.variant) else {
5598 let code = proto::GitStatus::from_i32(simple_status)
5599 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5600 let result = match code {
5601 proto::GitStatus::Added => TrackedStatus {
5602 worktree_status: StatusCode::Added,
5603 index_status: StatusCode::Unmodified,
5604 }
5605 .into(),
5606 proto::GitStatus::Modified => TrackedStatus {
5607 worktree_status: StatusCode::Modified,
5608 index_status: StatusCode::Unmodified,
5609 }
5610 .into(),
5611 proto::GitStatus::Conflict => UnmergedStatus {
5612 first_head: UnmergedStatusCode::Updated,
5613 second_head: UnmergedStatusCode::Updated,
5614 }
5615 .into(),
5616 proto::GitStatus::Deleted => TrackedStatus {
5617 worktree_status: StatusCode::Deleted,
5618 index_status: StatusCode::Unmodified,
5619 }
5620 .into(),
5621 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5622 };
5623 return Ok(result);
5624 };
5625
5626 let result = match variant {
5627 Variant::Untracked(_) => FileStatus::Untracked,
5628 Variant::Ignored(_) => FileStatus::Ignored,
5629 Variant::Unmerged(unmerged) => {
5630 let [first_head, second_head] =
5631 [unmerged.first_head, unmerged.second_head].map(|head| {
5632 let code = proto::GitStatus::from_i32(head)
5633 .with_context(|| format!("Invalid git status code: {head}"))?;
5634 let result = match code {
5635 proto::GitStatus::Added => UnmergedStatusCode::Added,
5636 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5637 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5638 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5639 };
5640 Ok(result)
5641 });
5642 let [first_head, second_head] = [first_head?, second_head?];
5643 UnmergedStatus {
5644 first_head,
5645 second_head,
5646 }
5647 .into()
5648 }
5649 Variant::Tracked(tracked) => {
5650 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5651 .map(|status| {
5652 let code = proto::GitStatus::from_i32(status)
5653 .with_context(|| format!("Invalid git status code: {status}"))?;
5654 let result = match code {
5655 proto::GitStatus::Modified => StatusCode::Modified,
5656 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5657 proto::GitStatus::Added => StatusCode::Added,
5658 proto::GitStatus::Deleted => StatusCode::Deleted,
5659 proto::GitStatus::Renamed => StatusCode::Renamed,
5660 proto::GitStatus::Copied => StatusCode::Copied,
5661 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5662 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5663 };
5664 Ok(result)
5665 });
5666 let [index_status, worktree_status] = [index_status?, worktree_status?];
5667 TrackedStatus {
5668 index_status,
5669 worktree_status,
5670 }
5671 .into()
5672 }
5673 };
5674 Ok(result)
5675}
5676
5677fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5678 use proto::git_file_status::{Tracked, Unmerged, Variant};
5679
5680 let variant = match status {
5681 FileStatus::Untracked => Variant::Untracked(Default::default()),
5682 FileStatus::Ignored => Variant::Ignored(Default::default()),
5683 FileStatus::Unmerged(UnmergedStatus {
5684 first_head,
5685 second_head,
5686 }) => Variant::Unmerged(Unmerged {
5687 first_head: unmerged_status_to_proto(first_head),
5688 second_head: unmerged_status_to_proto(second_head),
5689 }),
5690 FileStatus::Tracked(TrackedStatus {
5691 index_status,
5692 worktree_status,
5693 }) => Variant::Tracked(Tracked {
5694 index_status: tracked_status_to_proto(index_status),
5695 worktree_status: tracked_status_to_proto(worktree_status),
5696 }),
5697 };
5698 proto::GitFileStatus {
5699 variant: Some(variant),
5700 }
5701}
5702
5703fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5704 match code {
5705 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5706 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5707 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5708 }
5709}
5710
5711fn tracked_status_to_proto(code: StatusCode) -> i32 {
5712 match code {
5713 StatusCode::Added => proto::GitStatus::Added as _,
5714 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5715 StatusCode::Modified => proto::GitStatus::Modified as _,
5716 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5717 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5718 StatusCode::Copied => proto::GitStatus::Copied as _,
5719 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5720 }
5721}