1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4
5use crate::{
6 ProjectEnvironment, ProjectItem, ProjectPath,
7 buffer_store::{BufferStore, BufferStoreEvent},
8 worktree_store::{WorktreeStore, WorktreeStoreEvent},
9};
10use anyhow::{Context as _, Result, anyhow, bail};
11use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
12use buffer_diff::{BufferDiff, BufferDiffEvent};
13use client::ProjectId;
14use collections::HashMap;
15pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
16use fs::Fs;
17use futures::{
18 FutureExt, StreamExt,
19 channel::{mpsc, oneshot},
20 future::{self, Shared},
21 stream::FuturesOrdered,
22};
23use git::{
24 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
25 blame::Blame,
26 parse_git_remote_url,
27 repository::{
28 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
29 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
30 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
31 },
32 stash::{GitStash, StashEntry},
33 status::{
34 DiffTreeType, FileStatus, GitSummary, StageStatus, StatusCode, TrackedStatus, TreeDiff,
35 TreeDiffStatus, UnmergedStatus, UnmergedStatusCode,
36 },
37};
38use gpui::{
39 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
40 WeakEntity,
41};
42use language::{
43 Buffer, BufferEvent, Language, LanguageRegistry,
44 proto::{deserialize_version, serialize_version},
45};
46use parking_lot::Mutex;
47use postage::stream::Stream as _;
48use rpc::{
49 AnyProtoClient, TypedEnvelope,
50 proto::{self, git_reset, split_repository_update},
51};
52use serde::Deserialize;
53use std::{
54 cmp::Ordering,
55 collections::{BTreeSet, VecDeque},
56 future::Future,
57 mem,
58 ops::Range,
59 path::{Path, PathBuf},
60 str::FromStr,
61 sync::{
62 Arc,
63 atomic::{self, AtomicU64},
64 },
65 time::Instant,
66};
67use sum_tree::{Edit, SumTree, TreeSet};
68use task::Shell;
69use text::{Bias, BufferId};
70use util::{
71 ResultExt, debug_panic,
72 paths::{PathStyle, SanitizedPath},
73 post_inc,
74 rel_path::RelPath,
75};
76use worktree::{
77 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
78 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
79};
80use zeroize::Zeroize;
81
82pub struct GitStore {
83 state: GitStoreState,
84 buffer_store: Entity<BufferStore>,
85 worktree_store: Entity<WorktreeStore>,
86 repositories: HashMap<RepositoryId, Entity<Repository>>,
87 active_repo_id: Option<RepositoryId>,
88 #[allow(clippy::type_complexity)]
89 loading_diffs:
90 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
91 diffs: HashMap<BufferId, Entity<BufferGitState>>,
92 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
93 _subscriptions: Vec<Subscription>,
94}
95
96#[derive(Default)]
97struct SharedDiffs {
98 unstaged: Option<Entity<BufferDiff>>,
99 uncommitted: Option<Entity<BufferDiff>>,
100}
101
102struct BufferGitState {
103 unstaged_diff: Option<WeakEntity<BufferDiff>>,
104 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
105 conflict_set: Option<WeakEntity<ConflictSet>>,
106 recalculate_diff_task: Option<Task<Result<()>>>,
107 reparse_conflict_markers_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 language_registry: Option<Arc<LanguageRegistry>>,
110 conflict_updated_futures: Vec<oneshot::Sender<()>>,
111 recalculating_tx: postage::watch::Sender<bool>,
112
113 /// These operation counts are used to ensure that head and index text
114 /// values read from the git repository are up-to-date with any hunk staging
115 /// operations that have been performed on the BufferDiff.
116 ///
117 /// The operation count is incremented immediately when the user initiates a
118 /// hunk stage/unstage operation. Then, upon finishing writing the new index
119 /// text do disk, the `operation count as of write` is updated to reflect
120 /// the operation count that prompted the write.
121 hunk_staging_operation_count: usize,
122 hunk_staging_operation_count_as_of_write: usize,
123
124 head_text: Option<Arc<String>>,
125 index_text: Option<Arc<String>>,
126 head_changed: bool,
127 index_changed: bool,
128 language_changed: bool,
129}
130
131#[derive(Clone, Debug)]
132enum DiffBasesChange {
133 SetIndex(Option<String>),
134 SetHead(Option<String>),
135 SetEach {
136 index: Option<String>,
137 head: Option<String>,
138 },
139 SetBoth(Option<String>),
140}
141
142#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
143enum DiffKind {
144 Unstaged,
145 Uncommitted,
146}
147
148enum GitStoreState {
149 Local {
150 next_repository_id: Arc<AtomicU64>,
151 downstream: Option<LocalDownstreamState>,
152 project_environment: Entity<ProjectEnvironment>,
153 fs: Arc<dyn Fs>,
154 },
155 Remote {
156 upstream_client: AnyProtoClient,
157 upstream_project_id: u64,
158 downstream: Option<(AnyProtoClient, ProjectId)>,
159 },
160}
161
162enum DownstreamUpdate {
163 UpdateRepository(RepositorySnapshot),
164 RemoveRepository(RepositoryId),
165}
166
167struct LocalDownstreamState {
168 client: AnyProtoClient,
169 project_id: ProjectId,
170 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
171 _task: Task<Result<()>>,
172}
173
174#[derive(Clone, Debug)]
175pub struct GitStoreCheckpoint {
176 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
177}
178
179#[derive(Clone, Debug, PartialEq, Eq)]
180pub struct StatusEntry {
181 pub repo_path: RepoPath,
182 pub status: FileStatus,
183}
184
185impl StatusEntry {
186 fn to_proto(&self) -> proto::StatusEntry {
187 let simple_status = match self.status {
188 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
189 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
190 FileStatus::Tracked(TrackedStatus {
191 index_status,
192 worktree_status,
193 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
194 worktree_status
195 } else {
196 index_status
197 }),
198 };
199
200 proto::StatusEntry {
201 repo_path: self.repo_path.to_proto(),
202 simple_status,
203 status: Some(status_to_proto(self.status)),
204 }
205 }
206}
207
208impl TryFrom<proto::StatusEntry> for StatusEntry {
209 type Error = anyhow::Error;
210
211 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
212 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
213 let status = status_from_proto(value.simple_status, value.status)?;
214 Ok(Self { repo_path, status })
215 }
216}
217
218impl sum_tree::Item for StatusEntry {
219 type Summary = PathSummary<GitSummary>;
220
221 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
222 PathSummary {
223 max_path: self.repo_path.0.clone(),
224 item_summary: self.status.summary(),
225 }
226 }
227}
228
229impl sum_tree::KeyedItem for StatusEntry {
230 type Key = PathKey;
231
232 fn key(&self) -> Self::Key {
233 PathKey(self.repo_path.0.clone())
234 }
235}
236
237#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
238pub struct RepositoryId(pub u64);
239
240#[derive(Clone, Debug, Default, PartialEq, Eq)]
241pub struct MergeDetails {
242 pub conflicted_paths: TreeSet<RepoPath>,
243 pub message: Option<SharedString>,
244 pub heads: Vec<Option<SharedString>>,
245}
246
247#[derive(Clone, Copy, Debug, PartialEq, Eq)]
248pub enum PendingOperationStatus {
249 Staged,
250 Unstaged,
251 Reverted,
252 Unchanged,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct PendingOperations {
257 repo_path: RepoPath,
258 // TODO: move this into StatusEntry
259 staging: StageStatus,
260 ops: Vec<PendingOperation>,
261}
262
263#[derive(Clone, Debug, PartialEq, Eq)]
264pub struct PendingOperation {
265 id: usize,
266 finished: bool,
267 status: PendingOperationStatus,
268}
269
270#[derive(Clone, Copy, Debug, PartialEq, Eq)]
271pub struct PendingOperationsSummary {
272 finished: bool,
273 status: PendingOperationStatus,
274 count: usize,
275}
276
277impl Default for PendingOperationsSummary {
278 fn default() -> Self {
279 Self {
280 finished: false,
281 status: PendingOperationStatus::Unstaged,
282 count: 0,
283 }
284 }
285}
286
287impl sum_tree::ContextLessSummary for PendingOperationsSummary {
288 fn zero() -> Self {
289 Default::default()
290 }
291
292 fn add_summary(&mut self, rhs: &Self) {
293 self.finished = self.finished || rhs.finished;
294 self.count += rhs.count;
295 self.status = rhs.status;
296 }
297}
298
299impl sum_tree::Item for PendingOperations {
300 type Summary = PathSummary<PendingOperationsSummary>;
301
302 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
303 let mut item_summary = PendingOperationsSummary {
304 count: self.ops.len(),
305 ..Default::default()
306 };
307 if let Some(op) = self.ops.last() {
308 item_summary.finished = op.finished;
309 item_summary.status = op.status;
310 }
311 PathSummary {
312 max_path: self.repo_path.0.clone(),
313 item_summary,
314 }
315 }
316}
317
318impl sum_tree::KeyedItem for PendingOperations {
319 type Key = PathKey;
320
321 fn key(&self) -> Self::Key {
322 PathKey(self.repo_path.0.clone())
323 }
324}
325
326#[derive(Clone, Debug, PartialEq, Eq)]
327pub struct RepositorySnapshot {
328 pub id: RepositoryId,
329 pub statuses_by_path: SumTree<StatusEntry>,
330 pub pending_ops_by_path: SumTree<PendingOperations>,
331 pub work_directory_abs_path: Arc<Path>,
332 pub path_style: PathStyle,
333 pub branch: Option<Branch>,
334 pub head_commit: Option<CommitDetails>,
335 pub scan_id: u64,
336 pub merge: MergeDetails,
337 pub remote_origin_url: Option<String>,
338 pub remote_upstream_url: Option<String>,
339 pub stash_entries: GitStash,
340}
341
342type JobId = u64;
343
344#[derive(Clone, Debug, PartialEq, Eq)]
345pub struct JobInfo {
346 pub start: Instant,
347 pub message: SharedString,
348}
349
350pub struct Repository {
351 this: WeakEntity<Self>,
352 snapshot: RepositorySnapshot,
353 commit_message_buffer: Option<Entity<Buffer>>,
354 git_store: WeakEntity<GitStore>,
355 // For a local repository, holds paths that have had worktree events since the last status scan completed,
356 // and that should be examined during the next status scan.
357 paths_needing_status_update: BTreeSet<RepoPath>,
358 job_sender: mpsc::UnboundedSender<GitJob>,
359 active_jobs: HashMap<JobId, JobInfo>,
360 job_id: JobId,
361 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
362 latest_askpass_id: u64,
363}
364
365impl std::ops::Deref for Repository {
366 type Target = RepositorySnapshot;
367
368 fn deref(&self) -> &Self::Target {
369 &self.snapshot
370 }
371}
372
373#[derive(Clone)]
374pub enum RepositoryState {
375 Local {
376 backend: Arc<dyn GitRepository>,
377 environment: Arc<HashMap<String, String>>,
378 },
379 Remote {
380 project_id: ProjectId,
381 client: AnyProtoClient,
382 },
383}
384
385#[derive(Clone, Debug, PartialEq, Eq)]
386pub enum RepositoryEvent {
387 StatusesChanged {
388 // TODO could report which statuses changed here
389 full_scan: bool,
390 },
391 MergeHeadsChanged,
392 BranchChanged,
393 StashEntriesChanged,
394}
395
396#[derive(Clone, Debug)]
397pub struct JobsUpdated;
398
399#[derive(Debug)]
400pub enum GitStoreEvent {
401 ActiveRepositoryChanged(Option<RepositoryId>),
402 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
403 RepositoryAdded,
404 RepositoryRemoved(RepositoryId),
405 IndexWriteError(anyhow::Error),
406 JobsUpdated,
407 ConflictsUpdated,
408}
409
410impl EventEmitter<RepositoryEvent> for Repository {}
411impl EventEmitter<JobsUpdated> for Repository {}
412impl EventEmitter<GitStoreEvent> for GitStore {}
413
414pub struct GitJob {
415 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
416 key: Option<GitJobKey>,
417}
418
419#[derive(PartialEq, Eq)]
420enum GitJobKey {
421 WriteIndex(RepoPath),
422 ReloadBufferDiffBases,
423 RefreshStatuses,
424 ReloadGitState,
425}
426
427impl GitStore {
428 pub fn local(
429 worktree_store: &Entity<WorktreeStore>,
430 buffer_store: Entity<BufferStore>,
431 environment: Entity<ProjectEnvironment>,
432 fs: Arc<dyn Fs>,
433 cx: &mut Context<Self>,
434 ) -> Self {
435 Self::new(
436 worktree_store.clone(),
437 buffer_store,
438 GitStoreState::Local {
439 next_repository_id: Arc::new(AtomicU64::new(1)),
440 downstream: None,
441 project_environment: environment,
442 fs,
443 },
444 cx,
445 )
446 }
447
448 pub fn remote(
449 worktree_store: &Entity<WorktreeStore>,
450 buffer_store: Entity<BufferStore>,
451 upstream_client: AnyProtoClient,
452 project_id: u64,
453 cx: &mut Context<Self>,
454 ) -> Self {
455 Self::new(
456 worktree_store.clone(),
457 buffer_store,
458 GitStoreState::Remote {
459 upstream_client,
460 upstream_project_id: project_id,
461 downstream: None,
462 },
463 cx,
464 )
465 }
466
467 fn new(
468 worktree_store: Entity<WorktreeStore>,
469 buffer_store: Entity<BufferStore>,
470 state: GitStoreState,
471 cx: &mut Context<Self>,
472 ) -> Self {
473 let _subscriptions = vec![
474 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
475 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
476 ];
477
478 GitStore {
479 state,
480 buffer_store,
481 worktree_store,
482 repositories: HashMap::default(),
483 active_repo_id: None,
484 _subscriptions,
485 loading_diffs: HashMap::default(),
486 shared_diffs: HashMap::default(),
487 diffs: HashMap::default(),
488 }
489 }
490
491 pub fn init(client: &AnyProtoClient) {
492 client.add_entity_request_handler(Self::handle_get_remotes);
493 client.add_entity_request_handler(Self::handle_get_branches);
494 client.add_entity_request_handler(Self::handle_get_default_branch);
495 client.add_entity_request_handler(Self::handle_change_branch);
496 client.add_entity_request_handler(Self::handle_create_branch);
497 client.add_entity_request_handler(Self::handle_rename_branch);
498 client.add_entity_request_handler(Self::handle_git_init);
499 client.add_entity_request_handler(Self::handle_push);
500 client.add_entity_request_handler(Self::handle_pull);
501 client.add_entity_request_handler(Self::handle_fetch);
502 client.add_entity_request_handler(Self::handle_stage);
503 client.add_entity_request_handler(Self::handle_unstage);
504 client.add_entity_request_handler(Self::handle_stash);
505 client.add_entity_request_handler(Self::handle_stash_pop);
506 client.add_entity_request_handler(Self::handle_stash_apply);
507 client.add_entity_request_handler(Self::handle_stash_drop);
508 client.add_entity_request_handler(Self::handle_commit);
509 client.add_entity_request_handler(Self::handle_reset);
510 client.add_entity_request_handler(Self::handle_show);
511 client.add_entity_request_handler(Self::handle_load_commit_diff);
512 client.add_entity_request_handler(Self::handle_checkout_files);
513 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
514 client.add_entity_request_handler(Self::handle_set_index_text);
515 client.add_entity_request_handler(Self::handle_askpass);
516 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
517 client.add_entity_request_handler(Self::handle_git_diff);
518 client.add_entity_request_handler(Self::handle_tree_diff);
519 client.add_entity_request_handler(Self::handle_get_blob_content);
520 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
521 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
522 client.add_entity_message_handler(Self::handle_update_diff_bases);
523 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
524 client.add_entity_request_handler(Self::handle_blame_buffer);
525 client.add_entity_message_handler(Self::handle_update_repository);
526 client.add_entity_message_handler(Self::handle_remove_repository);
527 client.add_entity_request_handler(Self::handle_git_clone);
528 client.add_entity_request_handler(Self::handle_get_worktrees);
529 client.add_entity_request_handler(Self::handle_create_worktree);
530 }
531
532 pub fn is_local(&self) -> bool {
533 matches!(self.state, GitStoreState::Local { .. })
534 }
535 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
536 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
537 let id = repo.read(cx).id;
538 if self.active_repo_id != Some(id) {
539 self.active_repo_id = Some(id);
540 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
541 }
542 }
543 }
544
545 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
546 match &mut self.state {
547 GitStoreState::Remote {
548 downstream: downstream_client,
549 ..
550 } => {
551 for repo in self.repositories.values() {
552 let update = repo.read(cx).snapshot.initial_update(project_id);
553 for update in split_repository_update(update) {
554 client.send(update).log_err();
555 }
556 }
557 *downstream_client = Some((client, ProjectId(project_id)));
558 }
559 GitStoreState::Local {
560 downstream: downstream_client,
561 ..
562 } => {
563 let mut snapshots = HashMap::default();
564 let (updates_tx, mut updates_rx) = mpsc::unbounded();
565 for repo in self.repositories.values() {
566 updates_tx
567 .unbounded_send(DownstreamUpdate::UpdateRepository(
568 repo.read(cx).snapshot.clone(),
569 ))
570 .ok();
571 }
572 *downstream_client = Some(LocalDownstreamState {
573 client: client.clone(),
574 project_id: ProjectId(project_id),
575 updates_tx,
576 _task: cx.spawn(async move |this, cx| {
577 cx.background_spawn(async move {
578 while let Some(update) = updates_rx.next().await {
579 match update {
580 DownstreamUpdate::UpdateRepository(snapshot) => {
581 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
582 {
583 let update =
584 snapshot.build_update(old_snapshot, project_id);
585 *old_snapshot = snapshot;
586 for update in split_repository_update(update) {
587 client.send(update)?;
588 }
589 } else {
590 let update = snapshot.initial_update(project_id);
591 for update in split_repository_update(update) {
592 client.send(update)?;
593 }
594 snapshots.insert(snapshot.id, snapshot);
595 }
596 }
597 DownstreamUpdate::RemoveRepository(id) => {
598 client.send(proto::RemoveRepository {
599 project_id,
600 id: id.to_proto(),
601 })?;
602 }
603 }
604 }
605 anyhow::Ok(())
606 })
607 .await
608 .ok();
609 this.update(cx, |this, _| {
610 if let GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } = &mut this.state
614 {
615 downstream_client.take();
616 } else {
617 unreachable!("unshared called on remote store");
618 }
619 })
620 }),
621 });
622 }
623 }
624 }
625
626 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
627 match &mut self.state {
628 GitStoreState::Local {
629 downstream: downstream_client,
630 ..
631 } => {
632 downstream_client.take();
633 }
634 GitStoreState::Remote {
635 downstream: downstream_client,
636 ..
637 } => {
638 downstream_client.take();
639 }
640 }
641 self.shared_diffs.clear();
642 }
643
644 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
645 self.shared_diffs.remove(peer_id);
646 }
647
648 pub fn active_repository(&self) -> Option<Entity<Repository>> {
649 self.active_repo_id
650 .as_ref()
651 .map(|id| self.repositories[id].clone())
652 }
653
654 pub fn open_unstaged_diff(
655 &mut self,
656 buffer: Entity<Buffer>,
657 cx: &mut Context<Self>,
658 ) -> Task<Result<Entity<BufferDiff>>> {
659 let buffer_id = buffer.read(cx).remote_id();
660 if let Some(diff_state) = self.diffs.get(&buffer_id)
661 && let Some(unstaged_diff) = diff_state
662 .read(cx)
663 .unstaged_diff
664 .as_ref()
665 .and_then(|weak| weak.upgrade())
666 {
667 if let Some(task) =
668 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
669 {
670 return cx.background_executor().spawn(async move {
671 task.await;
672 Ok(unstaged_diff)
673 });
674 }
675 return Task::ready(Ok(unstaged_diff));
676 }
677
678 let Some((repo, repo_path)) =
679 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
680 else {
681 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
682 };
683
684 let task = self
685 .loading_diffs
686 .entry((buffer_id, DiffKind::Unstaged))
687 .or_insert_with(|| {
688 let staged_text = repo.update(cx, |repo, cx| {
689 repo.load_staged_text(buffer_id, repo_path, cx)
690 });
691 cx.spawn(async move |this, cx| {
692 Self::open_diff_internal(
693 this,
694 DiffKind::Unstaged,
695 staged_text.await.map(DiffBasesChange::SetIndex),
696 buffer,
697 cx,
698 )
699 .await
700 .map_err(Arc::new)
701 })
702 .shared()
703 })
704 .clone();
705
706 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
707 }
708
709 pub fn open_diff_since(
710 &mut self,
711 oid: Option<git::Oid>,
712 buffer: Entity<Buffer>,
713 repo: Entity<Repository>,
714 languages: Arc<LanguageRegistry>,
715 cx: &mut Context<Self>,
716 ) -> Task<Result<Entity<BufferDiff>>> {
717 cx.spawn(async move |this, cx| {
718 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
719 let content = match oid {
720 None => None,
721 Some(oid) => Some(
722 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
723 .await?,
724 ),
725 };
726 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
727
728 buffer_diff
729 .update(cx, |buffer_diff, cx| {
730 buffer_diff.set_base_text(
731 content.map(Arc::new),
732 buffer_snapshot.language().cloned(),
733 Some(languages.clone()),
734 buffer_snapshot.text,
735 cx,
736 )
737 })?
738 .await?;
739 let unstaged_diff = this
740 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
741 .await?;
742 buffer_diff.update(cx, |buffer_diff, _| {
743 buffer_diff.set_secondary_diff(unstaged_diff);
744 })?;
745
746 this.update(cx, |_, cx| {
747 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
748 .detach();
749 })?;
750
751 Ok(buffer_diff)
752 })
753 }
754
755 pub fn open_uncommitted_diff(
756 &mut self,
757 buffer: Entity<Buffer>,
758 cx: &mut Context<Self>,
759 ) -> Task<Result<Entity<BufferDiff>>> {
760 let buffer_id = buffer.read(cx).remote_id();
761
762 if let Some(diff_state) = self.diffs.get(&buffer_id)
763 && let Some(uncommitted_diff) = diff_state
764 .read(cx)
765 .uncommitted_diff
766 .as_ref()
767 .and_then(|weak| weak.upgrade())
768 {
769 if let Some(task) =
770 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
771 {
772 return cx.background_executor().spawn(async move {
773 task.await;
774 Ok(uncommitted_diff)
775 });
776 }
777 return Task::ready(Ok(uncommitted_diff));
778 }
779
780 let Some((repo, repo_path)) =
781 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
782 else {
783 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
784 };
785
786 let task = self
787 .loading_diffs
788 .entry((buffer_id, DiffKind::Uncommitted))
789 .or_insert_with(|| {
790 let changes = repo.update(cx, |repo, cx| {
791 repo.load_committed_text(buffer_id, repo_path, cx)
792 });
793
794 // todo(lw): hot foreground spawn
795 cx.spawn(async move |this, cx| {
796 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
797 .await
798 .map_err(Arc::new)
799 })
800 .shared()
801 })
802 .clone();
803
804 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
805 }
806
807 async fn open_diff_internal(
808 this: WeakEntity<Self>,
809 kind: DiffKind,
810 texts: Result<DiffBasesChange>,
811 buffer_entity: Entity<Buffer>,
812 cx: &mut AsyncApp,
813 ) -> Result<Entity<BufferDiff>> {
814 let diff_bases_change = match texts {
815 Err(e) => {
816 this.update(cx, |this, cx| {
817 let buffer = buffer_entity.read(cx);
818 let buffer_id = buffer.remote_id();
819 this.loading_diffs.remove(&(buffer_id, kind));
820 })?;
821 return Err(e);
822 }
823 Ok(change) => change,
824 };
825
826 this.update(cx, |this, cx| {
827 let buffer = buffer_entity.read(cx);
828 let buffer_id = buffer.remote_id();
829 let language = buffer.language().cloned();
830 let language_registry = buffer.language_registry();
831 let text_snapshot = buffer.text_snapshot();
832 this.loading_diffs.remove(&(buffer_id, kind));
833
834 let git_store = cx.weak_entity();
835 let diff_state = this
836 .diffs
837 .entry(buffer_id)
838 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
839
840 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
841
842 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
843 diff_state.update(cx, |diff_state, cx| {
844 diff_state.language = language;
845 diff_state.language_registry = language_registry;
846
847 match kind {
848 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
849 DiffKind::Uncommitted => {
850 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
851 diff
852 } else {
853 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
854 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
855 unstaged_diff
856 };
857
858 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
859 diff_state.uncommitted_diff = Some(diff.downgrade())
860 }
861 }
862
863 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
864 let rx = diff_state.wait_for_recalculation();
865
866 anyhow::Ok(async move {
867 if let Some(rx) = rx {
868 rx.await;
869 }
870 Ok(diff)
871 })
872 })
873 })??
874 .await
875 }
876
877 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
878 let diff_state = self.diffs.get(&buffer_id)?;
879 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
880 }
881
882 pub fn get_uncommitted_diff(
883 &self,
884 buffer_id: BufferId,
885 cx: &App,
886 ) -> Option<Entity<BufferDiff>> {
887 let diff_state = self.diffs.get(&buffer_id)?;
888 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
889 }
890
891 pub fn open_conflict_set(
892 &mut self,
893 buffer: Entity<Buffer>,
894 cx: &mut Context<Self>,
895 ) -> Entity<ConflictSet> {
896 log::debug!("open conflict set");
897 let buffer_id = buffer.read(cx).remote_id();
898
899 if let Some(git_state) = self.diffs.get(&buffer_id)
900 && let Some(conflict_set) = git_state
901 .read(cx)
902 .conflict_set
903 .as_ref()
904 .and_then(|weak| weak.upgrade())
905 {
906 let conflict_set = conflict_set;
907 let buffer_snapshot = buffer.read(cx).text_snapshot();
908
909 git_state.update(cx, |state, cx| {
910 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
911 });
912
913 return conflict_set;
914 }
915
916 let is_unmerged = self
917 .repository_and_path_for_buffer_id(buffer_id, cx)
918 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
919 let git_store = cx.weak_entity();
920 let buffer_git_state = self
921 .diffs
922 .entry(buffer_id)
923 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
924 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
925
926 self._subscriptions
927 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
928 cx.emit(GitStoreEvent::ConflictsUpdated);
929 }));
930
931 buffer_git_state.update(cx, |state, cx| {
932 state.conflict_set = Some(conflict_set.downgrade());
933 let buffer_snapshot = buffer.read(cx).text_snapshot();
934 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
935 });
936
937 conflict_set
938 }
939
940 pub fn project_path_git_status(
941 &self,
942 project_path: &ProjectPath,
943 cx: &App,
944 ) -> Option<FileStatus> {
945 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
946 Some(repo.read(cx).status_for_path(&repo_path)?.status)
947 }
948
949 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
950 let mut work_directory_abs_paths = Vec::new();
951 let mut checkpoints = Vec::new();
952 for repository in self.repositories.values() {
953 repository.update(cx, |repository, _| {
954 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
955 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
956 });
957 }
958
959 cx.background_executor().spawn(async move {
960 let checkpoints = future::try_join_all(checkpoints).await?;
961 Ok(GitStoreCheckpoint {
962 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
963 .into_iter()
964 .zip(checkpoints)
965 .collect(),
966 })
967 })
968 }
969
970 pub fn restore_checkpoint(
971 &self,
972 checkpoint: GitStoreCheckpoint,
973 cx: &mut App,
974 ) -> Task<Result<()>> {
975 let repositories_by_work_dir_abs_path = self
976 .repositories
977 .values()
978 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
979 .collect::<HashMap<_, _>>();
980
981 let mut tasks = Vec::new();
982 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
983 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
984 let restore = repository.update(cx, |repository, _| {
985 repository.restore_checkpoint(checkpoint)
986 });
987 tasks.push(async move { restore.await? });
988 }
989 }
990 cx.background_spawn(async move {
991 future::try_join_all(tasks).await?;
992 Ok(())
993 })
994 }
995
996 /// Compares two checkpoints, returning true if they are equal.
997 pub fn compare_checkpoints(
998 &self,
999 left: GitStoreCheckpoint,
1000 mut right: GitStoreCheckpoint,
1001 cx: &mut App,
1002 ) -> Task<Result<bool>> {
1003 let repositories_by_work_dir_abs_path = self
1004 .repositories
1005 .values()
1006 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1007 .collect::<HashMap<_, _>>();
1008
1009 let mut tasks = Vec::new();
1010 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1011 if let Some(right_checkpoint) = right
1012 .checkpoints_by_work_dir_abs_path
1013 .remove(&work_dir_abs_path)
1014 {
1015 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1016 {
1017 let compare = repository.update(cx, |repository, _| {
1018 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1019 });
1020
1021 tasks.push(async move { compare.await? });
1022 }
1023 } else {
1024 return Task::ready(Ok(false));
1025 }
1026 }
1027 cx.background_spawn(async move {
1028 Ok(future::try_join_all(tasks)
1029 .await?
1030 .into_iter()
1031 .all(|result| result))
1032 })
1033 }
1034
1035 /// Blames a buffer.
1036 pub fn blame_buffer(
1037 &self,
1038 buffer: &Entity<Buffer>,
1039 version: Option<clock::Global>,
1040 cx: &mut App,
1041 ) -> Task<Result<Option<Blame>>> {
1042 let buffer = buffer.read(cx);
1043 let Some((repo, repo_path)) =
1044 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1045 else {
1046 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1047 };
1048 let content = match &version {
1049 Some(version) => buffer.rope_for_version(version),
1050 None => buffer.as_rope().clone(),
1051 };
1052 let version = version.unwrap_or(buffer.version());
1053 let buffer_id = buffer.remote_id();
1054
1055 let rx = repo.update(cx, |repo, _| {
1056 repo.send_job(None, move |state, _| async move {
1057 match state {
1058 RepositoryState::Local { backend, .. } => backend
1059 .blame(repo_path.clone(), content)
1060 .await
1061 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
1062 .map(Some),
1063 RepositoryState::Remote { project_id, client } => {
1064 let response = client
1065 .request(proto::BlameBuffer {
1066 project_id: project_id.to_proto(),
1067 buffer_id: buffer_id.into(),
1068 version: serialize_version(&version),
1069 })
1070 .await?;
1071 Ok(deserialize_blame_buffer_response(response))
1072 }
1073 }
1074 })
1075 });
1076
1077 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1078 }
1079
1080 pub fn get_permalink_to_line(
1081 &self,
1082 buffer: &Entity<Buffer>,
1083 selection: Range<u32>,
1084 cx: &mut App,
1085 ) -> Task<Result<url::Url>> {
1086 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1087 return Task::ready(Err(anyhow!("buffer has no file")));
1088 };
1089
1090 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1091 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1092 cx,
1093 ) else {
1094 // If we're not in a Git repo, check whether this is a Rust source
1095 // file in the Cargo registry (presumably opened with go-to-definition
1096 // from a normal Rust file). If so, we can put together a permalink
1097 // using crate metadata.
1098 if buffer
1099 .read(cx)
1100 .language()
1101 .is_none_or(|lang| lang.name() != "Rust".into())
1102 {
1103 return Task::ready(Err(anyhow!("no permalink available")));
1104 }
1105 let file_path = file.worktree.read(cx).absolutize(&file.path);
1106 return cx.spawn(async move |cx| {
1107 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1108 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1109 .context("no permalink available")
1110 });
1111 };
1112
1113 let buffer_id = buffer.read(cx).remote_id();
1114 let branch = repo.read(cx).branch.clone();
1115 let remote = branch
1116 .as_ref()
1117 .and_then(|b| b.upstream.as_ref())
1118 .and_then(|b| b.remote_name())
1119 .unwrap_or("origin")
1120 .to_string();
1121
1122 let rx = repo.update(cx, |repo, _| {
1123 repo.send_job(None, move |state, cx| async move {
1124 match state {
1125 RepositoryState::Local { backend, .. } => {
1126 let origin_url = backend
1127 .remote_url(&remote)
1128 .with_context(|| format!("remote \"{remote}\" not found"))?;
1129
1130 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1131
1132 let provider_registry =
1133 cx.update(GitHostingProviderRegistry::default_global)?;
1134
1135 let (provider, remote) =
1136 parse_git_remote_url(provider_registry, &origin_url)
1137 .context("parsing Git remote URL")?;
1138
1139 Ok(provider.build_permalink(
1140 remote,
1141 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1142 ))
1143 }
1144 RepositoryState::Remote { project_id, client } => {
1145 let response = client
1146 .request(proto::GetPermalinkToLine {
1147 project_id: project_id.to_proto(),
1148 buffer_id: buffer_id.into(),
1149 selection: Some(proto::Range {
1150 start: selection.start as u64,
1151 end: selection.end as u64,
1152 }),
1153 })
1154 .await?;
1155
1156 url::Url::parse(&response.permalink).context("failed to parse permalink")
1157 }
1158 }
1159 })
1160 });
1161 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1162 }
1163
1164 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1165 match &self.state {
1166 GitStoreState::Local {
1167 downstream: downstream_client,
1168 ..
1169 } => downstream_client
1170 .as_ref()
1171 .map(|state| (state.client.clone(), state.project_id)),
1172 GitStoreState::Remote {
1173 downstream: downstream_client,
1174 ..
1175 } => downstream_client.clone(),
1176 }
1177 }
1178
1179 fn upstream_client(&self) -> Option<AnyProtoClient> {
1180 match &self.state {
1181 GitStoreState::Local { .. } => None,
1182 GitStoreState::Remote {
1183 upstream_client, ..
1184 } => Some(upstream_client.clone()),
1185 }
1186 }
1187
1188 fn on_worktree_store_event(
1189 &mut self,
1190 worktree_store: Entity<WorktreeStore>,
1191 event: &WorktreeStoreEvent,
1192 cx: &mut Context<Self>,
1193 ) {
1194 let GitStoreState::Local {
1195 project_environment,
1196 downstream,
1197 next_repository_id,
1198 fs,
1199 } = &self.state
1200 else {
1201 return;
1202 };
1203
1204 match event {
1205 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1206 if let Some(worktree) = self
1207 .worktree_store
1208 .read(cx)
1209 .worktree_for_id(*worktree_id, cx)
1210 {
1211 let paths_by_git_repo =
1212 self.process_updated_entries(&worktree, updated_entries, cx);
1213 let downstream = downstream
1214 .as_ref()
1215 .map(|downstream| downstream.updates_tx.clone());
1216 cx.spawn(async move |_, cx| {
1217 let paths_by_git_repo = paths_by_git_repo.await;
1218 for (repo, paths) in paths_by_git_repo {
1219 repo.update(cx, |repo, cx| {
1220 repo.paths_changed(paths, downstream.clone(), cx);
1221 })
1222 .ok();
1223 }
1224 })
1225 .detach();
1226 }
1227 }
1228 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1229 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1230 else {
1231 return;
1232 };
1233 if !worktree.read(cx).is_visible() {
1234 log::debug!(
1235 "not adding repositories for local worktree {:?} because it's not visible",
1236 worktree.read(cx).abs_path()
1237 );
1238 return;
1239 }
1240 self.update_repositories_from_worktree(
1241 project_environment.clone(),
1242 next_repository_id.clone(),
1243 downstream
1244 .as_ref()
1245 .map(|downstream| downstream.updates_tx.clone()),
1246 changed_repos.clone(),
1247 fs.clone(),
1248 cx,
1249 );
1250 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1251 }
1252 _ => {}
1253 }
1254 }
1255 fn on_repository_event(
1256 &mut self,
1257 repo: Entity<Repository>,
1258 event: &RepositoryEvent,
1259 cx: &mut Context<Self>,
1260 ) {
1261 let id = repo.read(cx).id;
1262 let repo_snapshot = repo.read(cx).snapshot.clone();
1263 for (buffer_id, diff) in self.diffs.iter() {
1264 if let Some((buffer_repo, repo_path)) =
1265 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1266 && buffer_repo == repo
1267 {
1268 diff.update(cx, |diff, cx| {
1269 if let Some(conflict_set) = &diff.conflict_set {
1270 let conflict_status_changed =
1271 conflict_set.update(cx, |conflict_set, cx| {
1272 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1273 conflict_set.set_has_conflict(has_conflict, cx)
1274 })?;
1275 if conflict_status_changed {
1276 let buffer_store = self.buffer_store.read(cx);
1277 if let Some(buffer) = buffer_store.get(*buffer_id) {
1278 let _ = diff
1279 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1280 }
1281 }
1282 }
1283 anyhow::Ok(())
1284 })
1285 .ok();
1286 }
1287 }
1288 cx.emit(GitStoreEvent::RepositoryUpdated(
1289 id,
1290 event.clone(),
1291 self.active_repo_id == Some(id),
1292 ))
1293 }
1294
1295 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1296 cx.emit(GitStoreEvent::JobsUpdated)
1297 }
1298
1299 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1300 fn update_repositories_from_worktree(
1301 &mut self,
1302 project_environment: Entity<ProjectEnvironment>,
1303 next_repository_id: Arc<AtomicU64>,
1304 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1305 updated_git_repositories: UpdatedGitRepositoriesSet,
1306 fs: Arc<dyn Fs>,
1307 cx: &mut Context<Self>,
1308 ) {
1309 let mut removed_ids = Vec::new();
1310 for update in updated_git_repositories.iter() {
1311 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1312 let existing_work_directory_abs_path =
1313 repo.read(cx).work_directory_abs_path.clone();
1314 Some(&existing_work_directory_abs_path)
1315 == update.old_work_directory_abs_path.as_ref()
1316 || Some(&existing_work_directory_abs_path)
1317 == update.new_work_directory_abs_path.as_ref()
1318 }) {
1319 if let Some(new_work_directory_abs_path) =
1320 update.new_work_directory_abs_path.clone()
1321 {
1322 existing.update(cx, |existing, cx| {
1323 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1324 existing.schedule_scan(updates_tx.clone(), cx);
1325 });
1326 } else {
1327 removed_ids.push(*id);
1328 }
1329 } else if let UpdatedGitRepository {
1330 new_work_directory_abs_path: Some(work_directory_abs_path),
1331 dot_git_abs_path: Some(dot_git_abs_path),
1332 repository_dir_abs_path: Some(repository_dir_abs_path),
1333 common_dir_abs_path: Some(common_dir_abs_path),
1334 ..
1335 } = update
1336 {
1337 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1338 let git_store = cx.weak_entity();
1339 let repo = cx.new(|cx| {
1340 let mut repo = Repository::local(
1341 id,
1342 work_directory_abs_path.clone(),
1343 dot_git_abs_path.clone(),
1344 repository_dir_abs_path.clone(),
1345 common_dir_abs_path.clone(),
1346 project_environment.downgrade(),
1347 fs.clone(),
1348 git_store,
1349 cx,
1350 );
1351 repo.schedule_scan(updates_tx.clone(), cx);
1352 repo
1353 });
1354 self._subscriptions
1355 .push(cx.subscribe(&repo, Self::on_repository_event));
1356 self._subscriptions
1357 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1358 self.repositories.insert(id, repo);
1359 cx.emit(GitStoreEvent::RepositoryAdded);
1360 self.active_repo_id.get_or_insert_with(|| {
1361 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1362 id
1363 });
1364 }
1365 }
1366
1367 for id in removed_ids {
1368 if self.active_repo_id == Some(id) {
1369 self.active_repo_id = None;
1370 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1371 }
1372 self.repositories.remove(&id);
1373 if let Some(updates_tx) = updates_tx.as_ref() {
1374 updates_tx
1375 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1376 .ok();
1377 }
1378 }
1379 }
1380
1381 fn on_buffer_store_event(
1382 &mut self,
1383 _: Entity<BufferStore>,
1384 event: &BufferStoreEvent,
1385 cx: &mut Context<Self>,
1386 ) {
1387 match event {
1388 BufferStoreEvent::BufferAdded(buffer) => {
1389 cx.subscribe(buffer, |this, buffer, event, cx| {
1390 if let BufferEvent::LanguageChanged = event {
1391 let buffer_id = buffer.read(cx).remote_id();
1392 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1393 diff_state.update(cx, |diff_state, cx| {
1394 diff_state.buffer_language_changed(buffer, cx);
1395 });
1396 }
1397 }
1398 })
1399 .detach();
1400 }
1401 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1402 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1403 diffs.remove(buffer_id);
1404 }
1405 }
1406 BufferStoreEvent::BufferDropped(buffer_id) => {
1407 self.diffs.remove(buffer_id);
1408 for diffs in self.shared_diffs.values_mut() {
1409 diffs.remove(buffer_id);
1410 }
1411 }
1412
1413 _ => {}
1414 }
1415 }
1416
1417 pub fn recalculate_buffer_diffs(
1418 &mut self,
1419 buffers: Vec<Entity<Buffer>>,
1420 cx: &mut Context<Self>,
1421 ) -> impl Future<Output = ()> + use<> {
1422 let mut futures = Vec::new();
1423 for buffer in buffers {
1424 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1425 let buffer = buffer.read(cx).text_snapshot();
1426 diff_state.update(cx, |diff_state, cx| {
1427 diff_state.recalculate_diffs(buffer.clone(), cx);
1428 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1429 });
1430 futures.push(diff_state.update(cx, |diff_state, cx| {
1431 diff_state
1432 .reparse_conflict_markers(buffer, cx)
1433 .map(|_| {})
1434 .boxed()
1435 }));
1436 }
1437 }
1438 async move {
1439 futures::future::join_all(futures).await;
1440 }
1441 }
1442
1443 fn on_buffer_diff_event(
1444 &mut self,
1445 diff: Entity<buffer_diff::BufferDiff>,
1446 event: &BufferDiffEvent,
1447 cx: &mut Context<Self>,
1448 ) {
1449 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1450 let buffer_id = diff.read(cx).buffer_id;
1451 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1452 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1453 diff_state.hunk_staging_operation_count += 1;
1454 diff_state.hunk_staging_operation_count
1455 });
1456 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1457 let recv = repo.update(cx, |repo, cx| {
1458 log::debug!("hunks changed for {}", path.as_unix_str());
1459 repo.spawn_set_index_text_job(
1460 path,
1461 new_index_text.as_ref().map(|rope| rope.to_string()),
1462 Some(hunk_staging_operation_count),
1463 cx,
1464 )
1465 });
1466 let diff = diff.downgrade();
1467 cx.spawn(async move |this, cx| {
1468 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1469 diff.update(cx, |diff, cx| {
1470 diff.clear_pending_hunks(cx);
1471 })
1472 .ok();
1473 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1474 .ok();
1475 }
1476 })
1477 .detach();
1478 }
1479 }
1480 }
1481 }
1482
1483 fn local_worktree_git_repos_changed(
1484 &mut self,
1485 worktree: Entity<Worktree>,
1486 changed_repos: &UpdatedGitRepositoriesSet,
1487 cx: &mut Context<Self>,
1488 ) {
1489 log::debug!("local worktree repos changed");
1490 debug_assert!(worktree.read(cx).is_local());
1491
1492 for repository in self.repositories.values() {
1493 repository.update(cx, |repository, cx| {
1494 let repo_abs_path = &repository.work_directory_abs_path;
1495 if changed_repos.iter().any(|update| {
1496 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1497 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1498 }) {
1499 repository.reload_buffer_diff_bases(cx);
1500 }
1501 });
1502 }
1503 }
1504
1505 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1506 &self.repositories
1507 }
1508
1509 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1510 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1511 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1512 Some(status.status)
1513 }
1514
1515 pub fn repository_and_path_for_buffer_id(
1516 &self,
1517 buffer_id: BufferId,
1518 cx: &App,
1519 ) -> Option<(Entity<Repository>, RepoPath)> {
1520 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1521 let project_path = buffer.read(cx).project_path(cx)?;
1522 self.repository_and_path_for_project_path(&project_path, cx)
1523 }
1524
1525 pub fn repository_and_path_for_project_path(
1526 &self,
1527 path: &ProjectPath,
1528 cx: &App,
1529 ) -> Option<(Entity<Repository>, RepoPath)> {
1530 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1531 self.repositories
1532 .values()
1533 .filter_map(|repo| {
1534 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1535 Some((repo.clone(), repo_path))
1536 })
1537 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1538 }
1539
1540 pub fn git_init(
1541 &self,
1542 path: Arc<Path>,
1543 fallback_branch_name: String,
1544 cx: &App,
1545 ) -> Task<Result<()>> {
1546 match &self.state {
1547 GitStoreState::Local { fs, .. } => {
1548 let fs = fs.clone();
1549 cx.background_executor()
1550 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1551 }
1552 GitStoreState::Remote {
1553 upstream_client,
1554 upstream_project_id: project_id,
1555 ..
1556 } => {
1557 let client = upstream_client.clone();
1558 let project_id = *project_id;
1559 cx.background_executor().spawn(async move {
1560 client
1561 .request(proto::GitInit {
1562 project_id: project_id,
1563 abs_path: path.to_string_lossy().into_owned(),
1564 fallback_branch_name,
1565 })
1566 .await?;
1567 Ok(())
1568 })
1569 }
1570 }
1571 }
1572
1573 pub fn git_clone(
1574 &self,
1575 repo: String,
1576 path: impl Into<Arc<std::path::Path>>,
1577 cx: &App,
1578 ) -> Task<Result<()>> {
1579 let path = path.into();
1580 match &self.state {
1581 GitStoreState::Local { fs, .. } => {
1582 let fs = fs.clone();
1583 cx.background_executor()
1584 .spawn(async move { fs.git_clone(&repo, &path).await })
1585 }
1586 GitStoreState::Remote {
1587 upstream_client,
1588 upstream_project_id,
1589 ..
1590 } => {
1591 if upstream_client.is_via_collab() {
1592 return Task::ready(Err(anyhow!(
1593 "Git Clone isn't supported for project guests"
1594 )));
1595 }
1596 let request = upstream_client.request(proto::GitClone {
1597 project_id: *upstream_project_id,
1598 abs_path: path.to_string_lossy().into_owned(),
1599 remote_repo: repo,
1600 });
1601
1602 cx.background_spawn(async move {
1603 let result = request.await?;
1604
1605 match result.success {
1606 true => Ok(()),
1607 false => Err(anyhow!("Git Clone failed")),
1608 }
1609 })
1610 }
1611 }
1612 }
1613
1614 async fn handle_update_repository(
1615 this: Entity<Self>,
1616 envelope: TypedEnvelope<proto::UpdateRepository>,
1617 mut cx: AsyncApp,
1618 ) -> Result<()> {
1619 this.update(&mut cx, |this, cx| {
1620 let path_style = this.worktree_store.read(cx).path_style();
1621 let mut update = envelope.payload;
1622
1623 let id = RepositoryId::from_proto(update.id);
1624 let client = this.upstream_client().context("no upstream client")?;
1625
1626 let mut repo_subscription = None;
1627 let repo = this.repositories.entry(id).or_insert_with(|| {
1628 let git_store = cx.weak_entity();
1629 let repo = cx.new(|cx| {
1630 Repository::remote(
1631 id,
1632 Path::new(&update.abs_path).into(),
1633 path_style,
1634 ProjectId(update.project_id),
1635 client,
1636 git_store,
1637 cx,
1638 )
1639 });
1640 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1641 cx.emit(GitStoreEvent::RepositoryAdded);
1642 repo
1643 });
1644 this._subscriptions.extend(repo_subscription);
1645
1646 repo.update(cx, {
1647 let update = update.clone();
1648 |repo, cx| repo.apply_remote_update(update, cx)
1649 })?;
1650
1651 this.active_repo_id.get_or_insert_with(|| {
1652 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1653 id
1654 });
1655
1656 if let Some((client, project_id)) = this.downstream_client() {
1657 update.project_id = project_id.to_proto();
1658 client.send(update).log_err();
1659 }
1660 Ok(())
1661 })?
1662 }
1663
1664 async fn handle_remove_repository(
1665 this: Entity<Self>,
1666 envelope: TypedEnvelope<proto::RemoveRepository>,
1667 mut cx: AsyncApp,
1668 ) -> Result<()> {
1669 this.update(&mut cx, |this, cx| {
1670 let mut update = envelope.payload;
1671 let id = RepositoryId::from_proto(update.id);
1672 this.repositories.remove(&id);
1673 if let Some((client, project_id)) = this.downstream_client() {
1674 update.project_id = project_id.to_proto();
1675 client.send(update).log_err();
1676 }
1677 if this.active_repo_id == Some(id) {
1678 this.active_repo_id = None;
1679 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1680 }
1681 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1682 })
1683 }
1684
1685 async fn handle_git_init(
1686 this: Entity<Self>,
1687 envelope: TypedEnvelope<proto::GitInit>,
1688 cx: AsyncApp,
1689 ) -> Result<proto::Ack> {
1690 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1691 let name = envelope.payload.fallback_branch_name;
1692 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1693 .await?;
1694
1695 Ok(proto::Ack {})
1696 }
1697
1698 async fn handle_git_clone(
1699 this: Entity<Self>,
1700 envelope: TypedEnvelope<proto::GitClone>,
1701 cx: AsyncApp,
1702 ) -> Result<proto::GitCloneResponse> {
1703 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1704 let repo_name = envelope.payload.remote_repo;
1705 let result = cx
1706 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1707 .await;
1708
1709 Ok(proto::GitCloneResponse {
1710 success: result.is_ok(),
1711 })
1712 }
1713
1714 async fn handle_fetch(
1715 this: Entity<Self>,
1716 envelope: TypedEnvelope<proto::Fetch>,
1717 mut cx: AsyncApp,
1718 ) -> Result<proto::RemoteMessageResponse> {
1719 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1720 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1721 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1722 let askpass_id = envelope.payload.askpass_id;
1723
1724 let askpass = make_remote_delegate(
1725 this,
1726 envelope.payload.project_id,
1727 repository_id,
1728 askpass_id,
1729 &mut cx,
1730 );
1731
1732 let remote_output = repository_handle
1733 .update(&mut cx, |repository_handle, cx| {
1734 repository_handle.fetch(fetch_options, askpass, cx)
1735 })?
1736 .await??;
1737
1738 Ok(proto::RemoteMessageResponse {
1739 stdout: remote_output.stdout,
1740 stderr: remote_output.stderr,
1741 })
1742 }
1743
1744 async fn handle_push(
1745 this: Entity<Self>,
1746 envelope: TypedEnvelope<proto::Push>,
1747 mut cx: AsyncApp,
1748 ) -> Result<proto::RemoteMessageResponse> {
1749 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1750 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1751
1752 let askpass_id = envelope.payload.askpass_id;
1753 let askpass = make_remote_delegate(
1754 this,
1755 envelope.payload.project_id,
1756 repository_id,
1757 askpass_id,
1758 &mut cx,
1759 );
1760
1761 let options = envelope
1762 .payload
1763 .options
1764 .as_ref()
1765 .map(|_| match envelope.payload.options() {
1766 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1767 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1768 });
1769
1770 let branch_name = envelope.payload.branch_name.into();
1771 let remote_name = envelope.payload.remote_name.into();
1772
1773 let remote_output = repository_handle
1774 .update(&mut cx, |repository_handle, cx| {
1775 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1776 })?
1777 .await??;
1778 Ok(proto::RemoteMessageResponse {
1779 stdout: remote_output.stdout,
1780 stderr: remote_output.stderr,
1781 })
1782 }
1783
1784 async fn handle_pull(
1785 this: Entity<Self>,
1786 envelope: TypedEnvelope<proto::Pull>,
1787 mut cx: AsyncApp,
1788 ) -> Result<proto::RemoteMessageResponse> {
1789 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1790 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1791 let askpass_id = envelope.payload.askpass_id;
1792 let askpass = make_remote_delegate(
1793 this,
1794 envelope.payload.project_id,
1795 repository_id,
1796 askpass_id,
1797 &mut cx,
1798 );
1799
1800 let branch_name = envelope.payload.branch_name.into();
1801 let remote_name = envelope.payload.remote_name.into();
1802
1803 let remote_message = repository_handle
1804 .update(&mut cx, |repository_handle, cx| {
1805 repository_handle.pull(branch_name, remote_name, askpass, cx)
1806 })?
1807 .await??;
1808
1809 Ok(proto::RemoteMessageResponse {
1810 stdout: remote_message.stdout,
1811 stderr: remote_message.stderr,
1812 })
1813 }
1814
1815 async fn handle_stage(
1816 this: Entity<Self>,
1817 envelope: TypedEnvelope<proto::Stage>,
1818 mut cx: AsyncApp,
1819 ) -> Result<proto::Ack> {
1820 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1821 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1822
1823 let entries = envelope
1824 .payload
1825 .paths
1826 .into_iter()
1827 .map(|path| RepoPath::new(&path))
1828 .collect::<Result<Vec<_>>>()?;
1829
1830 repository_handle
1831 .update(&mut cx, |repository_handle, cx| {
1832 repository_handle.stage_entries(entries, cx)
1833 })?
1834 .await?;
1835 Ok(proto::Ack {})
1836 }
1837
1838 async fn handle_unstage(
1839 this: Entity<Self>,
1840 envelope: TypedEnvelope<proto::Unstage>,
1841 mut cx: AsyncApp,
1842 ) -> Result<proto::Ack> {
1843 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1844 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1845
1846 let entries = envelope
1847 .payload
1848 .paths
1849 .into_iter()
1850 .map(|path| RepoPath::new(&path))
1851 .collect::<Result<Vec<_>>>()?;
1852
1853 repository_handle
1854 .update(&mut cx, |repository_handle, cx| {
1855 repository_handle.unstage_entries(entries, cx)
1856 })?
1857 .await?;
1858
1859 Ok(proto::Ack {})
1860 }
1861
1862 async fn handle_stash(
1863 this: Entity<Self>,
1864 envelope: TypedEnvelope<proto::Stash>,
1865 mut cx: AsyncApp,
1866 ) -> Result<proto::Ack> {
1867 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1868 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1869
1870 let entries = envelope
1871 .payload
1872 .paths
1873 .into_iter()
1874 .map(|path| RepoPath::new(&path))
1875 .collect::<Result<Vec<_>>>()?;
1876
1877 repository_handle
1878 .update(&mut cx, |repository_handle, cx| {
1879 repository_handle.stash_entries(entries, cx)
1880 })?
1881 .await?;
1882
1883 Ok(proto::Ack {})
1884 }
1885
1886 async fn handle_stash_pop(
1887 this: Entity<Self>,
1888 envelope: TypedEnvelope<proto::StashPop>,
1889 mut cx: AsyncApp,
1890 ) -> Result<proto::Ack> {
1891 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1892 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1893 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1894
1895 repository_handle
1896 .update(&mut cx, |repository_handle, cx| {
1897 repository_handle.stash_pop(stash_index, cx)
1898 })?
1899 .await?;
1900
1901 Ok(proto::Ack {})
1902 }
1903
1904 async fn handle_stash_apply(
1905 this: Entity<Self>,
1906 envelope: TypedEnvelope<proto::StashApply>,
1907 mut cx: AsyncApp,
1908 ) -> Result<proto::Ack> {
1909 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1910 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1911 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1912
1913 repository_handle
1914 .update(&mut cx, |repository_handle, cx| {
1915 repository_handle.stash_apply(stash_index, cx)
1916 })?
1917 .await?;
1918
1919 Ok(proto::Ack {})
1920 }
1921
1922 async fn handle_stash_drop(
1923 this: Entity<Self>,
1924 envelope: TypedEnvelope<proto::StashDrop>,
1925 mut cx: AsyncApp,
1926 ) -> Result<proto::Ack> {
1927 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1928 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1929 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1930
1931 repository_handle
1932 .update(&mut cx, |repository_handle, cx| {
1933 repository_handle.stash_drop(stash_index, cx)
1934 })?
1935 .await??;
1936
1937 Ok(proto::Ack {})
1938 }
1939
1940 async fn handle_set_index_text(
1941 this: Entity<Self>,
1942 envelope: TypedEnvelope<proto::SetIndexText>,
1943 mut cx: AsyncApp,
1944 ) -> Result<proto::Ack> {
1945 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1946 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1947 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1948
1949 repository_handle
1950 .update(&mut cx, |repository_handle, cx| {
1951 repository_handle.spawn_set_index_text_job(
1952 repo_path,
1953 envelope.payload.text,
1954 None,
1955 cx,
1956 )
1957 })?
1958 .await??;
1959 Ok(proto::Ack {})
1960 }
1961
1962 async fn handle_commit(
1963 this: Entity<Self>,
1964 envelope: TypedEnvelope<proto::Commit>,
1965 mut cx: AsyncApp,
1966 ) -> Result<proto::Ack> {
1967 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1968 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1969
1970 let message = SharedString::from(envelope.payload.message);
1971 let name = envelope.payload.name.map(SharedString::from);
1972 let email = envelope.payload.email.map(SharedString::from);
1973 let options = envelope.payload.options.unwrap_or_default();
1974
1975 repository_handle
1976 .update(&mut cx, |repository_handle, cx| {
1977 repository_handle.commit(
1978 message,
1979 name.zip(email),
1980 CommitOptions {
1981 amend: options.amend,
1982 signoff: options.signoff,
1983 },
1984 cx,
1985 )
1986 })?
1987 .await??;
1988 Ok(proto::Ack {})
1989 }
1990
1991 async fn handle_get_remotes(
1992 this: Entity<Self>,
1993 envelope: TypedEnvelope<proto::GetRemotes>,
1994 mut cx: AsyncApp,
1995 ) -> Result<proto::GetRemotesResponse> {
1996 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1997 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1998
1999 let branch_name = envelope.payload.branch_name;
2000
2001 let remotes = repository_handle
2002 .update(&mut cx, |repository_handle, _| {
2003 repository_handle.get_remotes(branch_name)
2004 })?
2005 .await??;
2006
2007 Ok(proto::GetRemotesResponse {
2008 remotes: remotes
2009 .into_iter()
2010 .map(|remotes| proto::get_remotes_response::Remote {
2011 name: remotes.name.to_string(),
2012 })
2013 .collect::<Vec<_>>(),
2014 })
2015 }
2016
2017 async fn handle_get_worktrees(
2018 this: Entity<Self>,
2019 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2020 mut cx: AsyncApp,
2021 ) -> Result<proto::GitWorktreesResponse> {
2022 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2023 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2024
2025 let worktrees = repository_handle
2026 .update(&mut cx, |repository_handle, _| {
2027 repository_handle.worktrees()
2028 })?
2029 .await??;
2030
2031 Ok(proto::GitWorktreesResponse {
2032 worktrees: worktrees
2033 .into_iter()
2034 .map(|worktree| worktree_to_proto(&worktree))
2035 .collect::<Vec<_>>(),
2036 })
2037 }
2038
2039 async fn handle_create_worktree(
2040 this: Entity<Self>,
2041 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2042 mut cx: AsyncApp,
2043 ) -> Result<proto::Ack> {
2044 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2045 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2046 let directory = PathBuf::from(envelope.payload.directory);
2047 let name = envelope.payload.name;
2048 let commit = envelope.payload.commit;
2049
2050 repository_handle
2051 .update(&mut cx, |repository_handle, _| {
2052 repository_handle.create_worktree(name, directory, commit)
2053 })?
2054 .await??;
2055
2056 Ok(proto::Ack {})
2057 }
2058
2059 async fn handle_get_branches(
2060 this: Entity<Self>,
2061 envelope: TypedEnvelope<proto::GitGetBranches>,
2062 mut cx: AsyncApp,
2063 ) -> Result<proto::GitBranchesResponse> {
2064 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2065 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2066
2067 let branches = repository_handle
2068 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2069 .await??;
2070
2071 Ok(proto::GitBranchesResponse {
2072 branches: branches
2073 .into_iter()
2074 .map(|branch| branch_to_proto(&branch))
2075 .collect::<Vec<_>>(),
2076 })
2077 }
2078 async fn handle_get_default_branch(
2079 this: Entity<Self>,
2080 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2081 mut cx: AsyncApp,
2082 ) -> Result<proto::GetDefaultBranchResponse> {
2083 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2084 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2085
2086 let branch = repository_handle
2087 .update(&mut cx, |repository_handle, _| {
2088 repository_handle.default_branch()
2089 })?
2090 .await??
2091 .map(Into::into);
2092
2093 Ok(proto::GetDefaultBranchResponse { branch })
2094 }
2095 async fn handle_create_branch(
2096 this: Entity<Self>,
2097 envelope: TypedEnvelope<proto::GitCreateBranch>,
2098 mut cx: AsyncApp,
2099 ) -> Result<proto::Ack> {
2100 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2101 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2102 let branch_name = envelope.payload.branch_name;
2103
2104 repository_handle
2105 .update(&mut cx, |repository_handle, _| {
2106 repository_handle.create_branch(branch_name)
2107 })?
2108 .await??;
2109
2110 Ok(proto::Ack {})
2111 }
2112
2113 async fn handle_change_branch(
2114 this: Entity<Self>,
2115 envelope: TypedEnvelope<proto::GitChangeBranch>,
2116 mut cx: AsyncApp,
2117 ) -> Result<proto::Ack> {
2118 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2119 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2120 let branch_name = envelope.payload.branch_name;
2121
2122 repository_handle
2123 .update(&mut cx, |repository_handle, _| {
2124 repository_handle.change_branch(branch_name)
2125 })?
2126 .await??;
2127
2128 Ok(proto::Ack {})
2129 }
2130
2131 async fn handle_rename_branch(
2132 this: Entity<Self>,
2133 envelope: TypedEnvelope<proto::GitRenameBranch>,
2134 mut cx: AsyncApp,
2135 ) -> Result<proto::Ack> {
2136 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2137 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2138 let branch = envelope.payload.branch;
2139 let new_name = envelope.payload.new_name;
2140
2141 repository_handle
2142 .update(&mut cx, |repository_handle, _| {
2143 repository_handle.rename_branch(branch, new_name)
2144 })?
2145 .await??;
2146
2147 Ok(proto::Ack {})
2148 }
2149
2150 async fn handle_show(
2151 this: Entity<Self>,
2152 envelope: TypedEnvelope<proto::GitShow>,
2153 mut cx: AsyncApp,
2154 ) -> Result<proto::GitCommitDetails> {
2155 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2156 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2157
2158 let commit = repository_handle
2159 .update(&mut cx, |repository_handle, _| {
2160 repository_handle.show(envelope.payload.commit)
2161 })?
2162 .await??;
2163 Ok(proto::GitCommitDetails {
2164 sha: commit.sha.into(),
2165 message: commit.message.into(),
2166 commit_timestamp: commit.commit_timestamp,
2167 author_email: commit.author_email.into(),
2168 author_name: commit.author_name.into(),
2169 })
2170 }
2171
2172 async fn handle_load_commit_diff(
2173 this: Entity<Self>,
2174 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2175 mut cx: AsyncApp,
2176 ) -> Result<proto::LoadCommitDiffResponse> {
2177 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2178 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2179
2180 let commit_diff = repository_handle
2181 .update(&mut cx, |repository_handle, _| {
2182 repository_handle.load_commit_diff(envelope.payload.commit)
2183 })?
2184 .await??;
2185 Ok(proto::LoadCommitDiffResponse {
2186 files: commit_diff
2187 .files
2188 .into_iter()
2189 .map(|file| proto::CommitFile {
2190 path: file.path.to_proto(),
2191 old_text: file.old_text,
2192 new_text: file.new_text,
2193 })
2194 .collect(),
2195 })
2196 }
2197
2198 async fn handle_reset(
2199 this: Entity<Self>,
2200 envelope: TypedEnvelope<proto::GitReset>,
2201 mut cx: AsyncApp,
2202 ) -> Result<proto::Ack> {
2203 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2204 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2205
2206 let mode = match envelope.payload.mode() {
2207 git_reset::ResetMode::Soft => ResetMode::Soft,
2208 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2209 };
2210
2211 repository_handle
2212 .update(&mut cx, |repository_handle, cx| {
2213 repository_handle.reset(envelope.payload.commit, mode, cx)
2214 })?
2215 .await??;
2216 Ok(proto::Ack {})
2217 }
2218
2219 async fn handle_checkout_files(
2220 this: Entity<Self>,
2221 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2222 mut cx: AsyncApp,
2223 ) -> Result<proto::Ack> {
2224 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2225 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2226 let paths = envelope
2227 .payload
2228 .paths
2229 .iter()
2230 .map(|s| RepoPath::from_proto(s))
2231 .collect::<Result<Vec<_>>>()?;
2232
2233 repository_handle
2234 .update(&mut cx, |repository_handle, cx| {
2235 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2236 })?
2237 .await??;
2238 Ok(proto::Ack {})
2239 }
2240
2241 async fn handle_open_commit_message_buffer(
2242 this: Entity<Self>,
2243 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2244 mut cx: AsyncApp,
2245 ) -> Result<proto::OpenBufferResponse> {
2246 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2247 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2248 let buffer = repository
2249 .update(&mut cx, |repository, cx| {
2250 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2251 })?
2252 .await?;
2253
2254 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2255 this.update(&mut cx, |this, cx| {
2256 this.buffer_store.update(cx, |buffer_store, cx| {
2257 buffer_store
2258 .create_buffer_for_peer(
2259 &buffer,
2260 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2261 cx,
2262 )
2263 .detach_and_log_err(cx);
2264 })
2265 })?;
2266
2267 Ok(proto::OpenBufferResponse {
2268 buffer_id: buffer_id.to_proto(),
2269 })
2270 }
2271
2272 async fn handle_askpass(
2273 this: Entity<Self>,
2274 envelope: TypedEnvelope<proto::AskPassRequest>,
2275 mut cx: AsyncApp,
2276 ) -> Result<proto::AskPassResponse> {
2277 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2278 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2279
2280 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2281 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2282 debug_panic!("no askpass found");
2283 anyhow::bail!("no askpass found");
2284 };
2285
2286 let response = askpass
2287 .ask_password(envelope.payload.prompt)
2288 .await
2289 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2290
2291 delegates
2292 .lock()
2293 .insert(envelope.payload.askpass_id, askpass);
2294
2295 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2296 Ok(proto::AskPassResponse {
2297 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2298 })
2299 }
2300
2301 async fn handle_check_for_pushed_commits(
2302 this: Entity<Self>,
2303 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2304 mut cx: AsyncApp,
2305 ) -> Result<proto::CheckForPushedCommitsResponse> {
2306 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2307 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2308
2309 let branches = repository_handle
2310 .update(&mut cx, |repository_handle, _| {
2311 repository_handle.check_for_pushed_commits()
2312 })?
2313 .await??;
2314 Ok(proto::CheckForPushedCommitsResponse {
2315 pushed_to: branches
2316 .into_iter()
2317 .map(|commit| commit.to_string())
2318 .collect(),
2319 })
2320 }
2321
2322 async fn handle_git_diff(
2323 this: Entity<Self>,
2324 envelope: TypedEnvelope<proto::GitDiff>,
2325 mut cx: AsyncApp,
2326 ) -> Result<proto::GitDiffResponse> {
2327 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2328 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2329 let diff_type = match envelope.payload.diff_type() {
2330 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2331 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2332 };
2333
2334 let mut diff = repository_handle
2335 .update(&mut cx, |repository_handle, cx| {
2336 repository_handle.diff(diff_type, cx)
2337 })?
2338 .await??;
2339 const ONE_MB: usize = 1_000_000;
2340 if diff.len() > ONE_MB {
2341 diff = diff.chars().take(ONE_MB).collect()
2342 }
2343
2344 Ok(proto::GitDiffResponse { diff })
2345 }
2346
2347 async fn handle_tree_diff(
2348 this: Entity<Self>,
2349 request: TypedEnvelope<proto::GetTreeDiff>,
2350 mut cx: AsyncApp,
2351 ) -> Result<proto::GetTreeDiffResponse> {
2352 let repository_id = RepositoryId(request.payload.repository_id);
2353 let diff_type = if request.payload.is_merge {
2354 DiffTreeType::MergeBase {
2355 base: request.payload.base.into(),
2356 head: request.payload.head.into(),
2357 }
2358 } else {
2359 DiffTreeType::Since {
2360 base: request.payload.base.into(),
2361 head: request.payload.head.into(),
2362 }
2363 };
2364
2365 let diff = this
2366 .update(&mut cx, |this, cx| {
2367 let repository = this.repositories().get(&repository_id)?;
2368 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2369 })?
2370 .context("missing repository")?
2371 .await??;
2372
2373 Ok(proto::GetTreeDiffResponse {
2374 entries: diff
2375 .entries
2376 .into_iter()
2377 .map(|(path, status)| proto::TreeDiffStatus {
2378 path: path.0.to_proto(),
2379 status: match status {
2380 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2381 TreeDiffStatus::Modified { .. } => {
2382 proto::tree_diff_status::Status::Modified.into()
2383 }
2384 TreeDiffStatus::Deleted { .. } => {
2385 proto::tree_diff_status::Status::Deleted.into()
2386 }
2387 },
2388 oid: match status {
2389 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2390 Some(old.to_string())
2391 }
2392 TreeDiffStatus::Added => None,
2393 },
2394 })
2395 .collect(),
2396 })
2397 }
2398
2399 async fn handle_get_blob_content(
2400 this: Entity<Self>,
2401 request: TypedEnvelope<proto::GetBlobContent>,
2402 mut cx: AsyncApp,
2403 ) -> Result<proto::GetBlobContentResponse> {
2404 let oid = git::Oid::from_str(&request.payload.oid)?;
2405 let repository_id = RepositoryId(request.payload.repository_id);
2406 let content = this
2407 .update(&mut cx, |this, cx| {
2408 let repository = this.repositories().get(&repository_id)?;
2409 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2410 })?
2411 .context("missing repository")?
2412 .await?;
2413 Ok(proto::GetBlobContentResponse { content })
2414 }
2415
2416 async fn handle_open_unstaged_diff(
2417 this: Entity<Self>,
2418 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2419 mut cx: AsyncApp,
2420 ) -> Result<proto::OpenUnstagedDiffResponse> {
2421 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2422 let diff = this
2423 .update(&mut cx, |this, cx| {
2424 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2425 Some(this.open_unstaged_diff(buffer, cx))
2426 })?
2427 .context("missing buffer")?
2428 .await?;
2429 this.update(&mut cx, |this, _| {
2430 let shared_diffs = this
2431 .shared_diffs
2432 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2433 .or_default();
2434 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2435 })?;
2436 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2437 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2438 }
2439
2440 async fn handle_open_uncommitted_diff(
2441 this: Entity<Self>,
2442 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2443 mut cx: AsyncApp,
2444 ) -> Result<proto::OpenUncommittedDiffResponse> {
2445 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2446 let diff = this
2447 .update(&mut cx, |this, cx| {
2448 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2449 Some(this.open_uncommitted_diff(buffer, cx))
2450 })?
2451 .context("missing buffer")?
2452 .await?;
2453 this.update(&mut cx, |this, _| {
2454 let shared_diffs = this
2455 .shared_diffs
2456 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2457 .or_default();
2458 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2459 })?;
2460 diff.read_with(&cx, |diff, cx| {
2461 use proto::open_uncommitted_diff_response::Mode;
2462
2463 let unstaged_diff = diff.secondary_diff();
2464 let index_snapshot = unstaged_diff.and_then(|diff| {
2465 let diff = diff.read(cx);
2466 diff.base_text_exists().then(|| diff.base_text())
2467 });
2468
2469 let mode;
2470 let staged_text;
2471 let committed_text;
2472 if diff.base_text_exists() {
2473 let committed_snapshot = diff.base_text();
2474 committed_text = Some(committed_snapshot.text());
2475 if let Some(index_text) = index_snapshot {
2476 if index_text.remote_id() == committed_snapshot.remote_id() {
2477 mode = Mode::IndexMatchesHead;
2478 staged_text = None;
2479 } else {
2480 mode = Mode::IndexAndHead;
2481 staged_text = Some(index_text.text());
2482 }
2483 } else {
2484 mode = Mode::IndexAndHead;
2485 staged_text = None;
2486 }
2487 } else {
2488 mode = Mode::IndexAndHead;
2489 committed_text = None;
2490 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2491 }
2492
2493 proto::OpenUncommittedDiffResponse {
2494 committed_text,
2495 staged_text,
2496 mode: mode.into(),
2497 }
2498 })
2499 }
2500
2501 async fn handle_update_diff_bases(
2502 this: Entity<Self>,
2503 request: TypedEnvelope<proto::UpdateDiffBases>,
2504 mut cx: AsyncApp,
2505 ) -> Result<()> {
2506 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2507 this.update(&mut cx, |this, cx| {
2508 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2509 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2510 {
2511 let buffer = buffer.read(cx).text_snapshot();
2512 diff_state.update(cx, |diff_state, cx| {
2513 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2514 })
2515 }
2516 })
2517 }
2518
2519 async fn handle_blame_buffer(
2520 this: Entity<Self>,
2521 envelope: TypedEnvelope<proto::BlameBuffer>,
2522 mut cx: AsyncApp,
2523 ) -> Result<proto::BlameBufferResponse> {
2524 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2525 let version = deserialize_version(&envelope.payload.version);
2526 let buffer = this.read_with(&cx, |this, cx| {
2527 this.buffer_store.read(cx).get_existing(buffer_id)
2528 })??;
2529 buffer
2530 .update(&mut cx, |buffer, _| {
2531 buffer.wait_for_version(version.clone())
2532 })?
2533 .await?;
2534 let blame = this
2535 .update(&mut cx, |this, cx| {
2536 this.blame_buffer(&buffer, Some(version), cx)
2537 })?
2538 .await?;
2539 Ok(serialize_blame_buffer_response(blame))
2540 }
2541
2542 async fn handle_get_permalink_to_line(
2543 this: Entity<Self>,
2544 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2545 mut cx: AsyncApp,
2546 ) -> Result<proto::GetPermalinkToLineResponse> {
2547 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2548 // let version = deserialize_version(&envelope.payload.version);
2549 let selection = {
2550 let proto_selection = envelope
2551 .payload
2552 .selection
2553 .context("no selection to get permalink for defined")?;
2554 proto_selection.start as u32..proto_selection.end as u32
2555 };
2556 let buffer = this.read_with(&cx, |this, cx| {
2557 this.buffer_store.read(cx).get_existing(buffer_id)
2558 })??;
2559 let permalink = this
2560 .update(&mut cx, |this, cx| {
2561 this.get_permalink_to_line(&buffer, selection, cx)
2562 })?
2563 .await?;
2564 Ok(proto::GetPermalinkToLineResponse {
2565 permalink: permalink.to_string(),
2566 })
2567 }
2568
2569 fn repository_for_request(
2570 this: &Entity<Self>,
2571 id: RepositoryId,
2572 cx: &mut AsyncApp,
2573 ) -> Result<Entity<Repository>> {
2574 this.read_with(cx, |this, _| {
2575 this.repositories
2576 .get(&id)
2577 .context("missing repository handle")
2578 .cloned()
2579 })?
2580 }
2581
2582 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2583 self.repositories
2584 .iter()
2585 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2586 .collect()
2587 }
2588
2589 fn process_updated_entries(
2590 &self,
2591 worktree: &Entity<Worktree>,
2592 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2593 cx: &mut App,
2594 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2595 let path_style = worktree.read(cx).path_style();
2596 let mut repo_paths = self
2597 .repositories
2598 .values()
2599 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2600 .collect::<Vec<_>>();
2601 let mut entries: Vec<_> = updated_entries
2602 .iter()
2603 .map(|(path, _, _)| path.clone())
2604 .collect();
2605 entries.sort();
2606 let worktree = worktree.read(cx);
2607
2608 let entries = entries
2609 .into_iter()
2610 .map(|path| worktree.absolutize(&path))
2611 .collect::<Arc<[_]>>();
2612
2613 let executor = cx.background_executor().clone();
2614 cx.background_executor().spawn(async move {
2615 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2616 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2617 let mut tasks = FuturesOrdered::new();
2618 for (repo_path, repo) in repo_paths.into_iter().rev() {
2619 let entries = entries.clone();
2620 let task = executor.spawn(async move {
2621 // Find all repository paths that belong to this repo
2622 let mut ix = entries.partition_point(|path| path < &*repo_path);
2623 if ix == entries.len() {
2624 return None;
2625 };
2626
2627 let mut paths = Vec::new();
2628 // All paths prefixed by a given repo will constitute a continuous range.
2629 while let Some(path) = entries.get(ix)
2630 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2631 &repo_path, path, path_style,
2632 )
2633 {
2634 paths.push((repo_path, ix));
2635 ix += 1;
2636 }
2637 if paths.is_empty() {
2638 None
2639 } else {
2640 Some((repo, paths))
2641 }
2642 });
2643 tasks.push_back(task);
2644 }
2645
2646 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2647 let mut path_was_used = vec![false; entries.len()];
2648 let tasks = tasks.collect::<Vec<_>>().await;
2649 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2650 // We always want to assign a path to it's innermost repository.
2651 for t in tasks {
2652 let Some((repo, paths)) = t else {
2653 continue;
2654 };
2655 let entry = paths_by_git_repo.entry(repo).or_default();
2656 for (repo_path, ix) in paths {
2657 if path_was_used[ix] {
2658 continue;
2659 }
2660 path_was_used[ix] = true;
2661 entry.push(repo_path);
2662 }
2663 }
2664
2665 paths_by_git_repo
2666 })
2667 }
2668}
2669
2670impl BufferGitState {
2671 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2672 Self {
2673 unstaged_diff: Default::default(),
2674 uncommitted_diff: Default::default(),
2675 recalculate_diff_task: Default::default(),
2676 language: Default::default(),
2677 language_registry: Default::default(),
2678 recalculating_tx: postage::watch::channel_with(false).0,
2679 hunk_staging_operation_count: 0,
2680 hunk_staging_operation_count_as_of_write: 0,
2681 head_text: Default::default(),
2682 index_text: Default::default(),
2683 head_changed: Default::default(),
2684 index_changed: Default::default(),
2685 language_changed: Default::default(),
2686 conflict_updated_futures: Default::default(),
2687 conflict_set: Default::default(),
2688 reparse_conflict_markers_task: Default::default(),
2689 }
2690 }
2691
2692 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2693 self.language = buffer.read(cx).language().cloned();
2694 self.language_changed = true;
2695 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2696 }
2697
2698 fn reparse_conflict_markers(
2699 &mut self,
2700 buffer: text::BufferSnapshot,
2701 cx: &mut Context<Self>,
2702 ) -> oneshot::Receiver<()> {
2703 let (tx, rx) = oneshot::channel();
2704
2705 let Some(conflict_set) = self
2706 .conflict_set
2707 .as_ref()
2708 .and_then(|conflict_set| conflict_set.upgrade())
2709 else {
2710 return rx;
2711 };
2712
2713 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2714 if conflict_set.has_conflict {
2715 Some(conflict_set.snapshot())
2716 } else {
2717 None
2718 }
2719 });
2720
2721 if let Some(old_snapshot) = old_snapshot {
2722 self.conflict_updated_futures.push(tx);
2723 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2724 let (snapshot, changed_range) = cx
2725 .background_spawn(async move {
2726 let new_snapshot = ConflictSet::parse(&buffer);
2727 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2728 (new_snapshot, changed_range)
2729 })
2730 .await;
2731 this.update(cx, |this, cx| {
2732 if let Some(conflict_set) = &this.conflict_set {
2733 conflict_set
2734 .update(cx, |conflict_set, cx| {
2735 conflict_set.set_snapshot(snapshot, changed_range, cx);
2736 })
2737 .ok();
2738 }
2739 let futures = std::mem::take(&mut this.conflict_updated_futures);
2740 for tx in futures {
2741 tx.send(()).ok();
2742 }
2743 })
2744 }))
2745 }
2746
2747 rx
2748 }
2749
2750 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2751 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2752 }
2753
2754 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2755 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2756 }
2757
2758 fn handle_base_texts_updated(
2759 &mut self,
2760 buffer: text::BufferSnapshot,
2761 message: proto::UpdateDiffBases,
2762 cx: &mut Context<Self>,
2763 ) {
2764 use proto::update_diff_bases::Mode;
2765
2766 let Some(mode) = Mode::from_i32(message.mode) else {
2767 return;
2768 };
2769
2770 let diff_bases_change = match mode {
2771 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2772 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2773 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2774 Mode::IndexAndHead => DiffBasesChange::SetEach {
2775 index: message.staged_text,
2776 head: message.committed_text,
2777 },
2778 };
2779
2780 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2781 }
2782
2783 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2784 if *self.recalculating_tx.borrow() {
2785 let mut rx = self.recalculating_tx.subscribe();
2786 Some(async move {
2787 loop {
2788 let is_recalculating = rx.recv().await;
2789 if is_recalculating != Some(true) {
2790 break;
2791 }
2792 }
2793 })
2794 } else {
2795 None
2796 }
2797 }
2798
2799 fn diff_bases_changed(
2800 &mut self,
2801 buffer: text::BufferSnapshot,
2802 diff_bases_change: Option<DiffBasesChange>,
2803 cx: &mut Context<Self>,
2804 ) {
2805 match diff_bases_change {
2806 Some(DiffBasesChange::SetIndex(index)) => {
2807 self.index_text = index.map(|mut index| {
2808 text::LineEnding::normalize(&mut index);
2809 Arc::new(index)
2810 });
2811 self.index_changed = true;
2812 }
2813 Some(DiffBasesChange::SetHead(head)) => {
2814 self.head_text = head.map(|mut head| {
2815 text::LineEnding::normalize(&mut head);
2816 Arc::new(head)
2817 });
2818 self.head_changed = true;
2819 }
2820 Some(DiffBasesChange::SetBoth(text)) => {
2821 let text = text.map(|mut text| {
2822 text::LineEnding::normalize(&mut text);
2823 Arc::new(text)
2824 });
2825 self.head_text = text.clone();
2826 self.index_text = text;
2827 self.head_changed = true;
2828 self.index_changed = true;
2829 }
2830 Some(DiffBasesChange::SetEach { index, head }) => {
2831 self.index_text = index.map(|mut index| {
2832 text::LineEnding::normalize(&mut index);
2833 Arc::new(index)
2834 });
2835 self.index_changed = true;
2836 self.head_text = head.map(|mut head| {
2837 text::LineEnding::normalize(&mut head);
2838 Arc::new(head)
2839 });
2840 self.head_changed = true;
2841 }
2842 None => {}
2843 }
2844
2845 self.recalculate_diffs(buffer, cx)
2846 }
2847
2848 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2849 *self.recalculating_tx.borrow_mut() = true;
2850
2851 let language = self.language.clone();
2852 let language_registry = self.language_registry.clone();
2853 let unstaged_diff = self.unstaged_diff();
2854 let uncommitted_diff = self.uncommitted_diff();
2855 let head = self.head_text.clone();
2856 let index = self.index_text.clone();
2857 let index_changed = self.index_changed;
2858 let head_changed = self.head_changed;
2859 let language_changed = self.language_changed;
2860 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2861 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2862 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2863 (None, None) => true,
2864 _ => false,
2865 };
2866 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2867 log::debug!(
2868 "start recalculating diffs for buffer {}",
2869 buffer.remote_id()
2870 );
2871
2872 let mut new_unstaged_diff = None;
2873 if let Some(unstaged_diff) = &unstaged_diff {
2874 new_unstaged_diff = Some(
2875 BufferDiff::update_diff(
2876 unstaged_diff.clone(),
2877 buffer.clone(),
2878 index,
2879 index_changed,
2880 language_changed,
2881 language.clone(),
2882 language_registry.clone(),
2883 cx,
2884 )
2885 .await?,
2886 );
2887 }
2888
2889 let mut new_uncommitted_diff = None;
2890 if let Some(uncommitted_diff) = &uncommitted_diff {
2891 new_uncommitted_diff = if index_matches_head {
2892 new_unstaged_diff.clone()
2893 } else {
2894 Some(
2895 BufferDiff::update_diff(
2896 uncommitted_diff.clone(),
2897 buffer.clone(),
2898 head,
2899 head_changed,
2900 language_changed,
2901 language.clone(),
2902 language_registry.clone(),
2903 cx,
2904 )
2905 .await?,
2906 )
2907 }
2908 }
2909
2910 let cancel = this.update(cx, |this, _| {
2911 // This checks whether all pending stage/unstage operations
2912 // have quiesced (i.e. both the corresponding write and the
2913 // read of that write have completed). If not, then we cancel
2914 // this recalculation attempt to avoid invalidating pending
2915 // state too quickly; another recalculation will come along
2916 // later and clear the pending state once the state of the index has settled.
2917 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2918 *this.recalculating_tx.borrow_mut() = false;
2919 true
2920 } else {
2921 false
2922 }
2923 })?;
2924 if cancel {
2925 log::debug!(
2926 concat!(
2927 "aborting recalculating diffs for buffer {}",
2928 "due to subsequent hunk operations",
2929 ),
2930 buffer.remote_id()
2931 );
2932 return Ok(());
2933 }
2934
2935 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2936 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2937 {
2938 unstaged_diff.update(cx, |diff, cx| {
2939 if language_changed {
2940 diff.language_changed(cx);
2941 }
2942 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2943 })?
2944 } else {
2945 None
2946 };
2947
2948 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2949 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2950 {
2951 uncommitted_diff.update(cx, |diff, cx| {
2952 if language_changed {
2953 diff.language_changed(cx);
2954 }
2955 diff.set_snapshot_with_secondary(
2956 new_uncommitted_diff,
2957 &buffer,
2958 unstaged_changed_range,
2959 true,
2960 cx,
2961 );
2962 })?;
2963 }
2964
2965 log::debug!(
2966 "finished recalculating diffs for buffer {}",
2967 buffer.remote_id()
2968 );
2969
2970 if let Some(this) = this.upgrade() {
2971 this.update(cx, |this, _| {
2972 this.index_changed = false;
2973 this.head_changed = false;
2974 this.language_changed = false;
2975 *this.recalculating_tx.borrow_mut() = false;
2976 })?;
2977 }
2978
2979 Ok(())
2980 }));
2981 }
2982}
2983
2984fn make_remote_delegate(
2985 this: Entity<GitStore>,
2986 project_id: u64,
2987 repository_id: RepositoryId,
2988 askpass_id: u64,
2989 cx: &mut AsyncApp,
2990) -> AskPassDelegate {
2991 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2992 this.update(cx, |this, cx| {
2993 let Some((client, _)) = this.downstream_client() else {
2994 return;
2995 };
2996 let response = client.request(proto::AskPassRequest {
2997 project_id,
2998 repository_id: repository_id.to_proto(),
2999 askpass_id,
3000 prompt,
3001 });
3002 cx.spawn(async move |_, _| {
3003 let mut response = response.await?.response;
3004 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3005 .ok();
3006 response.zeroize();
3007 anyhow::Ok(())
3008 })
3009 .detach_and_log_err(cx);
3010 })
3011 .log_err();
3012 })
3013}
3014
3015impl RepositoryId {
3016 pub fn to_proto(self) -> u64 {
3017 self.0
3018 }
3019
3020 pub fn from_proto(id: u64) -> Self {
3021 RepositoryId(id)
3022 }
3023}
3024
3025impl RepositorySnapshot {
3026 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3027 Self {
3028 id,
3029 statuses_by_path: Default::default(),
3030 pending_ops_by_path: Default::default(),
3031 work_directory_abs_path,
3032 branch: None,
3033 head_commit: None,
3034 scan_id: 0,
3035 merge: Default::default(),
3036 remote_origin_url: None,
3037 remote_upstream_url: None,
3038 stash_entries: Default::default(),
3039 path_style,
3040 }
3041 }
3042
3043 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3044 proto::UpdateRepository {
3045 branch_summary: self.branch.as_ref().map(branch_to_proto),
3046 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3047 updated_statuses: self
3048 .statuses_by_path
3049 .iter()
3050 .map(|entry| entry.to_proto())
3051 .collect(),
3052 removed_statuses: Default::default(),
3053 current_merge_conflicts: self
3054 .merge
3055 .conflicted_paths
3056 .iter()
3057 .map(|repo_path| repo_path.to_proto())
3058 .collect(),
3059 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3060 project_id,
3061 id: self.id.to_proto(),
3062 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3063 entry_ids: vec![self.id.to_proto()],
3064 scan_id: self.scan_id,
3065 is_last_update: true,
3066 stash_entries: self
3067 .stash_entries
3068 .entries
3069 .iter()
3070 .map(stash_to_proto)
3071 .collect(),
3072 }
3073 }
3074
3075 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3076 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3077 let mut removed_statuses: Vec<String> = Vec::new();
3078
3079 let mut new_statuses = self.statuses_by_path.iter().peekable();
3080 let mut old_statuses = old.statuses_by_path.iter().peekable();
3081
3082 let mut current_new_entry = new_statuses.next();
3083 let mut current_old_entry = old_statuses.next();
3084 loop {
3085 match (current_new_entry, current_old_entry) {
3086 (Some(new_entry), Some(old_entry)) => {
3087 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3088 Ordering::Less => {
3089 updated_statuses.push(new_entry.to_proto());
3090 current_new_entry = new_statuses.next();
3091 }
3092 Ordering::Equal => {
3093 if new_entry.status != old_entry.status {
3094 updated_statuses.push(new_entry.to_proto());
3095 }
3096 current_old_entry = old_statuses.next();
3097 current_new_entry = new_statuses.next();
3098 }
3099 Ordering::Greater => {
3100 removed_statuses.push(old_entry.repo_path.to_proto());
3101 current_old_entry = old_statuses.next();
3102 }
3103 }
3104 }
3105 (None, Some(old_entry)) => {
3106 removed_statuses.push(old_entry.repo_path.to_proto());
3107 current_old_entry = old_statuses.next();
3108 }
3109 (Some(new_entry), None) => {
3110 updated_statuses.push(new_entry.to_proto());
3111 current_new_entry = new_statuses.next();
3112 }
3113 (None, None) => break,
3114 }
3115 }
3116
3117 proto::UpdateRepository {
3118 branch_summary: self.branch.as_ref().map(branch_to_proto),
3119 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3120 updated_statuses,
3121 removed_statuses,
3122 current_merge_conflicts: self
3123 .merge
3124 .conflicted_paths
3125 .iter()
3126 .map(|path| path.to_proto())
3127 .collect(),
3128 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3129 project_id,
3130 id: self.id.to_proto(),
3131 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3132 entry_ids: vec![],
3133 scan_id: self.scan_id,
3134 is_last_update: true,
3135 stash_entries: self
3136 .stash_entries
3137 .entries
3138 .iter()
3139 .map(stash_to_proto)
3140 .collect(),
3141 }
3142 }
3143
3144 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3145 self.statuses_by_path.iter().cloned()
3146 }
3147
3148 pub fn status_summary(&self) -> GitSummary {
3149 self.statuses_by_path.summary().item_summary
3150 }
3151
3152 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3153 self.statuses_by_path
3154 .get(&PathKey(path.0.clone()), ())
3155 .cloned()
3156 }
3157
3158 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOperations> + '_ {
3159 self.pending_ops_by_path.iter().cloned()
3160 }
3161
3162 pub fn pending_ops_summary(&self) -> PendingOperationsSummary {
3163 self.pending_ops_by_path.summary().item_summary
3164 }
3165
3166 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Vec<PendingOperation> {
3167 self.pending_ops_by_path
3168 .get(&PathKey(path.0.clone()), ())
3169 .map(|ops| ops.ops.clone())
3170 .unwrap_or(Vec::new())
3171 }
3172
3173 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3174 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3175 }
3176
3177 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3178 self.path_style
3179 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3180 .unwrap()
3181 .into()
3182 }
3183
3184 #[inline]
3185 fn abs_path_to_repo_path_inner(
3186 work_directory_abs_path: &Path,
3187 abs_path: &Path,
3188 path_style: PathStyle,
3189 ) -> Option<RepoPath> {
3190 abs_path
3191 .strip_prefix(&work_directory_abs_path)
3192 .ok()
3193 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3194 }
3195
3196 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3197 self.merge.conflicted_paths.contains(repo_path)
3198 }
3199
3200 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3201 let had_conflict_on_last_merge_head_change =
3202 self.merge.conflicted_paths.contains(repo_path);
3203 let has_conflict_currently = self
3204 .status_for_path(repo_path)
3205 .is_some_and(|entry| entry.status.is_conflicted());
3206 had_conflict_on_last_merge_head_change || has_conflict_currently
3207 }
3208
3209 /// This is the name that will be displayed in the repository selector for this repository.
3210 pub fn display_name(&self) -> SharedString {
3211 self.work_directory_abs_path
3212 .file_name()
3213 .unwrap_or_default()
3214 .to_string_lossy()
3215 .to_string()
3216 .into()
3217 }
3218}
3219
3220pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3221 proto::StashEntry {
3222 oid: entry.oid.as_bytes().to_vec(),
3223 message: entry.message.clone(),
3224 branch: entry.branch.clone(),
3225 index: entry.index as u64,
3226 timestamp: entry.timestamp,
3227 }
3228}
3229
3230pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3231 Ok(StashEntry {
3232 oid: Oid::from_bytes(&entry.oid)?,
3233 message: entry.message.clone(),
3234 index: entry.index as usize,
3235 branch: entry.branch.clone(),
3236 timestamp: entry.timestamp,
3237 })
3238}
3239
3240impl MergeDetails {
3241 async fn load(
3242 backend: &Arc<dyn GitRepository>,
3243 status: &SumTree<StatusEntry>,
3244 prev_snapshot: &RepositorySnapshot,
3245 ) -> Result<(MergeDetails, bool)> {
3246 log::debug!("load merge details");
3247 let message = backend.merge_message().await;
3248 let heads = backend
3249 .revparse_batch(vec![
3250 "MERGE_HEAD".into(),
3251 "CHERRY_PICK_HEAD".into(),
3252 "REBASE_HEAD".into(),
3253 "REVERT_HEAD".into(),
3254 "APPLY_HEAD".into(),
3255 ])
3256 .await
3257 .log_err()
3258 .unwrap_or_default()
3259 .into_iter()
3260 .map(|opt| opt.map(SharedString::from))
3261 .collect::<Vec<_>>();
3262 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3263 let conflicted_paths = if merge_heads_changed {
3264 let current_conflicted_paths = TreeSet::from_ordered_entries(
3265 status
3266 .iter()
3267 .filter(|entry| entry.status.is_conflicted())
3268 .map(|entry| entry.repo_path.clone()),
3269 );
3270
3271 // It can happen that we run a scan while a lengthy merge is in progress
3272 // that will eventually result in conflicts, but before those conflicts
3273 // are reported by `git status`. Since for the moment we only care about
3274 // the merge heads state for the purposes of tracking conflicts, don't update
3275 // this state until we see some conflicts.
3276 if heads.iter().any(Option::is_some)
3277 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3278 && current_conflicted_paths.is_empty()
3279 {
3280 log::debug!("not updating merge heads because no conflicts found");
3281 return Ok((
3282 MergeDetails {
3283 message: message.map(SharedString::from),
3284 ..prev_snapshot.merge.clone()
3285 },
3286 false,
3287 ));
3288 }
3289
3290 current_conflicted_paths
3291 } else {
3292 prev_snapshot.merge.conflicted_paths.clone()
3293 };
3294 let details = MergeDetails {
3295 conflicted_paths,
3296 message: message.map(SharedString::from),
3297 heads,
3298 };
3299 Ok((details, merge_heads_changed))
3300 }
3301}
3302
3303impl Repository {
3304 pub fn snapshot(&self) -> RepositorySnapshot {
3305 self.snapshot.clone()
3306 }
3307
3308 fn local(
3309 id: RepositoryId,
3310 work_directory_abs_path: Arc<Path>,
3311 dot_git_abs_path: Arc<Path>,
3312 repository_dir_abs_path: Arc<Path>,
3313 common_dir_abs_path: Arc<Path>,
3314 project_environment: WeakEntity<ProjectEnvironment>,
3315 fs: Arc<dyn Fs>,
3316 git_store: WeakEntity<GitStore>,
3317 cx: &mut Context<Self>,
3318 ) -> Self {
3319 let snapshot =
3320 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3321 Repository {
3322 this: cx.weak_entity(),
3323 git_store,
3324 snapshot,
3325 commit_message_buffer: None,
3326 askpass_delegates: Default::default(),
3327 paths_needing_status_update: Default::default(),
3328 latest_askpass_id: 0,
3329 job_sender: Repository::spawn_local_git_worker(
3330 work_directory_abs_path,
3331 dot_git_abs_path,
3332 repository_dir_abs_path,
3333 common_dir_abs_path,
3334 project_environment,
3335 fs,
3336 cx,
3337 ),
3338 job_id: 0,
3339 active_jobs: Default::default(),
3340 }
3341 }
3342
3343 fn remote(
3344 id: RepositoryId,
3345 work_directory_abs_path: Arc<Path>,
3346 path_style: PathStyle,
3347 project_id: ProjectId,
3348 client: AnyProtoClient,
3349 git_store: WeakEntity<GitStore>,
3350 cx: &mut Context<Self>,
3351 ) -> Self {
3352 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3353 Self {
3354 this: cx.weak_entity(),
3355 snapshot,
3356 commit_message_buffer: None,
3357 git_store,
3358 paths_needing_status_update: Default::default(),
3359 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3360 askpass_delegates: Default::default(),
3361 latest_askpass_id: 0,
3362 active_jobs: Default::default(),
3363 job_id: 0,
3364 }
3365 }
3366
3367 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3368 self.git_store.upgrade()
3369 }
3370
3371 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3372 let this = cx.weak_entity();
3373 let git_store = self.git_store.clone();
3374 let _ = self.send_keyed_job(
3375 Some(GitJobKey::ReloadBufferDiffBases),
3376 None,
3377 |state, mut cx| async move {
3378 let RepositoryState::Local { backend, .. } = state else {
3379 log::error!("tried to recompute diffs for a non-local repository");
3380 return Ok(());
3381 };
3382
3383 let Some(this) = this.upgrade() else {
3384 return Ok(());
3385 };
3386
3387 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3388 git_store.update(cx, |git_store, cx| {
3389 git_store
3390 .diffs
3391 .iter()
3392 .filter_map(|(buffer_id, diff_state)| {
3393 let buffer_store = git_store.buffer_store.read(cx);
3394 let buffer = buffer_store.get(*buffer_id)?;
3395 let file = File::from_dyn(buffer.read(cx).file())?;
3396 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3397 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3398 log::debug!(
3399 "start reload diff bases for repo path {}",
3400 repo_path.as_unix_str()
3401 );
3402 diff_state.update(cx, |diff_state, _| {
3403 let has_unstaged_diff = diff_state
3404 .unstaged_diff
3405 .as_ref()
3406 .is_some_and(|diff| diff.is_upgradable());
3407 let has_uncommitted_diff = diff_state
3408 .uncommitted_diff
3409 .as_ref()
3410 .is_some_and(|set| set.is_upgradable());
3411
3412 Some((
3413 buffer,
3414 repo_path,
3415 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3416 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3417 ))
3418 })
3419 })
3420 .collect::<Vec<_>>()
3421 })
3422 })??;
3423
3424 let buffer_diff_base_changes = cx
3425 .background_spawn(async move {
3426 let mut changes = Vec::new();
3427 for (buffer, repo_path, current_index_text, current_head_text) in
3428 &repo_diff_state_updates
3429 {
3430 let index_text = if current_index_text.is_some() {
3431 backend.load_index_text(repo_path.clone()).await
3432 } else {
3433 None
3434 };
3435 let head_text = if current_head_text.is_some() {
3436 backend.load_committed_text(repo_path.clone()).await
3437 } else {
3438 None
3439 };
3440
3441 let change =
3442 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3443 (Some(current_index), Some(current_head)) => {
3444 let index_changed =
3445 index_text.as_ref() != current_index.as_deref();
3446 let head_changed =
3447 head_text.as_ref() != current_head.as_deref();
3448 if index_changed && head_changed {
3449 if index_text == head_text {
3450 Some(DiffBasesChange::SetBoth(head_text))
3451 } else {
3452 Some(DiffBasesChange::SetEach {
3453 index: index_text,
3454 head: head_text,
3455 })
3456 }
3457 } else if index_changed {
3458 Some(DiffBasesChange::SetIndex(index_text))
3459 } else if head_changed {
3460 Some(DiffBasesChange::SetHead(head_text))
3461 } else {
3462 None
3463 }
3464 }
3465 (Some(current_index), None) => {
3466 let index_changed =
3467 index_text.as_ref() != current_index.as_deref();
3468 index_changed
3469 .then_some(DiffBasesChange::SetIndex(index_text))
3470 }
3471 (None, Some(current_head)) => {
3472 let head_changed =
3473 head_text.as_ref() != current_head.as_deref();
3474 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3475 }
3476 (None, None) => None,
3477 };
3478
3479 changes.push((buffer.clone(), change))
3480 }
3481 changes
3482 })
3483 .await;
3484
3485 git_store.update(&mut cx, |git_store, cx| {
3486 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3487 let buffer_snapshot = buffer.read(cx).text_snapshot();
3488 let buffer_id = buffer_snapshot.remote_id();
3489 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3490 continue;
3491 };
3492
3493 let downstream_client = git_store.downstream_client();
3494 diff_state.update(cx, |diff_state, cx| {
3495 use proto::update_diff_bases::Mode;
3496
3497 if let Some((diff_bases_change, (client, project_id))) =
3498 diff_bases_change.clone().zip(downstream_client)
3499 {
3500 let (staged_text, committed_text, mode) = match diff_bases_change {
3501 DiffBasesChange::SetIndex(index) => {
3502 (index, None, Mode::IndexOnly)
3503 }
3504 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3505 DiffBasesChange::SetEach { index, head } => {
3506 (index, head, Mode::IndexAndHead)
3507 }
3508 DiffBasesChange::SetBoth(text) => {
3509 (None, text, Mode::IndexMatchesHead)
3510 }
3511 };
3512 client
3513 .send(proto::UpdateDiffBases {
3514 project_id: project_id.to_proto(),
3515 buffer_id: buffer_id.to_proto(),
3516 staged_text,
3517 committed_text,
3518 mode: mode as i32,
3519 })
3520 .log_err();
3521 }
3522
3523 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3524 });
3525 }
3526 })
3527 },
3528 );
3529 }
3530
3531 pub fn send_job<F, Fut, R>(
3532 &mut self,
3533 status: Option<SharedString>,
3534 job: F,
3535 ) -> oneshot::Receiver<R>
3536 where
3537 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3538 Fut: Future<Output = R> + 'static,
3539 R: Send + 'static,
3540 {
3541 self.send_keyed_job(None, status, job)
3542 }
3543
3544 fn send_keyed_job<F, Fut, R>(
3545 &mut self,
3546 key: Option<GitJobKey>,
3547 status: Option<SharedString>,
3548 job: F,
3549 ) -> oneshot::Receiver<R>
3550 where
3551 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3552 Fut: Future<Output = R> + 'static,
3553 R: Send + 'static,
3554 {
3555 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3556 let job_id = post_inc(&mut self.job_id);
3557 let this = self.this.clone();
3558 self.job_sender
3559 .unbounded_send(GitJob {
3560 key,
3561 job: Box::new(move |state, cx: &mut AsyncApp| {
3562 let job = job(state, cx.clone());
3563 cx.spawn(async move |cx| {
3564 if let Some(s) = status.clone() {
3565 this.update(cx, |this, cx| {
3566 this.active_jobs.insert(
3567 job_id,
3568 JobInfo {
3569 start: Instant::now(),
3570 message: s.clone(),
3571 },
3572 );
3573
3574 cx.notify();
3575 })
3576 .ok();
3577 }
3578 let result = job.await;
3579
3580 this.update(cx, |this, cx| {
3581 this.active_jobs.remove(&job_id);
3582 cx.notify();
3583 })
3584 .ok();
3585
3586 result_tx.send(result).ok();
3587 })
3588 }),
3589 })
3590 .ok();
3591 result_rx
3592 }
3593
3594 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3595 let Some(git_store) = self.git_store.upgrade() else {
3596 return;
3597 };
3598 let entity = cx.entity();
3599 git_store.update(cx, |git_store, cx| {
3600 let Some((&id, _)) = git_store
3601 .repositories
3602 .iter()
3603 .find(|(_, handle)| *handle == &entity)
3604 else {
3605 return;
3606 };
3607 git_store.active_repo_id = Some(id);
3608 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3609 });
3610 }
3611
3612 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3613 self.snapshot.status()
3614 }
3615
3616 pub fn cached_stash(&self) -> GitStash {
3617 self.snapshot.stash_entries.clone()
3618 }
3619
3620 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3621 let git_store = self.git_store.upgrade()?;
3622 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3623 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3624 let abs_path = SanitizedPath::new(&abs_path);
3625 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3626 Some(ProjectPath {
3627 worktree_id: worktree.read(cx).id(),
3628 path: relative_path,
3629 })
3630 }
3631
3632 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3633 let git_store = self.git_store.upgrade()?;
3634 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3635 let abs_path = worktree_store.absolutize(path, cx)?;
3636 self.snapshot.abs_path_to_repo_path(&abs_path)
3637 }
3638
3639 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3640 other
3641 .read(cx)
3642 .snapshot
3643 .work_directory_abs_path
3644 .starts_with(&self.snapshot.work_directory_abs_path)
3645 }
3646
3647 pub fn open_commit_buffer(
3648 &mut self,
3649 languages: Option<Arc<LanguageRegistry>>,
3650 buffer_store: Entity<BufferStore>,
3651 cx: &mut Context<Self>,
3652 ) -> Task<Result<Entity<Buffer>>> {
3653 let id = self.id;
3654 if let Some(buffer) = self.commit_message_buffer.clone() {
3655 return Task::ready(Ok(buffer));
3656 }
3657 let this = cx.weak_entity();
3658
3659 let rx = self.send_job(None, move |state, mut cx| async move {
3660 let Some(this) = this.upgrade() else {
3661 bail!("git store was dropped");
3662 };
3663 match state {
3664 RepositoryState::Local { .. } => {
3665 this.update(&mut cx, |_, cx| {
3666 Self::open_local_commit_buffer(languages, buffer_store, cx)
3667 })?
3668 .await
3669 }
3670 RepositoryState::Remote { project_id, client } => {
3671 let request = client.request(proto::OpenCommitMessageBuffer {
3672 project_id: project_id.0,
3673 repository_id: id.to_proto(),
3674 });
3675 let response = request.await.context("requesting to open commit buffer")?;
3676 let buffer_id = BufferId::new(response.buffer_id)?;
3677 let buffer = buffer_store
3678 .update(&mut cx, |buffer_store, cx| {
3679 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3680 })?
3681 .await?;
3682 if let Some(language_registry) = languages {
3683 let git_commit_language =
3684 language_registry.language_for_name("Git Commit").await?;
3685 buffer.update(&mut cx, |buffer, cx| {
3686 buffer.set_language(Some(git_commit_language), cx);
3687 })?;
3688 }
3689 this.update(&mut cx, |this, _| {
3690 this.commit_message_buffer = Some(buffer.clone());
3691 })?;
3692 Ok(buffer)
3693 }
3694 }
3695 });
3696
3697 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3698 }
3699
3700 fn open_local_commit_buffer(
3701 language_registry: Option<Arc<LanguageRegistry>>,
3702 buffer_store: Entity<BufferStore>,
3703 cx: &mut Context<Self>,
3704 ) -> Task<Result<Entity<Buffer>>> {
3705 cx.spawn(async move |repository, cx| {
3706 let buffer = buffer_store
3707 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3708 .await?;
3709
3710 if let Some(language_registry) = language_registry {
3711 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3712 buffer.update(cx, |buffer, cx| {
3713 buffer.set_language(Some(git_commit_language), cx);
3714 })?;
3715 }
3716
3717 repository.update(cx, |repository, _| {
3718 repository.commit_message_buffer = Some(buffer.clone());
3719 })?;
3720 Ok(buffer)
3721 })
3722 }
3723
3724 pub fn checkout_files(
3725 &mut self,
3726 commit: &str,
3727 paths: Vec<RepoPath>,
3728 _cx: &mut App,
3729 ) -> oneshot::Receiver<Result<()>> {
3730 let commit = commit.to_string();
3731 let id = self.id;
3732
3733 self.send_job(
3734 Some(format!("git checkout {}", commit).into()),
3735 move |git_repo, _| async move {
3736 match git_repo {
3737 RepositoryState::Local {
3738 backend,
3739 environment,
3740 ..
3741 } => {
3742 backend
3743 .checkout_files(commit, paths, environment.clone())
3744 .await
3745 }
3746 RepositoryState::Remote { project_id, client } => {
3747 client
3748 .request(proto::GitCheckoutFiles {
3749 project_id: project_id.0,
3750 repository_id: id.to_proto(),
3751 commit,
3752 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3753 })
3754 .await?;
3755
3756 Ok(())
3757 }
3758 }
3759 },
3760 )
3761 }
3762
3763 pub fn reset(
3764 &mut self,
3765 commit: String,
3766 reset_mode: ResetMode,
3767 _cx: &mut App,
3768 ) -> oneshot::Receiver<Result<()>> {
3769 let id = self.id;
3770
3771 self.send_job(None, move |git_repo, _| async move {
3772 match git_repo {
3773 RepositoryState::Local {
3774 backend,
3775 environment,
3776 ..
3777 } => backend.reset(commit, reset_mode, environment).await,
3778 RepositoryState::Remote { project_id, client } => {
3779 client
3780 .request(proto::GitReset {
3781 project_id: project_id.0,
3782 repository_id: id.to_proto(),
3783 commit,
3784 mode: match reset_mode {
3785 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3786 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3787 },
3788 })
3789 .await?;
3790
3791 Ok(())
3792 }
3793 }
3794 })
3795 }
3796
3797 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3798 let id = self.id;
3799 self.send_job(None, move |git_repo, _cx| async move {
3800 match git_repo {
3801 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3802 RepositoryState::Remote { project_id, client } => {
3803 let resp = client
3804 .request(proto::GitShow {
3805 project_id: project_id.0,
3806 repository_id: id.to_proto(),
3807 commit,
3808 })
3809 .await?;
3810
3811 Ok(CommitDetails {
3812 sha: resp.sha.into(),
3813 message: resp.message.into(),
3814 commit_timestamp: resp.commit_timestamp,
3815 author_email: resp.author_email.into(),
3816 author_name: resp.author_name.into(),
3817 })
3818 }
3819 }
3820 })
3821 }
3822
3823 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3824 let id = self.id;
3825 self.send_job(None, move |git_repo, cx| async move {
3826 match git_repo {
3827 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3828 RepositoryState::Remote {
3829 client, project_id, ..
3830 } => {
3831 let response = client
3832 .request(proto::LoadCommitDiff {
3833 project_id: project_id.0,
3834 repository_id: id.to_proto(),
3835 commit,
3836 })
3837 .await?;
3838 Ok(CommitDiff {
3839 files: response
3840 .files
3841 .into_iter()
3842 .map(|file| {
3843 Ok(CommitFile {
3844 path: RepoPath::from_proto(&file.path)?,
3845 old_text: file.old_text,
3846 new_text: file.new_text,
3847 })
3848 })
3849 .collect::<Result<Vec<_>>>()?,
3850 })
3851 }
3852 }
3853 })
3854 }
3855
3856 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3857 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3858 }
3859
3860 fn save_buffers<'a>(
3861 &self,
3862 entries: impl IntoIterator<Item = &'a RepoPath>,
3863 cx: &mut Context<Self>,
3864 ) -> Vec<Task<anyhow::Result<()>>> {
3865 let mut save_futures = Vec::new();
3866 if let Some(buffer_store) = self.buffer_store(cx) {
3867 buffer_store.update(cx, |buffer_store, cx| {
3868 for path in entries {
3869 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3870 continue;
3871 };
3872 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3873 && buffer
3874 .read(cx)
3875 .file()
3876 .is_some_and(|file| file.disk_state().exists())
3877 && buffer.read(cx).has_unsaved_edits()
3878 {
3879 save_futures.push(buffer_store.save_buffer(buffer, cx));
3880 }
3881 }
3882 })
3883 }
3884 save_futures
3885 }
3886
3887 pub fn stage_entries(
3888 &self,
3889 entries: Vec<RepoPath>,
3890 cx: &mut Context<Self>,
3891 ) -> Task<anyhow::Result<()>> {
3892 if entries.is_empty() {
3893 return Task::ready(Ok(()));
3894 }
3895 let id = self.id;
3896 let save_tasks = self.save_buffers(&entries, cx);
3897 let paths = entries
3898 .iter()
3899 .map(|p| p.as_unix_str())
3900 .collect::<Vec<_>>()
3901 .join(" ");
3902 let status = format!("git add {paths}");
3903 let job_key = match entries.len() {
3904 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3905 _ => None,
3906 };
3907
3908 cx.spawn(async move |this, cx| {
3909 for save_task in save_tasks {
3910 save_task.await?;
3911 }
3912
3913 this.update(cx, |this, _| {
3914 this.send_keyed_job(
3915 job_key,
3916 Some(status.into()),
3917 move |git_repo, _cx| async move {
3918 match git_repo {
3919 RepositoryState::Local {
3920 backend,
3921 environment,
3922 ..
3923 } => backend.stage_paths(entries, environment.clone()).await,
3924 RepositoryState::Remote { project_id, client } => {
3925 client
3926 .request(proto::Stage {
3927 project_id: project_id.0,
3928 repository_id: id.to_proto(),
3929 paths: entries
3930 .into_iter()
3931 .map(|repo_path| repo_path.to_proto())
3932 .collect(),
3933 })
3934 .await
3935 .context("sending stage request")?;
3936
3937 Ok(())
3938 }
3939 }
3940 },
3941 )
3942 })?
3943 .await??;
3944
3945 Ok(())
3946 })
3947 }
3948
3949 pub fn unstage_entries(
3950 &self,
3951 entries: Vec<RepoPath>,
3952 cx: &mut Context<Self>,
3953 ) -> Task<anyhow::Result<()>> {
3954 if entries.is_empty() {
3955 return Task::ready(Ok(()));
3956 }
3957 let id = self.id;
3958 let save_tasks = self.save_buffers(&entries, cx);
3959 let paths = entries
3960 .iter()
3961 .map(|p| p.as_unix_str())
3962 .collect::<Vec<_>>()
3963 .join(" ");
3964 let status = format!("git reset {paths}");
3965 let job_key = match entries.len() {
3966 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3967 _ => None,
3968 };
3969
3970 cx.spawn(async move |this, cx| {
3971 for save_task in save_tasks {
3972 save_task.await?;
3973 }
3974
3975 this.update(cx, |this, _| {
3976 this.send_keyed_job(
3977 job_key,
3978 Some(status.into()),
3979 move |git_repo, _cx| async move {
3980 match git_repo {
3981 RepositoryState::Local {
3982 backend,
3983 environment,
3984 ..
3985 } => backend.unstage_paths(entries, environment).await,
3986 RepositoryState::Remote { project_id, client } => {
3987 client
3988 .request(proto::Unstage {
3989 project_id: project_id.0,
3990 repository_id: id.to_proto(),
3991 paths: entries
3992 .into_iter()
3993 .map(|repo_path| repo_path.to_proto())
3994 .collect(),
3995 })
3996 .await
3997 .context("sending unstage request")?;
3998
3999 Ok(())
4000 }
4001 }
4002 },
4003 )
4004 })?
4005 .await??;
4006
4007 Ok(())
4008 })
4009 }
4010
4011 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4012 let to_stage = self
4013 .cached_status()
4014 .filter(|entry| !entry.status.staging().is_fully_staged())
4015 .map(|entry| entry.repo_path)
4016 .collect();
4017 self.stage_entries(to_stage, cx)
4018 }
4019
4020 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4021 let to_unstage = self
4022 .cached_status()
4023 .filter(|entry| entry.status.staging().has_staged())
4024 .map(|entry| entry.repo_path)
4025 .collect();
4026 self.unstage_entries(to_unstage, cx)
4027 }
4028
4029 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4030 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4031
4032 self.stash_entries(to_stash, cx)
4033 }
4034
4035 pub fn stash_entries(
4036 &mut self,
4037 entries: Vec<RepoPath>,
4038 cx: &mut Context<Self>,
4039 ) -> Task<anyhow::Result<()>> {
4040 let id = self.id;
4041
4042 cx.spawn(async move |this, cx| {
4043 this.update(cx, |this, _| {
4044 this.send_job(None, move |git_repo, _cx| async move {
4045 match git_repo {
4046 RepositoryState::Local {
4047 backend,
4048 environment,
4049 ..
4050 } => backend.stash_paths(entries, environment).await,
4051 RepositoryState::Remote { project_id, client } => {
4052 client
4053 .request(proto::Stash {
4054 project_id: project_id.0,
4055 repository_id: id.to_proto(),
4056 paths: entries
4057 .into_iter()
4058 .map(|repo_path| repo_path.to_proto())
4059 .collect(),
4060 })
4061 .await
4062 .context("sending stash request")?;
4063 Ok(())
4064 }
4065 }
4066 })
4067 })?
4068 .await??;
4069 Ok(())
4070 })
4071 }
4072
4073 pub fn stash_pop(
4074 &mut self,
4075 index: Option<usize>,
4076 cx: &mut Context<Self>,
4077 ) -> Task<anyhow::Result<()>> {
4078 let id = self.id;
4079 cx.spawn(async move |this, cx| {
4080 this.update(cx, |this, _| {
4081 this.send_job(None, move |git_repo, _cx| async move {
4082 match git_repo {
4083 RepositoryState::Local {
4084 backend,
4085 environment,
4086 ..
4087 } => backend.stash_pop(index, environment).await,
4088 RepositoryState::Remote { project_id, client } => {
4089 client
4090 .request(proto::StashPop {
4091 project_id: project_id.0,
4092 repository_id: id.to_proto(),
4093 stash_index: index.map(|i| i as u64),
4094 })
4095 .await
4096 .context("sending stash pop request")?;
4097 Ok(())
4098 }
4099 }
4100 })
4101 })?
4102 .await??;
4103 Ok(())
4104 })
4105 }
4106
4107 pub fn stash_apply(
4108 &mut self,
4109 index: Option<usize>,
4110 cx: &mut Context<Self>,
4111 ) -> Task<anyhow::Result<()>> {
4112 let id = self.id;
4113 cx.spawn(async move |this, cx| {
4114 this.update(cx, |this, _| {
4115 this.send_job(None, move |git_repo, _cx| async move {
4116 match git_repo {
4117 RepositoryState::Local {
4118 backend,
4119 environment,
4120 ..
4121 } => backend.stash_apply(index, environment).await,
4122 RepositoryState::Remote { project_id, client } => {
4123 client
4124 .request(proto::StashApply {
4125 project_id: project_id.0,
4126 repository_id: id.to_proto(),
4127 stash_index: index.map(|i| i as u64),
4128 })
4129 .await
4130 .context("sending stash apply request")?;
4131 Ok(())
4132 }
4133 }
4134 })
4135 })?
4136 .await??;
4137 Ok(())
4138 })
4139 }
4140
4141 pub fn stash_drop(
4142 &mut self,
4143 index: Option<usize>,
4144 cx: &mut Context<Self>,
4145 ) -> oneshot::Receiver<anyhow::Result<()>> {
4146 let id = self.id;
4147 let updates_tx = self
4148 .git_store()
4149 .and_then(|git_store| match &git_store.read(cx).state {
4150 GitStoreState::Local { downstream, .. } => downstream
4151 .as_ref()
4152 .map(|downstream| downstream.updates_tx.clone()),
4153 _ => None,
4154 });
4155 let this = cx.weak_entity();
4156 self.send_job(None, move |git_repo, mut cx| async move {
4157 match git_repo {
4158 RepositoryState::Local {
4159 backend,
4160 environment,
4161 ..
4162 } => {
4163 // TODO would be nice to not have to do this manually
4164 let result = backend.stash_drop(index, environment).await;
4165 if result.is_ok()
4166 && let Ok(stash_entries) = backend.stash_entries().await
4167 {
4168 let snapshot = this.update(&mut cx, |this, cx| {
4169 this.snapshot.stash_entries = stash_entries;
4170 cx.emit(RepositoryEvent::StashEntriesChanged);
4171 this.snapshot.clone()
4172 })?;
4173 if let Some(updates_tx) = updates_tx {
4174 updates_tx
4175 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4176 .ok();
4177 }
4178 }
4179
4180 result
4181 }
4182 RepositoryState::Remote { project_id, client } => {
4183 client
4184 .request(proto::StashDrop {
4185 project_id: project_id.0,
4186 repository_id: id.to_proto(),
4187 stash_index: index.map(|i| i as u64),
4188 })
4189 .await
4190 .context("sending stash pop request")?;
4191 Ok(())
4192 }
4193 }
4194 })
4195 }
4196
4197 pub fn commit(
4198 &mut self,
4199 message: SharedString,
4200 name_and_email: Option<(SharedString, SharedString)>,
4201 options: CommitOptions,
4202 _cx: &mut App,
4203 ) -> oneshot::Receiver<Result<()>> {
4204 let id = self.id;
4205
4206 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4207 match git_repo {
4208 RepositoryState::Local {
4209 backend,
4210 environment,
4211 ..
4212 } => {
4213 backend
4214 .commit(message, name_and_email, options, environment)
4215 .await
4216 }
4217 RepositoryState::Remote { project_id, client } => {
4218 let (name, email) = name_and_email.unzip();
4219 client
4220 .request(proto::Commit {
4221 project_id: project_id.0,
4222 repository_id: id.to_proto(),
4223 message: String::from(message),
4224 name: name.map(String::from),
4225 email: email.map(String::from),
4226 options: Some(proto::commit::CommitOptions {
4227 amend: options.amend,
4228 signoff: options.signoff,
4229 }),
4230 })
4231 .await
4232 .context("sending commit request")?;
4233
4234 Ok(())
4235 }
4236 }
4237 })
4238 }
4239
4240 pub fn fetch(
4241 &mut self,
4242 fetch_options: FetchOptions,
4243 askpass: AskPassDelegate,
4244 _cx: &mut App,
4245 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4246 let askpass_delegates = self.askpass_delegates.clone();
4247 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4248 let id = self.id;
4249
4250 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4251 match git_repo {
4252 RepositoryState::Local {
4253 backend,
4254 environment,
4255 ..
4256 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4257 RepositoryState::Remote { project_id, client } => {
4258 askpass_delegates.lock().insert(askpass_id, askpass);
4259 let _defer = util::defer(|| {
4260 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4261 debug_assert!(askpass_delegate.is_some());
4262 });
4263
4264 let response = client
4265 .request(proto::Fetch {
4266 project_id: project_id.0,
4267 repository_id: id.to_proto(),
4268 askpass_id,
4269 remote: fetch_options.to_proto(),
4270 })
4271 .await
4272 .context("sending fetch request")?;
4273
4274 Ok(RemoteCommandOutput {
4275 stdout: response.stdout,
4276 stderr: response.stderr,
4277 })
4278 }
4279 }
4280 })
4281 }
4282
4283 pub fn push(
4284 &mut self,
4285 branch: SharedString,
4286 remote: SharedString,
4287 options: Option<PushOptions>,
4288 askpass: AskPassDelegate,
4289 cx: &mut Context<Self>,
4290 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4291 let askpass_delegates = self.askpass_delegates.clone();
4292 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4293 let id = self.id;
4294
4295 let args = options
4296 .map(|option| match option {
4297 PushOptions::SetUpstream => " --set-upstream",
4298 PushOptions::Force => " --force-with-lease",
4299 })
4300 .unwrap_or("");
4301
4302 let updates_tx = self
4303 .git_store()
4304 .and_then(|git_store| match &git_store.read(cx).state {
4305 GitStoreState::Local { downstream, .. } => downstream
4306 .as_ref()
4307 .map(|downstream| downstream.updates_tx.clone()),
4308 _ => None,
4309 });
4310
4311 let this = cx.weak_entity();
4312 self.send_job(
4313 Some(format!("git push {} {} {}", args, remote, branch).into()),
4314 move |git_repo, mut cx| async move {
4315 match git_repo {
4316 RepositoryState::Local {
4317 backend,
4318 environment,
4319 ..
4320 } => {
4321 let result = backend
4322 .push(
4323 branch.to_string(),
4324 remote.to_string(),
4325 options,
4326 askpass,
4327 environment.clone(),
4328 cx.clone(),
4329 )
4330 .await;
4331 // TODO would be nice to not have to do this manually
4332 if result.is_ok() {
4333 let branches = backend.branches().await?;
4334 let branch = branches.into_iter().find(|branch| branch.is_head);
4335 log::info!("head branch after scan is {branch:?}");
4336 let snapshot = this.update(&mut cx, |this, cx| {
4337 this.snapshot.branch = branch;
4338 cx.emit(RepositoryEvent::BranchChanged);
4339 this.snapshot.clone()
4340 })?;
4341 if let Some(updates_tx) = updates_tx {
4342 updates_tx
4343 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4344 .ok();
4345 }
4346 }
4347 result
4348 }
4349 RepositoryState::Remote { project_id, client } => {
4350 askpass_delegates.lock().insert(askpass_id, askpass);
4351 let _defer = util::defer(|| {
4352 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4353 debug_assert!(askpass_delegate.is_some());
4354 });
4355 let response = client
4356 .request(proto::Push {
4357 project_id: project_id.0,
4358 repository_id: id.to_proto(),
4359 askpass_id,
4360 branch_name: branch.to_string(),
4361 remote_name: remote.to_string(),
4362 options: options.map(|options| match options {
4363 PushOptions::Force => proto::push::PushOptions::Force,
4364 PushOptions::SetUpstream => {
4365 proto::push::PushOptions::SetUpstream
4366 }
4367 }
4368 as i32),
4369 })
4370 .await
4371 .context("sending push request")?;
4372
4373 Ok(RemoteCommandOutput {
4374 stdout: response.stdout,
4375 stderr: response.stderr,
4376 })
4377 }
4378 }
4379 },
4380 )
4381 }
4382
4383 pub fn pull(
4384 &mut self,
4385 branch: SharedString,
4386 remote: SharedString,
4387 askpass: AskPassDelegate,
4388 _cx: &mut App,
4389 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4390 let askpass_delegates = self.askpass_delegates.clone();
4391 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4392 let id = self.id;
4393
4394 self.send_job(
4395 Some(format!("git pull {} {}", remote, branch).into()),
4396 move |git_repo, cx| async move {
4397 match git_repo {
4398 RepositoryState::Local {
4399 backend,
4400 environment,
4401 ..
4402 } => {
4403 backend
4404 .pull(
4405 branch.to_string(),
4406 remote.to_string(),
4407 askpass,
4408 environment.clone(),
4409 cx,
4410 )
4411 .await
4412 }
4413 RepositoryState::Remote { project_id, client } => {
4414 askpass_delegates.lock().insert(askpass_id, askpass);
4415 let _defer = util::defer(|| {
4416 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4417 debug_assert!(askpass_delegate.is_some());
4418 });
4419 let response = client
4420 .request(proto::Pull {
4421 project_id: project_id.0,
4422 repository_id: id.to_proto(),
4423 askpass_id,
4424 branch_name: branch.to_string(),
4425 remote_name: remote.to_string(),
4426 })
4427 .await
4428 .context("sending pull request")?;
4429
4430 Ok(RemoteCommandOutput {
4431 stdout: response.stdout,
4432 stderr: response.stderr,
4433 })
4434 }
4435 }
4436 },
4437 )
4438 }
4439
4440 fn spawn_set_index_text_job(
4441 &mut self,
4442 path: RepoPath,
4443 content: Option<String>,
4444 hunk_staging_operation_count: Option<usize>,
4445 cx: &mut Context<Self>,
4446 ) -> oneshot::Receiver<anyhow::Result<()>> {
4447 let id = self.id;
4448 let this = cx.weak_entity();
4449 let git_store = self.git_store.clone();
4450 self.send_keyed_job(
4451 Some(GitJobKey::WriteIndex(path.clone())),
4452 None,
4453 move |git_repo, mut cx| async move {
4454 log::debug!(
4455 "start updating index text for buffer {}",
4456 path.as_unix_str()
4457 );
4458 match git_repo {
4459 RepositoryState::Local {
4460 backend,
4461 environment,
4462 ..
4463 } => {
4464 backend
4465 .set_index_text(path.clone(), content, environment.clone())
4466 .await?;
4467 }
4468 RepositoryState::Remote { project_id, client } => {
4469 client
4470 .request(proto::SetIndexText {
4471 project_id: project_id.0,
4472 repository_id: id.to_proto(),
4473 path: path.to_proto(),
4474 text: content,
4475 })
4476 .await?;
4477 }
4478 }
4479 log::debug!(
4480 "finish updating index text for buffer {}",
4481 path.as_unix_str()
4482 );
4483
4484 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4485 let project_path = this
4486 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4487 .ok()
4488 .flatten();
4489 git_store.update(&mut cx, |git_store, cx| {
4490 let buffer_id = git_store
4491 .buffer_store
4492 .read(cx)
4493 .get_by_path(&project_path?)?
4494 .read(cx)
4495 .remote_id();
4496 let diff_state = git_store.diffs.get(&buffer_id)?;
4497 diff_state.update(cx, |diff_state, _| {
4498 diff_state.hunk_staging_operation_count_as_of_write =
4499 hunk_staging_operation_count;
4500 });
4501 Some(())
4502 })?;
4503 }
4504 Ok(())
4505 },
4506 )
4507 }
4508
4509 pub fn get_remotes(
4510 &mut self,
4511 branch_name: Option<String>,
4512 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4513 let id = self.id;
4514 self.send_job(None, move |repo, _cx| async move {
4515 match repo {
4516 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4517 RepositoryState::Remote { project_id, client } => {
4518 let response = client
4519 .request(proto::GetRemotes {
4520 project_id: project_id.0,
4521 repository_id: id.to_proto(),
4522 branch_name,
4523 })
4524 .await?;
4525
4526 let remotes = response
4527 .remotes
4528 .into_iter()
4529 .map(|remotes| git::repository::Remote {
4530 name: remotes.name.into(),
4531 })
4532 .collect();
4533
4534 Ok(remotes)
4535 }
4536 }
4537 })
4538 }
4539
4540 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4541 let id = self.id;
4542 self.send_job(None, move |repo, _| async move {
4543 match repo {
4544 RepositoryState::Local { backend, .. } => backend.branches().await,
4545 RepositoryState::Remote { project_id, client } => {
4546 let response = client
4547 .request(proto::GitGetBranches {
4548 project_id: project_id.0,
4549 repository_id: id.to_proto(),
4550 })
4551 .await?;
4552
4553 let branches = response
4554 .branches
4555 .into_iter()
4556 .map(|branch| proto_to_branch(&branch))
4557 .collect();
4558
4559 Ok(branches)
4560 }
4561 }
4562 })
4563 }
4564
4565 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4566 let id = self.id;
4567 self.send_job(None, move |repo, _| async move {
4568 match repo {
4569 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4570 RepositoryState::Remote { project_id, client } => {
4571 let response = client
4572 .request(proto::GitGetWorktrees {
4573 project_id: project_id.0,
4574 repository_id: id.to_proto(),
4575 })
4576 .await?;
4577
4578 let worktrees = response
4579 .worktrees
4580 .into_iter()
4581 .map(|worktree| proto_to_worktree(&worktree))
4582 .collect();
4583
4584 Ok(worktrees)
4585 }
4586 }
4587 })
4588 }
4589
4590 pub fn create_worktree(
4591 &mut self,
4592 name: String,
4593 path: PathBuf,
4594 commit: Option<String>,
4595 ) -> oneshot::Receiver<Result<()>> {
4596 let id = self.id;
4597 self.send_job(
4598 Some("git worktree add".into()),
4599 move |repo, _cx| async move {
4600 match repo {
4601 RepositoryState::Local { backend, .. } => {
4602 backend.create_worktree(name, path, commit).await
4603 }
4604 RepositoryState::Remote { project_id, client } => {
4605 client
4606 .request(proto::GitCreateWorktree {
4607 project_id: project_id.0,
4608 repository_id: id.to_proto(),
4609 name,
4610 directory: path.to_string_lossy().to_string(),
4611 commit,
4612 })
4613 .await?;
4614
4615 Ok(())
4616 }
4617 }
4618 },
4619 )
4620 }
4621
4622 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4623 let id = self.id;
4624 self.send_job(None, move |repo, _| async move {
4625 match repo {
4626 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4627 RepositoryState::Remote { project_id, client } => {
4628 let response = client
4629 .request(proto::GetDefaultBranch {
4630 project_id: project_id.0,
4631 repository_id: id.to_proto(),
4632 })
4633 .await?;
4634
4635 anyhow::Ok(response.branch.map(SharedString::from))
4636 }
4637 }
4638 })
4639 }
4640
4641 pub fn diff_tree(
4642 &mut self,
4643 diff_type: DiffTreeType,
4644 _cx: &App,
4645 ) -> oneshot::Receiver<Result<TreeDiff>> {
4646 let repository_id = self.snapshot.id;
4647 self.send_job(None, move |repo, _cx| async move {
4648 match repo {
4649 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4650 RepositoryState::Remote { client, project_id } => {
4651 let response = client
4652 .request(proto::GetTreeDiff {
4653 project_id: project_id.0,
4654 repository_id: repository_id.0,
4655 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4656 base: diff_type.base().to_string(),
4657 head: diff_type.head().to_string(),
4658 })
4659 .await?;
4660
4661 let entries = response
4662 .entries
4663 .into_iter()
4664 .filter_map(|entry| {
4665 let status = match entry.status() {
4666 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4667 proto::tree_diff_status::Status::Modified => {
4668 TreeDiffStatus::Modified {
4669 old: git::Oid::from_str(
4670 &entry.oid.context("missing oid").log_err()?,
4671 )
4672 .log_err()?,
4673 }
4674 }
4675 proto::tree_diff_status::Status::Deleted => {
4676 TreeDiffStatus::Deleted {
4677 old: git::Oid::from_str(
4678 &entry.oid.context("missing oid").log_err()?,
4679 )
4680 .log_err()?,
4681 }
4682 }
4683 };
4684 Some((
4685 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4686 status,
4687 ))
4688 })
4689 .collect();
4690
4691 Ok(TreeDiff { entries })
4692 }
4693 }
4694 })
4695 }
4696
4697 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4698 let id = self.id;
4699 self.send_job(None, move |repo, _cx| async move {
4700 match repo {
4701 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4702 RepositoryState::Remote { project_id, client } => {
4703 let response = client
4704 .request(proto::GitDiff {
4705 project_id: project_id.0,
4706 repository_id: id.to_proto(),
4707 diff_type: match diff_type {
4708 DiffType::HeadToIndex => {
4709 proto::git_diff::DiffType::HeadToIndex.into()
4710 }
4711 DiffType::HeadToWorktree => {
4712 proto::git_diff::DiffType::HeadToWorktree.into()
4713 }
4714 },
4715 })
4716 .await?;
4717
4718 Ok(response.diff)
4719 }
4720 }
4721 })
4722 }
4723
4724 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4725 let id = self.id;
4726 self.send_job(
4727 Some(format!("git switch -c {branch_name}").into()),
4728 move |repo, _cx| async move {
4729 match repo {
4730 RepositoryState::Local { backend, .. } => {
4731 backend.create_branch(branch_name).await
4732 }
4733 RepositoryState::Remote { project_id, client } => {
4734 client
4735 .request(proto::GitCreateBranch {
4736 project_id: project_id.0,
4737 repository_id: id.to_proto(),
4738 branch_name,
4739 })
4740 .await?;
4741
4742 Ok(())
4743 }
4744 }
4745 },
4746 )
4747 }
4748
4749 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4750 let id = self.id;
4751 self.send_job(
4752 Some(format!("git switch {branch_name}").into()),
4753 move |repo, _cx| async move {
4754 match repo {
4755 RepositoryState::Local { backend, .. } => {
4756 backend.change_branch(branch_name).await
4757 }
4758 RepositoryState::Remote { project_id, client } => {
4759 client
4760 .request(proto::GitChangeBranch {
4761 project_id: project_id.0,
4762 repository_id: id.to_proto(),
4763 branch_name,
4764 })
4765 .await?;
4766
4767 Ok(())
4768 }
4769 }
4770 },
4771 )
4772 }
4773
4774 pub fn rename_branch(
4775 &mut self,
4776 branch: String,
4777 new_name: String,
4778 ) -> oneshot::Receiver<Result<()>> {
4779 let id = self.id;
4780 self.send_job(
4781 Some(format!("git branch -m {branch} {new_name}").into()),
4782 move |repo, _cx| async move {
4783 match repo {
4784 RepositoryState::Local { backend, .. } => {
4785 backend.rename_branch(branch, new_name).await
4786 }
4787 RepositoryState::Remote { project_id, client } => {
4788 client
4789 .request(proto::GitRenameBranch {
4790 project_id: project_id.0,
4791 repository_id: id.to_proto(),
4792 branch,
4793 new_name,
4794 })
4795 .await?;
4796
4797 Ok(())
4798 }
4799 }
4800 },
4801 )
4802 }
4803
4804 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4805 let id = self.id;
4806 self.send_job(None, move |repo, _cx| async move {
4807 match repo {
4808 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4809 RepositoryState::Remote { project_id, client } => {
4810 let response = client
4811 .request(proto::CheckForPushedCommits {
4812 project_id: project_id.0,
4813 repository_id: id.to_proto(),
4814 })
4815 .await?;
4816
4817 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4818
4819 Ok(branches)
4820 }
4821 }
4822 })
4823 }
4824
4825 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4826 self.send_job(None, |repo, _cx| async move {
4827 match repo {
4828 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4829 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4830 }
4831 })
4832 }
4833
4834 pub fn restore_checkpoint(
4835 &mut self,
4836 checkpoint: GitRepositoryCheckpoint,
4837 ) -> oneshot::Receiver<Result<()>> {
4838 self.send_job(None, move |repo, _cx| async move {
4839 match repo {
4840 RepositoryState::Local { backend, .. } => {
4841 backend.restore_checkpoint(checkpoint).await
4842 }
4843 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4844 }
4845 })
4846 }
4847
4848 pub(crate) fn apply_remote_update(
4849 &mut self,
4850 update: proto::UpdateRepository,
4851 cx: &mut Context<Self>,
4852 ) -> Result<()> {
4853 let conflicted_paths = TreeSet::from_ordered_entries(
4854 update
4855 .current_merge_conflicts
4856 .into_iter()
4857 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4858 );
4859 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4860 let new_head_commit = update
4861 .head_commit_details
4862 .as_ref()
4863 .map(proto_to_commit_details);
4864 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4865 cx.emit(RepositoryEvent::BranchChanged)
4866 }
4867 self.snapshot.branch = new_branch;
4868 self.snapshot.head_commit = new_head_commit;
4869
4870 self.snapshot.merge.conflicted_paths = conflicted_paths;
4871 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4872 let new_stash_entries = GitStash {
4873 entries: update
4874 .stash_entries
4875 .iter()
4876 .filter_map(|entry| proto_to_stash(entry).ok())
4877 .collect(),
4878 };
4879 if self.snapshot.stash_entries != new_stash_entries {
4880 cx.emit(RepositoryEvent::StashEntriesChanged)
4881 }
4882 self.snapshot.stash_entries = new_stash_entries;
4883
4884 let edits = update
4885 .removed_statuses
4886 .into_iter()
4887 .filter_map(|path| {
4888 Some(sum_tree::Edit::Remove(PathKey(
4889 RelPath::from_proto(&path).log_err()?,
4890 )))
4891 })
4892 .chain(
4893 update
4894 .updated_statuses
4895 .into_iter()
4896 .filter_map(|updated_status| {
4897 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4898 }),
4899 )
4900 .collect::<Vec<_>>();
4901 if !edits.is_empty() {
4902 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4903 }
4904 self.snapshot.statuses_by_path.edit(edits, ());
4905 if update.is_last_update {
4906 self.snapshot.scan_id = update.scan_id;
4907 }
4908 Ok(())
4909 }
4910
4911 pub fn compare_checkpoints(
4912 &mut self,
4913 left: GitRepositoryCheckpoint,
4914 right: GitRepositoryCheckpoint,
4915 ) -> oneshot::Receiver<Result<bool>> {
4916 self.send_job(None, move |repo, _cx| async move {
4917 match repo {
4918 RepositoryState::Local { backend, .. } => {
4919 backend.compare_checkpoints(left, right).await
4920 }
4921 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4922 }
4923 })
4924 }
4925
4926 pub fn diff_checkpoints(
4927 &mut self,
4928 base_checkpoint: GitRepositoryCheckpoint,
4929 target_checkpoint: GitRepositoryCheckpoint,
4930 ) -> oneshot::Receiver<Result<String>> {
4931 self.send_job(None, move |repo, _cx| async move {
4932 match repo {
4933 RepositoryState::Local { backend, .. } => {
4934 backend
4935 .diff_checkpoints(base_checkpoint, target_checkpoint)
4936 .await
4937 }
4938 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4939 }
4940 })
4941 }
4942
4943 fn schedule_scan(
4944 &mut self,
4945 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4946 cx: &mut Context<Self>,
4947 ) {
4948 let this = cx.weak_entity();
4949 let _ = self.send_keyed_job(
4950 Some(GitJobKey::ReloadGitState),
4951 None,
4952 |state, mut cx| async move {
4953 log::debug!("run scheduled git status scan");
4954
4955 let Some(this) = this.upgrade() else {
4956 return Ok(());
4957 };
4958 let RepositoryState::Local { backend, .. } = state else {
4959 bail!("not a local repository")
4960 };
4961 let (snapshot, events) = this
4962 .update(&mut cx, |this, _| {
4963 this.paths_needing_status_update.clear();
4964 compute_snapshot(
4965 this.id,
4966 this.work_directory_abs_path.clone(),
4967 this.snapshot.clone(),
4968 backend.clone(),
4969 )
4970 })?
4971 .await?;
4972 this.update(&mut cx, |this, cx| {
4973 this.snapshot = snapshot.clone();
4974 for event in events {
4975 cx.emit(event);
4976 }
4977 })?;
4978 if let Some(updates_tx) = updates_tx {
4979 updates_tx
4980 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4981 .ok();
4982 }
4983 Ok(())
4984 },
4985 );
4986 }
4987
4988 fn spawn_local_git_worker(
4989 work_directory_abs_path: Arc<Path>,
4990 dot_git_abs_path: Arc<Path>,
4991 _repository_dir_abs_path: Arc<Path>,
4992 _common_dir_abs_path: Arc<Path>,
4993 project_environment: WeakEntity<ProjectEnvironment>,
4994 fs: Arc<dyn Fs>,
4995 cx: &mut Context<Self>,
4996 ) -> mpsc::UnboundedSender<GitJob> {
4997 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4998
4999 cx.spawn(async move |_, cx| {
5000 let environment = project_environment
5001 .upgrade()
5002 .context("missing project environment")?
5003 .update(cx, |project_environment, cx| {
5004 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
5005 })?
5006 .await
5007 .unwrap_or_else(|| {
5008 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
5009 HashMap::default()
5010 });
5011 let search_paths = environment.get("PATH").map(|val| val.to_owned());
5012 let backend = cx
5013 .background_spawn(async move {
5014 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
5015 .or_else(|| which::which("git").ok());
5016 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
5017 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
5018 })
5019 .await?;
5020
5021 if let Some(git_hosting_provider_registry) =
5022 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5023 {
5024 git_hosting_providers::register_additional_providers(
5025 git_hosting_provider_registry,
5026 backend.clone(),
5027 );
5028 }
5029
5030 let state = RepositoryState::Local {
5031 backend,
5032 environment: Arc::new(environment),
5033 };
5034 let mut jobs = VecDeque::new();
5035 loop {
5036 while let Ok(Some(next_job)) = job_rx.try_next() {
5037 jobs.push_back(next_job);
5038 }
5039
5040 if let Some(job) = jobs.pop_front() {
5041 if let Some(current_key) = &job.key
5042 && jobs
5043 .iter()
5044 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5045 {
5046 continue;
5047 }
5048 (job.job)(state.clone(), cx).await;
5049 } else if let Some(job) = job_rx.next().await {
5050 jobs.push_back(job);
5051 } else {
5052 break;
5053 }
5054 }
5055 anyhow::Ok(())
5056 })
5057 .detach_and_log_err(cx);
5058
5059 job_tx
5060 }
5061
5062 fn spawn_remote_git_worker(
5063 project_id: ProjectId,
5064 client: AnyProtoClient,
5065 cx: &mut Context<Self>,
5066 ) -> mpsc::UnboundedSender<GitJob> {
5067 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5068
5069 cx.spawn(async move |_, cx| {
5070 let state = RepositoryState::Remote { project_id, client };
5071 let mut jobs = VecDeque::new();
5072 loop {
5073 while let Ok(Some(next_job)) = job_rx.try_next() {
5074 jobs.push_back(next_job);
5075 }
5076
5077 if let Some(job) = jobs.pop_front() {
5078 if let Some(current_key) = &job.key
5079 && jobs
5080 .iter()
5081 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5082 {
5083 continue;
5084 }
5085 (job.job)(state.clone(), cx).await;
5086 } else if let Some(job) = job_rx.next().await {
5087 jobs.push_back(job);
5088 } else {
5089 break;
5090 }
5091 }
5092 anyhow::Ok(())
5093 })
5094 .detach_and_log_err(cx);
5095
5096 job_tx
5097 }
5098
5099 fn load_staged_text(
5100 &mut self,
5101 buffer_id: BufferId,
5102 repo_path: RepoPath,
5103 cx: &App,
5104 ) -> Task<Result<Option<String>>> {
5105 let rx = self.send_job(None, move |state, _| async move {
5106 match state {
5107 RepositoryState::Local { backend, .. } => {
5108 anyhow::Ok(backend.load_index_text(repo_path).await)
5109 }
5110 RepositoryState::Remote { project_id, client } => {
5111 let response = client
5112 .request(proto::OpenUnstagedDiff {
5113 project_id: project_id.to_proto(),
5114 buffer_id: buffer_id.to_proto(),
5115 })
5116 .await?;
5117 Ok(response.staged_text)
5118 }
5119 }
5120 });
5121 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5122 }
5123
5124 fn load_committed_text(
5125 &mut self,
5126 buffer_id: BufferId,
5127 repo_path: RepoPath,
5128 cx: &App,
5129 ) -> Task<Result<DiffBasesChange>> {
5130 let rx = self.send_job(None, move |state, _| async move {
5131 match state {
5132 RepositoryState::Local { backend, .. } => {
5133 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5134 let staged_text = backend.load_index_text(repo_path).await;
5135 let diff_bases_change = if committed_text == staged_text {
5136 DiffBasesChange::SetBoth(committed_text)
5137 } else {
5138 DiffBasesChange::SetEach {
5139 index: staged_text,
5140 head: committed_text,
5141 }
5142 };
5143 anyhow::Ok(diff_bases_change)
5144 }
5145 RepositoryState::Remote { project_id, client } => {
5146 use proto::open_uncommitted_diff_response::Mode;
5147
5148 let response = client
5149 .request(proto::OpenUncommittedDiff {
5150 project_id: project_id.to_proto(),
5151 buffer_id: buffer_id.to_proto(),
5152 })
5153 .await?;
5154 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5155 let bases = match mode {
5156 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5157 Mode::IndexAndHead => DiffBasesChange::SetEach {
5158 head: response.committed_text,
5159 index: response.staged_text,
5160 },
5161 };
5162 Ok(bases)
5163 }
5164 }
5165 });
5166
5167 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5168 }
5169 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5170 let repository_id = self.snapshot.id;
5171 let rx = self.send_job(None, move |state, _| async move {
5172 match state {
5173 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5174 RepositoryState::Remote { client, project_id } => {
5175 let response = client
5176 .request(proto::GetBlobContent {
5177 project_id: project_id.to_proto(),
5178 repository_id: repository_id.0,
5179 oid: oid.to_string(),
5180 })
5181 .await?;
5182 Ok(response.content)
5183 }
5184 }
5185 });
5186 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5187 }
5188
5189 fn paths_changed(
5190 &mut self,
5191 paths: Vec<RepoPath>,
5192 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5193 cx: &mut Context<Self>,
5194 ) {
5195 self.paths_needing_status_update.extend(paths);
5196
5197 let this = cx.weak_entity();
5198 let _ = self.send_keyed_job(
5199 Some(GitJobKey::RefreshStatuses),
5200 None,
5201 |state, mut cx| async move {
5202 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5203 (
5204 this.snapshot.clone(),
5205 mem::take(&mut this.paths_needing_status_update),
5206 )
5207 })?;
5208 let RepositoryState::Local { backend, .. } = state else {
5209 bail!("not a local repository")
5210 };
5211
5212 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5213 if paths.is_empty() {
5214 return Ok(());
5215 }
5216 let statuses = backend.status(&paths).await?;
5217 let stash_entries = backend.stash_entries().await?;
5218
5219 let changed_path_statuses = cx
5220 .background_spawn(async move {
5221 let mut changed_path_statuses = Vec::new();
5222 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5223 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5224
5225 for (repo_path, status) in &*statuses.entries {
5226 changed_paths.remove(repo_path);
5227 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5228 && cursor.item().is_some_and(|entry| entry.status == *status)
5229 {
5230 continue;
5231 }
5232
5233 changed_path_statuses.push(Edit::Insert(StatusEntry {
5234 repo_path: repo_path.clone(),
5235 status: *status,
5236 }));
5237 }
5238 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5239 for path in changed_paths.into_iter() {
5240 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5241 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5242 }
5243 }
5244 changed_path_statuses
5245 })
5246 .await;
5247
5248 this.update(&mut cx, |this, cx| {
5249 if this.snapshot.stash_entries != stash_entries {
5250 cx.emit(RepositoryEvent::StashEntriesChanged);
5251 this.snapshot.stash_entries = stash_entries;
5252 }
5253
5254 if !changed_path_statuses.is_empty() {
5255 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5256 this.snapshot
5257 .statuses_by_path
5258 .edit(changed_path_statuses, ());
5259 this.snapshot.scan_id += 1;
5260 }
5261
5262 if let Some(updates_tx) = updates_tx {
5263 updates_tx
5264 .unbounded_send(DownstreamUpdate::UpdateRepository(
5265 this.snapshot.clone(),
5266 ))
5267 .ok();
5268 }
5269 })
5270 },
5271 );
5272 }
5273
5274 /// currently running git command and when it started
5275 pub fn current_job(&self) -> Option<JobInfo> {
5276 self.active_jobs.values().next().cloned()
5277 }
5278
5279 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5280 self.send_job(None, |_, _| async {})
5281 }
5282}
5283
5284fn get_permalink_in_rust_registry_src(
5285 provider_registry: Arc<GitHostingProviderRegistry>,
5286 path: PathBuf,
5287 selection: Range<u32>,
5288) -> Result<url::Url> {
5289 #[derive(Deserialize)]
5290 struct CargoVcsGit {
5291 sha1: String,
5292 }
5293
5294 #[derive(Deserialize)]
5295 struct CargoVcsInfo {
5296 git: CargoVcsGit,
5297 path_in_vcs: String,
5298 }
5299
5300 #[derive(Deserialize)]
5301 struct CargoPackage {
5302 repository: String,
5303 }
5304
5305 #[derive(Deserialize)]
5306 struct CargoToml {
5307 package: CargoPackage,
5308 }
5309
5310 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5311 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5312 Some((dir, json))
5313 }) else {
5314 bail!("No .cargo_vcs_info.json found in parent directories")
5315 };
5316 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5317 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5318 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5319 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5320 .context("parsing package.repository field of manifest")?;
5321 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5322 let permalink = provider.build_permalink(
5323 remote,
5324 BuildPermalinkParams::new(
5325 &cargo_vcs_info.git.sha1,
5326 &RepoPath(
5327 RelPath::new(&path, PathStyle::local())
5328 .context("invalid path")?
5329 .into_arc(),
5330 ),
5331 Some(selection),
5332 ),
5333 );
5334 Ok(permalink)
5335}
5336
5337fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5338 let Some(blame) = blame else {
5339 return proto::BlameBufferResponse {
5340 blame_response: None,
5341 };
5342 };
5343
5344 let entries = blame
5345 .entries
5346 .into_iter()
5347 .map(|entry| proto::BlameEntry {
5348 sha: entry.sha.as_bytes().into(),
5349 start_line: entry.range.start,
5350 end_line: entry.range.end,
5351 original_line_number: entry.original_line_number,
5352 author: entry.author,
5353 author_mail: entry.author_mail,
5354 author_time: entry.author_time,
5355 author_tz: entry.author_tz,
5356 committer: entry.committer_name,
5357 committer_mail: entry.committer_email,
5358 committer_time: entry.committer_time,
5359 committer_tz: entry.committer_tz,
5360 summary: entry.summary,
5361 previous: entry.previous,
5362 filename: entry.filename,
5363 })
5364 .collect::<Vec<_>>();
5365
5366 let messages = blame
5367 .messages
5368 .into_iter()
5369 .map(|(oid, message)| proto::CommitMessage {
5370 oid: oid.as_bytes().into(),
5371 message,
5372 })
5373 .collect::<Vec<_>>();
5374
5375 proto::BlameBufferResponse {
5376 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5377 entries,
5378 messages,
5379 remote_url: blame.remote_url,
5380 }),
5381 }
5382}
5383
5384fn deserialize_blame_buffer_response(
5385 response: proto::BlameBufferResponse,
5386) -> Option<git::blame::Blame> {
5387 let response = response.blame_response?;
5388 let entries = response
5389 .entries
5390 .into_iter()
5391 .filter_map(|entry| {
5392 Some(git::blame::BlameEntry {
5393 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5394 range: entry.start_line..entry.end_line,
5395 original_line_number: entry.original_line_number,
5396 committer_name: entry.committer,
5397 committer_time: entry.committer_time,
5398 committer_tz: entry.committer_tz,
5399 committer_email: entry.committer_mail,
5400 author: entry.author,
5401 author_mail: entry.author_mail,
5402 author_time: entry.author_time,
5403 author_tz: entry.author_tz,
5404 summary: entry.summary,
5405 previous: entry.previous,
5406 filename: entry.filename,
5407 })
5408 })
5409 .collect::<Vec<_>>();
5410
5411 let messages = response
5412 .messages
5413 .into_iter()
5414 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5415 .collect::<HashMap<_, _>>();
5416
5417 Some(Blame {
5418 entries,
5419 messages,
5420 remote_url: response.remote_url,
5421 })
5422}
5423
5424fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5425 proto::Branch {
5426 is_head: branch.is_head,
5427 ref_name: branch.ref_name.to_string(),
5428 unix_timestamp: branch
5429 .most_recent_commit
5430 .as_ref()
5431 .map(|commit| commit.commit_timestamp as u64),
5432 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5433 ref_name: upstream.ref_name.to_string(),
5434 tracking: upstream
5435 .tracking
5436 .status()
5437 .map(|upstream| proto::UpstreamTracking {
5438 ahead: upstream.ahead as u64,
5439 behind: upstream.behind as u64,
5440 }),
5441 }),
5442 most_recent_commit: branch
5443 .most_recent_commit
5444 .as_ref()
5445 .map(|commit| proto::CommitSummary {
5446 sha: commit.sha.to_string(),
5447 subject: commit.subject.to_string(),
5448 commit_timestamp: commit.commit_timestamp,
5449 author_name: commit.author_name.to_string(),
5450 }),
5451 }
5452}
5453
5454fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5455 proto::Worktree {
5456 path: worktree.path.to_string_lossy().to_string(),
5457 ref_name: worktree.ref_name.to_string(),
5458 sha: worktree.sha.to_string(),
5459 }
5460}
5461
5462fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5463 git::repository::Worktree {
5464 path: PathBuf::from(proto.path.clone()),
5465 ref_name: proto.ref_name.clone().into(),
5466 sha: proto.sha.clone().into(),
5467 }
5468}
5469
5470fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5471 git::repository::Branch {
5472 is_head: proto.is_head,
5473 ref_name: proto.ref_name.clone().into(),
5474 upstream: proto
5475 .upstream
5476 .as_ref()
5477 .map(|upstream| git::repository::Upstream {
5478 ref_name: upstream.ref_name.to_string().into(),
5479 tracking: upstream
5480 .tracking
5481 .as_ref()
5482 .map(|tracking| {
5483 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5484 ahead: tracking.ahead as u32,
5485 behind: tracking.behind as u32,
5486 })
5487 })
5488 .unwrap_or(git::repository::UpstreamTracking::Gone),
5489 }),
5490 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5491 git::repository::CommitSummary {
5492 sha: commit.sha.to_string().into(),
5493 subject: commit.subject.to_string().into(),
5494 commit_timestamp: commit.commit_timestamp,
5495 author_name: commit.author_name.to_string().into(),
5496 has_parent: true,
5497 }
5498 }),
5499 }
5500}
5501
5502fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5503 proto::GitCommitDetails {
5504 sha: commit.sha.to_string(),
5505 message: commit.message.to_string(),
5506 commit_timestamp: commit.commit_timestamp,
5507 author_email: commit.author_email.to_string(),
5508 author_name: commit.author_name.to_string(),
5509 }
5510}
5511
5512fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5513 CommitDetails {
5514 sha: proto.sha.clone().into(),
5515 message: proto.message.clone().into(),
5516 commit_timestamp: proto.commit_timestamp,
5517 author_email: proto.author_email.clone().into(),
5518 author_name: proto.author_name.clone().into(),
5519 }
5520}
5521
5522async fn compute_snapshot(
5523 id: RepositoryId,
5524 work_directory_abs_path: Arc<Path>,
5525 prev_snapshot: RepositorySnapshot,
5526 backend: Arc<dyn GitRepository>,
5527) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5528 let mut events = Vec::new();
5529 let branches = backend.branches().await?;
5530 let branch = branches.into_iter().find(|branch| branch.is_head);
5531 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5532 let stash_entries = backend.stash_entries().await?;
5533 let statuses_by_path = SumTree::from_iter(
5534 statuses
5535 .entries
5536 .iter()
5537 .map(|(repo_path, status)| StatusEntry {
5538 repo_path: repo_path.clone(),
5539 status: *status,
5540 }),
5541 (),
5542 );
5543 let (merge_details, merge_heads_changed) =
5544 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5545 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5546
5547 let pending_ops_by_path = prev_snapshot.pending_ops_by_path.clone();
5548
5549 if merge_heads_changed {
5550 events.push(RepositoryEvent::MergeHeadsChanged);
5551 }
5552
5553 if statuses_by_path != prev_snapshot.statuses_by_path {
5554 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5555 }
5556
5557 // Useful when branch is None in detached head state
5558 let head_commit = match backend.head_sha().await {
5559 Some(head_sha) => backend.show(head_sha).await.log_err(),
5560 None => None,
5561 };
5562
5563 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5564 events.push(RepositoryEvent::BranchChanged);
5565 }
5566
5567 // Used by edit prediction data collection
5568 let remote_origin_url = backend.remote_url("origin");
5569 let remote_upstream_url = backend.remote_url("upstream");
5570
5571 let snapshot = RepositorySnapshot {
5572 id,
5573 statuses_by_path,
5574 pending_ops_by_path,
5575 work_directory_abs_path,
5576 path_style: prev_snapshot.path_style,
5577 scan_id: prev_snapshot.scan_id + 1,
5578 branch,
5579 head_commit,
5580 merge: merge_details,
5581 remote_origin_url,
5582 remote_upstream_url,
5583 stash_entries,
5584 };
5585
5586 Ok((snapshot, events))
5587}
5588
5589fn status_from_proto(
5590 simple_status: i32,
5591 status: Option<proto::GitFileStatus>,
5592) -> anyhow::Result<FileStatus> {
5593 use proto::git_file_status::Variant;
5594
5595 let Some(variant) = status.and_then(|status| status.variant) else {
5596 let code = proto::GitStatus::from_i32(simple_status)
5597 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5598 let result = match code {
5599 proto::GitStatus::Added => TrackedStatus {
5600 worktree_status: StatusCode::Added,
5601 index_status: StatusCode::Unmodified,
5602 }
5603 .into(),
5604 proto::GitStatus::Modified => TrackedStatus {
5605 worktree_status: StatusCode::Modified,
5606 index_status: StatusCode::Unmodified,
5607 }
5608 .into(),
5609 proto::GitStatus::Conflict => UnmergedStatus {
5610 first_head: UnmergedStatusCode::Updated,
5611 second_head: UnmergedStatusCode::Updated,
5612 }
5613 .into(),
5614 proto::GitStatus::Deleted => TrackedStatus {
5615 worktree_status: StatusCode::Deleted,
5616 index_status: StatusCode::Unmodified,
5617 }
5618 .into(),
5619 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5620 };
5621 return Ok(result);
5622 };
5623
5624 let result = match variant {
5625 Variant::Untracked(_) => FileStatus::Untracked,
5626 Variant::Ignored(_) => FileStatus::Ignored,
5627 Variant::Unmerged(unmerged) => {
5628 let [first_head, second_head] =
5629 [unmerged.first_head, unmerged.second_head].map(|head| {
5630 let code = proto::GitStatus::from_i32(head)
5631 .with_context(|| format!("Invalid git status code: {head}"))?;
5632 let result = match code {
5633 proto::GitStatus::Added => UnmergedStatusCode::Added,
5634 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5635 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5636 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5637 };
5638 Ok(result)
5639 });
5640 let [first_head, second_head] = [first_head?, second_head?];
5641 UnmergedStatus {
5642 first_head,
5643 second_head,
5644 }
5645 .into()
5646 }
5647 Variant::Tracked(tracked) => {
5648 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5649 .map(|status| {
5650 let code = proto::GitStatus::from_i32(status)
5651 .with_context(|| format!("Invalid git status code: {status}"))?;
5652 let result = match code {
5653 proto::GitStatus::Modified => StatusCode::Modified,
5654 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5655 proto::GitStatus::Added => StatusCode::Added,
5656 proto::GitStatus::Deleted => StatusCode::Deleted,
5657 proto::GitStatus::Renamed => StatusCode::Renamed,
5658 proto::GitStatus::Copied => StatusCode::Copied,
5659 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5660 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5661 };
5662 Ok(result)
5663 });
5664 let [index_status, worktree_status] = [index_status?, worktree_status?];
5665 TrackedStatus {
5666 index_status,
5667 worktree_status,
5668 }
5669 .into()
5670 }
5671 };
5672 Ok(result)
5673}
5674
5675fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5676 use proto::git_file_status::{Tracked, Unmerged, Variant};
5677
5678 let variant = match status {
5679 FileStatus::Untracked => Variant::Untracked(Default::default()),
5680 FileStatus::Ignored => Variant::Ignored(Default::default()),
5681 FileStatus::Unmerged(UnmergedStatus {
5682 first_head,
5683 second_head,
5684 }) => Variant::Unmerged(Unmerged {
5685 first_head: unmerged_status_to_proto(first_head),
5686 second_head: unmerged_status_to_proto(second_head),
5687 }),
5688 FileStatus::Tracked(TrackedStatus {
5689 index_status,
5690 worktree_status,
5691 }) => Variant::Tracked(Tracked {
5692 index_status: tracked_status_to_proto(index_status),
5693 worktree_status: tracked_status_to_proto(worktree_status),
5694 }),
5695 };
5696 proto::GitFileStatus {
5697 variant: Some(variant),
5698 }
5699}
5700
5701fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5702 match code {
5703 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5704 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5705 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5706 }
5707}
5708
5709fn tracked_status_to_proto(code: StatusCode) -> i32 {
5710 match code {
5711 StatusCode::Added => proto::GitStatus::Added as _,
5712 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5713 StatusCode::Modified => proto::GitStatus::Modified as _,
5714 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5715 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5716 StatusCode::Copied => proto::GitStatus::Copied as _,
5717 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5718 }
5719}