1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use std::{
59 cmp::Ordering,
60 collections::{BTreeSet, VecDeque},
61 future::Future,
62 mem,
63 ops::Range,
64 path::{Path, PathBuf},
65 str::FromStr,
66 sync::{
67 Arc,
68 atomic::{self, AtomicU64},
69 },
70 time::Instant,
71};
72use sum_tree::{Edit, SumTree, TreeSet};
73use task::Shell;
74use text::{Bias, BufferId};
75use util::{
76 ResultExt, debug_panic,
77 paths::{PathStyle, SanitizedPath},
78 post_inc,
79 rel_path::RelPath,
80};
81use worktree::{
82 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
83 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
84};
85use zeroize::Zeroize;
86
87pub struct GitStore {
88 state: GitStoreState,
89 buffer_store: Entity<BufferStore>,
90 worktree_store: Entity<WorktreeStore>,
91 repositories: HashMap<RepositoryId, Entity<Repository>>,
92 active_repo_id: Option<RepositoryId>,
93 #[allow(clippy::type_complexity)]
94 loading_diffs:
95 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
96 diffs: HashMap<BufferId, Entity<BufferGitState>>,
97 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
98 _subscriptions: Vec<Subscription>,
99}
100
101#[derive(Default)]
102struct SharedDiffs {
103 unstaged: Option<Entity<BufferDiff>>,
104 uncommitted: Option<Entity<BufferDiff>>,
105}
106
107struct BufferGitState {
108 unstaged_diff: Option<WeakEntity<BufferDiff>>,
109 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
110 conflict_set: Option<WeakEntity<ConflictSet>>,
111 recalculate_diff_task: Option<Task<Result<()>>>,
112 reparse_conflict_markers_task: Option<Task<Result<()>>>,
113 language: Option<Arc<Language>>,
114 language_registry: Option<Arc<LanguageRegistry>>,
115 conflict_updated_futures: Vec<oneshot::Sender<()>>,
116 recalculating_tx: postage::watch::Sender<bool>,
117
118 /// These operation counts are used to ensure that head and index text
119 /// values read from the git repository are up-to-date with any hunk staging
120 /// operations that have been performed on the BufferDiff.
121 ///
122 /// The operation count is incremented immediately when the user initiates a
123 /// hunk stage/unstage operation. Then, upon finishing writing the new index
124 /// text do disk, the `operation count as of write` is updated to reflect
125 /// the operation count that prompted the write.
126 hunk_staging_operation_count: usize,
127 hunk_staging_operation_count_as_of_write: usize,
128
129 head_text: Option<Arc<String>>,
130 index_text: Option<Arc<String>>,
131 head_changed: bool,
132 index_changed: bool,
133 language_changed: bool,
134}
135
136#[derive(Clone, Debug)]
137enum DiffBasesChange {
138 SetIndex(Option<String>),
139 SetHead(Option<String>),
140 SetEach {
141 index: Option<String>,
142 head: Option<String>,
143 },
144 SetBoth(Option<String>),
145}
146
147#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
148enum DiffKind {
149 Unstaged,
150 Uncommitted,
151}
152
153enum GitStoreState {
154 Local {
155 next_repository_id: Arc<AtomicU64>,
156 downstream: Option<LocalDownstreamState>,
157 project_environment: Entity<ProjectEnvironment>,
158 fs: Arc<dyn Fs>,
159 },
160 Remote {
161 upstream_client: AnyProtoClient,
162 upstream_project_id: u64,
163 downstream: Option<(AnyProtoClient, ProjectId)>,
164 },
165}
166
167enum DownstreamUpdate {
168 UpdateRepository(RepositorySnapshot),
169 RemoveRepository(RepositoryId),
170}
171
172struct LocalDownstreamState {
173 client: AnyProtoClient,
174 project_id: ProjectId,
175 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
176 _task: Task<Result<()>>,
177}
178
179#[derive(Clone, Debug)]
180pub struct GitStoreCheckpoint {
181 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
182}
183
184#[derive(Clone, Debug, PartialEq, Eq)]
185pub struct StatusEntry {
186 pub repo_path: RepoPath,
187 pub status: FileStatus,
188}
189
190impl StatusEntry {
191 fn to_proto(&self) -> proto::StatusEntry {
192 let simple_status = match self.status {
193 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
194 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
195 FileStatus::Tracked(TrackedStatus {
196 index_status,
197 worktree_status,
198 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
199 worktree_status
200 } else {
201 index_status
202 }),
203 };
204
205 proto::StatusEntry {
206 repo_path: self.repo_path.to_proto(),
207 simple_status,
208 status: Some(status_to_proto(self.status)),
209 }
210 }
211}
212
213impl TryFrom<proto::StatusEntry> for StatusEntry {
214 type Error = anyhow::Error;
215
216 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
217 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
218 let status = status_from_proto(value.simple_status, value.status)?;
219 Ok(Self { repo_path, status })
220 }
221}
222
223impl sum_tree::Item for StatusEntry {
224 type Summary = PathSummary<GitSummary>;
225
226 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
227 PathSummary {
228 max_path: self.repo_path.0.clone(),
229 item_summary: self.status.summary(),
230 }
231 }
232}
233
234impl sum_tree::KeyedItem for StatusEntry {
235 type Key = PathKey;
236
237 fn key(&self) -> Self::Key {
238 PathKey(self.repo_path.0.clone())
239 }
240}
241
242#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
243pub struct RepositoryId(pub u64);
244
245#[derive(Clone, Debug, Default, PartialEq, Eq)]
246pub struct MergeDetails {
247 pub conflicted_paths: TreeSet<RepoPath>,
248 pub message: Option<SharedString>,
249 pub heads: Vec<Option<SharedString>>,
250}
251
252#[derive(Clone, Debug, PartialEq, Eq)]
253pub struct RepositorySnapshot {
254 pub id: RepositoryId,
255 pub statuses_by_path: SumTree<StatusEntry>,
256 pub pending_ops_by_path: SumTree<PendingOps>,
257 pub work_directory_abs_path: Arc<Path>,
258 pub path_style: PathStyle,
259 pub branch: Option<Branch>,
260 pub head_commit: Option<CommitDetails>,
261 pub scan_id: u64,
262 pub merge: MergeDetails,
263 pub remote_origin_url: Option<String>,
264 pub remote_upstream_url: Option<String>,
265 pub stash_entries: GitStash,
266}
267
268type JobId = u64;
269
270#[derive(Clone, Debug, PartialEq, Eq)]
271pub struct JobInfo {
272 pub start: Instant,
273 pub message: SharedString,
274}
275
276pub struct Repository {
277 this: WeakEntity<Self>,
278 snapshot: RepositorySnapshot,
279 commit_message_buffer: Option<Entity<Buffer>>,
280 git_store: WeakEntity<GitStore>,
281 // For a local repository, holds paths that have had worktree events since the last status scan completed,
282 // and that should be examined during the next status scan.
283 paths_needing_status_update: BTreeSet<RepoPath>,
284 job_sender: mpsc::UnboundedSender<GitJob>,
285 active_jobs: HashMap<JobId, JobInfo>,
286 job_id: JobId,
287 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
288 latest_askpass_id: u64,
289}
290
291impl std::ops::Deref for Repository {
292 type Target = RepositorySnapshot;
293
294 fn deref(&self) -> &Self::Target {
295 &self.snapshot
296 }
297}
298
299#[derive(Clone)]
300pub enum RepositoryState {
301 Local {
302 backend: Arc<dyn GitRepository>,
303 environment: Arc<HashMap<String, String>>,
304 },
305 Remote {
306 project_id: ProjectId,
307 client: AnyProtoClient,
308 },
309}
310
311#[derive(Clone, Debug, PartialEq, Eq)]
312pub enum RepositoryEvent {
313 StatusesChanged {
314 // TODO could report which statuses changed here
315 full_scan: bool,
316 },
317 MergeHeadsChanged,
318 BranchChanged,
319 StashEntriesChanged,
320}
321
322#[derive(Clone, Debug)]
323pub struct JobsUpdated;
324
325#[derive(Debug)]
326pub enum GitStoreEvent {
327 ActiveRepositoryChanged(Option<RepositoryId>),
328 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
329 RepositoryAdded,
330 RepositoryRemoved(RepositoryId),
331 IndexWriteError(anyhow::Error),
332 JobsUpdated,
333 ConflictsUpdated,
334}
335
336impl EventEmitter<RepositoryEvent> for Repository {}
337impl EventEmitter<JobsUpdated> for Repository {}
338impl EventEmitter<GitStoreEvent> for GitStore {}
339
340pub struct GitJob {
341 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
342 key: Option<GitJobKey>,
343}
344
345#[derive(PartialEq, Eq)]
346enum GitJobKey {
347 WriteIndex(RepoPath),
348 ReloadBufferDiffBases,
349 RefreshStatuses,
350 ReloadGitState,
351}
352
353impl GitStore {
354 pub fn local(
355 worktree_store: &Entity<WorktreeStore>,
356 buffer_store: Entity<BufferStore>,
357 environment: Entity<ProjectEnvironment>,
358 fs: Arc<dyn Fs>,
359 cx: &mut Context<Self>,
360 ) -> Self {
361 Self::new(
362 worktree_store.clone(),
363 buffer_store,
364 GitStoreState::Local {
365 next_repository_id: Arc::new(AtomicU64::new(1)),
366 downstream: None,
367 project_environment: environment,
368 fs,
369 },
370 cx,
371 )
372 }
373
374 pub fn remote(
375 worktree_store: &Entity<WorktreeStore>,
376 buffer_store: Entity<BufferStore>,
377 upstream_client: AnyProtoClient,
378 project_id: u64,
379 cx: &mut Context<Self>,
380 ) -> Self {
381 Self::new(
382 worktree_store.clone(),
383 buffer_store,
384 GitStoreState::Remote {
385 upstream_client,
386 upstream_project_id: project_id,
387 downstream: None,
388 },
389 cx,
390 )
391 }
392
393 fn new(
394 worktree_store: Entity<WorktreeStore>,
395 buffer_store: Entity<BufferStore>,
396 state: GitStoreState,
397 cx: &mut Context<Self>,
398 ) -> Self {
399 let _subscriptions = vec![
400 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
401 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
402 ];
403
404 GitStore {
405 state,
406 buffer_store,
407 worktree_store,
408 repositories: HashMap::default(),
409 active_repo_id: None,
410 _subscriptions,
411 loading_diffs: HashMap::default(),
412 shared_diffs: HashMap::default(),
413 diffs: HashMap::default(),
414 }
415 }
416
417 pub fn init(client: &AnyProtoClient) {
418 client.add_entity_request_handler(Self::handle_get_remotes);
419 client.add_entity_request_handler(Self::handle_get_branches);
420 client.add_entity_request_handler(Self::handle_get_default_branch);
421 client.add_entity_request_handler(Self::handle_change_branch);
422 client.add_entity_request_handler(Self::handle_create_branch);
423 client.add_entity_request_handler(Self::handle_rename_branch);
424 client.add_entity_request_handler(Self::handle_git_init);
425 client.add_entity_request_handler(Self::handle_push);
426 client.add_entity_request_handler(Self::handle_pull);
427 client.add_entity_request_handler(Self::handle_fetch);
428 client.add_entity_request_handler(Self::handle_stage);
429 client.add_entity_request_handler(Self::handle_unstage);
430 client.add_entity_request_handler(Self::handle_stash);
431 client.add_entity_request_handler(Self::handle_stash_pop);
432 client.add_entity_request_handler(Self::handle_stash_apply);
433 client.add_entity_request_handler(Self::handle_stash_drop);
434 client.add_entity_request_handler(Self::handle_commit);
435 client.add_entity_request_handler(Self::handle_reset);
436 client.add_entity_request_handler(Self::handle_show);
437 client.add_entity_request_handler(Self::handle_load_commit_diff);
438 client.add_entity_request_handler(Self::handle_checkout_files);
439 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
440 client.add_entity_request_handler(Self::handle_set_index_text);
441 client.add_entity_request_handler(Self::handle_askpass);
442 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
443 client.add_entity_request_handler(Self::handle_git_diff);
444 client.add_entity_request_handler(Self::handle_tree_diff);
445 client.add_entity_request_handler(Self::handle_get_blob_content);
446 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
447 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
448 client.add_entity_message_handler(Self::handle_update_diff_bases);
449 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
450 client.add_entity_request_handler(Self::handle_blame_buffer);
451 client.add_entity_message_handler(Self::handle_update_repository);
452 client.add_entity_message_handler(Self::handle_remove_repository);
453 client.add_entity_request_handler(Self::handle_git_clone);
454 client.add_entity_request_handler(Self::handle_get_worktrees);
455 client.add_entity_request_handler(Self::handle_create_worktree);
456 }
457
458 pub fn is_local(&self) -> bool {
459 matches!(self.state, GitStoreState::Local { .. })
460 }
461 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
462 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
463 let id = repo.read(cx).id;
464 if self.active_repo_id != Some(id) {
465 self.active_repo_id = Some(id);
466 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
467 }
468 }
469 }
470
471 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
472 match &mut self.state {
473 GitStoreState::Remote {
474 downstream: downstream_client,
475 ..
476 } => {
477 for repo in self.repositories.values() {
478 let update = repo.read(cx).snapshot.initial_update(project_id);
479 for update in split_repository_update(update) {
480 client.send(update).log_err();
481 }
482 }
483 *downstream_client = Some((client, ProjectId(project_id)));
484 }
485 GitStoreState::Local {
486 downstream: downstream_client,
487 ..
488 } => {
489 let mut snapshots = HashMap::default();
490 let (updates_tx, mut updates_rx) = mpsc::unbounded();
491 for repo in self.repositories.values() {
492 updates_tx
493 .unbounded_send(DownstreamUpdate::UpdateRepository(
494 repo.read(cx).snapshot.clone(),
495 ))
496 .ok();
497 }
498 *downstream_client = Some(LocalDownstreamState {
499 client: client.clone(),
500 project_id: ProjectId(project_id),
501 updates_tx,
502 _task: cx.spawn(async move |this, cx| {
503 cx.background_spawn(async move {
504 while let Some(update) = updates_rx.next().await {
505 match update {
506 DownstreamUpdate::UpdateRepository(snapshot) => {
507 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
508 {
509 let update =
510 snapshot.build_update(old_snapshot, project_id);
511 *old_snapshot = snapshot;
512 for update in split_repository_update(update) {
513 client.send(update)?;
514 }
515 } else {
516 let update = snapshot.initial_update(project_id);
517 for update in split_repository_update(update) {
518 client.send(update)?;
519 }
520 snapshots.insert(snapshot.id, snapshot);
521 }
522 }
523 DownstreamUpdate::RemoveRepository(id) => {
524 client.send(proto::RemoveRepository {
525 project_id,
526 id: id.to_proto(),
527 })?;
528 }
529 }
530 }
531 anyhow::Ok(())
532 })
533 .await
534 .ok();
535 this.update(cx, |this, _| {
536 if let GitStoreState::Local {
537 downstream: downstream_client,
538 ..
539 } = &mut this.state
540 {
541 downstream_client.take();
542 } else {
543 unreachable!("unshared called on remote store");
544 }
545 })
546 }),
547 });
548 }
549 }
550 }
551
552 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
553 match &mut self.state {
554 GitStoreState::Local {
555 downstream: downstream_client,
556 ..
557 } => {
558 downstream_client.take();
559 }
560 GitStoreState::Remote {
561 downstream: downstream_client,
562 ..
563 } => {
564 downstream_client.take();
565 }
566 }
567 self.shared_diffs.clear();
568 }
569
570 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
571 self.shared_diffs.remove(peer_id);
572 }
573
574 pub fn active_repository(&self) -> Option<Entity<Repository>> {
575 self.active_repo_id
576 .as_ref()
577 .map(|id| self.repositories[id].clone())
578 }
579
580 pub fn open_unstaged_diff(
581 &mut self,
582 buffer: Entity<Buffer>,
583 cx: &mut Context<Self>,
584 ) -> Task<Result<Entity<BufferDiff>>> {
585 let buffer_id = buffer.read(cx).remote_id();
586 if let Some(diff_state) = self.diffs.get(&buffer_id)
587 && let Some(unstaged_diff) = diff_state
588 .read(cx)
589 .unstaged_diff
590 .as_ref()
591 .and_then(|weak| weak.upgrade())
592 {
593 if let Some(task) =
594 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
595 {
596 return cx.background_executor().spawn(async move {
597 task.await;
598 Ok(unstaged_diff)
599 });
600 }
601 return Task::ready(Ok(unstaged_diff));
602 }
603
604 let Some((repo, repo_path)) =
605 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
606 else {
607 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
608 };
609
610 let task = self
611 .loading_diffs
612 .entry((buffer_id, DiffKind::Unstaged))
613 .or_insert_with(|| {
614 let staged_text = repo.update(cx, |repo, cx| {
615 repo.load_staged_text(buffer_id, repo_path, cx)
616 });
617 cx.spawn(async move |this, cx| {
618 Self::open_diff_internal(
619 this,
620 DiffKind::Unstaged,
621 staged_text.await.map(DiffBasesChange::SetIndex),
622 buffer,
623 cx,
624 )
625 .await
626 .map_err(Arc::new)
627 })
628 .shared()
629 })
630 .clone();
631
632 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
633 }
634
635 pub fn open_diff_since(
636 &mut self,
637 oid: Option<git::Oid>,
638 buffer: Entity<Buffer>,
639 repo: Entity<Repository>,
640 languages: Arc<LanguageRegistry>,
641 cx: &mut Context<Self>,
642 ) -> Task<Result<Entity<BufferDiff>>> {
643 cx.spawn(async move |this, cx| {
644 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
645 let content = match oid {
646 None => None,
647 Some(oid) => Some(
648 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
649 .await?,
650 ),
651 };
652 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
653
654 buffer_diff
655 .update(cx, |buffer_diff, cx| {
656 buffer_diff.set_base_text(
657 content.map(Arc::new),
658 buffer_snapshot.language().cloned(),
659 Some(languages.clone()),
660 buffer_snapshot.text,
661 cx,
662 )
663 })?
664 .await?;
665 let unstaged_diff = this
666 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
667 .await?;
668 buffer_diff.update(cx, |buffer_diff, _| {
669 buffer_diff.set_secondary_diff(unstaged_diff);
670 })?;
671
672 this.update(cx, |_, cx| {
673 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
674 .detach();
675 })?;
676
677 Ok(buffer_diff)
678 })
679 }
680
681 pub fn open_uncommitted_diff(
682 &mut self,
683 buffer: Entity<Buffer>,
684 cx: &mut Context<Self>,
685 ) -> Task<Result<Entity<BufferDiff>>> {
686 let buffer_id = buffer.read(cx).remote_id();
687
688 if let Some(diff_state) = self.diffs.get(&buffer_id)
689 && let Some(uncommitted_diff) = diff_state
690 .read(cx)
691 .uncommitted_diff
692 .as_ref()
693 .and_then(|weak| weak.upgrade())
694 {
695 if let Some(task) =
696 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
697 {
698 return cx.background_executor().spawn(async move {
699 task.await;
700 Ok(uncommitted_diff)
701 });
702 }
703 return Task::ready(Ok(uncommitted_diff));
704 }
705
706 let Some((repo, repo_path)) =
707 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
708 else {
709 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
710 };
711
712 let task = self
713 .loading_diffs
714 .entry((buffer_id, DiffKind::Uncommitted))
715 .or_insert_with(|| {
716 let changes = repo.update(cx, |repo, cx| {
717 repo.load_committed_text(buffer_id, repo_path, cx)
718 });
719
720 // todo(lw): hot foreground spawn
721 cx.spawn(async move |this, cx| {
722 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
723 .await
724 .map_err(Arc::new)
725 })
726 .shared()
727 })
728 .clone();
729
730 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
731 }
732
733 async fn open_diff_internal(
734 this: WeakEntity<Self>,
735 kind: DiffKind,
736 texts: Result<DiffBasesChange>,
737 buffer_entity: Entity<Buffer>,
738 cx: &mut AsyncApp,
739 ) -> Result<Entity<BufferDiff>> {
740 let diff_bases_change = match texts {
741 Err(e) => {
742 this.update(cx, |this, cx| {
743 let buffer = buffer_entity.read(cx);
744 let buffer_id = buffer.remote_id();
745 this.loading_diffs.remove(&(buffer_id, kind));
746 })?;
747 return Err(e);
748 }
749 Ok(change) => change,
750 };
751
752 this.update(cx, |this, cx| {
753 let buffer = buffer_entity.read(cx);
754 let buffer_id = buffer.remote_id();
755 let language = buffer.language().cloned();
756 let language_registry = buffer.language_registry();
757 let text_snapshot = buffer.text_snapshot();
758 this.loading_diffs.remove(&(buffer_id, kind));
759
760 let git_store = cx.weak_entity();
761 let diff_state = this
762 .diffs
763 .entry(buffer_id)
764 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
765
766 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
767
768 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
769 diff_state.update(cx, |diff_state, cx| {
770 diff_state.language = language;
771 diff_state.language_registry = language_registry;
772
773 match kind {
774 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
775 DiffKind::Uncommitted => {
776 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
777 diff
778 } else {
779 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
780 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
781 unstaged_diff
782 };
783
784 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
785 diff_state.uncommitted_diff = Some(diff.downgrade())
786 }
787 }
788
789 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
790 let rx = diff_state.wait_for_recalculation();
791
792 anyhow::Ok(async move {
793 if let Some(rx) = rx {
794 rx.await;
795 }
796 Ok(diff)
797 })
798 })
799 })??
800 .await
801 }
802
803 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
804 let diff_state = self.diffs.get(&buffer_id)?;
805 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
806 }
807
808 pub fn get_uncommitted_diff(
809 &self,
810 buffer_id: BufferId,
811 cx: &App,
812 ) -> Option<Entity<BufferDiff>> {
813 let diff_state = self.diffs.get(&buffer_id)?;
814 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
815 }
816
817 pub fn open_conflict_set(
818 &mut self,
819 buffer: Entity<Buffer>,
820 cx: &mut Context<Self>,
821 ) -> Entity<ConflictSet> {
822 log::debug!("open conflict set");
823 let buffer_id = buffer.read(cx).remote_id();
824
825 if let Some(git_state) = self.diffs.get(&buffer_id)
826 && let Some(conflict_set) = git_state
827 .read(cx)
828 .conflict_set
829 .as_ref()
830 .and_then(|weak| weak.upgrade())
831 {
832 let conflict_set = conflict_set;
833 let buffer_snapshot = buffer.read(cx).text_snapshot();
834
835 git_state.update(cx, |state, cx| {
836 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
837 });
838
839 return conflict_set;
840 }
841
842 let is_unmerged = self
843 .repository_and_path_for_buffer_id(buffer_id, cx)
844 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
845 let git_store = cx.weak_entity();
846 let buffer_git_state = self
847 .diffs
848 .entry(buffer_id)
849 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
850 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
851
852 self._subscriptions
853 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
854 cx.emit(GitStoreEvent::ConflictsUpdated);
855 }));
856
857 buffer_git_state.update(cx, |state, cx| {
858 state.conflict_set = Some(conflict_set.downgrade());
859 let buffer_snapshot = buffer.read(cx).text_snapshot();
860 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
861 });
862
863 conflict_set
864 }
865
866 pub fn project_path_git_status(
867 &self,
868 project_path: &ProjectPath,
869 cx: &App,
870 ) -> Option<FileStatus> {
871 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
872 Some(repo.read(cx).status_for_path(&repo_path)?.status)
873 }
874
875 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
876 let mut work_directory_abs_paths = Vec::new();
877 let mut checkpoints = Vec::new();
878 for repository in self.repositories.values() {
879 repository.update(cx, |repository, _| {
880 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
881 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
882 });
883 }
884
885 cx.background_executor().spawn(async move {
886 let checkpoints = future::try_join_all(checkpoints).await?;
887 Ok(GitStoreCheckpoint {
888 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
889 .into_iter()
890 .zip(checkpoints)
891 .collect(),
892 })
893 })
894 }
895
896 pub fn restore_checkpoint(
897 &self,
898 checkpoint: GitStoreCheckpoint,
899 cx: &mut App,
900 ) -> Task<Result<()>> {
901 let repositories_by_work_dir_abs_path = self
902 .repositories
903 .values()
904 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
905 .collect::<HashMap<_, _>>();
906
907 let mut tasks = Vec::new();
908 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
909 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
910 let restore = repository.update(cx, |repository, _| {
911 repository.restore_checkpoint(checkpoint)
912 });
913 tasks.push(async move { restore.await? });
914 }
915 }
916 cx.background_spawn(async move {
917 future::try_join_all(tasks).await?;
918 Ok(())
919 })
920 }
921
922 /// Compares two checkpoints, returning true if they are equal.
923 pub fn compare_checkpoints(
924 &self,
925 left: GitStoreCheckpoint,
926 mut right: GitStoreCheckpoint,
927 cx: &mut App,
928 ) -> Task<Result<bool>> {
929 let repositories_by_work_dir_abs_path = self
930 .repositories
931 .values()
932 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
933 .collect::<HashMap<_, _>>();
934
935 let mut tasks = Vec::new();
936 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
937 if let Some(right_checkpoint) = right
938 .checkpoints_by_work_dir_abs_path
939 .remove(&work_dir_abs_path)
940 {
941 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
942 {
943 let compare = repository.update(cx, |repository, _| {
944 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
945 });
946
947 tasks.push(async move { compare.await? });
948 }
949 } else {
950 return Task::ready(Ok(false));
951 }
952 }
953 cx.background_spawn(async move {
954 Ok(future::try_join_all(tasks)
955 .await?
956 .into_iter()
957 .all(|result| result))
958 })
959 }
960
961 /// Blames a buffer.
962 pub fn blame_buffer(
963 &self,
964 buffer: &Entity<Buffer>,
965 version: Option<clock::Global>,
966 cx: &mut App,
967 ) -> Task<Result<Option<Blame>>> {
968 let buffer = buffer.read(cx);
969 let Some((repo, repo_path)) =
970 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
971 else {
972 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
973 };
974 let content = match &version {
975 Some(version) => buffer.rope_for_version(version),
976 None => buffer.as_rope().clone(),
977 };
978 let version = version.unwrap_or(buffer.version());
979 let buffer_id = buffer.remote_id();
980
981 let rx = repo.update(cx, |repo, _| {
982 repo.send_job(None, move |state, _| async move {
983 match state {
984 RepositoryState::Local { backend, .. } => backend
985 .blame(repo_path.clone(), content)
986 .await
987 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
988 .map(Some),
989 RepositoryState::Remote { project_id, client } => {
990 let response = client
991 .request(proto::BlameBuffer {
992 project_id: project_id.to_proto(),
993 buffer_id: buffer_id.into(),
994 version: serialize_version(&version),
995 })
996 .await?;
997 Ok(deserialize_blame_buffer_response(response))
998 }
999 }
1000 })
1001 });
1002
1003 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1004 }
1005
1006 pub fn get_permalink_to_line(
1007 &self,
1008 buffer: &Entity<Buffer>,
1009 selection: Range<u32>,
1010 cx: &mut App,
1011 ) -> Task<Result<url::Url>> {
1012 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1013 return Task::ready(Err(anyhow!("buffer has no file")));
1014 };
1015
1016 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1017 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1018 cx,
1019 ) else {
1020 // If we're not in a Git repo, check whether this is a Rust source
1021 // file in the Cargo registry (presumably opened with go-to-definition
1022 // from a normal Rust file). If so, we can put together a permalink
1023 // using crate metadata.
1024 if buffer
1025 .read(cx)
1026 .language()
1027 .is_none_or(|lang| lang.name() != "Rust".into())
1028 {
1029 return Task::ready(Err(anyhow!("no permalink available")));
1030 }
1031 let file_path = file.worktree.read(cx).absolutize(&file.path);
1032 return cx.spawn(async move |cx| {
1033 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1034 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1035 .context("no permalink available")
1036 });
1037 };
1038
1039 let buffer_id = buffer.read(cx).remote_id();
1040 let branch = repo.read(cx).branch.clone();
1041 let remote = branch
1042 .as_ref()
1043 .and_then(|b| b.upstream.as_ref())
1044 .and_then(|b| b.remote_name())
1045 .unwrap_or("origin")
1046 .to_string();
1047
1048 let rx = repo.update(cx, |repo, _| {
1049 repo.send_job(None, move |state, cx| async move {
1050 match state {
1051 RepositoryState::Local { backend, .. } => {
1052 let origin_url = backend
1053 .remote_url(&remote)
1054 .with_context(|| format!("remote \"{remote}\" not found"))?;
1055
1056 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1057
1058 let provider_registry =
1059 cx.update(GitHostingProviderRegistry::default_global)?;
1060
1061 let (provider, remote) =
1062 parse_git_remote_url(provider_registry, &origin_url)
1063 .context("parsing Git remote URL")?;
1064
1065 Ok(provider.build_permalink(
1066 remote,
1067 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1068 ))
1069 }
1070 RepositoryState::Remote { project_id, client } => {
1071 let response = client
1072 .request(proto::GetPermalinkToLine {
1073 project_id: project_id.to_proto(),
1074 buffer_id: buffer_id.into(),
1075 selection: Some(proto::Range {
1076 start: selection.start as u64,
1077 end: selection.end as u64,
1078 }),
1079 })
1080 .await?;
1081
1082 url::Url::parse(&response.permalink).context("failed to parse permalink")
1083 }
1084 }
1085 })
1086 });
1087 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1088 }
1089
1090 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1091 match &self.state {
1092 GitStoreState::Local {
1093 downstream: downstream_client,
1094 ..
1095 } => downstream_client
1096 .as_ref()
1097 .map(|state| (state.client.clone(), state.project_id)),
1098 GitStoreState::Remote {
1099 downstream: downstream_client,
1100 ..
1101 } => downstream_client.clone(),
1102 }
1103 }
1104
1105 fn upstream_client(&self) -> Option<AnyProtoClient> {
1106 match &self.state {
1107 GitStoreState::Local { .. } => None,
1108 GitStoreState::Remote {
1109 upstream_client, ..
1110 } => Some(upstream_client.clone()),
1111 }
1112 }
1113
1114 fn on_worktree_store_event(
1115 &mut self,
1116 worktree_store: Entity<WorktreeStore>,
1117 event: &WorktreeStoreEvent,
1118 cx: &mut Context<Self>,
1119 ) {
1120 let GitStoreState::Local {
1121 project_environment,
1122 downstream,
1123 next_repository_id,
1124 fs,
1125 } = &self.state
1126 else {
1127 return;
1128 };
1129
1130 match event {
1131 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1132 if let Some(worktree) = self
1133 .worktree_store
1134 .read(cx)
1135 .worktree_for_id(*worktree_id, cx)
1136 {
1137 let paths_by_git_repo =
1138 self.process_updated_entries(&worktree, updated_entries, cx);
1139 let downstream = downstream
1140 .as_ref()
1141 .map(|downstream| downstream.updates_tx.clone());
1142 cx.spawn(async move |_, cx| {
1143 let paths_by_git_repo = paths_by_git_repo.await;
1144 for (repo, paths) in paths_by_git_repo {
1145 repo.update(cx, |repo, cx| {
1146 repo.paths_changed(paths, downstream.clone(), cx);
1147 })
1148 .ok();
1149 }
1150 })
1151 .detach();
1152 }
1153 }
1154 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1155 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1156 else {
1157 return;
1158 };
1159 if !worktree.read(cx).is_visible() {
1160 log::debug!(
1161 "not adding repositories for local worktree {:?} because it's not visible",
1162 worktree.read(cx).abs_path()
1163 );
1164 return;
1165 }
1166 self.update_repositories_from_worktree(
1167 project_environment.clone(),
1168 next_repository_id.clone(),
1169 downstream
1170 .as_ref()
1171 .map(|downstream| downstream.updates_tx.clone()),
1172 changed_repos.clone(),
1173 fs.clone(),
1174 cx,
1175 );
1176 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1177 }
1178 _ => {}
1179 }
1180 }
1181 fn on_repository_event(
1182 &mut self,
1183 repo: Entity<Repository>,
1184 event: &RepositoryEvent,
1185 cx: &mut Context<Self>,
1186 ) {
1187 let id = repo.read(cx).id;
1188 let repo_snapshot = repo.read(cx).snapshot.clone();
1189 for (buffer_id, diff) in self.diffs.iter() {
1190 if let Some((buffer_repo, repo_path)) =
1191 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1192 && buffer_repo == repo
1193 {
1194 diff.update(cx, |diff, cx| {
1195 if let Some(conflict_set) = &diff.conflict_set {
1196 let conflict_status_changed =
1197 conflict_set.update(cx, |conflict_set, cx| {
1198 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1199 conflict_set.set_has_conflict(has_conflict, cx)
1200 })?;
1201 if conflict_status_changed {
1202 let buffer_store = self.buffer_store.read(cx);
1203 if let Some(buffer) = buffer_store.get(*buffer_id) {
1204 let _ = diff
1205 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1206 }
1207 }
1208 }
1209 anyhow::Ok(())
1210 })
1211 .ok();
1212 }
1213 }
1214 cx.emit(GitStoreEvent::RepositoryUpdated(
1215 id,
1216 event.clone(),
1217 self.active_repo_id == Some(id),
1218 ))
1219 }
1220
1221 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1222 cx.emit(GitStoreEvent::JobsUpdated)
1223 }
1224
1225 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1226 fn update_repositories_from_worktree(
1227 &mut self,
1228 project_environment: Entity<ProjectEnvironment>,
1229 next_repository_id: Arc<AtomicU64>,
1230 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1231 updated_git_repositories: UpdatedGitRepositoriesSet,
1232 fs: Arc<dyn Fs>,
1233 cx: &mut Context<Self>,
1234 ) {
1235 let mut removed_ids = Vec::new();
1236 for update in updated_git_repositories.iter() {
1237 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1238 let existing_work_directory_abs_path =
1239 repo.read(cx).work_directory_abs_path.clone();
1240 Some(&existing_work_directory_abs_path)
1241 == update.old_work_directory_abs_path.as_ref()
1242 || Some(&existing_work_directory_abs_path)
1243 == update.new_work_directory_abs_path.as_ref()
1244 }) {
1245 if let Some(new_work_directory_abs_path) =
1246 update.new_work_directory_abs_path.clone()
1247 {
1248 existing.update(cx, |existing, cx| {
1249 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1250 existing.schedule_scan(updates_tx.clone(), cx);
1251 });
1252 } else {
1253 removed_ids.push(*id);
1254 }
1255 } else if let UpdatedGitRepository {
1256 new_work_directory_abs_path: Some(work_directory_abs_path),
1257 dot_git_abs_path: Some(dot_git_abs_path),
1258 repository_dir_abs_path: Some(repository_dir_abs_path),
1259 common_dir_abs_path: Some(common_dir_abs_path),
1260 ..
1261 } = update
1262 {
1263 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1264 let git_store = cx.weak_entity();
1265 let repo = cx.new(|cx| {
1266 let mut repo = Repository::local(
1267 id,
1268 work_directory_abs_path.clone(),
1269 dot_git_abs_path.clone(),
1270 repository_dir_abs_path.clone(),
1271 common_dir_abs_path.clone(),
1272 project_environment.downgrade(),
1273 fs.clone(),
1274 git_store,
1275 cx,
1276 );
1277 repo.schedule_scan(updates_tx.clone(), cx);
1278 repo
1279 });
1280 self._subscriptions
1281 .push(cx.subscribe(&repo, Self::on_repository_event));
1282 self._subscriptions
1283 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1284 self.repositories.insert(id, repo);
1285 cx.emit(GitStoreEvent::RepositoryAdded);
1286 self.active_repo_id.get_or_insert_with(|| {
1287 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1288 id
1289 });
1290 }
1291 }
1292
1293 for id in removed_ids {
1294 if self.active_repo_id == Some(id) {
1295 self.active_repo_id = None;
1296 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1297 }
1298 self.repositories.remove(&id);
1299 if let Some(updates_tx) = updates_tx.as_ref() {
1300 updates_tx
1301 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1302 .ok();
1303 }
1304 }
1305 }
1306
1307 fn on_buffer_store_event(
1308 &mut self,
1309 _: Entity<BufferStore>,
1310 event: &BufferStoreEvent,
1311 cx: &mut Context<Self>,
1312 ) {
1313 match event {
1314 BufferStoreEvent::BufferAdded(buffer) => {
1315 cx.subscribe(buffer, |this, buffer, event, cx| {
1316 if let BufferEvent::LanguageChanged = event {
1317 let buffer_id = buffer.read(cx).remote_id();
1318 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1319 diff_state.update(cx, |diff_state, cx| {
1320 diff_state.buffer_language_changed(buffer, cx);
1321 });
1322 }
1323 }
1324 })
1325 .detach();
1326 }
1327 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1328 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1329 diffs.remove(buffer_id);
1330 }
1331 }
1332 BufferStoreEvent::BufferDropped(buffer_id) => {
1333 self.diffs.remove(buffer_id);
1334 for diffs in self.shared_diffs.values_mut() {
1335 diffs.remove(buffer_id);
1336 }
1337 }
1338
1339 _ => {}
1340 }
1341 }
1342
1343 pub fn recalculate_buffer_diffs(
1344 &mut self,
1345 buffers: Vec<Entity<Buffer>>,
1346 cx: &mut Context<Self>,
1347 ) -> impl Future<Output = ()> + use<> {
1348 let mut futures = Vec::new();
1349 for buffer in buffers {
1350 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1351 let buffer = buffer.read(cx).text_snapshot();
1352 diff_state.update(cx, |diff_state, cx| {
1353 diff_state.recalculate_diffs(buffer.clone(), cx);
1354 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1355 });
1356 futures.push(diff_state.update(cx, |diff_state, cx| {
1357 diff_state
1358 .reparse_conflict_markers(buffer, cx)
1359 .map(|_| {})
1360 .boxed()
1361 }));
1362 }
1363 }
1364 async move {
1365 futures::future::join_all(futures).await;
1366 }
1367 }
1368
1369 fn on_buffer_diff_event(
1370 &mut self,
1371 diff: Entity<buffer_diff::BufferDiff>,
1372 event: &BufferDiffEvent,
1373 cx: &mut Context<Self>,
1374 ) {
1375 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1376 let buffer_id = diff.read(cx).buffer_id;
1377 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1378 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1379 diff_state.hunk_staging_operation_count += 1;
1380 diff_state.hunk_staging_operation_count
1381 });
1382 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1383 let recv = repo.update(cx, |repo, cx| {
1384 log::debug!("hunks changed for {}", path.as_unix_str());
1385 repo.spawn_set_index_text_job(
1386 path,
1387 new_index_text.as_ref().map(|rope| rope.to_string()),
1388 Some(hunk_staging_operation_count),
1389 cx,
1390 )
1391 });
1392 let diff = diff.downgrade();
1393 cx.spawn(async move |this, cx| {
1394 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1395 diff.update(cx, |diff, cx| {
1396 diff.clear_pending_hunks(cx);
1397 })
1398 .ok();
1399 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1400 .ok();
1401 }
1402 })
1403 .detach();
1404 }
1405 }
1406 }
1407 }
1408
1409 fn local_worktree_git_repos_changed(
1410 &mut self,
1411 worktree: Entity<Worktree>,
1412 changed_repos: &UpdatedGitRepositoriesSet,
1413 cx: &mut Context<Self>,
1414 ) {
1415 log::debug!("local worktree repos changed");
1416 debug_assert!(worktree.read(cx).is_local());
1417
1418 for repository in self.repositories.values() {
1419 repository.update(cx, |repository, cx| {
1420 let repo_abs_path = &repository.work_directory_abs_path;
1421 if changed_repos.iter().any(|update| {
1422 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1423 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1424 }) {
1425 repository.reload_buffer_diff_bases(cx);
1426 }
1427 });
1428 }
1429 }
1430
1431 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1432 &self.repositories
1433 }
1434
1435 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1436 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1437 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1438 Some(status.status)
1439 }
1440
1441 pub fn repository_and_path_for_buffer_id(
1442 &self,
1443 buffer_id: BufferId,
1444 cx: &App,
1445 ) -> Option<(Entity<Repository>, RepoPath)> {
1446 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1447 let project_path = buffer.read(cx).project_path(cx)?;
1448 self.repository_and_path_for_project_path(&project_path, cx)
1449 }
1450
1451 pub fn repository_and_path_for_project_path(
1452 &self,
1453 path: &ProjectPath,
1454 cx: &App,
1455 ) -> Option<(Entity<Repository>, RepoPath)> {
1456 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1457 self.repositories
1458 .values()
1459 .filter_map(|repo| {
1460 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1461 Some((repo.clone(), repo_path))
1462 })
1463 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1464 }
1465
1466 pub fn git_init(
1467 &self,
1468 path: Arc<Path>,
1469 fallback_branch_name: String,
1470 cx: &App,
1471 ) -> Task<Result<()>> {
1472 match &self.state {
1473 GitStoreState::Local { fs, .. } => {
1474 let fs = fs.clone();
1475 cx.background_executor()
1476 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1477 }
1478 GitStoreState::Remote {
1479 upstream_client,
1480 upstream_project_id: project_id,
1481 ..
1482 } => {
1483 let client = upstream_client.clone();
1484 let project_id = *project_id;
1485 cx.background_executor().spawn(async move {
1486 client
1487 .request(proto::GitInit {
1488 project_id: project_id,
1489 abs_path: path.to_string_lossy().into_owned(),
1490 fallback_branch_name,
1491 })
1492 .await?;
1493 Ok(())
1494 })
1495 }
1496 }
1497 }
1498
1499 pub fn git_clone(
1500 &self,
1501 repo: String,
1502 path: impl Into<Arc<std::path::Path>>,
1503 cx: &App,
1504 ) -> Task<Result<()>> {
1505 let path = path.into();
1506 match &self.state {
1507 GitStoreState::Local { fs, .. } => {
1508 let fs = fs.clone();
1509 cx.background_executor()
1510 .spawn(async move { fs.git_clone(&repo, &path).await })
1511 }
1512 GitStoreState::Remote {
1513 upstream_client,
1514 upstream_project_id,
1515 ..
1516 } => {
1517 if upstream_client.is_via_collab() {
1518 return Task::ready(Err(anyhow!(
1519 "Git Clone isn't supported for project guests"
1520 )));
1521 }
1522 let request = upstream_client.request(proto::GitClone {
1523 project_id: *upstream_project_id,
1524 abs_path: path.to_string_lossy().into_owned(),
1525 remote_repo: repo,
1526 });
1527
1528 cx.background_spawn(async move {
1529 let result = request.await?;
1530
1531 match result.success {
1532 true => Ok(()),
1533 false => Err(anyhow!("Git Clone failed")),
1534 }
1535 })
1536 }
1537 }
1538 }
1539
1540 async fn handle_update_repository(
1541 this: Entity<Self>,
1542 envelope: TypedEnvelope<proto::UpdateRepository>,
1543 mut cx: AsyncApp,
1544 ) -> Result<()> {
1545 this.update(&mut cx, |this, cx| {
1546 let path_style = this.worktree_store.read(cx).path_style();
1547 let mut update = envelope.payload;
1548
1549 let id = RepositoryId::from_proto(update.id);
1550 let client = this.upstream_client().context("no upstream client")?;
1551
1552 let mut repo_subscription = None;
1553 let repo = this.repositories.entry(id).or_insert_with(|| {
1554 let git_store = cx.weak_entity();
1555 let repo = cx.new(|cx| {
1556 Repository::remote(
1557 id,
1558 Path::new(&update.abs_path).into(),
1559 path_style,
1560 ProjectId(update.project_id),
1561 client,
1562 git_store,
1563 cx,
1564 )
1565 });
1566 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1567 cx.emit(GitStoreEvent::RepositoryAdded);
1568 repo
1569 });
1570 this._subscriptions.extend(repo_subscription);
1571
1572 repo.update(cx, {
1573 let update = update.clone();
1574 |repo, cx| repo.apply_remote_update(update, cx)
1575 })?;
1576
1577 this.active_repo_id.get_or_insert_with(|| {
1578 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1579 id
1580 });
1581
1582 if let Some((client, project_id)) = this.downstream_client() {
1583 update.project_id = project_id.to_proto();
1584 client.send(update).log_err();
1585 }
1586 Ok(())
1587 })?
1588 }
1589
1590 async fn handle_remove_repository(
1591 this: Entity<Self>,
1592 envelope: TypedEnvelope<proto::RemoveRepository>,
1593 mut cx: AsyncApp,
1594 ) -> Result<()> {
1595 this.update(&mut cx, |this, cx| {
1596 let mut update = envelope.payload;
1597 let id = RepositoryId::from_proto(update.id);
1598 this.repositories.remove(&id);
1599 if let Some((client, project_id)) = this.downstream_client() {
1600 update.project_id = project_id.to_proto();
1601 client.send(update).log_err();
1602 }
1603 if this.active_repo_id == Some(id) {
1604 this.active_repo_id = None;
1605 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1606 }
1607 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1608 })
1609 }
1610
1611 async fn handle_git_init(
1612 this: Entity<Self>,
1613 envelope: TypedEnvelope<proto::GitInit>,
1614 cx: AsyncApp,
1615 ) -> Result<proto::Ack> {
1616 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1617 let name = envelope.payload.fallback_branch_name;
1618 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1619 .await?;
1620
1621 Ok(proto::Ack {})
1622 }
1623
1624 async fn handle_git_clone(
1625 this: Entity<Self>,
1626 envelope: TypedEnvelope<proto::GitClone>,
1627 cx: AsyncApp,
1628 ) -> Result<proto::GitCloneResponse> {
1629 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1630 let repo_name = envelope.payload.remote_repo;
1631 let result = cx
1632 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1633 .await;
1634
1635 Ok(proto::GitCloneResponse {
1636 success: result.is_ok(),
1637 })
1638 }
1639
1640 async fn handle_fetch(
1641 this: Entity<Self>,
1642 envelope: TypedEnvelope<proto::Fetch>,
1643 mut cx: AsyncApp,
1644 ) -> Result<proto::RemoteMessageResponse> {
1645 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1646 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1647 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1648 let askpass_id = envelope.payload.askpass_id;
1649
1650 let askpass = make_remote_delegate(
1651 this,
1652 envelope.payload.project_id,
1653 repository_id,
1654 askpass_id,
1655 &mut cx,
1656 );
1657
1658 let remote_output = repository_handle
1659 .update(&mut cx, |repository_handle, cx| {
1660 repository_handle.fetch(fetch_options, askpass, cx)
1661 })?
1662 .await??;
1663
1664 Ok(proto::RemoteMessageResponse {
1665 stdout: remote_output.stdout,
1666 stderr: remote_output.stderr,
1667 })
1668 }
1669
1670 async fn handle_push(
1671 this: Entity<Self>,
1672 envelope: TypedEnvelope<proto::Push>,
1673 mut cx: AsyncApp,
1674 ) -> Result<proto::RemoteMessageResponse> {
1675 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1676 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1677
1678 let askpass_id = envelope.payload.askpass_id;
1679 let askpass = make_remote_delegate(
1680 this,
1681 envelope.payload.project_id,
1682 repository_id,
1683 askpass_id,
1684 &mut cx,
1685 );
1686
1687 let options = envelope
1688 .payload
1689 .options
1690 .as_ref()
1691 .map(|_| match envelope.payload.options() {
1692 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1693 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1694 });
1695
1696 let branch_name = envelope.payload.branch_name.into();
1697 let remote_name = envelope.payload.remote_name.into();
1698
1699 let remote_output = repository_handle
1700 .update(&mut cx, |repository_handle, cx| {
1701 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1702 })?
1703 .await??;
1704 Ok(proto::RemoteMessageResponse {
1705 stdout: remote_output.stdout,
1706 stderr: remote_output.stderr,
1707 })
1708 }
1709
1710 async fn handle_pull(
1711 this: Entity<Self>,
1712 envelope: TypedEnvelope<proto::Pull>,
1713 mut cx: AsyncApp,
1714 ) -> Result<proto::RemoteMessageResponse> {
1715 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1716 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1717 let askpass_id = envelope.payload.askpass_id;
1718 let askpass = make_remote_delegate(
1719 this,
1720 envelope.payload.project_id,
1721 repository_id,
1722 askpass_id,
1723 &mut cx,
1724 );
1725
1726 let branch_name = envelope.payload.branch_name.into();
1727 let remote_name = envelope.payload.remote_name.into();
1728
1729 let remote_message = repository_handle
1730 .update(&mut cx, |repository_handle, cx| {
1731 repository_handle.pull(branch_name, remote_name, askpass, cx)
1732 })?
1733 .await??;
1734
1735 Ok(proto::RemoteMessageResponse {
1736 stdout: remote_message.stdout,
1737 stderr: remote_message.stderr,
1738 })
1739 }
1740
1741 async fn handle_stage(
1742 this: Entity<Self>,
1743 envelope: TypedEnvelope<proto::Stage>,
1744 mut cx: AsyncApp,
1745 ) -> Result<proto::Ack> {
1746 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1747 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1748
1749 let entries = envelope
1750 .payload
1751 .paths
1752 .into_iter()
1753 .map(|path| RepoPath::new(&path))
1754 .collect::<Result<Vec<_>>>()?;
1755
1756 repository_handle
1757 .update(&mut cx, |repository_handle, cx| {
1758 repository_handle.stage_entries(entries, cx)
1759 })?
1760 .await?;
1761 Ok(proto::Ack {})
1762 }
1763
1764 async fn handle_unstage(
1765 this: Entity<Self>,
1766 envelope: TypedEnvelope<proto::Unstage>,
1767 mut cx: AsyncApp,
1768 ) -> Result<proto::Ack> {
1769 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1770 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1771
1772 let entries = envelope
1773 .payload
1774 .paths
1775 .into_iter()
1776 .map(|path| RepoPath::new(&path))
1777 .collect::<Result<Vec<_>>>()?;
1778
1779 repository_handle
1780 .update(&mut cx, |repository_handle, cx| {
1781 repository_handle.unstage_entries(entries, cx)
1782 })?
1783 .await?;
1784
1785 Ok(proto::Ack {})
1786 }
1787
1788 async fn handle_stash(
1789 this: Entity<Self>,
1790 envelope: TypedEnvelope<proto::Stash>,
1791 mut cx: AsyncApp,
1792 ) -> Result<proto::Ack> {
1793 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1794 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1795
1796 let entries = envelope
1797 .payload
1798 .paths
1799 .into_iter()
1800 .map(|path| RepoPath::new(&path))
1801 .collect::<Result<Vec<_>>>()?;
1802
1803 repository_handle
1804 .update(&mut cx, |repository_handle, cx| {
1805 repository_handle.stash_entries(entries, cx)
1806 })?
1807 .await?;
1808
1809 Ok(proto::Ack {})
1810 }
1811
1812 async fn handle_stash_pop(
1813 this: Entity<Self>,
1814 envelope: TypedEnvelope<proto::StashPop>,
1815 mut cx: AsyncApp,
1816 ) -> Result<proto::Ack> {
1817 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1818 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1819 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1820
1821 repository_handle
1822 .update(&mut cx, |repository_handle, cx| {
1823 repository_handle.stash_pop(stash_index, cx)
1824 })?
1825 .await?;
1826
1827 Ok(proto::Ack {})
1828 }
1829
1830 async fn handle_stash_apply(
1831 this: Entity<Self>,
1832 envelope: TypedEnvelope<proto::StashApply>,
1833 mut cx: AsyncApp,
1834 ) -> Result<proto::Ack> {
1835 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1836 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1837 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1838
1839 repository_handle
1840 .update(&mut cx, |repository_handle, cx| {
1841 repository_handle.stash_apply(stash_index, cx)
1842 })?
1843 .await?;
1844
1845 Ok(proto::Ack {})
1846 }
1847
1848 async fn handle_stash_drop(
1849 this: Entity<Self>,
1850 envelope: TypedEnvelope<proto::StashDrop>,
1851 mut cx: AsyncApp,
1852 ) -> Result<proto::Ack> {
1853 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1854 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1855 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1856
1857 repository_handle
1858 .update(&mut cx, |repository_handle, cx| {
1859 repository_handle.stash_drop(stash_index, cx)
1860 })?
1861 .await??;
1862
1863 Ok(proto::Ack {})
1864 }
1865
1866 async fn handle_set_index_text(
1867 this: Entity<Self>,
1868 envelope: TypedEnvelope<proto::SetIndexText>,
1869 mut cx: AsyncApp,
1870 ) -> Result<proto::Ack> {
1871 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1872 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1873 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1874
1875 repository_handle
1876 .update(&mut cx, |repository_handle, cx| {
1877 repository_handle.spawn_set_index_text_job(
1878 repo_path,
1879 envelope.payload.text,
1880 None,
1881 cx,
1882 )
1883 })?
1884 .await??;
1885 Ok(proto::Ack {})
1886 }
1887
1888 async fn handle_commit(
1889 this: Entity<Self>,
1890 envelope: TypedEnvelope<proto::Commit>,
1891 mut cx: AsyncApp,
1892 ) -> Result<proto::Ack> {
1893 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1894 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1895
1896 let message = SharedString::from(envelope.payload.message);
1897 let name = envelope.payload.name.map(SharedString::from);
1898 let email = envelope.payload.email.map(SharedString::from);
1899 let options = envelope.payload.options.unwrap_or_default();
1900
1901 repository_handle
1902 .update(&mut cx, |repository_handle, cx| {
1903 repository_handle.commit(
1904 message,
1905 name.zip(email),
1906 CommitOptions {
1907 amend: options.amend,
1908 signoff: options.signoff,
1909 },
1910 cx,
1911 )
1912 })?
1913 .await??;
1914 Ok(proto::Ack {})
1915 }
1916
1917 async fn handle_get_remotes(
1918 this: Entity<Self>,
1919 envelope: TypedEnvelope<proto::GetRemotes>,
1920 mut cx: AsyncApp,
1921 ) -> Result<proto::GetRemotesResponse> {
1922 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1923 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1924
1925 let branch_name = envelope.payload.branch_name;
1926
1927 let remotes = repository_handle
1928 .update(&mut cx, |repository_handle, _| {
1929 repository_handle.get_remotes(branch_name)
1930 })?
1931 .await??;
1932
1933 Ok(proto::GetRemotesResponse {
1934 remotes: remotes
1935 .into_iter()
1936 .map(|remotes| proto::get_remotes_response::Remote {
1937 name: remotes.name.to_string(),
1938 })
1939 .collect::<Vec<_>>(),
1940 })
1941 }
1942
1943 async fn handle_get_worktrees(
1944 this: Entity<Self>,
1945 envelope: TypedEnvelope<proto::GitGetWorktrees>,
1946 mut cx: AsyncApp,
1947 ) -> Result<proto::GitWorktreesResponse> {
1948 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1949 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1950
1951 let worktrees = repository_handle
1952 .update(&mut cx, |repository_handle, _| {
1953 repository_handle.worktrees()
1954 })?
1955 .await??;
1956
1957 Ok(proto::GitWorktreesResponse {
1958 worktrees: worktrees
1959 .into_iter()
1960 .map(|worktree| worktree_to_proto(&worktree))
1961 .collect::<Vec<_>>(),
1962 })
1963 }
1964
1965 async fn handle_create_worktree(
1966 this: Entity<Self>,
1967 envelope: TypedEnvelope<proto::GitCreateWorktree>,
1968 mut cx: AsyncApp,
1969 ) -> Result<proto::Ack> {
1970 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1971 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1972 let directory = PathBuf::from(envelope.payload.directory);
1973 let name = envelope.payload.name;
1974 let commit = envelope.payload.commit;
1975
1976 repository_handle
1977 .update(&mut cx, |repository_handle, _| {
1978 repository_handle.create_worktree(name, directory, commit)
1979 })?
1980 .await??;
1981
1982 Ok(proto::Ack {})
1983 }
1984
1985 async fn handle_get_branches(
1986 this: Entity<Self>,
1987 envelope: TypedEnvelope<proto::GitGetBranches>,
1988 mut cx: AsyncApp,
1989 ) -> Result<proto::GitBranchesResponse> {
1990 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1991 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1992
1993 let branches = repository_handle
1994 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1995 .await??;
1996
1997 Ok(proto::GitBranchesResponse {
1998 branches: branches
1999 .into_iter()
2000 .map(|branch| branch_to_proto(&branch))
2001 .collect::<Vec<_>>(),
2002 })
2003 }
2004 async fn handle_get_default_branch(
2005 this: Entity<Self>,
2006 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2007 mut cx: AsyncApp,
2008 ) -> Result<proto::GetDefaultBranchResponse> {
2009 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2010 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2011
2012 let branch = repository_handle
2013 .update(&mut cx, |repository_handle, _| {
2014 repository_handle.default_branch()
2015 })?
2016 .await??
2017 .map(Into::into);
2018
2019 Ok(proto::GetDefaultBranchResponse { branch })
2020 }
2021 async fn handle_create_branch(
2022 this: Entity<Self>,
2023 envelope: TypedEnvelope<proto::GitCreateBranch>,
2024 mut cx: AsyncApp,
2025 ) -> Result<proto::Ack> {
2026 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2027 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2028 let branch_name = envelope.payload.branch_name;
2029
2030 repository_handle
2031 .update(&mut cx, |repository_handle, _| {
2032 repository_handle.create_branch(branch_name)
2033 })?
2034 .await??;
2035
2036 Ok(proto::Ack {})
2037 }
2038
2039 async fn handle_change_branch(
2040 this: Entity<Self>,
2041 envelope: TypedEnvelope<proto::GitChangeBranch>,
2042 mut cx: AsyncApp,
2043 ) -> Result<proto::Ack> {
2044 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2045 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2046 let branch_name = envelope.payload.branch_name;
2047
2048 repository_handle
2049 .update(&mut cx, |repository_handle, _| {
2050 repository_handle.change_branch(branch_name)
2051 })?
2052 .await??;
2053
2054 Ok(proto::Ack {})
2055 }
2056
2057 async fn handle_rename_branch(
2058 this: Entity<Self>,
2059 envelope: TypedEnvelope<proto::GitRenameBranch>,
2060 mut cx: AsyncApp,
2061 ) -> Result<proto::Ack> {
2062 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2063 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2064 let branch = envelope.payload.branch;
2065 let new_name = envelope.payload.new_name;
2066
2067 repository_handle
2068 .update(&mut cx, |repository_handle, _| {
2069 repository_handle.rename_branch(branch, new_name)
2070 })?
2071 .await??;
2072
2073 Ok(proto::Ack {})
2074 }
2075
2076 async fn handle_show(
2077 this: Entity<Self>,
2078 envelope: TypedEnvelope<proto::GitShow>,
2079 mut cx: AsyncApp,
2080 ) -> Result<proto::GitCommitDetails> {
2081 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2082 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2083
2084 let commit = repository_handle
2085 .update(&mut cx, |repository_handle, _| {
2086 repository_handle.show(envelope.payload.commit)
2087 })?
2088 .await??;
2089 Ok(proto::GitCommitDetails {
2090 sha: commit.sha.into(),
2091 message: commit.message.into(),
2092 commit_timestamp: commit.commit_timestamp,
2093 author_email: commit.author_email.into(),
2094 author_name: commit.author_name.into(),
2095 })
2096 }
2097
2098 async fn handle_load_commit_diff(
2099 this: Entity<Self>,
2100 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2101 mut cx: AsyncApp,
2102 ) -> Result<proto::LoadCommitDiffResponse> {
2103 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2104 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2105
2106 let commit_diff = repository_handle
2107 .update(&mut cx, |repository_handle, _| {
2108 repository_handle.load_commit_diff(envelope.payload.commit)
2109 })?
2110 .await??;
2111 Ok(proto::LoadCommitDiffResponse {
2112 files: commit_diff
2113 .files
2114 .into_iter()
2115 .map(|file| proto::CommitFile {
2116 path: file.path.to_proto(),
2117 old_text: file.old_text,
2118 new_text: file.new_text,
2119 })
2120 .collect(),
2121 })
2122 }
2123
2124 async fn handle_reset(
2125 this: Entity<Self>,
2126 envelope: TypedEnvelope<proto::GitReset>,
2127 mut cx: AsyncApp,
2128 ) -> Result<proto::Ack> {
2129 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2130 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2131
2132 let mode = match envelope.payload.mode() {
2133 git_reset::ResetMode::Soft => ResetMode::Soft,
2134 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2135 };
2136
2137 repository_handle
2138 .update(&mut cx, |repository_handle, cx| {
2139 repository_handle.reset(envelope.payload.commit, mode, cx)
2140 })?
2141 .await??;
2142 Ok(proto::Ack {})
2143 }
2144
2145 async fn handle_checkout_files(
2146 this: Entity<Self>,
2147 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2148 mut cx: AsyncApp,
2149 ) -> Result<proto::Ack> {
2150 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2151 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2152 let paths = envelope
2153 .payload
2154 .paths
2155 .iter()
2156 .map(|s| RepoPath::from_proto(s))
2157 .collect::<Result<Vec<_>>>()?;
2158
2159 repository_handle
2160 .update(&mut cx, |repository_handle, cx| {
2161 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2162 })?
2163 .await??;
2164 Ok(proto::Ack {})
2165 }
2166
2167 async fn handle_open_commit_message_buffer(
2168 this: Entity<Self>,
2169 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::OpenBufferResponse> {
2172 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2173 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2174 let buffer = repository
2175 .update(&mut cx, |repository, cx| {
2176 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2177 })?
2178 .await?;
2179
2180 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2181 this.update(&mut cx, |this, cx| {
2182 this.buffer_store.update(cx, |buffer_store, cx| {
2183 buffer_store
2184 .create_buffer_for_peer(
2185 &buffer,
2186 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2187 cx,
2188 )
2189 .detach_and_log_err(cx);
2190 })
2191 })?;
2192
2193 Ok(proto::OpenBufferResponse {
2194 buffer_id: buffer_id.to_proto(),
2195 })
2196 }
2197
2198 async fn handle_askpass(
2199 this: Entity<Self>,
2200 envelope: TypedEnvelope<proto::AskPassRequest>,
2201 mut cx: AsyncApp,
2202 ) -> Result<proto::AskPassResponse> {
2203 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2204 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2205
2206 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2207 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2208 debug_panic!("no askpass found");
2209 anyhow::bail!("no askpass found");
2210 };
2211
2212 let response = askpass
2213 .ask_password(envelope.payload.prompt)
2214 .await
2215 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2216
2217 delegates
2218 .lock()
2219 .insert(envelope.payload.askpass_id, askpass);
2220
2221 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2222 Ok(proto::AskPassResponse {
2223 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2224 })
2225 }
2226
2227 async fn handle_check_for_pushed_commits(
2228 this: Entity<Self>,
2229 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2230 mut cx: AsyncApp,
2231 ) -> Result<proto::CheckForPushedCommitsResponse> {
2232 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2233 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2234
2235 let branches = repository_handle
2236 .update(&mut cx, |repository_handle, _| {
2237 repository_handle.check_for_pushed_commits()
2238 })?
2239 .await??;
2240 Ok(proto::CheckForPushedCommitsResponse {
2241 pushed_to: branches
2242 .into_iter()
2243 .map(|commit| commit.to_string())
2244 .collect(),
2245 })
2246 }
2247
2248 async fn handle_git_diff(
2249 this: Entity<Self>,
2250 envelope: TypedEnvelope<proto::GitDiff>,
2251 mut cx: AsyncApp,
2252 ) -> Result<proto::GitDiffResponse> {
2253 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2254 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2255 let diff_type = match envelope.payload.diff_type() {
2256 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2257 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2258 };
2259
2260 let mut diff = repository_handle
2261 .update(&mut cx, |repository_handle, cx| {
2262 repository_handle.diff(diff_type, cx)
2263 })?
2264 .await??;
2265 const ONE_MB: usize = 1_000_000;
2266 if diff.len() > ONE_MB {
2267 diff = diff.chars().take(ONE_MB).collect()
2268 }
2269
2270 Ok(proto::GitDiffResponse { diff })
2271 }
2272
2273 async fn handle_tree_diff(
2274 this: Entity<Self>,
2275 request: TypedEnvelope<proto::GetTreeDiff>,
2276 mut cx: AsyncApp,
2277 ) -> Result<proto::GetTreeDiffResponse> {
2278 let repository_id = RepositoryId(request.payload.repository_id);
2279 let diff_type = if request.payload.is_merge {
2280 DiffTreeType::MergeBase {
2281 base: request.payload.base.into(),
2282 head: request.payload.head.into(),
2283 }
2284 } else {
2285 DiffTreeType::Since {
2286 base: request.payload.base.into(),
2287 head: request.payload.head.into(),
2288 }
2289 };
2290
2291 let diff = this
2292 .update(&mut cx, |this, cx| {
2293 let repository = this.repositories().get(&repository_id)?;
2294 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2295 })?
2296 .context("missing repository")?
2297 .await??;
2298
2299 Ok(proto::GetTreeDiffResponse {
2300 entries: diff
2301 .entries
2302 .into_iter()
2303 .map(|(path, status)| proto::TreeDiffStatus {
2304 path: path.0.to_proto(),
2305 status: match status {
2306 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2307 TreeDiffStatus::Modified { .. } => {
2308 proto::tree_diff_status::Status::Modified.into()
2309 }
2310 TreeDiffStatus::Deleted { .. } => {
2311 proto::tree_diff_status::Status::Deleted.into()
2312 }
2313 },
2314 oid: match status {
2315 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2316 Some(old.to_string())
2317 }
2318 TreeDiffStatus::Added => None,
2319 },
2320 })
2321 .collect(),
2322 })
2323 }
2324
2325 async fn handle_get_blob_content(
2326 this: Entity<Self>,
2327 request: TypedEnvelope<proto::GetBlobContent>,
2328 mut cx: AsyncApp,
2329 ) -> Result<proto::GetBlobContentResponse> {
2330 let oid = git::Oid::from_str(&request.payload.oid)?;
2331 let repository_id = RepositoryId(request.payload.repository_id);
2332 let content = this
2333 .update(&mut cx, |this, cx| {
2334 let repository = this.repositories().get(&repository_id)?;
2335 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2336 })?
2337 .context("missing repository")?
2338 .await?;
2339 Ok(proto::GetBlobContentResponse { content })
2340 }
2341
2342 async fn handle_open_unstaged_diff(
2343 this: Entity<Self>,
2344 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2345 mut cx: AsyncApp,
2346 ) -> Result<proto::OpenUnstagedDiffResponse> {
2347 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2348 let diff = this
2349 .update(&mut cx, |this, cx| {
2350 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2351 Some(this.open_unstaged_diff(buffer, cx))
2352 })?
2353 .context("missing buffer")?
2354 .await?;
2355 this.update(&mut cx, |this, _| {
2356 let shared_diffs = this
2357 .shared_diffs
2358 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2359 .or_default();
2360 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2361 })?;
2362 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2363 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2364 }
2365
2366 async fn handle_open_uncommitted_diff(
2367 this: Entity<Self>,
2368 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2369 mut cx: AsyncApp,
2370 ) -> Result<proto::OpenUncommittedDiffResponse> {
2371 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2372 let diff = this
2373 .update(&mut cx, |this, cx| {
2374 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2375 Some(this.open_uncommitted_diff(buffer, cx))
2376 })?
2377 .context("missing buffer")?
2378 .await?;
2379 this.update(&mut cx, |this, _| {
2380 let shared_diffs = this
2381 .shared_diffs
2382 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2383 .or_default();
2384 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2385 })?;
2386 diff.read_with(&cx, |diff, cx| {
2387 use proto::open_uncommitted_diff_response::Mode;
2388
2389 let unstaged_diff = diff.secondary_diff();
2390 let index_snapshot = unstaged_diff.and_then(|diff| {
2391 let diff = diff.read(cx);
2392 diff.base_text_exists().then(|| diff.base_text())
2393 });
2394
2395 let mode;
2396 let staged_text;
2397 let committed_text;
2398 if diff.base_text_exists() {
2399 let committed_snapshot = diff.base_text();
2400 committed_text = Some(committed_snapshot.text());
2401 if let Some(index_text) = index_snapshot {
2402 if index_text.remote_id() == committed_snapshot.remote_id() {
2403 mode = Mode::IndexMatchesHead;
2404 staged_text = None;
2405 } else {
2406 mode = Mode::IndexAndHead;
2407 staged_text = Some(index_text.text());
2408 }
2409 } else {
2410 mode = Mode::IndexAndHead;
2411 staged_text = None;
2412 }
2413 } else {
2414 mode = Mode::IndexAndHead;
2415 committed_text = None;
2416 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2417 }
2418
2419 proto::OpenUncommittedDiffResponse {
2420 committed_text,
2421 staged_text,
2422 mode: mode.into(),
2423 }
2424 })
2425 }
2426
2427 async fn handle_update_diff_bases(
2428 this: Entity<Self>,
2429 request: TypedEnvelope<proto::UpdateDiffBases>,
2430 mut cx: AsyncApp,
2431 ) -> Result<()> {
2432 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2433 this.update(&mut cx, |this, cx| {
2434 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2435 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2436 {
2437 let buffer = buffer.read(cx).text_snapshot();
2438 diff_state.update(cx, |diff_state, cx| {
2439 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2440 })
2441 }
2442 })
2443 }
2444
2445 async fn handle_blame_buffer(
2446 this: Entity<Self>,
2447 envelope: TypedEnvelope<proto::BlameBuffer>,
2448 mut cx: AsyncApp,
2449 ) -> Result<proto::BlameBufferResponse> {
2450 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2451 let version = deserialize_version(&envelope.payload.version);
2452 let buffer = this.read_with(&cx, |this, cx| {
2453 this.buffer_store.read(cx).get_existing(buffer_id)
2454 })??;
2455 buffer
2456 .update(&mut cx, |buffer, _| {
2457 buffer.wait_for_version(version.clone())
2458 })?
2459 .await?;
2460 let blame = this
2461 .update(&mut cx, |this, cx| {
2462 this.blame_buffer(&buffer, Some(version), cx)
2463 })?
2464 .await?;
2465 Ok(serialize_blame_buffer_response(blame))
2466 }
2467
2468 async fn handle_get_permalink_to_line(
2469 this: Entity<Self>,
2470 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2471 mut cx: AsyncApp,
2472 ) -> Result<proto::GetPermalinkToLineResponse> {
2473 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2474 // let version = deserialize_version(&envelope.payload.version);
2475 let selection = {
2476 let proto_selection = envelope
2477 .payload
2478 .selection
2479 .context("no selection to get permalink for defined")?;
2480 proto_selection.start as u32..proto_selection.end as u32
2481 };
2482 let buffer = this.read_with(&cx, |this, cx| {
2483 this.buffer_store.read(cx).get_existing(buffer_id)
2484 })??;
2485 let permalink = this
2486 .update(&mut cx, |this, cx| {
2487 this.get_permalink_to_line(&buffer, selection, cx)
2488 })?
2489 .await?;
2490 Ok(proto::GetPermalinkToLineResponse {
2491 permalink: permalink.to_string(),
2492 })
2493 }
2494
2495 fn repository_for_request(
2496 this: &Entity<Self>,
2497 id: RepositoryId,
2498 cx: &mut AsyncApp,
2499 ) -> Result<Entity<Repository>> {
2500 this.read_with(cx, |this, _| {
2501 this.repositories
2502 .get(&id)
2503 .context("missing repository handle")
2504 .cloned()
2505 })?
2506 }
2507
2508 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2509 self.repositories
2510 .iter()
2511 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2512 .collect()
2513 }
2514
2515 fn process_updated_entries(
2516 &self,
2517 worktree: &Entity<Worktree>,
2518 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2519 cx: &mut App,
2520 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2521 let path_style = worktree.read(cx).path_style();
2522 let mut repo_paths = self
2523 .repositories
2524 .values()
2525 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2526 .collect::<Vec<_>>();
2527 let mut entries: Vec<_> = updated_entries
2528 .iter()
2529 .map(|(path, _, _)| path.clone())
2530 .collect();
2531 entries.sort();
2532 let worktree = worktree.read(cx);
2533
2534 let entries = entries
2535 .into_iter()
2536 .map(|path| worktree.absolutize(&path))
2537 .collect::<Arc<[_]>>();
2538
2539 let executor = cx.background_executor().clone();
2540 cx.background_executor().spawn(async move {
2541 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2542 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2543 let mut tasks = FuturesOrdered::new();
2544 for (repo_path, repo) in repo_paths.into_iter().rev() {
2545 let entries = entries.clone();
2546 let task = executor.spawn(async move {
2547 // Find all repository paths that belong to this repo
2548 let mut ix = entries.partition_point(|path| path < &*repo_path);
2549 if ix == entries.len() {
2550 return None;
2551 };
2552
2553 let mut paths = Vec::new();
2554 // All paths prefixed by a given repo will constitute a continuous range.
2555 while let Some(path) = entries.get(ix)
2556 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2557 &repo_path, path, path_style,
2558 )
2559 {
2560 paths.push((repo_path, ix));
2561 ix += 1;
2562 }
2563 if paths.is_empty() {
2564 None
2565 } else {
2566 Some((repo, paths))
2567 }
2568 });
2569 tasks.push_back(task);
2570 }
2571
2572 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2573 let mut path_was_used = vec![false; entries.len()];
2574 let tasks = tasks.collect::<Vec<_>>().await;
2575 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2576 // We always want to assign a path to it's innermost repository.
2577 for t in tasks {
2578 let Some((repo, paths)) = t else {
2579 continue;
2580 };
2581 let entry = paths_by_git_repo.entry(repo).or_default();
2582 for (repo_path, ix) in paths {
2583 if path_was_used[ix] {
2584 continue;
2585 }
2586 path_was_used[ix] = true;
2587 entry.push(repo_path);
2588 }
2589 }
2590
2591 paths_by_git_repo
2592 })
2593 }
2594}
2595
2596impl BufferGitState {
2597 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2598 Self {
2599 unstaged_diff: Default::default(),
2600 uncommitted_diff: Default::default(),
2601 recalculate_diff_task: Default::default(),
2602 language: Default::default(),
2603 language_registry: Default::default(),
2604 recalculating_tx: postage::watch::channel_with(false).0,
2605 hunk_staging_operation_count: 0,
2606 hunk_staging_operation_count_as_of_write: 0,
2607 head_text: Default::default(),
2608 index_text: Default::default(),
2609 head_changed: Default::default(),
2610 index_changed: Default::default(),
2611 language_changed: Default::default(),
2612 conflict_updated_futures: Default::default(),
2613 conflict_set: Default::default(),
2614 reparse_conflict_markers_task: Default::default(),
2615 }
2616 }
2617
2618 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2619 self.language = buffer.read(cx).language().cloned();
2620 self.language_changed = true;
2621 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2622 }
2623
2624 fn reparse_conflict_markers(
2625 &mut self,
2626 buffer: text::BufferSnapshot,
2627 cx: &mut Context<Self>,
2628 ) -> oneshot::Receiver<()> {
2629 let (tx, rx) = oneshot::channel();
2630
2631 let Some(conflict_set) = self
2632 .conflict_set
2633 .as_ref()
2634 .and_then(|conflict_set| conflict_set.upgrade())
2635 else {
2636 return rx;
2637 };
2638
2639 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2640 if conflict_set.has_conflict {
2641 Some(conflict_set.snapshot())
2642 } else {
2643 None
2644 }
2645 });
2646
2647 if let Some(old_snapshot) = old_snapshot {
2648 self.conflict_updated_futures.push(tx);
2649 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2650 let (snapshot, changed_range) = cx
2651 .background_spawn(async move {
2652 let new_snapshot = ConflictSet::parse(&buffer);
2653 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2654 (new_snapshot, changed_range)
2655 })
2656 .await;
2657 this.update(cx, |this, cx| {
2658 if let Some(conflict_set) = &this.conflict_set {
2659 conflict_set
2660 .update(cx, |conflict_set, cx| {
2661 conflict_set.set_snapshot(snapshot, changed_range, cx);
2662 })
2663 .ok();
2664 }
2665 let futures = std::mem::take(&mut this.conflict_updated_futures);
2666 for tx in futures {
2667 tx.send(()).ok();
2668 }
2669 })
2670 }))
2671 }
2672
2673 rx
2674 }
2675
2676 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2677 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2678 }
2679
2680 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2681 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2682 }
2683
2684 fn handle_base_texts_updated(
2685 &mut self,
2686 buffer: text::BufferSnapshot,
2687 message: proto::UpdateDiffBases,
2688 cx: &mut Context<Self>,
2689 ) {
2690 use proto::update_diff_bases::Mode;
2691
2692 let Some(mode) = Mode::from_i32(message.mode) else {
2693 return;
2694 };
2695
2696 let diff_bases_change = match mode {
2697 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2698 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2699 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2700 Mode::IndexAndHead => DiffBasesChange::SetEach {
2701 index: message.staged_text,
2702 head: message.committed_text,
2703 },
2704 };
2705
2706 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2707 }
2708
2709 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2710 if *self.recalculating_tx.borrow() {
2711 let mut rx = self.recalculating_tx.subscribe();
2712 Some(async move {
2713 loop {
2714 let is_recalculating = rx.recv().await;
2715 if is_recalculating != Some(true) {
2716 break;
2717 }
2718 }
2719 })
2720 } else {
2721 None
2722 }
2723 }
2724
2725 fn diff_bases_changed(
2726 &mut self,
2727 buffer: text::BufferSnapshot,
2728 diff_bases_change: Option<DiffBasesChange>,
2729 cx: &mut Context<Self>,
2730 ) {
2731 match diff_bases_change {
2732 Some(DiffBasesChange::SetIndex(index)) => {
2733 self.index_text = index.map(|mut index| {
2734 text::LineEnding::normalize(&mut index);
2735 Arc::new(index)
2736 });
2737 self.index_changed = true;
2738 }
2739 Some(DiffBasesChange::SetHead(head)) => {
2740 self.head_text = head.map(|mut head| {
2741 text::LineEnding::normalize(&mut head);
2742 Arc::new(head)
2743 });
2744 self.head_changed = true;
2745 }
2746 Some(DiffBasesChange::SetBoth(text)) => {
2747 let text = text.map(|mut text| {
2748 text::LineEnding::normalize(&mut text);
2749 Arc::new(text)
2750 });
2751 self.head_text = text.clone();
2752 self.index_text = text;
2753 self.head_changed = true;
2754 self.index_changed = true;
2755 }
2756 Some(DiffBasesChange::SetEach { index, head }) => {
2757 self.index_text = index.map(|mut index| {
2758 text::LineEnding::normalize(&mut index);
2759 Arc::new(index)
2760 });
2761 self.index_changed = true;
2762 self.head_text = head.map(|mut head| {
2763 text::LineEnding::normalize(&mut head);
2764 Arc::new(head)
2765 });
2766 self.head_changed = true;
2767 }
2768 None => {}
2769 }
2770
2771 self.recalculate_diffs(buffer, cx)
2772 }
2773
2774 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2775 *self.recalculating_tx.borrow_mut() = true;
2776
2777 let language = self.language.clone();
2778 let language_registry = self.language_registry.clone();
2779 let unstaged_diff = self.unstaged_diff();
2780 let uncommitted_diff = self.uncommitted_diff();
2781 let head = self.head_text.clone();
2782 let index = self.index_text.clone();
2783 let index_changed = self.index_changed;
2784 let head_changed = self.head_changed;
2785 let language_changed = self.language_changed;
2786 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2787 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2788 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2789 (None, None) => true,
2790 _ => false,
2791 };
2792 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2793 log::debug!(
2794 "start recalculating diffs for buffer {}",
2795 buffer.remote_id()
2796 );
2797
2798 let mut new_unstaged_diff = None;
2799 if let Some(unstaged_diff) = &unstaged_diff {
2800 new_unstaged_diff = Some(
2801 BufferDiff::update_diff(
2802 unstaged_diff.clone(),
2803 buffer.clone(),
2804 index,
2805 index_changed,
2806 language_changed,
2807 language.clone(),
2808 language_registry.clone(),
2809 cx,
2810 )
2811 .await?,
2812 );
2813 }
2814
2815 let mut new_uncommitted_diff = None;
2816 if let Some(uncommitted_diff) = &uncommitted_diff {
2817 new_uncommitted_diff = if index_matches_head {
2818 new_unstaged_diff.clone()
2819 } else {
2820 Some(
2821 BufferDiff::update_diff(
2822 uncommitted_diff.clone(),
2823 buffer.clone(),
2824 head,
2825 head_changed,
2826 language_changed,
2827 language.clone(),
2828 language_registry.clone(),
2829 cx,
2830 )
2831 .await?,
2832 )
2833 }
2834 }
2835
2836 let cancel = this.update(cx, |this, _| {
2837 // This checks whether all pending stage/unstage operations
2838 // have quiesced (i.e. both the corresponding write and the
2839 // read of that write have completed). If not, then we cancel
2840 // this recalculation attempt to avoid invalidating pending
2841 // state too quickly; another recalculation will come along
2842 // later and clear the pending state once the state of the index has settled.
2843 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2844 *this.recalculating_tx.borrow_mut() = false;
2845 true
2846 } else {
2847 false
2848 }
2849 })?;
2850 if cancel {
2851 log::debug!(
2852 concat!(
2853 "aborting recalculating diffs for buffer {}",
2854 "due to subsequent hunk operations",
2855 ),
2856 buffer.remote_id()
2857 );
2858 return Ok(());
2859 }
2860
2861 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2862 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2863 {
2864 unstaged_diff.update(cx, |diff, cx| {
2865 if language_changed {
2866 diff.language_changed(cx);
2867 }
2868 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2869 })?
2870 } else {
2871 None
2872 };
2873
2874 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2875 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2876 {
2877 uncommitted_diff.update(cx, |diff, cx| {
2878 if language_changed {
2879 diff.language_changed(cx);
2880 }
2881 diff.set_snapshot_with_secondary(
2882 new_uncommitted_diff,
2883 &buffer,
2884 unstaged_changed_range,
2885 true,
2886 cx,
2887 );
2888 })?;
2889 }
2890
2891 log::debug!(
2892 "finished recalculating diffs for buffer {}",
2893 buffer.remote_id()
2894 );
2895
2896 if let Some(this) = this.upgrade() {
2897 this.update(cx, |this, _| {
2898 this.index_changed = false;
2899 this.head_changed = false;
2900 this.language_changed = false;
2901 *this.recalculating_tx.borrow_mut() = false;
2902 })?;
2903 }
2904
2905 Ok(())
2906 }));
2907 }
2908}
2909
2910fn make_remote_delegate(
2911 this: Entity<GitStore>,
2912 project_id: u64,
2913 repository_id: RepositoryId,
2914 askpass_id: u64,
2915 cx: &mut AsyncApp,
2916) -> AskPassDelegate {
2917 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2918 this.update(cx, |this, cx| {
2919 let Some((client, _)) = this.downstream_client() else {
2920 return;
2921 };
2922 let response = client.request(proto::AskPassRequest {
2923 project_id,
2924 repository_id: repository_id.to_proto(),
2925 askpass_id,
2926 prompt,
2927 });
2928 cx.spawn(async move |_, _| {
2929 let mut response = response.await?.response;
2930 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2931 .ok();
2932 response.zeroize();
2933 anyhow::Ok(())
2934 })
2935 .detach_and_log_err(cx);
2936 })
2937 .log_err();
2938 })
2939}
2940
2941impl RepositoryId {
2942 pub fn to_proto(self) -> u64 {
2943 self.0
2944 }
2945
2946 pub fn from_proto(id: u64) -> Self {
2947 RepositoryId(id)
2948 }
2949}
2950
2951impl RepositorySnapshot {
2952 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2953 Self {
2954 id,
2955 statuses_by_path: Default::default(),
2956 pending_ops_by_path: Default::default(),
2957 work_directory_abs_path,
2958 branch: None,
2959 head_commit: None,
2960 scan_id: 0,
2961 merge: Default::default(),
2962 remote_origin_url: None,
2963 remote_upstream_url: None,
2964 stash_entries: Default::default(),
2965 path_style,
2966 }
2967 }
2968
2969 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2970 proto::UpdateRepository {
2971 branch_summary: self.branch.as_ref().map(branch_to_proto),
2972 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2973 updated_statuses: self
2974 .statuses_by_path
2975 .iter()
2976 .map(|entry| entry.to_proto())
2977 .collect(),
2978 removed_statuses: Default::default(),
2979 current_merge_conflicts: self
2980 .merge
2981 .conflicted_paths
2982 .iter()
2983 .map(|repo_path| repo_path.to_proto())
2984 .collect(),
2985 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2986 project_id,
2987 id: self.id.to_proto(),
2988 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2989 entry_ids: vec![self.id.to_proto()],
2990 scan_id: self.scan_id,
2991 is_last_update: true,
2992 stash_entries: self
2993 .stash_entries
2994 .entries
2995 .iter()
2996 .map(stash_to_proto)
2997 .collect(),
2998 }
2999 }
3000
3001 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3002 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3003 let mut removed_statuses: Vec<String> = Vec::new();
3004
3005 let mut new_statuses = self.statuses_by_path.iter().peekable();
3006 let mut old_statuses = old.statuses_by_path.iter().peekable();
3007
3008 let mut current_new_entry = new_statuses.next();
3009 let mut current_old_entry = old_statuses.next();
3010 loop {
3011 match (current_new_entry, current_old_entry) {
3012 (Some(new_entry), Some(old_entry)) => {
3013 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3014 Ordering::Less => {
3015 updated_statuses.push(new_entry.to_proto());
3016 current_new_entry = new_statuses.next();
3017 }
3018 Ordering::Equal => {
3019 if new_entry.status != old_entry.status {
3020 updated_statuses.push(new_entry.to_proto());
3021 }
3022 current_old_entry = old_statuses.next();
3023 current_new_entry = new_statuses.next();
3024 }
3025 Ordering::Greater => {
3026 removed_statuses.push(old_entry.repo_path.to_proto());
3027 current_old_entry = old_statuses.next();
3028 }
3029 }
3030 }
3031 (None, Some(old_entry)) => {
3032 removed_statuses.push(old_entry.repo_path.to_proto());
3033 current_old_entry = old_statuses.next();
3034 }
3035 (Some(new_entry), None) => {
3036 updated_statuses.push(new_entry.to_proto());
3037 current_new_entry = new_statuses.next();
3038 }
3039 (None, None) => break,
3040 }
3041 }
3042
3043 proto::UpdateRepository {
3044 branch_summary: self.branch.as_ref().map(branch_to_proto),
3045 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3046 updated_statuses,
3047 removed_statuses,
3048 current_merge_conflicts: self
3049 .merge
3050 .conflicted_paths
3051 .iter()
3052 .map(|path| path.to_proto())
3053 .collect(),
3054 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3055 project_id,
3056 id: self.id.to_proto(),
3057 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3058 entry_ids: vec![],
3059 scan_id: self.scan_id,
3060 is_last_update: true,
3061 stash_entries: self
3062 .stash_entries
3063 .entries
3064 .iter()
3065 .map(stash_to_proto)
3066 .collect(),
3067 }
3068 }
3069
3070 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3071 self.statuses_by_path.iter().cloned()
3072 }
3073
3074 pub fn status_summary(&self) -> GitSummary {
3075 self.statuses_by_path.summary().item_summary
3076 }
3077
3078 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3079 self.statuses_by_path
3080 .get(&PathKey(path.0.clone()), ())
3081 .cloned()
3082 }
3083
3084 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3085 self.pending_ops_by_path
3086 .get(&PathKey(path.0.clone()), ())
3087 .cloned()
3088 }
3089
3090 pub fn new_pending_op(&self, git_status: pending_op::GitStatus) -> PendingOp {
3091 let id = self.pending_ops_by_path.summary().item_summary.max_id + 1;
3092 PendingOp {
3093 id,
3094 git_status,
3095 job_status: pending_op::JobStatus::Started,
3096 }
3097 }
3098
3099 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3100 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3101 }
3102
3103 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3104 self.path_style
3105 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3106 .unwrap()
3107 .into()
3108 }
3109
3110 #[inline]
3111 fn abs_path_to_repo_path_inner(
3112 work_directory_abs_path: &Path,
3113 abs_path: &Path,
3114 path_style: PathStyle,
3115 ) -> Option<RepoPath> {
3116 abs_path
3117 .strip_prefix(&work_directory_abs_path)
3118 .ok()
3119 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3120 }
3121
3122 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3123 self.merge.conflicted_paths.contains(repo_path)
3124 }
3125
3126 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3127 let had_conflict_on_last_merge_head_change =
3128 self.merge.conflicted_paths.contains(repo_path);
3129 let has_conflict_currently = self
3130 .status_for_path(repo_path)
3131 .is_some_and(|entry| entry.status.is_conflicted());
3132 had_conflict_on_last_merge_head_change || has_conflict_currently
3133 }
3134
3135 /// This is the name that will be displayed in the repository selector for this repository.
3136 pub fn display_name(&self) -> SharedString {
3137 self.work_directory_abs_path
3138 .file_name()
3139 .unwrap_or_default()
3140 .to_string_lossy()
3141 .to_string()
3142 .into()
3143 }
3144}
3145
3146pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3147 proto::StashEntry {
3148 oid: entry.oid.as_bytes().to_vec(),
3149 message: entry.message.clone(),
3150 branch: entry.branch.clone(),
3151 index: entry.index as u64,
3152 timestamp: entry.timestamp,
3153 }
3154}
3155
3156pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3157 Ok(StashEntry {
3158 oid: Oid::from_bytes(&entry.oid)?,
3159 message: entry.message.clone(),
3160 index: entry.index as usize,
3161 branch: entry.branch.clone(),
3162 timestamp: entry.timestamp,
3163 })
3164}
3165
3166impl MergeDetails {
3167 async fn load(
3168 backend: &Arc<dyn GitRepository>,
3169 status: &SumTree<StatusEntry>,
3170 prev_snapshot: &RepositorySnapshot,
3171 ) -> Result<(MergeDetails, bool)> {
3172 log::debug!("load merge details");
3173 let message = backend.merge_message().await;
3174 let heads = backend
3175 .revparse_batch(vec![
3176 "MERGE_HEAD".into(),
3177 "CHERRY_PICK_HEAD".into(),
3178 "REBASE_HEAD".into(),
3179 "REVERT_HEAD".into(),
3180 "APPLY_HEAD".into(),
3181 ])
3182 .await
3183 .log_err()
3184 .unwrap_or_default()
3185 .into_iter()
3186 .map(|opt| opt.map(SharedString::from))
3187 .collect::<Vec<_>>();
3188 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3189 let conflicted_paths = if merge_heads_changed {
3190 let current_conflicted_paths = TreeSet::from_ordered_entries(
3191 status
3192 .iter()
3193 .filter(|entry| entry.status.is_conflicted())
3194 .map(|entry| entry.repo_path.clone()),
3195 );
3196
3197 // It can happen that we run a scan while a lengthy merge is in progress
3198 // that will eventually result in conflicts, but before those conflicts
3199 // are reported by `git status`. Since for the moment we only care about
3200 // the merge heads state for the purposes of tracking conflicts, don't update
3201 // this state until we see some conflicts.
3202 if heads.iter().any(Option::is_some)
3203 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3204 && current_conflicted_paths.is_empty()
3205 {
3206 log::debug!("not updating merge heads because no conflicts found");
3207 return Ok((
3208 MergeDetails {
3209 message: message.map(SharedString::from),
3210 ..prev_snapshot.merge.clone()
3211 },
3212 false,
3213 ));
3214 }
3215
3216 current_conflicted_paths
3217 } else {
3218 prev_snapshot.merge.conflicted_paths.clone()
3219 };
3220 let details = MergeDetails {
3221 conflicted_paths,
3222 message: message.map(SharedString::from),
3223 heads,
3224 };
3225 Ok((details, merge_heads_changed))
3226 }
3227}
3228
3229impl Repository {
3230 pub fn snapshot(&self) -> RepositorySnapshot {
3231 self.snapshot.clone()
3232 }
3233
3234 fn local(
3235 id: RepositoryId,
3236 work_directory_abs_path: Arc<Path>,
3237 dot_git_abs_path: Arc<Path>,
3238 repository_dir_abs_path: Arc<Path>,
3239 common_dir_abs_path: Arc<Path>,
3240 project_environment: WeakEntity<ProjectEnvironment>,
3241 fs: Arc<dyn Fs>,
3242 git_store: WeakEntity<GitStore>,
3243 cx: &mut Context<Self>,
3244 ) -> Self {
3245 let snapshot =
3246 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3247 Repository {
3248 this: cx.weak_entity(),
3249 git_store,
3250 snapshot,
3251 commit_message_buffer: None,
3252 askpass_delegates: Default::default(),
3253 paths_needing_status_update: Default::default(),
3254 latest_askpass_id: 0,
3255 job_sender: Repository::spawn_local_git_worker(
3256 work_directory_abs_path,
3257 dot_git_abs_path,
3258 repository_dir_abs_path,
3259 common_dir_abs_path,
3260 project_environment,
3261 fs,
3262 cx,
3263 ),
3264 job_id: 0,
3265 active_jobs: Default::default(),
3266 }
3267 }
3268
3269 fn remote(
3270 id: RepositoryId,
3271 work_directory_abs_path: Arc<Path>,
3272 path_style: PathStyle,
3273 project_id: ProjectId,
3274 client: AnyProtoClient,
3275 git_store: WeakEntity<GitStore>,
3276 cx: &mut Context<Self>,
3277 ) -> Self {
3278 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3279 Self {
3280 this: cx.weak_entity(),
3281 snapshot,
3282 commit_message_buffer: None,
3283 git_store,
3284 paths_needing_status_update: Default::default(),
3285 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3286 askpass_delegates: Default::default(),
3287 latest_askpass_id: 0,
3288 active_jobs: Default::default(),
3289 job_id: 0,
3290 }
3291 }
3292
3293 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3294 self.git_store.upgrade()
3295 }
3296
3297 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3298 let this = cx.weak_entity();
3299 let git_store = self.git_store.clone();
3300 let _ = self.send_keyed_job(
3301 Some(GitJobKey::ReloadBufferDiffBases),
3302 None,
3303 |state, mut cx| async move {
3304 let RepositoryState::Local { backend, .. } = state else {
3305 log::error!("tried to recompute diffs for a non-local repository");
3306 return Ok(());
3307 };
3308
3309 let Some(this) = this.upgrade() else {
3310 return Ok(());
3311 };
3312
3313 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3314 git_store.update(cx, |git_store, cx| {
3315 git_store
3316 .diffs
3317 .iter()
3318 .filter_map(|(buffer_id, diff_state)| {
3319 let buffer_store = git_store.buffer_store.read(cx);
3320 let buffer = buffer_store.get(*buffer_id)?;
3321 let file = File::from_dyn(buffer.read(cx).file())?;
3322 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3323 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3324 log::debug!(
3325 "start reload diff bases for repo path {}",
3326 repo_path.as_unix_str()
3327 );
3328 diff_state.update(cx, |diff_state, _| {
3329 let has_unstaged_diff = diff_state
3330 .unstaged_diff
3331 .as_ref()
3332 .is_some_and(|diff| diff.is_upgradable());
3333 let has_uncommitted_diff = diff_state
3334 .uncommitted_diff
3335 .as_ref()
3336 .is_some_and(|set| set.is_upgradable());
3337
3338 Some((
3339 buffer,
3340 repo_path,
3341 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3342 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3343 ))
3344 })
3345 })
3346 .collect::<Vec<_>>()
3347 })
3348 })??;
3349
3350 let buffer_diff_base_changes = cx
3351 .background_spawn(async move {
3352 let mut changes = Vec::new();
3353 for (buffer, repo_path, current_index_text, current_head_text) in
3354 &repo_diff_state_updates
3355 {
3356 let index_text = if current_index_text.is_some() {
3357 backend.load_index_text(repo_path.clone()).await
3358 } else {
3359 None
3360 };
3361 let head_text = if current_head_text.is_some() {
3362 backend.load_committed_text(repo_path.clone()).await
3363 } else {
3364 None
3365 };
3366
3367 let change =
3368 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3369 (Some(current_index), Some(current_head)) => {
3370 let index_changed =
3371 index_text.as_ref() != current_index.as_deref();
3372 let head_changed =
3373 head_text.as_ref() != current_head.as_deref();
3374 if index_changed && head_changed {
3375 if index_text == head_text {
3376 Some(DiffBasesChange::SetBoth(head_text))
3377 } else {
3378 Some(DiffBasesChange::SetEach {
3379 index: index_text,
3380 head: head_text,
3381 })
3382 }
3383 } else if index_changed {
3384 Some(DiffBasesChange::SetIndex(index_text))
3385 } else if head_changed {
3386 Some(DiffBasesChange::SetHead(head_text))
3387 } else {
3388 None
3389 }
3390 }
3391 (Some(current_index), None) => {
3392 let index_changed =
3393 index_text.as_ref() != current_index.as_deref();
3394 index_changed
3395 .then_some(DiffBasesChange::SetIndex(index_text))
3396 }
3397 (None, Some(current_head)) => {
3398 let head_changed =
3399 head_text.as_ref() != current_head.as_deref();
3400 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3401 }
3402 (None, None) => None,
3403 };
3404
3405 changes.push((buffer.clone(), change))
3406 }
3407 changes
3408 })
3409 .await;
3410
3411 git_store.update(&mut cx, |git_store, cx| {
3412 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3413 let buffer_snapshot = buffer.read(cx).text_snapshot();
3414 let buffer_id = buffer_snapshot.remote_id();
3415 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3416 continue;
3417 };
3418
3419 let downstream_client = git_store.downstream_client();
3420 diff_state.update(cx, |diff_state, cx| {
3421 use proto::update_diff_bases::Mode;
3422
3423 if let Some((diff_bases_change, (client, project_id))) =
3424 diff_bases_change.clone().zip(downstream_client)
3425 {
3426 let (staged_text, committed_text, mode) = match diff_bases_change {
3427 DiffBasesChange::SetIndex(index) => {
3428 (index, None, Mode::IndexOnly)
3429 }
3430 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3431 DiffBasesChange::SetEach { index, head } => {
3432 (index, head, Mode::IndexAndHead)
3433 }
3434 DiffBasesChange::SetBoth(text) => {
3435 (None, text, Mode::IndexMatchesHead)
3436 }
3437 };
3438 client
3439 .send(proto::UpdateDiffBases {
3440 project_id: project_id.to_proto(),
3441 buffer_id: buffer_id.to_proto(),
3442 staged_text,
3443 committed_text,
3444 mode: mode as i32,
3445 })
3446 .log_err();
3447 }
3448
3449 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3450 });
3451 }
3452 })
3453 },
3454 );
3455 }
3456
3457 pub fn send_job<F, Fut, R>(
3458 &mut self,
3459 status: Option<SharedString>,
3460 job: F,
3461 ) -> oneshot::Receiver<R>
3462 where
3463 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3464 Fut: Future<Output = R> + 'static,
3465 R: Send + 'static,
3466 {
3467 self.send_keyed_job(None, status, job)
3468 }
3469
3470 fn send_keyed_job<F, Fut, R>(
3471 &mut self,
3472 key: Option<GitJobKey>,
3473 status: Option<SharedString>,
3474 job: F,
3475 ) -> oneshot::Receiver<R>
3476 where
3477 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3478 Fut: Future<Output = R> + 'static,
3479 R: Send + 'static,
3480 {
3481 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3482 let job_id = post_inc(&mut self.job_id);
3483 let this = self.this.clone();
3484 self.job_sender
3485 .unbounded_send(GitJob {
3486 key,
3487 job: Box::new(move |state, cx: &mut AsyncApp| {
3488 let job = job(state, cx.clone());
3489 cx.spawn(async move |cx| {
3490 if let Some(s) = status.clone() {
3491 this.update(cx, |this, cx| {
3492 this.active_jobs.insert(
3493 job_id,
3494 JobInfo {
3495 start: Instant::now(),
3496 message: s.clone(),
3497 },
3498 );
3499
3500 cx.notify();
3501 })
3502 .ok();
3503 }
3504 let result = job.await;
3505
3506 this.update(cx, |this, cx| {
3507 this.active_jobs.remove(&job_id);
3508 cx.notify();
3509 })
3510 .ok();
3511
3512 result_tx.send(result).ok();
3513 })
3514 }),
3515 })
3516 .ok();
3517 result_rx
3518 }
3519
3520 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3521 let Some(git_store) = self.git_store.upgrade() else {
3522 return;
3523 };
3524 let entity = cx.entity();
3525 git_store.update(cx, |git_store, cx| {
3526 let Some((&id, _)) = git_store
3527 .repositories
3528 .iter()
3529 .find(|(_, handle)| *handle == &entity)
3530 else {
3531 return;
3532 };
3533 git_store.active_repo_id = Some(id);
3534 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3535 });
3536 }
3537
3538 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3539 self.snapshot.status()
3540 }
3541
3542 pub fn cached_stash(&self) -> GitStash {
3543 self.snapshot.stash_entries.clone()
3544 }
3545
3546 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3547 let git_store = self.git_store.upgrade()?;
3548 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3549 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3550 let abs_path = SanitizedPath::new(&abs_path);
3551 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3552 Some(ProjectPath {
3553 worktree_id: worktree.read(cx).id(),
3554 path: relative_path,
3555 })
3556 }
3557
3558 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3559 let git_store = self.git_store.upgrade()?;
3560 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3561 let abs_path = worktree_store.absolutize(path, cx)?;
3562 self.snapshot.abs_path_to_repo_path(&abs_path)
3563 }
3564
3565 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3566 other
3567 .read(cx)
3568 .snapshot
3569 .work_directory_abs_path
3570 .starts_with(&self.snapshot.work_directory_abs_path)
3571 }
3572
3573 pub fn open_commit_buffer(
3574 &mut self,
3575 languages: Option<Arc<LanguageRegistry>>,
3576 buffer_store: Entity<BufferStore>,
3577 cx: &mut Context<Self>,
3578 ) -> Task<Result<Entity<Buffer>>> {
3579 let id = self.id;
3580 if let Some(buffer) = self.commit_message_buffer.clone() {
3581 return Task::ready(Ok(buffer));
3582 }
3583 let this = cx.weak_entity();
3584
3585 let rx = self.send_job(None, move |state, mut cx| async move {
3586 let Some(this) = this.upgrade() else {
3587 bail!("git store was dropped");
3588 };
3589 match state {
3590 RepositoryState::Local { .. } => {
3591 this.update(&mut cx, |_, cx| {
3592 Self::open_local_commit_buffer(languages, buffer_store, cx)
3593 })?
3594 .await
3595 }
3596 RepositoryState::Remote { project_id, client } => {
3597 let request = client.request(proto::OpenCommitMessageBuffer {
3598 project_id: project_id.0,
3599 repository_id: id.to_proto(),
3600 });
3601 let response = request.await.context("requesting to open commit buffer")?;
3602 let buffer_id = BufferId::new(response.buffer_id)?;
3603 let buffer = buffer_store
3604 .update(&mut cx, |buffer_store, cx| {
3605 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3606 })?
3607 .await?;
3608 if let Some(language_registry) = languages {
3609 let git_commit_language =
3610 language_registry.language_for_name("Git Commit").await?;
3611 buffer.update(&mut cx, |buffer, cx| {
3612 buffer.set_language(Some(git_commit_language), cx);
3613 })?;
3614 }
3615 this.update(&mut cx, |this, _| {
3616 this.commit_message_buffer = Some(buffer.clone());
3617 })?;
3618 Ok(buffer)
3619 }
3620 }
3621 });
3622
3623 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3624 }
3625
3626 fn open_local_commit_buffer(
3627 language_registry: Option<Arc<LanguageRegistry>>,
3628 buffer_store: Entity<BufferStore>,
3629 cx: &mut Context<Self>,
3630 ) -> Task<Result<Entity<Buffer>>> {
3631 cx.spawn(async move |repository, cx| {
3632 let buffer = buffer_store
3633 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3634 .await?;
3635
3636 if let Some(language_registry) = language_registry {
3637 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3638 buffer.update(cx, |buffer, cx| {
3639 buffer.set_language(Some(git_commit_language), cx);
3640 })?;
3641 }
3642
3643 repository.update(cx, |repository, _| {
3644 repository.commit_message_buffer = Some(buffer.clone());
3645 })?;
3646 Ok(buffer)
3647 })
3648 }
3649
3650 pub fn checkout_files(
3651 &mut self,
3652 commit: &str,
3653 paths: Vec<RepoPath>,
3654 _cx: &mut App,
3655 ) -> oneshot::Receiver<Result<()>> {
3656 let commit = commit.to_string();
3657 let id = self.id;
3658
3659 // let mut edits = Vec::with_capacity(paths.len());
3660 // let mut ids = Vec::with_capacity(paths.len());
3661 // for path in &paths {
3662
3663 // }
3664 // self.snapshot.pending_ops_by_path.edit(edits, ());
3665
3666 self.send_job(
3667 Some(format!("git checkout {}", commit).into()),
3668 move |git_repo, _| async move {
3669 match git_repo {
3670 RepositoryState::Local {
3671 backend,
3672 environment,
3673 ..
3674 } => {
3675 backend
3676 .checkout_files(commit, paths, environment.clone())
3677 .await
3678 }
3679 RepositoryState::Remote { project_id, client } => {
3680 client
3681 .request(proto::GitCheckoutFiles {
3682 project_id: project_id.0,
3683 repository_id: id.to_proto(),
3684 commit,
3685 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3686 })
3687 .await?;
3688
3689 Ok(())
3690 }
3691 }
3692 },
3693 )
3694 }
3695
3696 pub fn reset(
3697 &mut self,
3698 commit: String,
3699 reset_mode: ResetMode,
3700 _cx: &mut App,
3701 ) -> oneshot::Receiver<Result<()>> {
3702 let id = self.id;
3703
3704 self.send_job(None, move |git_repo, _| async move {
3705 match git_repo {
3706 RepositoryState::Local {
3707 backend,
3708 environment,
3709 ..
3710 } => backend.reset(commit, reset_mode, environment).await,
3711 RepositoryState::Remote { project_id, client } => {
3712 client
3713 .request(proto::GitReset {
3714 project_id: project_id.0,
3715 repository_id: id.to_proto(),
3716 commit,
3717 mode: match reset_mode {
3718 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3719 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3720 },
3721 })
3722 .await?;
3723
3724 Ok(())
3725 }
3726 }
3727 })
3728 }
3729
3730 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3731 let id = self.id;
3732 self.send_job(None, move |git_repo, _cx| async move {
3733 match git_repo {
3734 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3735 RepositoryState::Remote { project_id, client } => {
3736 let resp = client
3737 .request(proto::GitShow {
3738 project_id: project_id.0,
3739 repository_id: id.to_proto(),
3740 commit,
3741 })
3742 .await?;
3743
3744 Ok(CommitDetails {
3745 sha: resp.sha.into(),
3746 message: resp.message.into(),
3747 commit_timestamp: resp.commit_timestamp,
3748 author_email: resp.author_email.into(),
3749 author_name: resp.author_name.into(),
3750 })
3751 }
3752 }
3753 })
3754 }
3755
3756 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3757 let id = self.id;
3758 self.send_job(None, move |git_repo, cx| async move {
3759 match git_repo {
3760 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3761 RepositoryState::Remote {
3762 client, project_id, ..
3763 } => {
3764 let response = client
3765 .request(proto::LoadCommitDiff {
3766 project_id: project_id.0,
3767 repository_id: id.to_proto(),
3768 commit,
3769 })
3770 .await?;
3771 Ok(CommitDiff {
3772 files: response
3773 .files
3774 .into_iter()
3775 .map(|file| {
3776 Ok(CommitFile {
3777 path: RepoPath::from_proto(&file.path)?,
3778 old_text: file.old_text,
3779 new_text: file.new_text,
3780 })
3781 })
3782 .collect::<Result<Vec<_>>>()?,
3783 })
3784 }
3785 }
3786 })
3787 }
3788
3789 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3790 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3791 }
3792
3793 fn save_buffers<'a>(
3794 &self,
3795 entries: impl IntoIterator<Item = &'a RepoPath>,
3796 cx: &mut Context<Self>,
3797 ) -> Vec<Task<anyhow::Result<()>>> {
3798 let mut save_futures = Vec::new();
3799 if let Some(buffer_store) = self.buffer_store(cx) {
3800 buffer_store.update(cx, |buffer_store, cx| {
3801 for path in entries {
3802 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3803 continue;
3804 };
3805 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3806 && buffer
3807 .read(cx)
3808 .file()
3809 .is_some_and(|file| file.disk_state().exists())
3810 && buffer.read(cx).has_unsaved_edits()
3811 {
3812 save_futures.push(buffer_store.save_buffer(buffer, cx));
3813 }
3814 }
3815 })
3816 }
3817 save_futures
3818 }
3819
3820 pub fn stage_entries(
3821 &mut self,
3822 entries: Vec<RepoPath>,
3823 cx: &mut Context<Self>,
3824 ) -> Task<anyhow::Result<()>> {
3825 if entries.is_empty() {
3826 return Task::ready(Ok(()));
3827 }
3828 let id = self.id;
3829 let save_tasks = self.save_buffers(&entries, cx);
3830 let paths = entries
3831 .iter()
3832 .map(|p| p.as_unix_str())
3833 .collect::<Vec<_>>()
3834 .join(" ");
3835 let status = format!("git add {paths}");
3836 let job_key = match entries.len() {
3837 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3838 _ => None,
3839 };
3840
3841 self.spawn_job_with_tracking(
3842 entries.clone(),
3843 pending_op::GitStatus::Staged,
3844 cx,
3845 async move |this, cx| {
3846 for save_task in save_tasks {
3847 save_task.await?;
3848 }
3849
3850 this.update(cx, |this, _| {
3851 this.send_keyed_job(
3852 job_key,
3853 Some(status.into()),
3854 move |git_repo, _cx| async move {
3855 match git_repo {
3856 RepositoryState::Local {
3857 backend,
3858 environment,
3859 ..
3860 } => backend.stage_paths(entries, environment.clone()).await,
3861 RepositoryState::Remote { project_id, client } => {
3862 client
3863 .request(proto::Stage {
3864 project_id: project_id.0,
3865 repository_id: id.to_proto(),
3866 paths: entries
3867 .into_iter()
3868 .map(|repo_path| repo_path.to_proto())
3869 .collect(),
3870 })
3871 .await
3872 .context("sending stage request")?;
3873
3874 Ok(())
3875 }
3876 }
3877 },
3878 )
3879 })?
3880 .await?
3881 },
3882 )
3883 }
3884
3885 pub fn unstage_entries(
3886 &mut self,
3887 entries: Vec<RepoPath>,
3888 cx: &mut Context<Self>,
3889 ) -> Task<anyhow::Result<()>> {
3890 if entries.is_empty() {
3891 return Task::ready(Ok(()));
3892 }
3893 let id = self.id;
3894 let save_tasks = self.save_buffers(&entries, cx);
3895 let paths = entries
3896 .iter()
3897 .map(|p| p.as_unix_str())
3898 .collect::<Vec<_>>()
3899 .join(" ");
3900 let status = format!("git reset {paths}");
3901 let job_key = match entries.len() {
3902 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3903 _ => None,
3904 };
3905
3906 self.spawn_job_with_tracking(
3907 entries.clone(),
3908 pending_op::GitStatus::Unstaged,
3909 cx,
3910 async move |this, cx| {
3911 for save_task in save_tasks {
3912 save_task.await?;
3913 }
3914
3915 this.update(cx, |this, _| {
3916 this.send_keyed_job(
3917 job_key,
3918 Some(status.into()),
3919 move |git_repo, _cx| async move {
3920 match git_repo {
3921 RepositoryState::Local {
3922 backend,
3923 environment,
3924 ..
3925 } => backend.unstage_paths(entries, environment).await,
3926 RepositoryState::Remote { project_id, client } => {
3927 client
3928 .request(proto::Unstage {
3929 project_id: project_id.0,
3930 repository_id: id.to_proto(),
3931 paths: entries
3932 .into_iter()
3933 .map(|repo_path| repo_path.to_proto())
3934 .collect(),
3935 })
3936 .await
3937 .context("sending unstage request")?;
3938
3939 Ok(())
3940 }
3941 }
3942 },
3943 )
3944 })?
3945 .await?
3946 },
3947 )
3948 }
3949
3950 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3951 let to_stage = self
3952 .cached_status()
3953 .filter(|entry| !entry.status.staging().is_fully_staged())
3954 .map(|entry| entry.repo_path)
3955 .collect();
3956 self.stage_entries(to_stage, cx)
3957 }
3958
3959 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3960 let to_unstage = self
3961 .cached_status()
3962 .filter(|entry| entry.status.staging().has_staged())
3963 .map(|entry| entry.repo_path)
3964 .collect();
3965 self.unstage_entries(to_unstage, cx)
3966 }
3967
3968 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3969 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3970
3971 self.stash_entries(to_stash, cx)
3972 }
3973
3974 pub fn stash_entries(
3975 &mut self,
3976 entries: Vec<RepoPath>,
3977 cx: &mut Context<Self>,
3978 ) -> Task<anyhow::Result<()>> {
3979 let id = self.id;
3980
3981 cx.spawn(async move |this, cx| {
3982 this.update(cx, |this, _| {
3983 this.send_job(None, move |git_repo, _cx| async move {
3984 match git_repo {
3985 RepositoryState::Local {
3986 backend,
3987 environment,
3988 ..
3989 } => backend.stash_paths(entries, environment).await,
3990 RepositoryState::Remote { project_id, client } => {
3991 client
3992 .request(proto::Stash {
3993 project_id: project_id.0,
3994 repository_id: id.to_proto(),
3995 paths: entries
3996 .into_iter()
3997 .map(|repo_path| repo_path.to_proto())
3998 .collect(),
3999 })
4000 .await
4001 .context("sending stash request")?;
4002 Ok(())
4003 }
4004 }
4005 })
4006 })?
4007 .await??;
4008 Ok(())
4009 })
4010 }
4011
4012 pub fn stash_pop(
4013 &mut self,
4014 index: Option<usize>,
4015 cx: &mut Context<Self>,
4016 ) -> Task<anyhow::Result<()>> {
4017 let id = self.id;
4018 cx.spawn(async move |this, cx| {
4019 this.update(cx, |this, _| {
4020 this.send_job(None, move |git_repo, _cx| async move {
4021 match git_repo {
4022 RepositoryState::Local {
4023 backend,
4024 environment,
4025 ..
4026 } => backend.stash_pop(index, environment).await,
4027 RepositoryState::Remote { project_id, client } => {
4028 client
4029 .request(proto::StashPop {
4030 project_id: project_id.0,
4031 repository_id: id.to_proto(),
4032 stash_index: index.map(|i| i as u64),
4033 })
4034 .await
4035 .context("sending stash pop request")?;
4036 Ok(())
4037 }
4038 }
4039 })
4040 })?
4041 .await??;
4042 Ok(())
4043 })
4044 }
4045
4046 pub fn stash_apply(
4047 &mut self,
4048 index: Option<usize>,
4049 cx: &mut Context<Self>,
4050 ) -> Task<anyhow::Result<()>> {
4051 let id = self.id;
4052 cx.spawn(async move |this, cx| {
4053 this.update(cx, |this, _| {
4054 this.send_job(None, move |git_repo, _cx| async move {
4055 match git_repo {
4056 RepositoryState::Local {
4057 backend,
4058 environment,
4059 ..
4060 } => backend.stash_apply(index, environment).await,
4061 RepositoryState::Remote { project_id, client } => {
4062 client
4063 .request(proto::StashApply {
4064 project_id: project_id.0,
4065 repository_id: id.to_proto(),
4066 stash_index: index.map(|i| i as u64),
4067 })
4068 .await
4069 .context("sending stash apply request")?;
4070 Ok(())
4071 }
4072 }
4073 })
4074 })?
4075 .await??;
4076 Ok(())
4077 })
4078 }
4079
4080 pub fn stash_drop(
4081 &mut self,
4082 index: Option<usize>,
4083 cx: &mut Context<Self>,
4084 ) -> oneshot::Receiver<anyhow::Result<()>> {
4085 let id = self.id;
4086 let updates_tx = self
4087 .git_store()
4088 .and_then(|git_store| match &git_store.read(cx).state {
4089 GitStoreState::Local { downstream, .. } => downstream
4090 .as_ref()
4091 .map(|downstream| downstream.updates_tx.clone()),
4092 _ => None,
4093 });
4094 let this = cx.weak_entity();
4095 self.send_job(None, move |git_repo, mut cx| async move {
4096 match git_repo {
4097 RepositoryState::Local {
4098 backend,
4099 environment,
4100 ..
4101 } => {
4102 // TODO would be nice to not have to do this manually
4103 let result = backend.stash_drop(index, environment).await;
4104 if result.is_ok()
4105 && let Ok(stash_entries) = backend.stash_entries().await
4106 {
4107 let snapshot = this.update(&mut cx, |this, cx| {
4108 this.snapshot.stash_entries = stash_entries;
4109 cx.emit(RepositoryEvent::StashEntriesChanged);
4110 this.snapshot.clone()
4111 })?;
4112 if let Some(updates_tx) = updates_tx {
4113 updates_tx
4114 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4115 .ok();
4116 }
4117 }
4118
4119 result
4120 }
4121 RepositoryState::Remote { project_id, client } => {
4122 client
4123 .request(proto::StashDrop {
4124 project_id: project_id.0,
4125 repository_id: id.to_proto(),
4126 stash_index: index.map(|i| i as u64),
4127 })
4128 .await
4129 .context("sending stash pop request")?;
4130 Ok(())
4131 }
4132 }
4133 })
4134 }
4135
4136 pub fn commit(
4137 &mut self,
4138 message: SharedString,
4139 name_and_email: Option<(SharedString, SharedString)>,
4140 options: CommitOptions,
4141 _cx: &mut App,
4142 ) -> oneshot::Receiver<Result<()>> {
4143 let id = self.id;
4144
4145 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4146 match git_repo {
4147 RepositoryState::Local {
4148 backend,
4149 environment,
4150 ..
4151 } => {
4152 backend
4153 .commit(message, name_and_email, options, environment)
4154 .await
4155 }
4156 RepositoryState::Remote { project_id, client } => {
4157 let (name, email) = name_and_email.unzip();
4158 client
4159 .request(proto::Commit {
4160 project_id: project_id.0,
4161 repository_id: id.to_proto(),
4162 message: String::from(message),
4163 name: name.map(String::from),
4164 email: email.map(String::from),
4165 options: Some(proto::commit::CommitOptions {
4166 amend: options.amend,
4167 signoff: options.signoff,
4168 }),
4169 })
4170 .await
4171 .context("sending commit request")?;
4172
4173 Ok(())
4174 }
4175 }
4176 })
4177 }
4178
4179 pub fn fetch(
4180 &mut self,
4181 fetch_options: FetchOptions,
4182 askpass: AskPassDelegate,
4183 _cx: &mut App,
4184 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4185 let askpass_delegates = self.askpass_delegates.clone();
4186 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4187 let id = self.id;
4188
4189 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4190 match git_repo {
4191 RepositoryState::Local {
4192 backend,
4193 environment,
4194 ..
4195 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4196 RepositoryState::Remote { project_id, client } => {
4197 askpass_delegates.lock().insert(askpass_id, askpass);
4198 let _defer = util::defer(|| {
4199 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4200 debug_assert!(askpass_delegate.is_some());
4201 });
4202
4203 let response = client
4204 .request(proto::Fetch {
4205 project_id: project_id.0,
4206 repository_id: id.to_proto(),
4207 askpass_id,
4208 remote: fetch_options.to_proto(),
4209 })
4210 .await
4211 .context("sending fetch request")?;
4212
4213 Ok(RemoteCommandOutput {
4214 stdout: response.stdout,
4215 stderr: response.stderr,
4216 })
4217 }
4218 }
4219 })
4220 }
4221
4222 pub fn push(
4223 &mut self,
4224 branch: SharedString,
4225 remote: SharedString,
4226 options: Option<PushOptions>,
4227 askpass: AskPassDelegate,
4228 cx: &mut Context<Self>,
4229 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4230 let askpass_delegates = self.askpass_delegates.clone();
4231 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4232 let id = self.id;
4233
4234 let args = options
4235 .map(|option| match option {
4236 PushOptions::SetUpstream => " --set-upstream",
4237 PushOptions::Force => " --force-with-lease",
4238 })
4239 .unwrap_or("");
4240
4241 let updates_tx = self
4242 .git_store()
4243 .and_then(|git_store| match &git_store.read(cx).state {
4244 GitStoreState::Local { downstream, .. } => downstream
4245 .as_ref()
4246 .map(|downstream| downstream.updates_tx.clone()),
4247 _ => None,
4248 });
4249
4250 let this = cx.weak_entity();
4251 self.send_job(
4252 Some(format!("git push {} {} {}", args, remote, branch).into()),
4253 move |git_repo, mut cx| async move {
4254 match git_repo {
4255 RepositoryState::Local {
4256 backend,
4257 environment,
4258 ..
4259 } => {
4260 let result = backend
4261 .push(
4262 branch.to_string(),
4263 remote.to_string(),
4264 options,
4265 askpass,
4266 environment.clone(),
4267 cx.clone(),
4268 )
4269 .await;
4270 // TODO would be nice to not have to do this manually
4271 if result.is_ok() {
4272 let branches = backend.branches().await?;
4273 let branch = branches.into_iter().find(|branch| branch.is_head);
4274 log::info!("head branch after scan is {branch:?}");
4275 let snapshot = this.update(&mut cx, |this, cx| {
4276 this.snapshot.branch = branch;
4277 cx.emit(RepositoryEvent::BranchChanged);
4278 this.snapshot.clone()
4279 })?;
4280 if let Some(updates_tx) = updates_tx {
4281 updates_tx
4282 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4283 .ok();
4284 }
4285 }
4286 result
4287 }
4288 RepositoryState::Remote { project_id, client } => {
4289 askpass_delegates.lock().insert(askpass_id, askpass);
4290 let _defer = util::defer(|| {
4291 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4292 debug_assert!(askpass_delegate.is_some());
4293 });
4294 let response = client
4295 .request(proto::Push {
4296 project_id: project_id.0,
4297 repository_id: id.to_proto(),
4298 askpass_id,
4299 branch_name: branch.to_string(),
4300 remote_name: remote.to_string(),
4301 options: options.map(|options| match options {
4302 PushOptions::Force => proto::push::PushOptions::Force,
4303 PushOptions::SetUpstream => {
4304 proto::push::PushOptions::SetUpstream
4305 }
4306 }
4307 as i32),
4308 })
4309 .await
4310 .context("sending push request")?;
4311
4312 Ok(RemoteCommandOutput {
4313 stdout: response.stdout,
4314 stderr: response.stderr,
4315 })
4316 }
4317 }
4318 },
4319 )
4320 }
4321
4322 pub fn pull(
4323 &mut self,
4324 branch: SharedString,
4325 remote: SharedString,
4326 askpass: AskPassDelegate,
4327 _cx: &mut App,
4328 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4329 let askpass_delegates = self.askpass_delegates.clone();
4330 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4331 let id = self.id;
4332
4333 self.send_job(
4334 Some(format!("git pull {} {}", remote, branch).into()),
4335 move |git_repo, cx| async move {
4336 match git_repo {
4337 RepositoryState::Local {
4338 backend,
4339 environment,
4340 ..
4341 } => {
4342 backend
4343 .pull(
4344 branch.to_string(),
4345 remote.to_string(),
4346 askpass,
4347 environment.clone(),
4348 cx,
4349 )
4350 .await
4351 }
4352 RepositoryState::Remote { project_id, client } => {
4353 askpass_delegates.lock().insert(askpass_id, askpass);
4354 let _defer = util::defer(|| {
4355 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4356 debug_assert!(askpass_delegate.is_some());
4357 });
4358 let response = client
4359 .request(proto::Pull {
4360 project_id: project_id.0,
4361 repository_id: id.to_proto(),
4362 askpass_id,
4363 branch_name: branch.to_string(),
4364 remote_name: remote.to_string(),
4365 })
4366 .await
4367 .context("sending pull request")?;
4368
4369 Ok(RemoteCommandOutput {
4370 stdout: response.stdout,
4371 stderr: response.stderr,
4372 })
4373 }
4374 }
4375 },
4376 )
4377 }
4378
4379 fn spawn_set_index_text_job(
4380 &mut self,
4381 path: RepoPath,
4382 content: Option<String>,
4383 hunk_staging_operation_count: Option<usize>,
4384 cx: &mut Context<Self>,
4385 ) -> oneshot::Receiver<anyhow::Result<()>> {
4386 let id = self.id;
4387 let this = cx.weak_entity();
4388 let git_store = self.git_store.clone();
4389 self.send_keyed_job(
4390 Some(GitJobKey::WriteIndex(path.clone())),
4391 None,
4392 move |git_repo, mut cx| async move {
4393 log::debug!(
4394 "start updating index text for buffer {}",
4395 path.as_unix_str()
4396 );
4397 match git_repo {
4398 RepositoryState::Local {
4399 backend,
4400 environment,
4401 ..
4402 } => {
4403 backend
4404 .set_index_text(path.clone(), content, environment.clone())
4405 .await?;
4406 }
4407 RepositoryState::Remote { project_id, client } => {
4408 client
4409 .request(proto::SetIndexText {
4410 project_id: project_id.0,
4411 repository_id: id.to_proto(),
4412 path: path.to_proto(),
4413 text: content,
4414 })
4415 .await?;
4416 }
4417 }
4418 log::debug!(
4419 "finish updating index text for buffer {}",
4420 path.as_unix_str()
4421 );
4422
4423 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4424 let project_path = this
4425 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4426 .ok()
4427 .flatten();
4428 git_store.update(&mut cx, |git_store, cx| {
4429 let buffer_id = git_store
4430 .buffer_store
4431 .read(cx)
4432 .get_by_path(&project_path?)?
4433 .read(cx)
4434 .remote_id();
4435 let diff_state = git_store.diffs.get(&buffer_id)?;
4436 diff_state.update(cx, |diff_state, _| {
4437 diff_state.hunk_staging_operation_count_as_of_write =
4438 hunk_staging_operation_count;
4439 });
4440 Some(())
4441 })?;
4442 }
4443 Ok(())
4444 },
4445 )
4446 }
4447
4448 pub fn get_remotes(
4449 &mut self,
4450 branch_name: Option<String>,
4451 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4452 let id = self.id;
4453 self.send_job(None, move |repo, _cx| async move {
4454 match repo {
4455 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4456 RepositoryState::Remote { project_id, client } => {
4457 let response = client
4458 .request(proto::GetRemotes {
4459 project_id: project_id.0,
4460 repository_id: id.to_proto(),
4461 branch_name,
4462 })
4463 .await?;
4464
4465 let remotes = response
4466 .remotes
4467 .into_iter()
4468 .map(|remotes| git::repository::Remote {
4469 name: remotes.name.into(),
4470 })
4471 .collect();
4472
4473 Ok(remotes)
4474 }
4475 }
4476 })
4477 }
4478
4479 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4480 let id = self.id;
4481 self.send_job(None, move |repo, _| async move {
4482 match repo {
4483 RepositoryState::Local { backend, .. } => backend.branches().await,
4484 RepositoryState::Remote { project_id, client } => {
4485 let response = client
4486 .request(proto::GitGetBranches {
4487 project_id: project_id.0,
4488 repository_id: id.to_proto(),
4489 })
4490 .await?;
4491
4492 let branches = response
4493 .branches
4494 .into_iter()
4495 .map(|branch| proto_to_branch(&branch))
4496 .collect();
4497
4498 Ok(branches)
4499 }
4500 }
4501 })
4502 }
4503
4504 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4505 let id = self.id;
4506 self.send_job(None, move |repo, _| async move {
4507 match repo {
4508 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4509 RepositoryState::Remote { project_id, client } => {
4510 let response = client
4511 .request(proto::GitGetWorktrees {
4512 project_id: project_id.0,
4513 repository_id: id.to_proto(),
4514 })
4515 .await?;
4516
4517 let worktrees = response
4518 .worktrees
4519 .into_iter()
4520 .map(|worktree| proto_to_worktree(&worktree))
4521 .collect();
4522
4523 Ok(worktrees)
4524 }
4525 }
4526 })
4527 }
4528
4529 pub fn create_worktree(
4530 &mut self,
4531 name: String,
4532 path: PathBuf,
4533 commit: Option<String>,
4534 ) -> oneshot::Receiver<Result<()>> {
4535 let id = self.id;
4536 self.send_job(
4537 Some("git worktree add".into()),
4538 move |repo, _cx| async move {
4539 match repo {
4540 RepositoryState::Local { backend, .. } => {
4541 backend.create_worktree(name, path, commit).await
4542 }
4543 RepositoryState::Remote { project_id, client } => {
4544 client
4545 .request(proto::GitCreateWorktree {
4546 project_id: project_id.0,
4547 repository_id: id.to_proto(),
4548 name,
4549 directory: path.to_string_lossy().to_string(),
4550 commit,
4551 })
4552 .await?;
4553
4554 Ok(())
4555 }
4556 }
4557 },
4558 )
4559 }
4560
4561 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4562 let id = self.id;
4563 self.send_job(None, move |repo, _| async move {
4564 match repo {
4565 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4566 RepositoryState::Remote { project_id, client } => {
4567 let response = client
4568 .request(proto::GetDefaultBranch {
4569 project_id: project_id.0,
4570 repository_id: id.to_proto(),
4571 })
4572 .await?;
4573
4574 anyhow::Ok(response.branch.map(SharedString::from))
4575 }
4576 }
4577 })
4578 }
4579
4580 pub fn diff_tree(
4581 &mut self,
4582 diff_type: DiffTreeType,
4583 _cx: &App,
4584 ) -> oneshot::Receiver<Result<TreeDiff>> {
4585 let repository_id = self.snapshot.id;
4586 self.send_job(None, move |repo, _cx| async move {
4587 match repo {
4588 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4589 RepositoryState::Remote { client, project_id } => {
4590 let response = client
4591 .request(proto::GetTreeDiff {
4592 project_id: project_id.0,
4593 repository_id: repository_id.0,
4594 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4595 base: diff_type.base().to_string(),
4596 head: diff_type.head().to_string(),
4597 })
4598 .await?;
4599
4600 let entries = response
4601 .entries
4602 .into_iter()
4603 .filter_map(|entry| {
4604 let status = match entry.status() {
4605 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4606 proto::tree_diff_status::Status::Modified => {
4607 TreeDiffStatus::Modified {
4608 old: git::Oid::from_str(
4609 &entry.oid.context("missing oid").log_err()?,
4610 )
4611 .log_err()?,
4612 }
4613 }
4614 proto::tree_diff_status::Status::Deleted => {
4615 TreeDiffStatus::Deleted {
4616 old: git::Oid::from_str(
4617 &entry.oid.context("missing oid").log_err()?,
4618 )
4619 .log_err()?,
4620 }
4621 }
4622 };
4623 Some((
4624 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4625 status,
4626 ))
4627 })
4628 .collect();
4629
4630 Ok(TreeDiff { entries })
4631 }
4632 }
4633 })
4634 }
4635
4636 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4637 let id = self.id;
4638 self.send_job(None, move |repo, _cx| async move {
4639 match repo {
4640 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4641 RepositoryState::Remote { project_id, client } => {
4642 let response = client
4643 .request(proto::GitDiff {
4644 project_id: project_id.0,
4645 repository_id: id.to_proto(),
4646 diff_type: match diff_type {
4647 DiffType::HeadToIndex => {
4648 proto::git_diff::DiffType::HeadToIndex.into()
4649 }
4650 DiffType::HeadToWorktree => {
4651 proto::git_diff::DiffType::HeadToWorktree.into()
4652 }
4653 },
4654 })
4655 .await?;
4656
4657 Ok(response.diff)
4658 }
4659 }
4660 })
4661 }
4662
4663 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4664 let id = self.id;
4665 self.send_job(
4666 Some(format!("git switch -c {branch_name}").into()),
4667 move |repo, _cx| async move {
4668 match repo {
4669 RepositoryState::Local { backend, .. } => {
4670 backend.create_branch(branch_name).await
4671 }
4672 RepositoryState::Remote { project_id, client } => {
4673 client
4674 .request(proto::GitCreateBranch {
4675 project_id: project_id.0,
4676 repository_id: id.to_proto(),
4677 branch_name,
4678 })
4679 .await?;
4680
4681 Ok(())
4682 }
4683 }
4684 },
4685 )
4686 }
4687
4688 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4689 let id = self.id;
4690 self.send_job(
4691 Some(format!("git switch {branch_name}").into()),
4692 move |repo, _cx| async move {
4693 match repo {
4694 RepositoryState::Local { backend, .. } => {
4695 backend.change_branch(branch_name).await
4696 }
4697 RepositoryState::Remote { project_id, client } => {
4698 client
4699 .request(proto::GitChangeBranch {
4700 project_id: project_id.0,
4701 repository_id: id.to_proto(),
4702 branch_name,
4703 })
4704 .await?;
4705
4706 Ok(())
4707 }
4708 }
4709 },
4710 )
4711 }
4712
4713 pub fn rename_branch(
4714 &mut self,
4715 branch: String,
4716 new_name: String,
4717 ) -> oneshot::Receiver<Result<()>> {
4718 let id = self.id;
4719 self.send_job(
4720 Some(format!("git branch -m {branch} {new_name}").into()),
4721 move |repo, _cx| async move {
4722 match repo {
4723 RepositoryState::Local { backend, .. } => {
4724 backend.rename_branch(branch, new_name).await
4725 }
4726 RepositoryState::Remote { project_id, client } => {
4727 client
4728 .request(proto::GitRenameBranch {
4729 project_id: project_id.0,
4730 repository_id: id.to_proto(),
4731 branch,
4732 new_name,
4733 })
4734 .await?;
4735
4736 Ok(())
4737 }
4738 }
4739 },
4740 )
4741 }
4742
4743 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4744 let id = self.id;
4745 self.send_job(None, move |repo, _cx| async move {
4746 match repo {
4747 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4748 RepositoryState::Remote { project_id, client } => {
4749 let response = client
4750 .request(proto::CheckForPushedCommits {
4751 project_id: project_id.0,
4752 repository_id: id.to_proto(),
4753 })
4754 .await?;
4755
4756 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4757
4758 Ok(branches)
4759 }
4760 }
4761 })
4762 }
4763
4764 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4765 self.send_job(None, |repo, _cx| async move {
4766 match repo {
4767 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4768 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4769 }
4770 })
4771 }
4772
4773 pub fn restore_checkpoint(
4774 &mut self,
4775 checkpoint: GitRepositoryCheckpoint,
4776 ) -> oneshot::Receiver<Result<()>> {
4777 self.send_job(None, move |repo, _cx| async move {
4778 match repo {
4779 RepositoryState::Local { backend, .. } => {
4780 backend.restore_checkpoint(checkpoint).await
4781 }
4782 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4783 }
4784 })
4785 }
4786
4787 pub(crate) fn apply_remote_update(
4788 &mut self,
4789 update: proto::UpdateRepository,
4790 cx: &mut Context<Self>,
4791 ) -> Result<()> {
4792 let conflicted_paths = TreeSet::from_ordered_entries(
4793 update
4794 .current_merge_conflicts
4795 .into_iter()
4796 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4797 );
4798 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4799 let new_head_commit = update
4800 .head_commit_details
4801 .as_ref()
4802 .map(proto_to_commit_details);
4803 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4804 cx.emit(RepositoryEvent::BranchChanged)
4805 }
4806 self.snapshot.branch = new_branch;
4807 self.snapshot.head_commit = new_head_commit;
4808
4809 self.snapshot.merge.conflicted_paths = conflicted_paths;
4810 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4811 let new_stash_entries = GitStash {
4812 entries: update
4813 .stash_entries
4814 .iter()
4815 .filter_map(|entry| proto_to_stash(entry).ok())
4816 .collect(),
4817 };
4818 if self.snapshot.stash_entries != new_stash_entries {
4819 cx.emit(RepositoryEvent::StashEntriesChanged)
4820 }
4821 self.snapshot.stash_entries = new_stash_entries;
4822
4823 let edits = update
4824 .removed_statuses
4825 .into_iter()
4826 .filter_map(|path| {
4827 Some(sum_tree::Edit::Remove(PathKey(
4828 RelPath::from_proto(&path).log_err()?,
4829 )))
4830 })
4831 .chain(
4832 update
4833 .updated_statuses
4834 .into_iter()
4835 .filter_map(|updated_status| {
4836 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4837 }),
4838 )
4839 .collect::<Vec<_>>();
4840 if !edits.is_empty() {
4841 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4842 }
4843 self.snapshot.statuses_by_path.edit(edits, ());
4844 if update.is_last_update {
4845 self.snapshot.scan_id = update.scan_id;
4846 }
4847 Ok(())
4848 }
4849
4850 pub fn compare_checkpoints(
4851 &mut self,
4852 left: GitRepositoryCheckpoint,
4853 right: GitRepositoryCheckpoint,
4854 ) -> oneshot::Receiver<Result<bool>> {
4855 self.send_job(None, move |repo, _cx| async move {
4856 match repo {
4857 RepositoryState::Local { backend, .. } => {
4858 backend.compare_checkpoints(left, right).await
4859 }
4860 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4861 }
4862 })
4863 }
4864
4865 pub fn diff_checkpoints(
4866 &mut self,
4867 base_checkpoint: GitRepositoryCheckpoint,
4868 target_checkpoint: GitRepositoryCheckpoint,
4869 ) -> oneshot::Receiver<Result<String>> {
4870 self.send_job(None, move |repo, _cx| async move {
4871 match repo {
4872 RepositoryState::Local { backend, .. } => {
4873 backend
4874 .diff_checkpoints(base_checkpoint, target_checkpoint)
4875 .await
4876 }
4877 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4878 }
4879 })
4880 }
4881
4882 fn schedule_scan(
4883 &mut self,
4884 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4885 cx: &mut Context<Self>,
4886 ) {
4887 let this = cx.weak_entity();
4888 let _ = self.send_keyed_job(
4889 Some(GitJobKey::ReloadGitState),
4890 None,
4891 |state, mut cx| async move {
4892 log::debug!("run scheduled git status scan");
4893
4894 let Some(this) = this.upgrade() else {
4895 return Ok(());
4896 };
4897 let RepositoryState::Local { backend, .. } = state else {
4898 bail!("not a local repository")
4899 };
4900 let (snapshot, events) = this
4901 .update(&mut cx, |this, _| {
4902 this.paths_needing_status_update.clear();
4903 compute_snapshot(
4904 this.id,
4905 this.work_directory_abs_path.clone(),
4906 this.snapshot.clone(),
4907 backend.clone(),
4908 )
4909 })?
4910 .await?;
4911 this.update(&mut cx, |this, cx| {
4912 this.snapshot = snapshot.clone();
4913 for event in events {
4914 cx.emit(event);
4915 }
4916 })?;
4917 if let Some(updates_tx) = updates_tx {
4918 updates_tx
4919 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4920 .ok();
4921 }
4922 Ok(())
4923 },
4924 );
4925 }
4926
4927 fn spawn_local_git_worker(
4928 work_directory_abs_path: Arc<Path>,
4929 dot_git_abs_path: Arc<Path>,
4930 _repository_dir_abs_path: Arc<Path>,
4931 _common_dir_abs_path: Arc<Path>,
4932 project_environment: WeakEntity<ProjectEnvironment>,
4933 fs: Arc<dyn Fs>,
4934 cx: &mut Context<Self>,
4935 ) -> mpsc::UnboundedSender<GitJob> {
4936 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4937
4938 cx.spawn(async move |_, cx| {
4939 let environment = project_environment
4940 .upgrade()
4941 .context("missing project environment")?
4942 .update(cx, |project_environment, cx| {
4943 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4944 })?
4945 .await
4946 .unwrap_or_else(|| {
4947 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4948 HashMap::default()
4949 });
4950 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4951 let backend = cx
4952 .background_spawn(async move {
4953 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4954 .or_else(|| which::which("git").ok());
4955 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4956 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4957 })
4958 .await?;
4959
4960 if let Some(git_hosting_provider_registry) =
4961 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4962 {
4963 git_hosting_providers::register_additional_providers(
4964 git_hosting_provider_registry,
4965 backend.clone(),
4966 );
4967 }
4968
4969 let state = RepositoryState::Local {
4970 backend,
4971 environment: Arc::new(environment),
4972 };
4973 let mut jobs = VecDeque::new();
4974 loop {
4975 while let Ok(Some(next_job)) = job_rx.try_next() {
4976 jobs.push_back(next_job);
4977 }
4978
4979 if let Some(job) = jobs.pop_front() {
4980 if let Some(current_key) = &job.key
4981 && jobs
4982 .iter()
4983 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4984 {
4985 continue;
4986 }
4987 (job.job)(state.clone(), cx).await;
4988 } else if let Some(job) = job_rx.next().await {
4989 jobs.push_back(job);
4990 } else {
4991 break;
4992 }
4993 }
4994 anyhow::Ok(())
4995 })
4996 .detach_and_log_err(cx);
4997
4998 job_tx
4999 }
5000
5001 fn spawn_remote_git_worker(
5002 project_id: ProjectId,
5003 client: AnyProtoClient,
5004 cx: &mut Context<Self>,
5005 ) -> mpsc::UnboundedSender<GitJob> {
5006 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5007
5008 cx.spawn(async move |_, cx| {
5009 let state = RepositoryState::Remote { project_id, client };
5010 let mut jobs = VecDeque::new();
5011 loop {
5012 while let Ok(Some(next_job)) = job_rx.try_next() {
5013 jobs.push_back(next_job);
5014 }
5015
5016 if let Some(job) = jobs.pop_front() {
5017 if let Some(current_key) = &job.key
5018 && jobs
5019 .iter()
5020 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5021 {
5022 continue;
5023 }
5024 (job.job)(state.clone(), cx).await;
5025 } else if let Some(job) = job_rx.next().await {
5026 jobs.push_back(job);
5027 } else {
5028 break;
5029 }
5030 }
5031 anyhow::Ok(())
5032 })
5033 .detach_and_log_err(cx);
5034
5035 job_tx
5036 }
5037
5038 fn load_staged_text(
5039 &mut self,
5040 buffer_id: BufferId,
5041 repo_path: RepoPath,
5042 cx: &App,
5043 ) -> Task<Result<Option<String>>> {
5044 let rx = self.send_job(None, move |state, _| async move {
5045 match state {
5046 RepositoryState::Local { backend, .. } => {
5047 anyhow::Ok(backend.load_index_text(repo_path).await)
5048 }
5049 RepositoryState::Remote { project_id, client } => {
5050 let response = client
5051 .request(proto::OpenUnstagedDiff {
5052 project_id: project_id.to_proto(),
5053 buffer_id: buffer_id.to_proto(),
5054 })
5055 .await?;
5056 Ok(response.staged_text)
5057 }
5058 }
5059 });
5060 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5061 }
5062
5063 fn load_committed_text(
5064 &mut self,
5065 buffer_id: BufferId,
5066 repo_path: RepoPath,
5067 cx: &App,
5068 ) -> Task<Result<DiffBasesChange>> {
5069 let rx = self.send_job(None, move |state, _| async move {
5070 match state {
5071 RepositoryState::Local { backend, .. } => {
5072 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5073 let staged_text = backend.load_index_text(repo_path).await;
5074 let diff_bases_change = if committed_text == staged_text {
5075 DiffBasesChange::SetBoth(committed_text)
5076 } else {
5077 DiffBasesChange::SetEach {
5078 index: staged_text,
5079 head: committed_text,
5080 }
5081 };
5082 anyhow::Ok(diff_bases_change)
5083 }
5084 RepositoryState::Remote { project_id, client } => {
5085 use proto::open_uncommitted_diff_response::Mode;
5086
5087 let response = client
5088 .request(proto::OpenUncommittedDiff {
5089 project_id: project_id.to_proto(),
5090 buffer_id: buffer_id.to_proto(),
5091 })
5092 .await?;
5093 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5094 let bases = match mode {
5095 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5096 Mode::IndexAndHead => DiffBasesChange::SetEach {
5097 head: response.committed_text,
5098 index: response.staged_text,
5099 },
5100 };
5101 Ok(bases)
5102 }
5103 }
5104 });
5105
5106 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5107 }
5108 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5109 let repository_id = self.snapshot.id;
5110 let rx = self.send_job(None, move |state, _| async move {
5111 match state {
5112 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5113 RepositoryState::Remote { client, project_id } => {
5114 let response = client
5115 .request(proto::GetBlobContent {
5116 project_id: project_id.to_proto(),
5117 repository_id: repository_id.0,
5118 oid: oid.to_string(),
5119 })
5120 .await?;
5121 Ok(response.content)
5122 }
5123 }
5124 });
5125 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5126 }
5127
5128 fn paths_changed(
5129 &mut self,
5130 paths: Vec<RepoPath>,
5131 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5132 cx: &mut Context<Self>,
5133 ) {
5134 self.paths_needing_status_update.extend(paths);
5135
5136 let this = cx.weak_entity();
5137 let _ = self.send_keyed_job(
5138 Some(GitJobKey::RefreshStatuses),
5139 None,
5140 |state, mut cx| async move {
5141 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5142 (
5143 this.snapshot.clone(),
5144 mem::take(&mut this.paths_needing_status_update),
5145 )
5146 })?;
5147 let RepositoryState::Local { backend, .. } = state else {
5148 bail!("not a local repository")
5149 };
5150
5151 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5152 if paths.is_empty() {
5153 return Ok(());
5154 }
5155 let statuses = backend.status(&paths).await?;
5156 let stash_entries = backend.stash_entries().await?;
5157
5158 let changed_path_statuses = cx
5159 .background_spawn(async move {
5160 let mut changed_path_statuses = Vec::new();
5161 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5162 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5163
5164 for (repo_path, status) in &*statuses.entries {
5165 changed_paths.remove(repo_path);
5166 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5167 && cursor.item().is_some_and(|entry| entry.status == *status)
5168 {
5169 continue;
5170 }
5171
5172 changed_path_statuses.push(Edit::Insert(StatusEntry {
5173 repo_path: repo_path.clone(),
5174 status: *status,
5175 }));
5176 }
5177 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5178 for path in changed_paths.into_iter() {
5179 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5180 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5181 }
5182 }
5183 changed_path_statuses
5184 })
5185 .await;
5186
5187 this.update(&mut cx, |this, cx| {
5188 if this.snapshot.stash_entries != stash_entries {
5189 cx.emit(RepositoryEvent::StashEntriesChanged);
5190 this.snapshot.stash_entries = stash_entries;
5191 }
5192
5193 if !changed_path_statuses.is_empty() {
5194 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5195 this.snapshot
5196 .statuses_by_path
5197 .edit(changed_path_statuses, ());
5198 this.snapshot.scan_id += 1;
5199 }
5200
5201 if let Some(updates_tx) = updates_tx {
5202 updates_tx
5203 .unbounded_send(DownstreamUpdate::UpdateRepository(
5204 this.snapshot.clone(),
5205 ))
5206 .ok();
5207 }
5208 })
5209 },
5210 );
5211 }
5212
5213 /// currently running git command and when it started
5214 pub fn current_job(&self) -> Option<JobInfo> {
5215 self.active_jobs.values().next().cloned()
5216 }
5217
5218 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5219 self.send_job(None, |_, _| async {})
5220 }
5221
5222 fn spawn_job_with_tracking<AsyncFn>(
5223 &mut self,
5224 paths: Vec<RepoPath>,
5225 git_status: pending_op::GitStatus,
5226 cx: &mut Context<Self>,
5227 f: AsyncFn,
5228 ) -> Task<anyhow::Result<()>>
5229 where
5230 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> anyhow::Result<()> + 'static,
5231 {
5232 let ids = self.new_pending_ops_for_paths(paths, git_status);
5233
5234 cx.spawn(async move |this, cx| {
5235 let job_status = match f(this.clone(), cx).await {
5236 Ok(()) => pending_op::JobStatus::Finished,
5237 Err(err) if err.is::<Canceled>() => pending_op::JobStatus::Canceled,
5238 Err(err) => return Err(err),
5239 };
5240
5241 this.update(cx, |this, _| {
5242 let mut edits = Vec::with_capacity(ids.len());
5243 for (id, entry) in ids {
5244 if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
5245 if let Some(op) = ops.op_by_id_mut(id) {
5246 op.job_status = job_status;
5247 }
5248 edits.push(sum_tree::Edit::Insert(ops));
5249 }
5250 }
5251 this.snapshot.pending_ops_by_path.edit(edits, ());
5252 })?;
5253
5254 Ok(())
5255 })
5256 }
5257
5258 fn new_pending_ops_for_paths(
5259 &mut self,
5260 paths: Vec<RepoPath>,
5261 git_status: pending_op::GitStatus,
5262 ) -> Vec<(PendingOpId, RepoPath)> {
5263 let mut edits = Vec::with_capacity(paths.len());
5264 let mut ids = Vec::with_capacity(paths.len());
5265 for path in paths {
5266 let op = self.snapshot.new_pending_op(git_status);
5267 let mut ops = self
5268 .snapshot
5269 .pending_ops_for_path(&path)
5270 .unwrap_or_else(|| PendingOps::new(&path));
5271 ops.ops.push(op);
5272 edits.push(sum_tree::Edit::Insert(ops));
5273 ids.push((op.id, path));
5274 }
5275 self.snapshot.pending_ops_by_path.edit(edits, ());
5276 ids
5277 }
5278}
5279
5280fn get_permalink_in_rust_registry_src(
5281 provider_registry: Arc<GitHostingProviderRegistry>,
5282 path: PathBuf,
5283 selection: Range<u32>,
5284) -> Result<url::Url> {
5285 #[derive(Deserialize)]
5286 struct CargoVcsGit {
5287 sha1: String,
5288 }
5289
5290 #[derive(Deserialize)]
5291 struct CargoVcsInfo {
5292 git: CargoVcsGit,
5293 path_in_vcs: String,
5294 }
5295
5296 #[derive(Deserialize)]
5297 struct CargoPackage {
5298 repository: String,
5299 }
5300
5301 #[derive(Deserialize)]
5302 struct CargoToml {
5303 package: CargoPackage,
5304 }
5305
5306 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5307 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5308 Some((dir, json))
5309 }) else {
5310 bail!("No .cargo_vcs_info.json found in parent directories")
5311 };
5312 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5313 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5314 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5315 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5316 .context("parsing package.repository field of manifest")?;
5317 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5318 let permalink = provider.build_permalink(
5319 remote,
5320 BuildPermalinkParams::new(
5321 &cargo_vcs_info.git.sha1,
5322 &RepoPath(
5323 RelPath::new(&path, PathStyle::local())
5324 .context("invalid path")?
5325 .into_arc(),
5326 ),
5327 Some(selection),
5328 ),
5329 );
5330 Ok(permalink)
5331}
5332
5333fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5334 let Some(blame) = blame else {
5335 return proto::BlameBufferResponse {
5336 blame_response: None,
5337 };
5338 };
5339
5340 let entries = blame
5341 .entries
5342 .into_iter()
5343 .map(|entry| proto::BlameEntry {
5344 sha: entry.sha.as_bytes().into(),
5345 start_line: entry.range.start,
5346 end_line: entry.range.end,
5347 original_line_number: entry.original_line_number,
5348 author: entry.author,
5349 author_mail: entry.author_mail,
5350 author_time: entry.author_time,
5351 author_tz: entry.author_tz,
5352 committer: entry.committer_name,
5353 committer_mail: entry.committer_email,
5354 committer_time: entry.committer_time,
5355 committer_tz: entry.committer_tz,
5356 summary: entry.summary,
5357 previous: entry.previous,
5358 filename: entry.filename,
5359 })
5360 .collect::<Vec<_>>();
5361
5362 let messages = blame
5363 .messages
5364 .into_iter()
5365 .map(|(oid, message)| proto::CommitMessage {
5366 oid: oid.as_bytes().into(),
5367 message,
5368 })
5369 .collect::<Vec<_>>();
5370
5371 proto::BlameBufferResponse {
5372 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5373 entries,
5374 messages,
5375 remote_url: blame.remote_url,
5376 }),
5377 }
5378}
5379
5380fn deserialize_blame_buffer_response(
5381 response: proto::BlameBufferResponse,
5382) -> Option<git::blame::Blame> {
5383 let response = response.blame_response?;
5384 let entries = response
5385 .entries
5386 .into_iter()
5387 .filter_map(|entry| {
5388 Some(git::blame::BlameEntry {
5389 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5390 range: entry.start_line..entry.end_line,
5391 original_line_number: entry.original_line_number,
5392 committer_name: entry.committer,
5393 committer_time: entry.committer_time,
5394 committer_tz: entry.committer_tz,
5395 committer_email: entry.committer_mail,
5396 author: entry.author,
5397 author_mail: entry.author_mail,
5398 author_time: entry.author_time,
5399 author_tz: entry.author_tz,
5400 summary: entry.summary,
5401 previous: entry.previous,
5402 filename: entry.filename,
5403 })
5404 })
5405 .collect::<Vec<_>>();
5406
5407 let messages = response
5408 .messages
5409 .into_iter()
5410 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5411 .collect::<HashMap<_, _>>();
5412
5413 Some(Blame {
5414 entries,
5415 messages,
5416 remote_url: response.remote_url,
5417 })
5418}
5419
5420fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5421 proto::Branch {
5422 is_head: branch.is_head,
5423 ref_name: branch.ref_name.to_string(),
5424 unix_timestamp: branch
5425 .most_recent_commit
5426 .as_ref()
5427 .map(|commit| commit.commit_timestamp as u64),
5428 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5429 ref_name: upstream.ref_name.to_string(),
5430 tracking: upstream
5431 .tracking
5432 .status()
5433 .map(|upstream| proto::UpstreamTracking {
5434 ahead: upstream.ahead as u64,
5435 behind: upstream.behind as u64,
5436 }),
5437 }),
5438 most_recent_commit: branch
5439 .most_recent_commit
5440 .as_ref()
5441 .map(|commit| proto::CommitSummary {
5442 sha: commit.sha.to_string(),
5443 subject: commit.subject.to_string(),
5444 commit_timestamp: commit.commit_timestamp,
5445 author_name: commit.author_name.to_string(),
5446 }),
5447 }
5448}
5449
5450fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5451 proto::Worktree {
5452 path: worktree.path.to_string_lossy().to_string(),
5453 ref_name: worktree.ref_name.to_string(),
5454 sha: worktree.sha.to_string(),
5455 }
5456}
5457
5458fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5459 git::repository::Worktree {
5460 path: PathBuf::from(proto.path.clone()),
5461 ref_name: proto.ref_name.clone().into(),
5462 sha: proto.sha.clone().into(),
5463 }
5464}
5465
5466fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5467 git::repository::Branch {
5468 is_head: proto.is_head,
5469 ref_name: proto.ref_name.clone().into(),
5470 upstream: proto
5471 .upstream
5472 .as_ref()
5473 .map(|upstream| git::repository::Upstream {
5474 ref_name: upstream.ref_name.to_string().into(),
5475 tracking: upstream
5476 .tracking
5477 .as_ref()
5478 .map(|tracking| {
5479 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5480 ahead: tracking.ahead as u32,
5481 behind: tracking.behind as u32,
5482 })
5483 })
5484 .unwrap_or(git::repository::UpstreamTracking::Gone),
5485 }),
5486 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5487 git::repository::CommitSummary {
5488 sha: commit.sha.to_string().into(),
5489 subject: commit.subject.to_string().into(),
5490 commit_timestamp: commit.commit_timestamp,
5491 author_name: commit.author_name.to_string().into(),
5492 has_parent: true,
5493 }
5494 }),
5495 }
5496}
5497
5498fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5499 proto::GitCommitDetails {
5500 sha: commit.sha.to_string(),
5501 message: commit.message.to_string(),
5502 commit_timestamp: commit.commit_timestamp,
5503 author_email: commit.author_email.to_string(),
5504 author_name: commit.author_name.to_string(),
5505 }
5506}
5507
5508fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5509 CommitDetails {
5510 sha: proto.sha.clone().into(),
5511 message: proto.message.clone().into(),
5512 commit_timestamp: proto.commit_timestamp,
5513 author_email: proto.author_email.clone().into(),
5514 author_name: proto.author_name.clone().into(),
5515 }
5516}
5517
5518async fn compute_snapshot(
5519 id: RepositoryId,
5520 work_directory_abs_path: Arc<Path>,
5521 prev_snapshot: RepositorySnapshot,
5522 backend: Arc<dyn GitRepository>,
5523) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5524 let mut events = Vec::new();
5525 let branches = backend.branches().await?;
5526 let branch = branches.into_iter().find(|branch| branch.is_head);
5527 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5528 let stash_entries = backend.stash_entries().await?;
5529 let statuses_by_path = SumTree::from_iter(
5530 statuses
5531 .entries
5532 .iter()
5533 .map(|(repo_path, status)| StatusEntry {
5534 repo_path: repo_path.clone(),
5535 status: *status,
5536 }),
5537 (),
5538 );
5539 let (merge_details, merge_heads_changed) =
5540 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5541 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5542
5543 let pending_ops_by_path = prev_snapshot.pending_ops_by_path.clone();
5544
5545 if merge_heads_changed {
5546 events.push(RepositoryEvent::MergeHeadsChanged);
5547 }
5548
5549 if statuses_by_path != prev_snapshot.statuses_by_path {
5550 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5551 }
5552
5553 // Useful when branch is None in detached head state
5554 let head_commit = match backend.head_sha().await {
5555 Some(head_sha) => backend.show(head_sha).await.log_err(),
5556 None => None,
5557 };
5558
5559 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5560 events.push(RepositoryEvent::BranchChanged);
5561 }
5562
5563 // Used by edit prediction data collection
5564 let remote_origin_url = backend.remote_url("origin");
5565 let remote_upstream_url = backend.remote_url("upstream");
5566
5567 let snapshot = RepositorySnapshot {
5568 id,
5569 statuses_by_path,
5570 pending_ops_by_path,
5571 work_directory_abs_path,
5572 path_style: prev_snapshot.path_style,
5573 scan_id: prev_snapshot.scan_id + 1,
5574 branch,
5575 head_commit,
5576 merge: merge_details,
5577 remote_origin_url,
5578 remote_upstream_url,
5579 stash_entries,
5580 };
5581
5582 Ok((snapshot, events))
5583}
5584
5585fn status_from_proto(
5586 simple_status: i32,
5587 status: Option<proto::GitFileStatus>,
5588) -> anyhow::Result<FileStatus> {
5589 use proto::git_file_status::Variant;
5590
5591 let Some(variant) = status.and_then(|status| status.variant) else {
5592 let code = proto::GitStatus::from_i32(simple_status)
5593 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5594 let result = match code {
5595 proto::GitStatus::Added => TrackedStatus {
5596 worktree_status: StatusCode::Added,
5597 index_status: StatusCode::Unmodified,
5598 }
5599 .into(),
5600 proto::GitStatus::Modified => TrackedStatus {
5601 worktree_status: StatusCode::Modified,
5602 index_status: StatusCode::Unmodified,
5603 }
5604 .into(),
5605 proto::GitStatus::Conflict => UnmergedStatus {
5606 first_head: UnmergedStatusCode::Updated,
5607 second_head: UnmergedStatusCode::Updated,
5608 }
5609 .into(),
5610 proto::GitStatus::Deleted => TrackedStatus {
5611 worktree_status: StatusCode::Deleted,
5612 index_status: StatusCode::Unmodified,
5613 }
5614 .into(),
5615 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5616 };
5617 return Ok(result);
5618 };
5619
5620 let result = match variant {
5621 Variant::Untracked(_) => FileStatus::Untracked,
5622 Variant::Ignored(_) => FileStatus::Ignored,
5623 Variant::Unmerged(unmerged) => {
5624 let [first_head, second_head] =
5625 [unmerged.first_head, unmerged.second_head].map(|head| {
5626 let code = proto::GitStatus::from_i32(head)
5627 .with_context(|| format!("Invalid git status code: {head}"))?;
5628 let result = match code {
5629 proto::GitStatus::Added => UnmergedStatusCode::Added,
5630 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5631 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5632 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5633 };
5634 Ok(result)
5635 });
5636 let [first_head, second_head] = [first_head?, second_head?];
5637 UnmergedStatus {
5638 first_head,
5639 second_head,
5640 }
5641 .into()
5642 }
5643 Variant::Tracked(tracked) => {
5644 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5645 .map(|status| {
5646 let code = proto::GitStatus::from_i32(status)
5647 .with_context(|| format!("Invalid git status code: {status}"))?;
5648 let result = match code {
5649 proto::GitStatus::Modified => StatusCode::Modified,
5650 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5651 proto::GitStatus::Added => StatusCode::Added,
5652 proto::GitStatus::Deleted => StatusCode::Deleted,
5653 proto::GitStatus::Renamed => StatusCode::Renamed,
5654 proto::GitStatus::Copied => StatusCode::Copied,
5655 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5656 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5657 };
5658 Ok(result)
5659 });
5660 let [index_status, worktree_status] = [index_status?, worktree_status?];
5661 TrackedStatus {
5662 index_status,
5663 worktree_status,
5664 }
5665 .into()
5666 }
5667 };
5668 Ok(result)
5669}
5670
5671fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5672 use proto::git_file_status::{Tracked, Unmerged, Variant};
5673
5674 let variant = match status {
5675 FileStatus::Untracked => Variant::Untracked(Default::default()),
5676 FileStatus::Ignored => Variant::Ignored(Default::default()),
5677 FileStatus::Unmerged(UnmergedStatus {
5678 first_head,
5679 second_head,
5680 }) => Variant::Unmerged(Unmerged {
5681 first_head: unmerged_status_to_proto(first_head),
5682 second_head: unmerged_status_to_proto(second_head),
5683 }),
5684 FileStatus::Tracked(TrackedStatus {
5685 index_status,
5686 worktree_status,
5687 }) => Variant::Tracked(Tracked {
5688 index_status: tracked_status_to_proto(index_status),
5689 worktree_status: tracked_status_to_proto(worktree_status),
5690 }),
5691 };
5692 proto::GitFileStatus {
5693 variant: Some(variant),
5694 }
5695}
5696
5697fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5698 match code {
5699 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5700 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5701 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5702 }
5703}
5704
5705fn tracked_status_to_proto(code: StatusCode) -> i32 {
5706 match code {
5707 StatusCode::Added => proto::GitStatus::Added as _,
5708 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5709 StatusCode::Modified => proto::GitStatus::Modified as _,
5710 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5711 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5712 StatusCode::Copied => proto::GitStatus::Copied as _,
5713 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5714 }
5715}