1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use text::{Bias, BufferId};
66use util::{
67 ResultExt, debug_panic,
68 paths::{PathStyle, SanitizedPath},
69 post_inc,
70 rel_path::RelPath,
71};
72use worktree::{
73 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
74 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
75};
76use zeroize::Zeroize;
77
78pub struct GitStore {
79 state: GitStoreState,
80 buffer_store: Entity<BufferStore>,
81 worktree_store: Entity<WorktreeStore>,
82 repositories: HashMap<RepositoryId, Entity<Repository>>,
83 active_repo_id: Option<RepositoryId>,
84 #[allow(clippy::type_complexity)]
85 loading_diffs:
86 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
87 diffs: HashMap<BufferId, Entity<BufferGitState>>,
88 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
89 _subscriptions: Vec<Subscription>,
90}
91
92#[derive(Default)]
93struct SharedDiffs {
94 unstaged: Option<Entity<BufferDiff>>,
95 uncommitted: Option<Entity<BufferDiff>>,
96}
97
98struct BufferGitState {
99 unstaged_diff: Option<WeakEntity<BufferDiff>>,
100 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
101 conflict_set: Option<WeakEntity<ConflictSet>>,
102 recalculate_diff_task: Option<Task<Result<()>>>,
103 reparse_conflict_markers_task: Option<Task<Result<()>>>,
104 language: Option<Arc<Language>>,
105 language_registry: Option<Arc<LanguageRegistry>>,
106 conflict_updated_futures: Vec<oneshot::Sender<()>>,
107 recalculating_tx: postage::watch::Sender<bool>,
108
109 /// These operation counts are used to ensure that head and index text
110 /// values read from the git repository are up-to-date with any hunk staging
111 /// operations that have been performed on the BufferDiff.
112 ///
113 /// The operation count is incremented immediately when the user initiates a
114 /// hunk stage/unstage operation. Then, upon finishing writing the new index
115 /// text do disk, the `operation count as of write` is updated to reflect
116 /// the operation count that prompted the write.
117 hunk_staging_operation_count: usize,
118 hunk_staging_operation_count_as_of_write: usize,
119
120 head_text: Option<Arc<String>>,
121 index_text: Option<Arc<String>>,
122 head_changed: bool,
123 index_changed: bool,
124 language_changed: bool,
125}
126
127#[derive(Clone, Debug)]
128enum DiffBasesChange {
129 SetIndex(Option<String>),
130 SetHead(Option<String>),
131 SetEach {
132 index: Option<String>,
133 head: Option<String>,
134 },
135 SetBoth(Option<String>),
136}
137
138#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
139enum DiffKind {
140 Unstaged,
141 Uncommitted,
142}
143
144enum GitStoreState {
145 Local {
146 next_repository_id: Arc<AtomicU64>,
147 downstream: Option<LocalDownstreamState>,
148 project_environment: Entity<ProjectEnvironment>,
149 fs: Arc<dyn Fs>,
150 },
151 Remote {
152 upstream_client: AnyProtoClient,
153 upstream_project_id: u64,
154 downstream: Option<(AnyProtoClient, ProjectId)>,
155 },
156}
157
158enum DownstreamUpdate {
159 UpdateRepository(RepositorySnapshot),
160 RemoveRepository(RepositoryId),
161}
162
163struct LocalDownstreamState {
164 client: AnyProtoClient,
165 project_id: ProjectId,
166 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
167 _task: Task<Result<()>>,
168}
169
170#[derive(Clone, Debug)]
171pub struct GitStoreCheckpoint {
172 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
173}
174
175#[derive(Clone, Debug, PartialEq, Eq)]
176pub struct StatusEntry {
177 pub repo_path: RepoPath,
178 pub status: FileStatus,
179}
180
181impl StatusEntry {
182 fn to_proto(&self) -> proto::StatusEntry {
183 let simple_status = match self.status {
184 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
185 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
186 FileStatus::Tracked(TrackedStatus {
187 index_status,
188 worktree_status,
189 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
190 worktree_status
191 } else {
192 index_status
193 }),
194 };
195
196 proto::StatusEntry {
197 repo_path: self.repo_path.to_proto(),
198 simple_status,
199 status: Some(status_to_proto(self.status)),
200 }
201 }
202}
203
204impl TryFrom<proto::StatusEntry> for StatusEntry {
205 type Error = anyhow::Error;
206
207 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
208 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
209 let status = status_from_proto(value.simple_status, value.status)?;
210 Ok(Self { repo_path, status })
211 }
212}
213
214impl sum_tree::Item for StatusEntry {
215 type Summary = PathSummary<GitSummary>;
216
217 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
218 PathSummary {
219 max_path: self.repo_path.0.clone(),
220 item_summary: self.status.summary(),
221 }
222 }
223}
224
225impl sum_tree::KeyedItem for StatusEntry {
226 type Key = PathKey;
227
228 fn key(&self) -> Self::Key {
229 PathKey(self.repo_path.0.clone())
230 }
231}
232
233#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
234pub struct RepositoryId(pub u64);
235
236#[derive(Clone, Debug, Default, PartialEq, Eq)]
237pub struct MergeDetails {
238 pub conflicted_paths: TreeSet<RepoPath>,
239 pub message: Option<SharedString>,
240 pub heads: Vec<Option<SharedString>>,
241}
242
243#[derive(Clone, Debug, PartialEq, Eq)]
244pub struct RepositorySnapshot {
245 pub id: RepositoryId,
246 pub statuses_by_path: SumTree<StatusEntry>,
247 pub work_directory_abs_path: Arc<Path>,
248 pub path_style: PathStyle,
249 pub branch: Option<Branch>,
250 pub head_commit: Option<CommitDetails>,
251 pub scan_id: u64,
252 pub merge: MergeDetails,
253 pub remote_origin_url: Option<String>,
254 pub remote_upstream_url: Option<String>,
255 pub stash_entries: GitStash,
256}
257
258type JobId = u64;
259
260#[derive(Clone, Debug, PartialEq, Eq)]
261pub struct JobInfo {
262 pub start: Instant,
263 pub message: SharedString,
264}
265
266pub struct Repository {
267 this: WeakEntity<Self>,
268 snapshot: RepositorySnapshot,
269 commit_message_buffer: Option<Entity<Buffer>>,
270 git_store: WeakEntity<GitStore>,
271 // For a local repository, holds paths that have had worktree events since the last status scan completed,
272 // and that should be examined during the next status scan.
273 paths_needing_status_update: BTreeSet<RepoPath>,
274 job_sender: mpsc::UnboundedSender<GitJob>,
275 active_jobs: HashMap<JobId, JobInfo>,
276 job_id: JobId,
277 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
278 latest_askpass_id: u64,
279}
280
281impl std::ops::Deref for Repository {
282 type Target = RepositorySnapshot;
283
284 fn deref(&self) -> &Self::Target {
285 &self.snapshot
286 }
287}
288
289#[derive(Clone)]
290pub enum RepositoryState {
291 Local {
292 backend: Arc<dyn GitRepository>,
293 environment: Arc<HashMap<String, String>>,
294 },
295 Remote {
296 project_id: ProjectId,
297 client: AnyProtoClient,
298 },
299}
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub enum RepositoryEvent {
303 Updated { full_scan: bool, new_instance: bool },
304 MergeHeadsChanged,
305 PathsChanged,
306}
307
308#[derive(Clone, Debug)]
309pub struct JobsUpdated;
310
311#[derive(Debug)]
312pub enum GitStoreEvent {
313 ActiveRepositoryChanged(Option<RepositoryId>),
314 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
315 RepositoryAdded(RepositoryId),
316 RepositoryRemoved(RepositoryId),
317 IndexWriteError(anyhow::Error),
318 JobsUpdated,
319 ConflictsUpdated,
320}
321
322impl EventEmitter<RepositoryEvent> for Repository {}
323impl EventEmitter<JobsUpdated> for Repository {}
324impl EventEmitter<GitStoreEvent> for GitStore {}
325
326pub struct GitJob {
327 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
328 key: Option<GitJobKey>,
329}
330
331#[derive(PartialEq, Eq)]
332enum GitJobKey {
333 WriteIndex(RepoPath),
334 ReloadBufferDiffBases,
335 RefreshStatuses,
336 ReloadGitState,
337}
338
339impl GitStore {
340 pub fn local(
341 worktree_store: &Entity<WorktreeStore>,
342 buffer_store: Entity<BufferStore>,
343 environment: Entity<ProjectEnvironment>,
344 fs: Arc<dyn Fs>,
345 cx: &mut Context<Self>,
346 ) -> Self {
347 Self::new(
348 worktree_store.clone(),
349 buffer_store,
350 GitStoreState::Local {
351 next_repository_id: Arc::new(AtomicU64::new(1)),
352 downstream: None,
353 project_environment: environment,
354 fs,
355 },
356 cx,
357 )
358 }
359
360 pub fn remote(
361 worktree_store: &Entity<WorktreeStore>,
362 buffer_store: Entity<BufferStore>,
363 upstream_client: AnyProtoClient,
364 project_id: u64,
365 cx: &mut Context<Self>,
366 ) -> Self {
367 Self::new(
368 worktree_store.clone(),
369 buffer_store,
370 GitStoreState::Remote {
371 upstream_client,
372 upstream_project_id: project_id,
373 downstream: None,
374 },
375 cx,
376 )
377 }
378
379 fn new(
380 worktree_store: Entity<WorktreeStore>,
381 buffer_store: Entity<BufferStore>,
382 state: GitStoreState,
383 cx: &mut Context<Self>,
384 ) -> Self {
385 let _subscriptions = vec![
386 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
387 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
388 ];
389
390 GitStore {
391 state,
392 buffer_store,
393 worktree_store,
394 repositories: HashMap::default(),
395 active_repo_id: None,
396 _subscriptions,
397 loading_diffs: HashMap::default(),
398 shared_diffs: HashMap::default(),
399 diffs: HashMap::default(),
400 }
401 }
402
403 pub fn init(client: &AnyProtoClient) {
404 client.add_entity_request_handler(Self::handle_get_remotes);
405 client.add_entity_request_handler(Self::handle_get_branches);
406 client.add_entity_request_handler(Self::handle_get_default_branch);
407 client.add_entity_request_handler(Self::handle_change_branch);
408 client.add_entity_request_handler(Self::handle_create_branch);
409 client.add_entity_request_handler(Self::handle_rename_branch);
410 client.add_entity_request_handler(Self::handle_git_init);
411 client.add_entity_request_handler(Self::handle_push);
412 client.add_entity_request_handler(Self::handle_pull);
413 client.add_entity_request_handler(Self::handle_fetch);
414 client.add_entity_request_handler(Self::handle_stage);
415 client.add_entity_request_handler(Self::handle_unstage);
416 client.add_entity_request_handler(Self::handle_stash);
417 client.add_entity_request_handler(Self::handle_stash_pop);
418 client.add_entity_request_handler(Self::handle_stash_apply);
419 client.add_entity_request_handler(Self::handle_stash_drop);
420 client.add_entity_request_handler(Self::handle_commit);
421 client.add_entity_request_handler(Self::handle_reset);
422 client.add_entity_request_handler(Self::handle_show);
423 client.add_entity_request_handler(Self::handle_load_commit_diff);
424 client.add_entity_request_handler(Self::handle_checkout_files);
425 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
426 client.add_entity_request_handler(Self::handle_set_index_text);
427 client.add_entity_request_handler(Self::handle_askpass);
428 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
429 client.add_entity_request_handler(Self::handle_git_diff);
430 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
431 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
432 client.add_entity_message_handler(Self::handle_update_diff_bases);
433 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
434 client.add_entity_request_handler(Self::handle_blame_buffer);
435 client.add_entity_message_handler(Self::handle_update_repository);
436 client.add_entity_message_handler(Self::handle_remove_repository);
437 client.add_entity_request_handler(Self::handle_git_clone);
438 }
439
440 pub fn is_local(&self) -> bool {
441 matches!(self.state, GitStoreState::Local { .. })
442 }
443 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
444 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
445 let id = repo.read(cx).id;
446 if self.active_repo_id != Some(id) {
447 self.active_repo_id = Some(id);
448 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
449 }
450 }
451 }
452
453 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
454 match &mut self.state {
455 GitStoreState::Remote {
456 downstream: downstream_client,
457 ..
458 } => {
459 for repo in self.repositories.values() {
460 let update = repo.read(cx).snapshot.initial_update(project_id);
461 for update in split_repository_update(update) {
462 client.send(update).log_err();
463 }
464 }
465 *downstream_client = Some((client, ProjectId(project_id)));
466 }
467 GitStoreState::Local {
468 downstream: downstream_client,
469 ..
470 } => {
471 let mut snapshots = HashMap::default();
472 let (updates_tx, mut updates_rx) = mpsc::unbounded();
473 for repo in self.repositories.values() {
474 updates_tx
475 .unbounded_send(DownstreamUpdate::UpdateRepository(
476 repo.read(cx).snapshot.clone(),
477 ))
478 .ok();
479 }
480 *downstream_client = Some(LocalDownstreamState {
481 client: client.clone(),
482 project_id: ProjectId(project_id),
483 updates_tx,
484 _task: cx.spawn(async move |this, cx| {
485 cx.background_spawn(async move {
486 while let Some(update) = updates_rx.next().await {
487 match update {
488 DownstreamUpdate::UpdateRepository(snapshot) => {
489 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
490 {
491 let update =
492 snapshot.build_update(old_snapshot, project_id);
493 *old_snapshot = snapshot;
494 for update in split_repository_update(update) {
495 client.send(update)?;
496 }
497 } else {
498 let update = snapshot.initial_update(project_id);
499 for update in split_repository_update(update) {
500 client.send(update)?;
501 }
502 snapshots.insert(snapshot.id, snapshot);
503 }
504 }
505 DownstreamUpdate::RemoveRepository(id) => {
506 client.send(proto::RemoveRepository {
507 project_id,
508 id: id.to_proto(),
509 })?;
510 }
511 }
512 }
513 anyhow::Ok(())
514 })
515 .await
516 .ok();
517 this.update(cx, |this, _| {
518 if let GitStoreState::Local {
519 downstream: downstream_client,
520 ..
521 } = &mut this.state
522 {
523 downstream_client.take();
524 } else {
525 unreachable!("unshared called on remote store");
526 }
527 })
528 }),
529 });
530 }
531 }
532 }
533
534 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
535 match &mut self.state {
536 GitStoreState::Local {
537 downstream: downstream_client,
538 ..
539 } => {
540 downstream_client.take();
541 }
542 GitStoreState::Remote {
543 downstream: downstream_client,
544 ..
545 } => {
546 downstream_client.take();
547 }
548 }
549 self.shared_diffs.clear();
550 }
551
552 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
553 self.shared_diffs.remove(peer_id);
554 }
555
556 pub fn active_repository(&self) -> Option<Entity<Repository>> {
557 self.active_repo_id
558 .as_ref()
559 .map(|id| self.repositories[id].clone())
560 }
561
562 pub fn open_unstaged_diff(
563 &mut self,
564 buffer: Entity<Buffer>,
565 cx: &mut Context<Self>,
566 ) -> Task<Result<Entity<BufferDiff>>> {
567 let buffer_id = buffer.read(cx).remote_id();
568 if let Some(diff_state) = self.diffs.get(&buffer_id)
569 && let Some(unstaged_diff) = diff_state
570 .read(cx)
571 .unstaged_diff
572 .as_ref()
573 .and_then(|weak| weak.upgrade())
574 {
575 if let Some(task) =
576 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
577 {
578 return cx.background_executor().spawn(async move {
579 task.await;
580 Ok(unstaged_diff)
581 });
582 }
583 return Task::ready(Ok(unstaged_diff));
584 }
585
586 let Some((repo, repo_path)) =
587 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
588 else {
589 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
590 };
591
592 let task = self
593 .loading_diffs
594 .entry((buffer_id, DiffKind::Unstaged))
595 .or_insert_with(|| {
596 let staged_text = repo.update(cx, |repo, cx| {
597 repo.load_staged_text(buffer_id, repo_path, cx)
598 });
599 cx.spawn(async move |this, cx| {
600 Self::open_diff_internal(
601 this,
602 DiffKind::Unstaged,
603 staged_text.await.map(DiffBasesChange::SetIndex),
604 buffer,
605 cx,
606 )
607 .await
608 .map_err(Arc::new)
609 })
610 .shared()
611 })
612 .clone();
613
614 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
615 }
616
617 pub fn open_uncommitted_diff(
618 &mut self,
619 buffer: Entity<Buffer>,
620 cx: &mut Context<Self>,
621 ) -> Task<Result<Entity<BufferDiff>>> {
622 let buffer_id = buffer.read(cx).remote_id();
623
624 if let Some(diff_state) = self.diffs.get(&buffer_id)
625 && let Some(uncommitted_diff) = diff_state
626 .read(cx)
627 .uncommitted_diff
628 .as_ref()
629 .and_then(|weak| weak.upgrade())
630 {
631 if let Some(task) =
632 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
633 {
634 return cx.background_executor().spawn(async move {
635 task.await;
636 Ok(uncommitted_diff)
637 });
638 }
639 return Task::ready(Ok(uncommitted_diff));
640 }
641
642 let Some((repo, repo_path)) =
643 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
644 else {
645 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
646 };
647
648 let task = self
649 .loading_diffs
650 .entry((buffer_id, DiffKind::Uncommitted))
651 .or_insert_with(|| {
652 let changes = repo.update(cx, |repo, cx| {
653 repo.load_committed_text(buffer_id, repo_path, cx)
654 });
655
656 cx.spawn(async move |this, cx| {
657 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
658 .await
659 .map_err(Arc::new)
660 })
661 .shared()
662 })
663 .clone();
664
665 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
666 }
667
668 async fn open_diff_internal(
669 this: WeakEntity<Self>,
670 kind: DiffKind,
671 texts: Result<DiffBasesChange>,
672 buffer_entity: Entity<Buffer>,
673 cx: &mut AsyncApp,
674 ) -> Result<Entity<BufferDiff>> {
675 let diff_bases_change = match texts {
676 Err(e) => {
677 this.update(cx, |this, cx| {
678 let buffer = buffer_entity.read(cx);
679 let buffer_id = buffer.remote_id();
680 this.loading_diffs.remove(&(buffer_id, kind));
681 })?;
682 return Err(e);
683 }
684 Ok(change) => change,
685 };
686
687 this.update(cx, |this, cx| {
688 let buffer = buffer_entity.read(cx);
689 let buffer_id = buffer.remote_id();
690 let language = buffer.language().cloned();
691 let language_registry = buffer.language_registry();
692 let text_snapshot = buffer.text_snapshot();
693 this.loading_diffs.remove(&(buffer_id, kind));
694
695 let git_store = cx.weak_entity();
696 let diff_state = this
697 .diffs
698 .entry(buffer_id)
699 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
700
701 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
702
703 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
704 diff_state.update(cx, |diff_state, cx| {
705 diff_state.language = language;
706 diff_state.language_registry = language_registry;
707
708 match kind {
709 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
710 DiffKind::Uncommitted => {
711 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
712 diff
713 } else {
714 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
715 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
716 unstaged_diff
717 };
718
719 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
720 diff_state.uncommitted_diff = Some(diff.downgrade())
721 }
722 }
723
724 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
725 let rx = diff_state.wait_for_recalculation();
726
727 anyhow::Ok(async move {
728 if let Some(rx) = rx {
729 rx.await;
730 }
731 Ok(diff)
732 })
733 })
734 })??
735 .await
736 }
737
738 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
739 let diff_state = self.diffs.get(&buffer_id)?;
740 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
741 }
742
743 pub fn get_uncommitted_diff(
744 &self,
745 buffer_id: BufferId,
746 cx: &App,
747 ) -> Option<Entity<BufferDiff>> {
748 let diff_state = self.diffs.get(&buffer_id)?;
749 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
750 }
751
752 pub fn open_conflict_set(
753 &mut self,
754 buffer: Entity<Buffer>,
755 cx: &mut Context<Self>,
756 ) -> Entity<ConflictSet> {
757 log::debug!("open conflict set");
758 let buffer_id = buffer.read(cx).remote_id();
759
760 if let Some(git_state) = self.diffs.get(&buffer_id)
761 && let Some(conflict_set) = git_state
762 .read(cx)
763 .conflict_set
764 .as_ref()
765 .and_then(|weak| weak.upgrade())
766 {
767 let conflict_set = conflict_set;
768 let buffer_snapshot = buffer.read(cx).text_snapshot();
769
770 git_state.update(cx, |state, cx| {
771 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
772 });
773
774 return conflict_set;
775 }
776
777 let is_unmerged = self
778 .repository_and_path_for_buffer_id(buffer_id, cx)
779 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
780 let git_store = cx.weak_entity();
781 let buffer_git_state = self
782 .diffs
783 .entry(buffer_id)
784 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
785 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
786
787 self._subscriptions
788 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
789 cx.emit(GitStoreEvent::ConflictsUpdated);
790 }));
791
792 buffer_git_state.update(cx, |state, cx| {
793 state.conflict_set = Some(conflict_set.downgrade());
794 let buffer_snapshot = buffer.read(cx).text_snapshot();
795 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
796 });
797
798 conflict_set
799 }
800
801 pub fn project_path_git_status(
802 &self,
803 project_path: &ProjectPath,
804 cx: &App,
805 ) -> Option<FileStatus> {
806 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
807 Some(repo.read(cx).status_for_path(&repo_path)?.status)
808 }
809
810 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
811 let mut work_directory_abs_paths = Vec::new();
812 let mut checkpoints = Vec::new();
813 for repository in self.repositories.values() {
814 repository.update(cx, |repository, _| {
815 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
816 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
817 });
818 }
819
820 cx.background_executor().spawn(async move {
821 let checkpoints = future::try_join_all(checkpoints).await?;
822 Ok(GitStoreCheckpoint {
823 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
824 .into_iter()
825 .zip(checkpoints)
826 .collect(),
827 })
828 })
829 }
830
831 pub fn restore_checkpoint(
832 &self,
833 checkpoint: GitStoreCheckpoint,
834 cx: &mut App,
835 ) -> Task<Result<()>> {
836 let repositories_by_work_dir_abs_path = self
837 .repositories
838 .values()
839 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
840 .collect::<HashMap<_, _>>();
841
842 let mut tasks = Vec::new();
843 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
844 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
845 let restore = repository.update(cx, |repository, _| {
846 repository.restore_checkpoint(checkpoint)
847 });
848 tasks.push(async move { restore.await? });
849 }
850 }
851 cx.background_spawn(async move {
852 future::try_join_all(tasks).await?;
853 Ok(())
854 })
855 }
856
857 /// Compares two checkpoints, returning true if they are equal.
858 pub fn compare_checkpoints(
859 &self,
860 left: GitStoreCheckpoint,
861 mut right: GitStoreCheckpoint,
862 cx: &mut App,
863 ) -> Task<Result<bool>> {
864 let repositories_by_work_dir_abs_path = self
865 .repositories
866 .values()
867 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
868 .collect::<HashMap<_, _>>();
869
870 let mut tasks = Vec::new();
871 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
872 if let Some(right_checkpoint) = right
873 .checkpoints_by_work_dir_abs_path
874 .remove(&work_dir_abs_path)
875 {
876 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
877 {
878 let compare = repository.update(cx, |repository, _| {
879 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
880 });
881
882 tasks.push(async move { compare.await? });
883 }
884 } else {
885 return Task::ready(Ok(false));
886 }
887 }
888 cx.background_spawn(async move {
889 Ok(future::try_join_all(tasks)
890 .await?
891 .into_iter()
892 .all(|result| result))
893 })
894 }
895
896 /// Blames a buffer.
897 pub fn blame_buffer(
898 &self,
899 buffer: &Entity<Buffer>,
900 version: Option<clock::Global>,
901 cx: &mut App,
902 ) -> Task<Result<Option<Blame>>> {
903 let buffer = buffer.read(cx);
904 let Some((repo, repo_path)) =
905 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
906 else {
907 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
908 };
909 let content = match &version {
910 Some(version) => buffer.rope_for_version(version),
911 None => buffer.as_rope().clone(),
912 };
913 let version = version.unwrap_or(buffer.version());
914 let buffer_id = buffer.remote_id();
915
916 let rx = repo.update(cx, |repo, _| {
917 repo.send_job(None, move |state, _| async move {
918 match state {
919 RepositoryState::Local { backend, .. } => backend
920 .blame(repo_path.clone(), content)
921 .await
922 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
923 .map(Some),
924 RepositoryState::Remote { project_id, client } => {
925 let response = client
926 .request(proto::BlameBuffer {
927 project_id: project_id.to_proto(),
928 buffer_id: buffer_id.into(),
929 version: serialize_version(&version),
930 })
931 .await?;
932 Ok(deserialize_blame_buffer_response(response))
933 }
934 }
935 })
936 });
937
938 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
939 }
940
941 pub fn get_permalink_to_line(
942 &self,
943 buffer: &Entity<Buffer>,
944 selection: Range<u32>,
945 cx: &mut App,
946 ) -> Task<Result<url::Url>> {
947 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
948 return Task::ready(Err(anyhow!("buffer has no file")));
949 };
950
951 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
952 &(file.worktree.read(cx).id(), file.path.clone()).into(),
953 cx,
954 ) else {
955 // If we're not in a Git repo, check whether this is a Rust source
956 // file in the Cargo registry (presumably opened with go-to-definition
957 // from a normal Rust file). If so, we can put together a permalink
958 // using crate metadata.
959 if buffer
960 .read(cx)
961 .language()
962 .is_none_or(|lang| lang.name() != "Rust".into())
963 {
964 return Task::ready(Err(anyhow!("no permalink available")));
965 }
966 let file_path = file.worktree.read(cx).absolutize(&file.path);
967 return cx.spawn(async move |cx| {
968 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
969 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
970 .context("no permalink available")
971 });
972 };
973
974 let buffer_id = buffer.read(cx).remote_id();
975 let branch = repo.read(cx).branch.clone();
976 let remote = branch
977 .as_ref()
978 .and_then(|b| b.upstream.as_ref())
979 .and_then(|b| b.remote_name())
980 .unwrap_or("origin")
981 .to_string();
982
983 let rx = repo.update(cx, |repo, _| {
984 repo.send_job(None, move |state, cx| async move {
985 match state {
986 RepositoryState::Local { backend, .. } => {
987 let origin_url = backend
988 .remote_url(&remote)
989 .with_context(|| format!("remote \"{remote}\" not found"))?;
990
991 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
992
993 let provider_registry =
994 cx.update(GitHostingProviderRegistry::default_global)?;
995
996 let (provider, remote) =
997 parse_git_remote_url(provider_registry, &origin_url)
998 .context("parsing Git remote URL")?;
999
1000 Ok(provider.build_permalink(
1001 remote,
1002 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1003 ))
1004 }
1005 RepositoryState::Remote { project_id, client } => {
1006 let response = client
1007 .request(proto::GetPermalinkToLine {
1008 project_id: project_id.to_proto(),
1009 buffer_id: buffer_id.into(),
1010 selection: Some(proto::Range {
1011 start: selection.start as u64,
1012 end: selection.end as u64,
1013 }),
1014 })
1015 .await?;
1016
1017 url::Url::parse(&response.permalink).context("failed to parse permalink")
1018 }
1019 }
1020 })
1021 });
1022 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1023 }
1024
1025 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1026 match &self.state {
1027 GitStoreState::Local {
1028 downstream: downstream_client,
1029 ..
1030 } => downstream_client
1031 .as_ref()
1032 .map(|state| (state.client.clone(), state.project_id)),
1033 GitStoreState::Remote {
1034 downstream: downstream_client,
1035 ..
1036 } => downstream_client.clone(),
1037 }
1038 }
1039
1040 fn upstream_client(&self) -> Option<AnyProtoClient> {
1041 match &self.state {
1042 GitStoreState::Local { .. } => None,
1043 GitStoreState::Remote {
1044 upstream_client, ..
1045 } => Some(upstream_client.clone()),
1046 }
1047 }
1048
1049 fn on_worktree_store_event(
1050 &mut self,
1051 worktree_store: Entity<WorktreeStore>,
1052 event: &WorktreeStoreEvent,
1053 cx: &mut Context<Self>,
1054 ) {
1055 let GitStoreState::Local {
1056 project_environment,
1057 downstream,
1058 next_repository_id,
1059 fs,
1060 } = &self.state
1061 else {
1062 return;
1063 };
1064
1065 match event {
1066 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1067 if let Some(worktree) = self
1068 .worktree_store
1069 .read(cx)
1070 .worktree_for_id(*worktree_id, cx)
1071 {
1072 let paths_by_git_repo =
1073 self.process_updated_entries(&worktree, updated_entries, cx);
1074 let downstream = downstream
1075 .as_ref()
1076 .map(|downstream| downstream.updates_tx.clone());
1077 cx.spawn(async move |_, cx| {
1078 let paths_by_git_repo = paths_by_git_repo.await;
1079 for (repo, paths) in paths_by_git_repo {
1080 repo.update(cx, |repo, cx| {
1081 repo.paths_changed(paths, downstream.clone(), cx);
1082 })
1083 .ok();
1084 }
1085 })
1086 .detach();
1087 }
1088 }
1089 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1090 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1091 else {
1092 return;
1093 };
1094 if !worktree.read(cx).is_visible() {
1095 log::debug!(
1096 "not adding repositories for local worktree {:?} because it's not visible",
1097 worktree.read(cx).abs_path()
1098 );
1099 return;
1100 }
1101 self.update_repositories_from_worktree(
1102 project_environment.clone(),
1103 next_repository_id.clone(),
1104 downstream
1105 .as_ref()
1106 .map(|downstream| downstream.updates_tx.clone()),
1107 changed_repos.clone(),
1108 fs.clone(),
1109 cx,
1110 );
1111 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1112 }
1113 _ => {}
1114 }
1115 }
1116 fn on_repository_event(
1117 &mut self,
1118 repo: Entity<Repository>,
1119 event: &RepositoryEvent,
1120 cx: &mut Context<Self>,
1121 ) {
1122 let id = repo.read(cx).id;
1123 let repo_snapshot = repo.read(cx).snapshot.clone();
1124 for (buffer_id, diff) in self.diffs.iter() {
1125 if let Some((buffer_repo, repo_path)) =
1126 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1127 && buffer_repo == repo
1128 {
1129 diff.update(cx, |diff, cx| {
1130 if let Some(conflict_set) = &diff.conflict_set {
1131 let conflict_status_changed =
1132 conflict_set.update(cx, |conflict_set, cx| {
1133 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1134 conflict_set.set_has_conflict(has_conflict, cx)
1135 })?;
1136 if conflict_status_changed {
1137 let buffer_store = self.buffer_store.read(cx);
1138 if let Some(buffer) = buffer_store.get(*buffer_id) {
1139 let _ = diff
1140 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1141 }
1142 }
1143 }
1144 anyhow::Ok(())
1145 })
1146 .ok();
1147 }
1148 }
1149 cx.emit(GitStoreEvent::RepositoryUpdated(
1150 id,
1151 event.clone(),
1152 self.active_repo_id == Some(id),
1153 ))
1154 }
1155
1156 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1157 cx.emit(GitStoreEvent::JobsUpdated)
1158 }
1159
1160 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1161 fn update_repositories_from_worktree(
1162 &mut self,
1163 project_environment: Entity<ProjectEnvironment>,
1164 next_repository_id: Arc<AtomicU64>,
1165 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1166 updated_git_repositories: UpdatedGitRepositoriesSet,
1167 fs: Arc<dyn Fs>,
1168 cx: &mut Context<Self>,
1169 ) {
1170 let mut removed_ids = Vec::new();
1171 for update in updated_git_repositories.iter() {
1172 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1173 let existing_work_directory_abs_path =
1174 repo.read(cx).work_directory_abs_path.clone();
1175 Some(&existing_work_directory_abs_path)
1176 == update.old_work_directory_abs_path.as_ref()
1177 || Some(&existing_work_directory_abs_path)
1178 == update.new_work_directory_abs_path.as_ref()
1179 }) {
1180 if let Some(new_work_directory_abs_path) =
1181 update.new_work_directory_abs_path.clone()
1182 {
1183 existing.update(cx, |existing, cx| {
1184 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1185 existing.schedule_scan(updates_tx.clone(), cx);
1186 });
1187 } else {
1188 removed_ids.push(*id);
1189 }
1190 } else if let UpdatedGitRepository {
1191 new_work_directory_abs_path: Some(work_directory_abs_path),
1192 dot_git_abs_path: Some(dot_git_abs_path),
1193 repository_dir_abs_path: Some(repository_dir_abs_path),
1194 common_dir_abs_path: Some(common_dir_abs_path),
1195 ..
1196 } = update
1197 {
1198 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1199 let git_store = cx.weak_entity();
1200 let repo = cx.new(|cx| {
1201 let mut repo = Repository::local(
1202 id,
1203 work_directory_abs_path.clone(),
1204 dot_git_abs_path.clone(),
1205 repository_dir_abs_path.clone(),
1206 common_dir_abs_path.clone(),
1207 project_environment.downgrade(),
1208 fs.clone(),
1209 git_store,
1210 cx,
1211 );
1212 repo.schedule_scan(updates_tx.clone(), cx);
1213 repo
1214 });
1215 self._subscriptions
1216 .push(cx.subscribe(&repo, Self::on_repository_event));
1217 self._subscriptions
1218 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1219 self.repositories.insert(id, repo);
1220 cx.emit(GitStoreEvent::RepositoryAdded(id));
1221 self.active_repo_id.get_or_insert_with(|| {
1222 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1223 id
1224 });
1225 }
1226 }
1227
1228 for id in removed_ids {
1229 if self.active_repo_id == Some(id) {
1230 self.active_repo_id = None;
1231 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1232 }
1233 self.repositories.remove(&id);
1234 if let Some(updates_tx) = updates_tx.as_ref() {
1235 updates_tx
1236 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1237 .ok();
1238 }
1239 }
1240 }
1241
1242 fn on_buffer_store_event(
1243 &mut self,
1244 _: Entity<BufferStore>,
1245 event: &BufferStoreEvent,
1246 cx: &mut Context<Self>,
1247 ) {
1248 match event {
1249 BufferStoreEvent::BufferAdded(buffer) => {
1250 cx.subscribe(buffer, |this, buffer, event, cx| {
1251 if let BufferEvent::LanguageChanged = event {
1252 let buffer_id = buffer.read(cx).remote_id();
1253 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1254 diff_state.update(cx, |diff_state, cx| {
1255 diff_state.buffer_language_changed(buffer, cx);
1256 });
1257 }
1258 }
1259 })
1260 .detach();
1261 }
1262 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1263 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1264 diffs.remove(buffer_id);
1265 }
1266 }
1267 BufferStoreEvent::BufferDropped(buffer_id) => {
1268 self.diffs.remove(buffer_id);
1269 for diffs in self.shared_diffs.values_mut() {
1270 diffs.remove(buffer_id);
1271 }
1272 }
1273
1274 _ => {}
1275 }
1276 }
1277
1278 pub fn recalculate_buffer_diffs(
1279 &mut self,
1280 buffers: Vec<Entity<Buffer>>,
1281 cx: &mut Context<Self>,
1282 ) -> impl Future<Output = ()> + use<> {
1283 let mut futures = Vec::new();
1284 for buffer in buffers {
1285 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1286 let buffer = buffer.read(cx).text_snapshot();
1287 diff_state.update(cx, |diff_state, cx| {
1288 diff_state.recalculate_diffs(buffer.clone(), cx);
1289 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1290 });
1291 futures.push(diff_state.update(cx, |diff_state, cx| {
1292 diff_state
1293 .reparse_conflict_markers(buffer, cx)
1294 .map(|_| {})
1295 .boxed()
1296 }));
1297 }
1298 }
1299 async move {
1300 futures::future::join_all(futures).await;
1301 }
1302 }
1303
1304 fn on_buffer_diff_event(
1305 &mut self,
1306 diff: Entity<buffer_diff::BufferDiff>,
1307 event: &BufferDiffEvent,
1308 cx: &mut Context<Self>,
1309 ) {
1310 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1311 let buffer_id = diff.read(cx).buffer_id;
1312 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1313 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1314 diff_state.hunk_staging_operation_count += 1;
1315 diff_state.hunk_staging_operation_count
1316 });
1317 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1318 let recv = repo.update(cx, |repo, cx| {
1319 log::debug!("hunks changed for {}", path.as_unix_str());
1320 repo.spawn_set_index_text_job(
1321 path,
1322 new_index_text.as_ref().map(|rope| rope.to_string()),
1323 Some(hunk_staging_operation_count),
1324 cx,
1325 )
1326 });
1327 let diff = diff.downgrade();
1328 cx.spawn(async move |this, cx| {
1329 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1330 diff.update(cx, |diff, cx| {
1331 diff.clear_pending_hunks(cx);
1332 })
1333 .ok();
1334 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1335 .ok();
1336 }
1337 })
1338 .detach();
1339 }
1340 }
1341 }
1342 }
1343
1344 fn local_worktree_git_repos_changed(
1345 &mut self,
1346 worktree: Entity<Worktree>,
1347 changed_repos: &UpdatedGitRepositoriesSet,
1348 cx: &mut Context<Self>,
1349 ) {
1350 log::debug!("local worktree repos changed");
1351 debug_assert!(worktree.read(cx).is_local());
1352
1353 for repository in self.repositories.values() {
1354 repository.update(cx, |repository, cx| {
1355 let repo_abs_path = &repository.work_directory_abs_path;
1356 if changed_repos.iter().any(|update| {
1357 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1358 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1359 }) {
1360 repository.reload_buffer_diff_bases(cx);
1361 }
1362 });
1363 }
1364 }
1365
1366 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1367 &self.repositories
1368 }
1369
1370 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1371 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1372 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1373 Some(status.status)
1374 }
1375
1376 pub fn repository_and_path_for_buffer_id(
1377 &self,
1378 buffer_id: BufferId,
1379 cx: &App,
1380 ) -> Option<(Entity<Repository>, RepoPath)> {
1381 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1382 let project_path = buffer.read(cx).project_path(cx)?;
1383 self.repository_and_path_for_project_path(&project_path, cx)
1384 }
1385
1386 pub fn repository_and_path_for_project_path(
1387 &self,
1388 path: &ProjectPath,
1389 cx: &App,
1390 ) -> Option<(Entity<Repository>, RepoPath)> {
1391 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1392 self.repositories
1393 .values()
1394 .filter_map(|repo| {
1395 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1396 Some((repo.clone(), repo_path))
1397 })
1398 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1399 }
1400
1401 pub fn git_init(
1402 &self,
1403 path: Arc<Path>,
1404 fallback_branch_name: String,
1405 cx: &App,
1406 ) -> Task<Result<()>> {
1407 match &self.state {
1408 GitStoreState::Local { fs, .. } => {
1409 let fs = fs.clone();
1410 cx.background_executor()
1411 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1412 }
1413 GitStoreState::Remote {
1414 upstream_client,
1415 upstream_project_id: project_id,
1416 ..
1417 } => {
1418 let client = upstream_client.clone();
1419 let project_id = *project_id;
1420 cx.background_executor().spawn(async move {
1421 client
1422 .request(proto::GitInit {
1423 project_id: project_id,
1424 abs_path: path.to_string_lossy().into_owned(),
1425 fallback_branch_name,
1426 })
1427 .await?;
1428 Ok(())
1429 })
1430 }
1431 }
1432 }
1433
1434 pub fn git_clone(
1435 &self,
1436 repo: String,
1437 path: impl Into<Arc<std::path::Path>>,
1438 cx: &App,
1439 ) -> Task<Result<()>> {
1440 let path = path.into();
1441 match &self.state {
1442 GitStoreState::Local { fs, .. } => {
1443 let fs = fs.clone();
1444 cx.background_executor()
1445 .spawn(async move { fs.git_clone(&repo, &path).await })
1446 }
1447 GitStoreState::Remote {
1448 upstream_client,
1449 upstream_project_id,
1450 ..
1451 } => {
1452 if upstream_client.is_via_collab() {
1453 return Task::ready(Err(anyhow!(
1454 "Git Clone isn't supported for project guests"
1455 )));
1456 }
1457 let request = upstream_client.request(proto::GitClone {
1458 project_id: *upstream_project_id,
1459 abs_path: path.to_string_lossy().into_owned(),
1460 remote_repo: repo,
1461 });
1462
1463 cx.background_spawn(async move {
1464 let result = request.await?;
1465
1466 match result.success {
1467 true => Ok(()),
1468 false => Err(anyhow!("Git Clone failed")),
1469 }
1470 })
1471 }
1472 }
1473 }
1474
1475 async fn handle_update_repository(
1476 this: Entity<Self>,
1477 envelope: TypedEnvelope<proto::UpdateRepository>,
1478 mut cx: AsyncApp,
1479 ) -> Result<()> {
1480 this.update(&mut cx, |this, cx| {
1481 let path_style = this.worktree_store.read(cx).path_style();
1482 let mut update = envelope.payload;
1483
1484 let id = RepositoryId::from_proto(update.id);
1485 let client = this.upstream_client().context("no upstream client")?;
1486
1487 let mut is_new = false;
1488 let repo = this.repositories.entry(id).or_insert_with(|| {
1489 is_new = true;
1490 let git_store = cx.weak_entity();
1491 cx.new(|cx| {
1492 Repository::remote(
1493 id,
1494 Path::new(&update.abs_path).into(),
1495 path_style,
1496 ProjectId(update.project_id),
1497 client,
1498 git_store,
1499 cx,
1500 )
1501 })
1502 });
1503 if is_new {
1504 this._subscriptions
1505 .push(cx.subscribe(repo, Self::on_repository_event))
1506 }
1507
1508 repo.update(cx, {
1509 let update = update.clone();
1510 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1511 })?;
1512
1513 this.active_repo_id.get_or_insert_with(|| {
1514 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1515 id
1516 });
1517
1518 if let Some((client, project_id)) = this.downstream_client() {
1519 update.project_id = project_id.to_proto();
1520 client.send(update).log_err();
1521 }
1522 Ok(())
1523 })?
1524 }
1525
1526 async fn handle_remove_repository(
1527 this: Entity<Self>,
1528 envelope: TypedEnvelope<proto::RemoveRepository>,
1529 mut cx: AsyncApp,
1530 ) -> Result<()> {
1531 this.update(&mut cx, |this, cx| {
1532 let mut update = envelope.payload;
1533 let id = RepositoryId::from_proto(update.id);
1534 this.repositories.remove(&id);
1535 if let Some((client, project_id)) = this.downstream_client() {
1536 update.project_id = project_id.to_proto();
1537 client.send(update).log_err();
1538 }
1539 if this.active_repo_id == Some(id) {
1540 this.active_repo_id = None;
1541 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1542 }
1543 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1544 })
1545 }
1546
1547 async fn handle_git_init(
1548 this: Entity<Self>,
1549 envelope: TypedEnvelope<proto::GitInit>,
1550 cx: AsyncApp,
1551 ) -> Result<proto::Ack> {
1552 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1553 let name = envelope.payload.fallback_branch_name;
1554 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1555 .await?;
1556
1557 Ok(proto::Ack {})
1558 }
1559
1560 async fn handle_git_clone(
1561 this: Entity<Self>,
1562 envelope: TypedEnvelope<proto::GitClone>,
1563 cx: AsyncApp,
1564 ) -> Result<proto::GitCloneResponse> {
1565 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1566 let repo_name = envelope.payload.remote_repo;
1567 let result = cx
1568 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1569 .await;
1570
1571 Ok(proto::GitCloneResponse {
1572 success: result.is_ok(),
1573 })
1574 }
1575
1576 async fn handle_fetch(
1577 this: Entity<Self>,
1578 envelope: TypedEnvelope<proto::Fetch>,
1579 mut cx: AsyncApp,
1580 ) -> Result<proto::RemoteMessageResponse> {
1581 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1582 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1583 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1584 let askpass_id = envelope.payload.askpass_id;
1585
1586 let askpass = make_remote_delegate(
1587 this,
1588 envelope.payload.project_id,
1589 repository_id,
1590 askpass_id,
1591 &mut cx,
1592 );
1593
1594 let remote_output = repository_handle
1595 .update(&mut cx, |repository_handle, cx| {
1596 repository_handle.fetch(fetch_options, askpass, cx)
1597 })?
1598 .await??;
1599
1600 Ok(proto::RemoteMessageResponse {
1601 stdout: remote_output.stdout,
1602 stderr: remote_output.stderr,
1603 })
1604 }
1605
1606 async fn handle_push(
1607 this: Entity<Self>,
1608 envelope: TypedEnvelope<proto::Push>,
1609 mut cx: AsyncApp,
1610 ) -> Result<proto::RemoteMessageResponse> {
1611 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1612 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1613
1614 let askpass_id = envelope.payload.askpass_id;
1615 let askpass = make_remote_delegate(
1616 this,
1617 envelope.payload.project_id,
1618 repository_id,
1619 askpass_id,
1620 &mut cx,
1621 );
1622
1623 let options = envelope
1624 .payload
1625 .options
1626 .as_ref()
1627 .map(|_| match envelope.payload.options() {
1628 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1629 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1630 });
1631
1632 let branch_name = envelope.payload.branch_name.into();
1633 let remote_name = envelope.payload.remote_name.into();
1634
1635 let remote_output = repository_handle
1636 .update(&mut cx, |repository_handle, cx| {
1637 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1638 })?
1639 .await??;
1640 Ok(proto::RemoteMessageResponse {
1641 stdout: remote_output.stdout,
1642 stderr: remote_output.stderr,
1643 })
1644 }
1645
1646 async fn handle_pull(
1647 this: Entity<Self>,
1648 envelope: TypedEnvelope<proto::Pull>,
1649 mut cx: AsyncApp,
1650 ) -> Result<proto::RemoteMessageResponse> {
1651 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1652 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1653 let askpass_id = envelope.payload.askpass_id;
1654 let askpass = make_remote_delegate(
1655 this,
1656 envelope.payload.project_id,
1657 repository_id,
1658 askpass_id,
1659 &mut cx,
1660 );
1661
1662 let branch_name = envelope.payload.branch_name.into();
1663 let remote_name = envelope.payload.remote_name.into();
1664
1665 let remote_message = repository_handle
1666 .update(&mut cx, |repository_handle, cx| {
1667 repository_handle.pull(branch_name, remote_name, askpass, cx)
1668 })?
1669 .await??;
1670
1671 Ok(proto::RemoteMessageResponse {
1672 stdout: remote_message.stdout,
1673 stderr: remote_message.stderr,
1674 })
1675 }
1676
1677 async fn handle_stage(
1678 this: Entity<Self>,
1679 envelope: TypedEnvelope<proto::Stage>,
1680 mut cx: AsyncApp,
1681 ) -> Result<proto::Ack> {
1682 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1683 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1684
1685 let entries = envelope
1686 .payload
1687 .paths
1688 .into_iter()
1689 .map(|path| RepoPath::new(&path))
1690 .collect::<Result<Vec<_>>>()?;
1691
1692 repository_handle
1693 .update(&mut cx, |repository_handle, cx| {
1694 repository_handle.stage_entries(entries, cx)
1695 })?
1696 .await?;
1697 Ok(proto::Ack {})
1698 }
1699
1700 async fn handle_unstage(
1701 this: Entity<Self>,
1702 envelope: TypedEnvelope<proto::Unstage>,
1703 mut cx: AsyncApp,
1704 ) -> Result<proto::Ack> {
1705 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1706 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1707
1708 let entries = envelope
1709 .payload
1710 .paths
1711 .into_iter()
1712 .map(|path| RepoPath::new(&path))
1713 .collect::<Result<Vec<_>>>()?;
1714
1715 repository_handle
1716 .update(&mut cx, |repository_handle, cx| {
1717 repository_handle.unstage_entries(entries, cx)
1718 })?
1719 .await?;
1720
1721 Ok(proto::Ack {})
1722 }
1723
1724 async fn handle_stash(
1725 this: Entity<Self>,
1726 envelope: TypedEnvelope<proto::Stash>,
1727 mut cx: AsyncApp,
1728 ) -> Result<proto::Ack> {
1729 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1730 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1731
1732 let entries = envelope
1733 .payload
1734 .paths
1735 .into_iter()
1736 .map(|path| RepoPath::new(&path))
1737 .collect::<Result<Vec<_>>>()?;
1738
1739 repository_handle
1740 .update(&mut cx, |repository_handle, cx| {
1741 repository_handle.stash_entries(entries, cx)
1742 })?
1743 .await?;
1744
1745 Ok(proto::Ack {})
1746 }
1747
1748 async fn handle_stash_pop(
1749 this: Entity<Self>,
1750 envelope: TypedEnvelope<proto::StashPop>,
1751 mut cx: AsyncApp,
1752 ) -> Result<proto::Ack> {
1753 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1754 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1755 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1756
1757 repository_handle
1758 .update(&mut cx, |repository_handle, cx| {
1759 repository_handle.stash_pop(stash_index, cx)
1760 })?
1761 .await?;
1762
1763 Ok(proto::Ack {})
1764 }
1765
1766 async fn handle_stash_apply(
1767 this: Entity<Self>,
1768 envelope: TypedEnvelope<proto::StashApply>,
1769 mut cx: AsyncApp,
1770 ) -> Result<proto::Ack> {
1771 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1772 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1773 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1774
1775 repository_handle
1776 .update(&mut cx, |repository_handle, cx| {
1777 repository_handle.stash_apply(stash_index, cx)
1778 })?
1779 .await?;
1780
1781 Ok(proto::Ack {})
1782 }
1783
1784 async fn handle_stash_drop(
1785 this: Entity<Self>,
1786 envelope: TypedEnvelope<proto::StashDrop>,
1787 mut cx: AsyncApp,
1788 ) -> Result<proto::Ack> {
1789 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1790 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1791 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1792
1793 repository_handle
1794 .update(&mut cx, |repository_handle, cx| {
1795 repository_handle.stash_drop(stash_index, cx)
1796 })?
1797 .await??;
1798
1799 Ok(proto::Ack {})
1800 }
1801
1802 async fn handle_set_index_text(
1803 this: Entity<Self>,
1804 envelope: TypedEnvelope<proto::SetIndexText>,
1805 mut cx: AsyncApp,
1806 ) -> Result<proto::Ack> {
1807 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1808 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1809 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1810
1811 repository_handle
1812 .update(&mut cx, |repository_handle, cx| {
1813 repository_handle.spawn_set_index_text_job(
1814 repo_path,
1815 envelope.payload.text,
1816 None,
1817 cx,
1818 )
1819 })?
1820 .await??;
1821 Ok(proto::Ack {})
1822 }
1823
1824 async fn handle_commit(
1825 this: Entity<Self>,
1826 envelope: TypedEnvelope<proto::Commit>,
1827 mut cx: AsyncApp,
1828 ) -> Result<proto::Ack> {
1829 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1830 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1831
1832 let message = SharedString::from(envelope.payload.message);
1833 let name = envelope.payload.name.map(SharedString::from);
1834 let email = envelope.payload.email.map(SharedString::from);
1835 let options = envelope.payload.options.unwrap_or_default();
1836
1837 repository_handle
1838 .update(&mut cx, |repository_handle, cx| {
1839 repository_handle.commit(
1840 message,
1841 name.zip(email),
1842 CommitOptions {
1843 amend: options.amend,
1844 signoff: options.signoff,
1845 },
1846 cx,
1847 )
1848 })?
1849 .await??;
1850 Ok(proto::Ack {})
1851 }
1852
1853 async fn handle_get_remotes(
1854 this: Entity<Self>,
1855 envelope: TypedEnvelope<proto::GetRemotes>,
1856 mut cx: AsyncApp,
1857 ) -> Result<proto::GetRemotesResponse> {
1858 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1859 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1860
1861 let branch_name = envelope.payload.branch_name;
1862
1863 let remotes = repository_handle
1864 .update(&mut cx, |repository_handle, _| {
1865 repository_handle.get_remotes(branch_name)
1866 })?
1867 .await??;
1868
1869 Ok(proto::GetRemotesResponse {
1870 remotes: remotes
1871 .into_iter()
1872 .map(|remotes| proto::get_remotes_response::Remote {
1873 name: remotes.name.to_string(),
1874 })
1875 .collect::<Vec<_>>(),
1876 })
1877 }
1878
1879 async fn handle_get_branches(
1880 this: Entity<Self>,
1881 envelope: TypedEnvelope<proto::GitGetBranches>,
1882 mut cx: AsyncApp,
1883 ) -> Result<proto::GitBranchesResponse> {
1884 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1885 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1886
1887 let branches = repository_handle
1888 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1889 .await??;
1890
1891 Ok(proto::GitBranchesResponse {
1892 branches: branches
1893 .into_iter()
1894 .map(|branch| branch_to_proto(&branch))
1895 .collect::<Vec<_>>(),
1896 })
1897 }
1898 async fn handle_get_default_branch(
1899 this: Entity<Self>,
1900 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1901 mut cx: AsyncApp,
1902 ) -> Result<proto::GetDefaultBranchResponse> {
1903 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1904 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1905
1906 let branch = repository_handle
1907 .update(&mut cx, |repository_handle, _| {
1908 repository_handle.default_branch()
1909 })?
1910 .await??
1911 .map(Into::into);
1912
1913 Ok(proto::GetDefaultBranchResponse { branch })
1914 }
1915 async fn handle_create_branch(
1916 this: Entity<Self>,
1917 envelope: TypedEnvelope<proto::GitCreateBranch>,
1918 mut cx: AsyncApp,
1919 ) -> Result<proto::Ack> {
1920 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1921 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1922 let branch_name = envelope.payload.branch_name;
1923
1924 repository_handle
1925 .update(&mut cx, |repository_handle, _| {
1926 repository_handle.create_branch(branch_name)
1927 })?
1928 .await??;
1929
1930 Ok(proto::Ack {})
1931 }
1932
1933 async fn handle_change_branch(
1934 this: Entity<Self>,
1935 envelope: TypedEnvelope<proto::GitChangeBranch>,
1936 mut cx: AsyncApp,
1937 ) -> Result<proto::Ack> {
1938 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1939 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1940 let branch_name = envelope.payload.branch_name;
1941
1942 repository_handle
1943 .update(&mut cx, |repository_handle, _| {
1944 repository_handle.change_branch(branch_name)
1945 })?
1946 .await??;
1947
1948 Ok(proto::Ack {})
1949 }
1950
1951 async fn handle_rename_branch(
1952 this: Entity<Self>,
1953 envelope: TypedEnvelope<proto::GitRenameBranch>,
1954 mut cx: AsyncApp,
1955 ) -> Result<proto::Ack> {
1956 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1957 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1958 let branch = envelope.payload.branch;
1959 let new_name = envelope.payload.new_name;
1960
1961 repository_handle
1962 .update(&mut cx, |repository_handle, _| {
1963 repository_handle.rename_branch(branch, new_name)
1964 })?
1965 .await??;
1966
1967 Ok(proto::Ack {})
1968 }
1969
1970 async fn handle_show(
1971 this: Entity<Self>,
1972 envelope: TypedEnvelope<proto::GitShow>,
1973 mut cx: AsyncApp,
1974 ) -> Result<proto::GitCommitDetails> {
1975 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1976 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1977
1978 let commit = repository_handle
1979 .update(&mut cx, |repository_handle, _| {
1980 repository_handle.show(envelope.payload.commit)
1981 })?
1982 .await??;
1983 Ok(proto::GitCommitDetails {
1984 sha: commit.sha.into(),
1985 message: commit.message.into(),
1986 commit_timestamp: commit.commit_timestamp,
1987 author_email: commit.author_email.into(),
1988 author_name: commit.author_name.into(),
1989 })
1990 }
1991
1992 async fn handle_load_commit_diff(
1993 this: Entity<Self>,
1994 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1995 mut cx: AsyncApp,
1996 ) -> Result<proto::LoadCommitDiffResponse> {
1997 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1998 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1999
2000 let commit_diff = repository_handle
2001 .update(&mut cx, |repository_handle, _| {
2002 repository_handle.load_commit_diff(envelope.payload.commit)
2003 })?
2004 .await??;
2005 Ok(proto::LoadCommitDiffResponse {
2006 files: commit_diff
2007 .files
2008 .into_iter()
2009 .map(|file| proto::CommitFile {
2010 path: file.path.to_proto(),
2011 old_text: file.old_text,
2012 new_text: file.new_text,
2013 })
2014 .collect(),
2015 })
2016 }
2017
2018 async fn handle_reset(
2019 this: Entity<Self>,
2020 envelope: TypedEnvelope<proto::GitReset>,
2021 mut cx: AsyncApp,
2022 ) -> Result<proto::Ack> {
2023 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2024 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2025
2026 let mode = match envelope.payload.mode() {
2027 git_reset::ResetMode::Soft => ResetMode::Soft,
2028 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2029 };
2030
2031 repository_handle
2032 .update(&mut cx, |repository_handle, cx| {
2033 repository_handle.reset(envelope.payload.commit, mode, cx)
2034 })?
2035 .await??;
2036 Ok(proto::Ack {})
2037 }
2038
2039 async fn handle_checkout_files(
2040 this: Entity<Self>,
2041 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2042 mut cx: AsyncApp,
2043 ) -> Result<proto::Ack> {
2044 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2045 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2046 let paths = envelope
2047 .payload
2048 .paths
2049 .iter()
2050 .map(|s| RepoPath::from_proto(s))
2051 .collect::<Result<Vec<_>>>()?;
2052
2053 repository_handle
2054 .update(&mut cx, |repository_handle, cx| {
2055 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2056 })?
2057 .await??;
2058 Ok(proto::Ack {})
2059 }
2060
2061 async fn handle_open_commit_message_buffer(
2062 this: Entity<Self>,
2063 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2064 mut cx: AsyncApp,
2065 ) -> Result<proto::OpenBufferResponse> {
2066 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2067 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2068 let buffer = repository
2069 .update(&mut cx, |repository, cx| {
2070 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2071 })?
2072 .await?;
2073
2074 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2075 this.update(&mut cx, |this, cx| {
2076 this.buffer_store.update(cx, |buffer_store, cx| {
2077 buffer_store
2078 .create_buffer_for_peer(
2079 &buffer,
2080 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2081 cx,
2082 )
2083 .detach_and_log_err(cx);
2084 })
2085 })?;
2086
2087 Ok(proto::OpenBufferResponse {
2088 buffer_id: buffer_id.to_proto(),
2089 })
2090 }
2091
2092 async fn handle_askpass(
2093 this: Entity<Self>,
2094 envelope: TypedEnvelope<proto::AskPassRequest>,
2095 mut cx: AsyncApp,
2096 ) -> Result<proto::AskPassResponse> {
2097 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2098 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2099
2100 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2101 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2102 debug_panic!("no askpass found");
2103 anyhow::bail!("no askpass found");
2104 };
2105
2106 let response = askpass
2107 .ask_password(envelope.payload.prompt)
2108 .await
2109 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2110
2111 delegates
2112 .lock()
2113 .insert(envelope.payload.askpass_id, askpass);
2114
2115 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2116 Ok(proto::AskPassResponse {
2117 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2118 })
2119 }
2120
2121 async fn handle_check_for_pushed_commits(
2122 this: Entity<Self>,
2123 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2124 mut cx: AsyncApp,
2125 ) -> Result<proto::CheckForPushedCommitsResponse> {
2126 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2127 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2128
2129 let branches = repository_handle
2130 .update(&mut cx, |repository_handle, _| {
2131 repository_handle.check_for_pushed_commits()
2132 })?
2133 .await??;
2134 Ok(proto::CheckForPushedCommitsResponse {
2135 pushed_to: branches
2136 .into_iter()
2137 .map(|commit| commit.to_string())
2138 .collect(),
2139 })
2140 }
2141
2142 async fn handle_git_diff(
2143 this: Entity<Self>,
2144 envelope: TypedEnvelope<proto::GitDiff>,
2145 mut cx: AsyncApp,
2146 ) -> Result<proto::GitDiffResponse> {
2147 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2148 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2149 let diff_type = match envelope.payload.diff_type() {
2150 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2151 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2152 };
2153
2154 let mut diff = repository_handle
2155 .update(&mut cx, |repository_handle, cx| {
2156 repository_handle.diff(diff_type, cx)
2157 })?
2158 .await??;
2159 const ONE_MB: usize = 1_000_000;
2160 if diff.len() > ONE_MB {
2161 diff = diff.chars().take(ONE_MB).collect()
2162 }
2163
2164 Ok(proto::GitDiffResponse { diff })
2165 }
2166
2167 async fn handle_open_unstaged_diff(
2168 this: Entity<Self>,
2169 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::OpenUnstagedDiffResponse> {
2172 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2173 let diff = this
2174 .update(&mut cx, |this, cx| {
2175 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2176 Some(this.open_unstaged_diff(buffer, cx))
2177 })?
2178 .context("missing buffer")?
2179 .await?;
2180 this.update(&mut cx, |this, _| {
2181 let shared_diffs = this
2182 .shared_diffs
2183 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2184 .or_default();
2185 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2186 })?;
2187 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2188 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2189 }
2190
2191 async fn handle_open_uncommitted_diff(
2192 this: Entity<Self>,
2193 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2194 mut cx: AsyncApp,
2195 ) -> Result<proto::OpenUncommittedDiffResponse> {
2196 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2197 let diff = this
2198 .update(&mut cx, |this, cx| {
2199 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2200 Some(this.open_uncommitted_diff(buffer, cx))
2201 })?
2202 .context("missing buffer")?
2203 .await?;
2204 this.update(&mut cx, |this, _| {
2205 let shared_diffs = this
2206 .shared_diffs
2207 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2208 .or_default();
2209 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2210 })?;
2211 diff.read_with(&cx, |diff, cx| {
2212 use proto::open_uncommitted_diff_response::Mode;
2213
2214 let unstaged_diff = diff.secondary_diff();
2215 let index_snapshot = unstaged_diff.and_then(|diff| {
2216 let diff = diff.read(cx);
2217 diff.base_text_exists().then(|| diff.base_text())
2218 });
2219
2220 let mode;
2221 let staged_text;
2222 let committed_text;
2223 if diff.base_text_exists() {
2224 let committed_snapshot = diff.base_text();
2225 committed_text = Some(committed_snapshot.text());
2226 if let Some(index_text) = index_snapshot {
2227 if index_text.remote_id() == committed_snapshot.remote_id() {
2228 mode = Mode::IndexMatchesHead;
2229 staged_text = None;
2230 } else {
2231 mode = Mode::IndexAndHead;
2232 staged_text = Some(index_text.text());
2233 }
2234 } else {
2235 mode = Mode::IndexAndHead;
2236 staged_text = None;
2237 }
2238 } else {
2239 mode = Mode::IndexAndHead;
2240 committed_text = None;
2241 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2242 }
2243
2244 proto::OpenUncommittedDiffResponse {
2245 committed_text,
2246 staged_text,
2247 mode: mode.into(),
2248 }
2249 })
2250 }
2251
2252 async fn handle_update_diff_bases(
2253 this: Entity<Self>,
2254 request: TypedEnvelope<proto::UpdateDiffBases>,
2255 mut cx: AsyncApp,
2256 ) -> Result<()> {
2257 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2258 this.update(&mut cx, |this, cx| {
2259 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2260 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2261 {
2262 let buffer = buffer.read(cx).text_snapshot();
2263 diff_state.update(cx, |diff_state, cx| {
2264 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2265 })
2266 }
2267 })
2268 }
2269
2270 async fn handle_blame_buffer(
2271 this: Entity<Self>,
2272 envelope: TypedEnvelope<proto::BlameBuffer>,
2273 mut cx: AsyncApp,
2274 ) -> Result<proto::BlameBufferResponse> {
2275 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2276 let version = deserialize_version(&envelope.payload.version);
2277 let buffer = this.read_with(&cx, |this, cx| {
2278 this.buffer_store.read(cx).get_existing(buffer_id)
2279 })??;
2280 buffer
2281 .update(&mut cx, |buffer, _| {
2282 buffer.wait_for_version(version.clone())
2283 })?
2284 .await?;
2285 let blame = this
2286 .update(&mut cx, |this, cx| {
2287 this.blame_buffer(&buffer, Some(version), cx)
2288 })?
2289 .await?;
2290 Ok(serialize_blame_buffer_response(blame))
2291 }
2292
2293 async fn handle_get_permalink_to_line(
2294 this: Entity<Self>,
2295 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2296 mut cx: AsyncApp,
2297 ) -> Result<proto::GetPermalinkToLineResponse> {
2298 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2299 // let version = deserialize_version(&envelope.payload.version);
2300 let selection = {
2301 let proto_selection = envelope
2302 .payload
2303 .selection
2304 .context("no selection to get permalink for defined")?;
2305 proto_selection.start as u32..proto_selection.end as u32
2306 };
2307 let buffer = this.read_with(&cx, |this, cx| {
2308 this.buffer_store.read(cx).get_existing(buffer_id)
2309 })??;
2310 let permalink = this
2311 .update(&mut cx, |this, cx| {
2312 this.get_permalink_to_line(&buffer, selection, cx)
2313 })?
2314 .await?;
2315 Ok(proto::GetPermalinkToLineResponse {
2316 permalink: permalink.to_string(),
2317 })
2318 }
2319
2320 fn repository_for_request(
2321 this: &Entity<Self>,
2322 id: RepositoryId,
2323 cx: &mut AsyncApp,
2324 ) -> Result<Entity<Repository>> {
2325 this.read_with(cx, |this, _| {
2326 this.repositories
2327 .get(&id)
2328 .context("missing repository handle")
2329 .cloned()
2330 })?
2331 }
2332
2333 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2334 self.repositories
2335 .iter()
2336 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2337 .collect()
2338 }
2339
2340 fn process_updated_entries(
2341 &self,
2342 worktree: &Entity<Worktree>,
2343 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2344 cx: &mut App,
2345 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2346 let path_style = worktree.read(cx).path_style();
2347 let mut repo_paths = self
2348 .repositories
2349 .values()
2350 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2351 .collect::<Vec<_>>();
2352 let mut entries: Vec<_> = updated_entries
2353 .iter()
2354 .map(|(path, _, _)| path.clone())
2355 .collect();
2356 entries.sort();
2357 let worktree = worktree.read(cx);
2358
2359 let entries = entries
2360 .into_iter()
2361 .map(|path| worktree.absolutize(&path))
2362 .collect::<Arc<[_]>>();
2363
2364 let executor = cx.background_executor().clone();
2365 cx.background_executor().spawn(async move {
2366 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2367 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2368 let mut tasks = FuturesOrdered::new();
2369 for (repo_path, repo) in repo_paths.into_iter().rev() {
2370 let entries = entries.clone();
2371 let task = executor.spawn(async move {
2372 // Find all repository paths that belong to this repo
2373 let mut ix = entries.partition_point(|path| path < &*repo_path);
2374 if ix == entries.len() {
2375 return None;
2376 };
2377
2378 let mut paths = Vec::new();
2379 // All paths prefixed by a given repo will constitute a continuous range.
2380 while let Some(path) = entries.get(ix)
2381 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2382 &repo_path, path, path_style,
2383 )
2384 {
2385 paths.push((repo_path, ix));
2386 ix += 1;
2387 }
2388 if paths.is_empty() {
2389 None
2390 } else {
2391 Some((repo, paths))
2392 }
2393 });
2394 tasks.push_back(task);
2395 }
2396
2397 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2398 let mut path_was_used = vec![false; entries.len()];
2399 let tasks = tasks.collect::<Vec<_>>().await;
2400 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2401 // We always want to assign a path to it's innermost repository.
2402 for t in tasks {
2403 let Some((repo, paths)) = t else {
2404 continue;
2405 };
2406 let entry = paths_by_git_repo.entry(repo).or_default();
2407 for (repo_path, ix) in paths {
2408 if path_was_used[ix] {
2409 continue;
2410 }
2411 path_was_used[ix] = true;
2412 entry.push(repo_path);
2413 }
2414 }
2415
2416 paths_by_git_repo
2417 })
2418 }
2419}
2420
2421impl BufferGitState {
2422 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2423 Self {
2424 unstaged_diff: Default::default(),
2425 uncommitted_diff: Default::default(),
2426 recalculate_diff_task: Default::default(),
2427 language: Default::default(),
2428 language_registry: Default::default(),
2429 recalculating_tx: postage::watch::channel_with(false).0,
2430 hunk_staging_operation_count: 0,
2431 hunk_staging_operation_count_as_of_write: 0,
2432 head_text: Default::default(),
2433 index_text: Default::default(),
2434 head_changed: Default::default(),
2435 index_changed: Default::default(),
2436 language_changed: Default::default(),
2437 conflict_updated_futures: Default::default(),
2438 conflict_set: Default::default(),
2439 reparse_conflict_markers_task: Default::default(),
2440 }
2441 }
2442
2443 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2444 self.language = buffer.read(cx).language().cloned();
2445 self.language_changed = true;
2446 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2447 }
2448
2449 fn reparse_conflict_markers(
2450 &mut self,
2451 buffer: text::BufferSnapshot,
2452 cx: &mut Context<Self>,
2453 ) -> oneshot::Receiver<()> {
2454 let (tx, rx) = oneshot::channel();
2455
2456 let Some(conflict_set) = self
2457 .conflict_set
2458 .as_ref()
2459 .and_then(|conflict_set| conflict_set.upgrade())
2460 else {
2461 return rx;
2462 };
2463
2464 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2465 if conflict_set.has_conflict {
2466 Some(conflict_set.snapshot())
2467 } else {
2468 None
2469 }
2470 });
2471
2472 if let Some(old_snapshot) = old_snapshot {
2473 self.conflict_updated_futures.push(tx);
2474 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2475 let (snapshot, changed_range) = cx
2476 .background_spawn(async move {
2477 let new_snapshot = ConflictSet::parse(&buffer);
2478 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2479 (new_snapshot, changed_range)
2480 })
2481 .await;
2482 this.update(cx, |this, cx| {
2483 if let Some(conflict_set) = &this.conflict_set {
2484 conflict_set
2485 .update(cx, |conflict_set, cx| {
2486 conflict_set.set_snapshot(snapshot, changed_range, cx);
2487 })
2488 .ok();
2489 }
2490 let futures = std::mem::take(&mut this.conflict_updated_futures);
2491 for tx in futures {
2492 tx.send(()).ok();
2493 }
2494 })
2495 }))
2496 }
2497
2498 rx
2499 }
2500
2501 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2502 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2503 }
2504
2505 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2506 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2507 }
2508
2509 fn handle_base_texts_updated(
2510 &mut self,
2511 buffer: text::BufferSnapshot,
2512 message: proto::UpdateDiffBases,
2513 cx: &mut Context<Self>,
2514 ) {
2515 use proto::update_diff_bases::Mode;
2516
2517 let Some(mode) = Mode::from_i32(message.mode) else {
2518 return;
2519 };
2520
2521 let diff_bases_change = match mode {
2522 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2523 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2524 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2525 Mode::IndexAndHead => DiffBasesChange::SetEach {
2526 index: message.staged_text,
2527 head: message.committed_text,
2528 },
2529 };
2530
2531 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2532 }
2533
2534 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2535 if *self.recalculating_tx.borrow() {
2536 let mut rx = self.recalculating_tx.subscribe();
2537 Some(async move {
2538 loop {
2539 let is_recalculating = rx.recv().await;
2540 if is_recalculating != Some(true) {
2541 break;
2542 }
2543 }
2544 })
2545 } else {
2546 None
2547 }
2548 }
2549
2550 fn diff_bases_changed(
2551 &mut self,
2552 buffer: text::BufferSnapshot,
2553 diff_bases_change: Option<DiffBasesChange>,
2554 cx: &mut Context<Self>,
2555 ) {
2556 match diff_bases_change {
2557 Some(DiffBasesChange::SetIndex(index)) => {
2558 self.index_text = index.map(|mut index| {
2559 text::LineEnding::normalize(&mut index);
2560 Arc::new(index)
2561 });
2562 self.index_changed = true;
2563 }
2564 Some(DiffBasesChange::SetHead(head)) => {
2565 self.head_text = head.map(|mut head| {
2566 text::LineEnding::normalize(&mut head);
2567 Arc::new(head)
2568 });
2569 self.head_changed = true;
2570 }
2571 Some(DiffBasesChange::SetBoth(text)) => {
2572 let text = text.map(|mut text| {
2573 text::LineEnding::normalize(&mut text);
2574 Arc::new(text)
2575 });
2576 self.head_text = text.clone();
2577 self.index_text = text;
2578 self.head_changed = true;
2579 self.index_changed = true;
2580 }
2581 Some(DiffBasesChange::SetEach { index, head }) => {
2582 self.index_text = index.map(|mut index| {
2583 text::LineEnding::normalize(&mut index);
2584 Arc::new(index)
2585 });
2586 self.index_changed = true;
2587 self.head_text = head.map(|mut head| {
2588 text::LineEnding::normalize(&mut head);
2589 Arc::new(head)
2590 });
2591 self.head_changed = true;
2592 }
2593 None => {}
2594 }
2595
2596 self.recalculate_diffs(buffer, cx)
2597 }
2598
2599 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2600 *self.recalculating_tx.borrow_mut() = true;
2601
2602 let language = self.language.clone();
2603 let language_registry = self.language_registry.clone();
2604 let unstaged_diff = self.unstaged_diff();
2605 let uncommitted_diff = self.uncommitted_diff();
2606 let head = self.head_text.clone();
2607 let index = self.index_text.clone();
2608 let index_changed = self.index_changed;
2609 let head_changed = self.head_changed;
2610 let language_changed = self.language_changed;
2611 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2612 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2613 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2614 (None, None) => true,
2615 _ => false,
2616 };
2617 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2618 log::debug!(
2619 "start recalculating diffs for buffer {}",
2620 buffer.remote_id()
2621 );
2622
2623 let mut new_unstaged_diff = None;
2624 if let Some(unstaged_diff) = &unstaged_diff {
2625 new_unstaged_diff = Some(
2626 BufferDiff::update_diff(
2627 unstaged_diff.clone(),
2628 buffer.clone(),
2629 index,
2630 index_changed,
2631 language_changed,
2632 language.clone(),
2633 language_registry.clone(),
2634 cx,
2635 )
2636 .await?,
2637 );
2638 }
2639
2640 let mut new_uncommitted_diff = None;
2641 if let Some(uncommitted_diff) = &uncommitted_diff {
2642 new_uncommitted_diff = if index_matches_head {
2643 new_unstaged_diff.clone()
2644 } else {
2645 Some(
2646 BufferDiff::update_diff(
2647 uncommitted_diff.clone(),
2648 buffer.clone(),
2649 head,
2650 head_changed,
2651 language_changed,
2652 language.clone(),
2653 language_registry.clone(),
2654 cx,
2655 )
2656 .await?,
2657 )
2658 }
2659 }
2660
2661 let cancel = this.update(cx, |this, _| {
2662 // This checks whether all pending stage/unstage operations
2663 // have quiesced (i.e. both the corresponding write and the
2664 // read of that write have completed). If not, then we cancel
2665 // this recalculation attempt to avoid invalidating pending
2666 // state too quickly; another recalculation will come along
2667 // later and clear the pending state once the state of the index has settled.
2668 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2669 *this.recalculating_tx.borrow_mut() = false;
2670 true
2671 } else {
2672 false
2673 }
2674 })?;
2675 if cancel {
2676 log::debug!(
2677 concat!(
2678 "aborting recalculating diffs for buffer {}",
2679 "due to subsequent hunk operations",
2680 ),
2681 buffer.remote_id()
2682 );
2683 return Ok(());
2684 }
2685
2686 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2687 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2688 {
2689 unstaged_diff.update(cx, |diff, cx| {
2690 if language_changed {
2691 diff.language_changed(cx);
2692 }
2693 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2694 })?
2695 } else {
2696 None
2697 };
2698
2699 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2700 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2701 {
2702 uncommitted_diff.update(cx, |diff, cx| {
2703 if language_changed {
2704 diff.language_changed(cx);
2705 }
2706 diff.set_snapshot_with_secondary(
2707 new_uncommitted_diff,
2708 &buffer,
2709 unstaged_changed_range,
2710 true,
2711 cx,
2712 );
2713 })?;
2714 }
2715
2716 log::debug!(
2717 "finished recalculating diffs for buffer {}",
2718 buffer.remote_id()
2719 );
2720
2721 if let Some(this) = this.upgrade() {
2722 this.update(cx, |this, _| {
2723 this.index_changed = false;
2724 this.head_changed = false;
2725 this.language_changed = false;
2726 *this.recalculating_tx.borrow_mut() = false;
2727 })?;
2728 }
2729
2730 Ok(())
2731 }));
2732 }
2733}
2734
2735fn make_remote_delegate(
2736 this: Entity<GitStore>,
2737 project_id: u64,
2738 repository_id: RepositoryId,
2739 askpass_id: u64,
2740 cx: &mut AsyncApp,
2741) -> AskPassDelegate {
2742 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2743 this.update(cx, |this, cx| {
2744 let Some((client, _)) = this.downstream_client() else {
2745 return;
2746 };
2747 let response = client.request(proto::AskPassRequest {
2748 project_id,
2749 repository_id: repository_id.to_proto(),
2750 askpass_id,
2751 prompt,
2752 });
2753 cx.spawn(async move |_, _| {
2754 let mut response = response.await?.response;
2755 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2756 .ok();
2757 response.zeroize();
2758 anyhow::Ok(())
2759 })
2760 .detach_and_log_err(cx);
2761 })
2762 .log_err();
2763 })
2764}
2765
2766impl RepositoryId {
2767 pub fn to_proto(self) -> u64 {
2768 self.0
2769 }
2770
2771 pub fn from_proto(id: u64) -> Self {
2772 RepositoryId(id)
2773 }
2774}
2775
2776impl RepositorySnapshot {
2777 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2778 Self {
2779 id,
2780 statuses_by_path: Default::default(),
2781 work_directory_abs_path,
2782 branch: None,
2783 head_commit: None,
2784 scan_id: 0,
2785 merge: Default::default(),
2786 remote_origin_url: None,
2787 remote_upstream_url: None,
2788 stash_entries: Default::default(),
2789 path_style,
2790 }
2791 }
2792
2793 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2794 proto::UpdateRepository {
2795 branch_summary: self.branch.as_ref().map(branch_to_proto),
2796 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2797 updated_statuses: self
2798 .statuses_by_path
2799 .iter()
2800 .map(|entry| entry.to_proto())
2801 .collect(),
2802 removed_statuses: Default::default(),
2803 current_merge_conflicts: self
2804 .merge
2805 .conflicted_paths
2806 .iter()
2807 .map(|repo_path| repo_path.to_proto())
2808 .collect(),
2809 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2810 project_id,
2811 id: self.id.to_proto(),
2812 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2813 entry_ids: vec![self.id.to_proto()],
2814 scan_id: self.scan_id,
2815 is_last_update: true,
2816 stash_entries: self
2817 .stash_entries
2818 .entries
2819 .iter()
2820 .map(stash_to_proto)
2821 .collect(),
2822 }
2823 }
2824
2825 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2826 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2827 let mut removed_statuses: Vec<String> = Vec::new();
2828
2829 let mut new_statuses = self.statuses_by_path.iter().peekable();
2830 let mut old_statuses = old.statuses_by_path.iter().peekable();
2831
2832 let mut current_new_entry = new_statuses.next();
2833 let mut current_old_entry = old_statuses.next();
2834 loop {
2835 match (current_new_entry, current_old_entry) {
2836 (Some(new_entry), Some(old_entry)) => {
2837 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2838 Ordering::Less => {
2839 updated_statuses.push(new_entry.to_proto());
2840 current_new_entry = new_statuses.next();
2841 }
2842 Ordering::Equal => {
2843 if new_entry.status != old_entry.status {
2844 updated_statuses.push(new_entry.to_proto());
2845 }
2846 current_old_entry = old_statuses.next();
2847 current_new_entry = new_statuses.next();
2848 }
2849 Ordering::Greater => {
2850 removed_statuses.push(old_entry.repo_path.to_proto());
2851 current_old_entry = old_statuses.next();
2852 }
2853 }
2854 }
2855 (None, Some(old_entry)) => {
2856 removed_statuses.push(old_entry.repo_path.to_proto());
2857 current_old_entry = old_statuses.next();
2858 }
2859 (Some(new_entry), None) => {
2860 updated_statuses.push(new_entry.to_proto());
2861 current_new_entry = new_statuses.next();
2862 }
2863 (None, None) => break,
2864 }
2865 }
2866
2867 proto::UpdateRepository {
2868 branch_summary: self.branch.as_ref().map(branch_to_proto),
2869 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2870 updated_statuses,
2871 removed_statuses,
2872 current_merge_conflicts: self
2873 .merge
2874 .conflicted_paths
2875 .iter()
2876 .map(|path| path.to_proto())
2877 .collect(),
2878 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2879 project_id,
2880 id: self.id.to_proto(),
2881 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2882 entry_ids: vec![],
2883 scan_id: self.scan_id,
2884 is_last_update: true,
2885 stash_entries: self
2886 .stash_entries
2887 .entries
2888 .iter()
2889 .map(stash_to_proto)
2890 .collect(),
2891 }
2892 }
2893
2894 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2895 self.statuses_by_path.iter().cloned()
2896 }
2897
2898 pub fn status_summary(&self) -> GitSummary {
2899 self.statuses_by_path.summary().item_summary
2900 }
2901
2902 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2903 self.statuses_by_path
2904 .get(&PathKey(path.0.clone()), ())
2905 .cloned()
2906 }
2907
2908 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2909 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
2910 }
2911
2912 #[inline]
2913 fn abs_path_to_repo_path_inner(
2914 work_directory_abs_path: &Path,
2915 abs_path: &Path,
2916 path_style: PathStyle,
2917 ) -> Option<RepoPath> {
2918 abs_path
2919 .strip_prefix(&work_directory_abs_path)
2920 .ok()
2921 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
2922 }
2923
2924 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2925 self.merge.conflicted_paths.contains(repo_path)
2926 }
2927
2928 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2929 let had_conflict_on_last_merge_head_change =
2930 self.merge.conflicted_paths.contains(repo_path);
2931 let has_conflict_currently = self
2932 .status_for_path(repo_path)
2933 .is_some_and(|entry| entry.status.is_conflicted());
2934 had_conflict_on_last_merge_head_change || has_conflict_currently
2935 }
2936
2937 /// This is the name that will be displayed in the repository selector for this repository.
2938 pub fn display_name(&self) -> SharedString {
2939 self.work_directory_abs_path
2940 .file_name()
2941 .unwrap_or_default()
2942 .to_string_lossy()
2943 .to_string()
2944 .into()
2945 }
2946}
2947
2948pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2949 proto::StashEntry {
2950 oid: entry.oid.as_bytes().to_vec(),
2951 message: entry.message.clone(),
2952 branch: entry.branch.clone(),
2953 index: entry.index as u64,
2954 timestamp: entry.timestamp,
2955 }
2956}
2957
2958pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2959 Ok(StashEntry {
2960 oid: Oid::from_bytes(&entry.oid)?,
2961 message: entry.message.clone(),
2962 index: entry.index as usize,
2963 branch: entry.branch.clone(),
2964 timestamp: entry.timestamp,
2965 })
2966}
2967
2968impl MergeDetails {
2969 async fn load(
2970 backend: &Arc<dyn GitRepository>,
2971 status: &SumTree<StatusEntry>,
2972 prev_snapshot: &RepositorySnapshot,
2973 ) -> Result<(MergeDetails, bool)> {
2974 log::debug!("load merge details");
2975 let message = backend.merge_message().await;
2976 let heads = backend
2977 .revparse_batch(vec![
2978 "MERGE_HEAD".into(),
2979 "CHERRY_PICK_HEAD".into(),
2980 "REBASE_HEAD".into(),
2981 "REVERT_HEAD".into(),
2982 "APPLY_HEAD".into(),
2983 ])
2984 .await
2985 .log_err()
2986 .unwrap_or_default()
2987 .into_iter()
2988 .map(|opt| opt.map(SharedString::from))
2989 .collect::<Vec<_>>();
2990 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2991 let conflicted_paths = if merge_heads_changed {
2992 let current_conflicted_paths = TreeSet::from_ordered_entries(
2993 status
2994 .iter()
2995 .filter(|entry| entry.status.is_conflicted())
2996 .map(|entry| entry.repo_path.clone()),
2997 );
2998
2999 // It can happen that we run a scan while a lengthy merge is in progress
3000 // that will eventually result in conflicts, but before those conflicts
3001 // are reported by `git status`. Since for the moment we only care about
3002 // the merge heads state for the purposes of tracking conflicts, don't update
3003 // this state until we see some conflicts.
3004 if heads.iter().any(Option::is_some)
3005 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3006 && current_conflicted_paths.is_empty()
3007 {
3008 log::debug!("not updating merge heads because no conflicts found");
3009 return Ok((
3010 MergeDetails {
3011 message: message.map(SharedString::from),
3012 ..prev_snapshot.merge.clone()
3013 },
3014 false,
3015 ));
3016 }
3017
3018 current_conflicted_paths
3019 } else {
3020 prev_snapshot.merge.conflicted_paths.clone()
3021 };
3022 let details = MergeDetails {
3023 conflicted_paths,
3024 message: message.map(SharedString::from),
3025 heads,
3026 };
3027 Ok((details, merge_heads_changed))
3028 }
3029}
3030
3031impl Repository {
3032 pub fn snapshot(&self) -> RepositorySnapshot {
3033 self.snapshot.clone()
3034 }
3035
3036 fn local(
3037 id: RepositoryId,
3038 work_directory_abs_path: Arc<Path>,
3039 dot_git_abs_path: Arc<Path>,
3040 repository_dir_abs_path: Arc<Path>,
3041 common_dir_abs_path: Arc<Path>,
3042 project_environment: WeakEntity<ProjectEnvironment>,
3043 fs: Arc<dyn Fs>,
3044 git_store: WeakEntity<GitStore>,
3045 cx: &mut Context<Self>,
3046 ) -> Self {
3047 let snapshot =
3048 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3049 Repository {
3050 this: cx.weak_entity(),
3051 git_store,
3052 snapshot,
3053 commit_message_buffer: None,
3054 askpass_delegates: Default::default(),
3055 paths_needing_status_update: Default::default(),
3056 latest_askpass_id: 0,
3057 job_sender: Repository::spawn_local_git_worker(
3058 work_directory_abs_path,
3059 dot_git_abs_path,
3060 repository_dir_abs_path,
3061 common_dir_abs_path,
3062 project_environment,
3063 fs,
3064 cx,
3065 ),
3066 job_id: 0,
3067 active_jobs: Default::default(),
3068 }
3069 }
3070
3071 fn remote(
3072 id: RepositoryId,
3073 work_directory_abs_path: Arc<Path>,
3074 path_style: PathStyle,
3075 project_id: ProjectId,
3076 client: AnyProtoClient,
3077 git_store: WeakEntity<GitStore>,
3078 cx: &mut Context<Self>,
3079 ) -> Self {
3080 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3081 Self {
3082 this: cx.weak_entity(),
3083 snapshot,
3084 commit_message_buffer: None,
3085 git_store,
3086 paths_needing_status_update: Default::default(),
3087 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3088 askpass_delegates: Default::default(),
3089 latest_askpass_id: 0,
3090 active_jobs: Default::default(),
3091 job_id: 0,
3092 }
3093 }
3094
3095 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3096 self.git_store.upgrade()
3097 }
3098
3099 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3100 let this = cx.weak_entity();
3101 let git_store = self.git_store.clone();
3102 let _ = self.send_keyed_job(
3103 Some(GitJobKey::ReloadBufferDiffBases),
3104 None,
3105 |state, mut cx| async move {
3106 let RepositoryState::Local { backend, .. } = state else {
3107 log::error!("tried to recompute diffs for a non-local repository");
3108 return Ok(());
3109 };
3110
3111 let Some(this) = this.upgrade() else {
3112 return Ok(());
3113 };
3114
3115 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3116 git_store.update(cx, |git_store, cx| {
3117 git_store
3118 .diffs
3119 .iter()
3120 .filter_map(|(buffer_id, diff_state)| {
3121 let buffer_store = git_store.buffer_store.read(cx);
3122 let buffer = buffer_store.get(*buffer_id)?;
3123 let file = File::from_dyn(buffer.read(cx).file())?;
3124 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3125 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3126 log::debug!(
3127 "start reload diff bases for repo path {}",
3128 repo_path.as_unix_str()
3129 );
3130 diff_state.update(cx, |diff_state, _| {
3131 let has_unstaged_diff = diff_state
3132 .unstaged_diff
3133 .as_ref()
3134 .is_some_and(|diff| diff.is_upgradable());
3135 let has_uncommitted_diff = diff_state
3136 .uncommitted_diff
3137 .as_ref()
3138 .is_some_and(|set| set.is_upgradable());
3139
3140 Some((
3141 buffer,
3142 repo_path,
3143 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3144 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3145 ))
3146 })
3147 })
3148 .collect::<Vec<_>>()
3149 })
3150 })??;
3151
3152 let buffer_diff_base_changes = cx
3153 .background_spawn(async move {
3154 let mut changes = Vec::new();
3155 for (buffer, repo_path, current_index_text, current_head_text) in
3156 &repo_diff_state_updates
3157 {
3158 let index_text = if current_index_text.is_some() {
3159 backend.load_index_text(repo_path.clone()).await
3160 } else {
3161 None
3162 };
3163 let head_text = if current_head_text.is_some() {
3164 backend.load_committed_text(repo_path.clone()).await
3165 } else {
3166 None
3167 };
3168
3169 let change =
3170 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3171 (Some(current_index), Some(current_head)) => {
3172 let index_changed =
3173 index_text.as_ref() != current_index.as_deref();
3174 let head_changed =
3175 head_text.as_ref() != current_head.as_deref();
3176 if index_changed && head_changed {
3177 if index_text == head_text {
3178 Some(DiffBasesChange::SetBoth(head_text))
3179 } else {
3180 Some(DiffBasesChange::SetEach {
3181 index: index_text,
3182 head: head_text,
3183 })
3184 }
3185 } else if index_changed {
3186 Some(DiffBasesChange::SetIndex(index_text))
3187 } else if head_changed {
3188 Some(DiffBasesChange::SetHead(head_text))
3189 } else {
3190 None
3191 }
3192 }
3193 (Some(current_index), None) => {
3194 let index_changed =
3195 index_text.as_ref() != current_index.as_deref();
3196 index_changed
3197 .then_some(DiffBasesChange::SetIndex(index_text))
3198 }
3199 (None, Some(current_head)) => {
3200 let head_changed =
3201 head_text.as_ref() != current_head.as_deref();
3202 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3203 }
3204 (None, None) => None,
3205 };
3206
3207 changes.push((buffer.clone(), change))
3208 }
3209 changes
3210 })
3211 .await;
3212
3213 git_store.update(&mut cx, |git_store, cx| {
3214 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3215 let buffer_snapshot = buffer.read(cx).text_snapshot();
3216 let buffer_id = buffer_snapshot.remote_id();
3217 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3218 continue;
3219 };
3220
3221 let downstream_client = git_store.downstream_client();
3222 diff_state.update(cx, |diff_state, cx| {
3223 use proto::update_diff_bases::Mode;
3224
3225 if let Some((diff_bases_change, (client, project_id))) =
3226 diff_bases_change.clone().zip(downstream_client)
3227 {
3228 let (staged_text, committed_text, mode) = match diff_bases_change {
3229 DiffBasesChange::SetIndex(index) => {
3230 (index, None, Mode::IndexOnly)
3231 }
3232 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3233 DiffBasesChange::SetEach { index, head } => {
3234 (index, head, Mode::IndexAndHead)
3235 }
3236 DiffBasesChange::SetBoth(text) => {
3237 (None, text, Mode::IndexMatchesHead)
3238 }
3239 };
3240 client
3241 .send(proto::UpdateDiffBases {
3242 project_id: project_id.to_proto(),
3243 buffer_id: buffer_id.to_proto(),
3244 staged_text,
3245 committed_text,
3246 mode: mode as i32,
3247 })
3248 .log_err();
3249 }
3250
3251 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3252 });
3253 }
3254 })
3255 },
3256 );
3257 }
3258
3259 pub fn send_job<F, Fut, R>(
3260 &mut self,
3261 status: Option<SharedString>,
3262 job: F,
3263 ) -> oneshot::Receiver<R>
3264 where
3265 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3266 Fut: Future<Output = R> + 'static,
3267 R: Send + 'static,
3268 {
3269 self.send_keyed_job(None, status, job)
3270 }
3271
3272 fn send_keyed_job<F, Fut, R>(
3273 &mut self,
3274 key: Option<GitJobKey>,
3275 status: Option<SharedString>,
3276 job: F,
3277 ) -> oneshot::Receiver<R>
3278 where
3279 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3280 Fut: Future<Output = R> + 'static,
3281 R: Send + 'static,
3282 {
3283 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3284 let job_id = post_inc(&mut self.job_id);
3285 let this = self.this.clone();
3286 self.job_sender
3287 .unbounded_send(GitJob {
3288 key,
3289 job: Box::new(move |state, cx: &mut AsyncApp| {
3290 let job = job(state, cx.clone());
3291 cx.spawn(async move |cx| {
3292 if let Some(s) = status.clone() {
3293 this.update(cx, |this, cx| {
3294 this.active_jobs.insert(
3295 job_id,
3296 JobInfo {
3297 start: Instant::now(),
3298 message: s.clone(),
3299 },
3300 );
3301
3302 cx.notify();
3303 })
3304 .ok();
3305 }
3306 let result = job.await;
3307
3308 this.update(cx, |this, cx| {
3309 this.active_jobs.remove(&job_id);
3310 cx.notify();
3311 })
3312 .ok();
3313
3314 result_tx.send(result).ok();
3315 })
3316 }),
3317 })
3318 .ok();
3319 result_rx
3320 }
3321
3322 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3323 let Some(git_store) = self.git_store.upgrade() else {
3324 return;
3325 };
3326 let entity = cx.entity();
3327 git_store.update(cx, |git_store, cx| {
3328 let Some((&id, _)) = git_store
3329 .repositories
3330 .iter()
3331 .find(|(_, handle)| *handle == &entity)
3332 else {
3333 return;
3334 };
3335 git_store.active_repo_id = Some(id);
3336 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3337 });
3338 }
3339
3340 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3341 self.snapshot.status()
3342 }
3343
3344 pub fn cached_stash(&self) -> GitStash {
3345 self.snapshot.stash_entries.clone()
3346 }
3347
3348 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3349 let git_store = self.git_store.upgrade()?;
3350 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3351 let abs_path = self
3352 .snapshot
3353 .work_directory_abs_path
3354 .join(path.as_std_path());
3355 let abs_path = SanitizedPath::new(&abs_path);
3356 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3357 Some(ProjectPath {
3358 worktree_id: worktree.read(cx).id(),
3359 path: relative_path,
3360 })
3361 }
3362
3363 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3364 let git_store = self.git_store.upgrade()?;
3365 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3366 let abs_path = worktree_store.absolutize(path, cx)?;
3367 self.snapshot.abs_path_to_repo_path(&abs_path)
3368 }
3369
3370 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3371 other
3372 .read(cx)
3373 .snapshot
3374 .work_directory_abs_path
3375 .starts_with(&self.snapshot.work_directory_abs_path)
3376 }
3377
3378 pub fn open_commit_buffer(
3379 &mut self,
3380 languages: Option<Arc<LanguageRegistry>>,
3381 buffer_store: Entity<BufferStore>,
3382 cx: &mut Context<Self>,
3383 ) -> Task<Result<Entity<Buffer>>> {
3384 let id = self.id;
3385 if let Some(buffer) = self.commit_message_buffer.clone() {
3386 return Task::ready(Ok(buffer));
3387 }
3388 let this = cx.weak_entity();
3389
3390 let rx = self.send_job(None, move |state, mut cx| async move {
3391 let Some(this) = this.upgrade() else {
3392 bail!("git store was dropped");
3393 };
3394 match state {
3395 RepositoryState::Local { .. } => {
3396 this.update(&mut cx, |_, cx| {
3397 Self::open_local_commit_buffer(languages, buffer_store, cx)
3398 })?
3399 .await
3400 }
3401 RepositoryState::Remote { project_id, client } => {
3402 let request = client.request(proto::OpenCommitMessageBuffer {
3403 project_id: project_id.0,
3404 repository_id: id.to_proto(),
3405 });
3406 let response = request.await.context("requesting to open commit buffer")?;
3407 let buffer_id = BufferId::new(response.buffer_id)?;
3408 let buffer = buffer_store
3409 .update(&mut cx, |buffer_store, cx| {
3410 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3411 })?
3412 .await?;
3413 if let Some(language_registry) = languages {
3414 let git_commit_language =
3415 language_registry.language_for_name("Git Commit").await?;
3416 buffer.update(&mut cx, |buffer, cx| {
3417 buffer.set_language(Some(git_commit_language), cx);
3418 })?;
3419 }
3420 this.update(&mut cx, |this, _| {
3421 this.commit_message_buffer = Some(buffer.clone());
3422 })?;
3423 Ok(buffer)
3424 }
3425 }
3426 });
3427
3428 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3429 }
3430
3431 fn open_local_commit_buffer(
3432 language_registry: Option<Arc<LanguageRegistry>>,
3433 buffer_store: Entity<BufferStore>,
3434 cx: &mut Context<Self>,
3435 ) -> Task<Result<Entity<Buffer>>> {
3436 cx.spawn(async move |repository, cx| {
3437 let buffer = buffer_store
3438 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3439 .await?;
3440
3441 if let Some(language_registry) = language_registry {
3442 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3443 buffer.update(cx, |buffer, cx| {
3444 buffer.set_language(Some(git_commit_language), cx);
3445 })?;
3446 }
3447
3448 repository.update(cx, |repository, _| {
3449 repository.commit_message_buffer = Some(buffer.clone());
3450 })?;
3451 Ok(buffer)
3452 })
3453 }
3454
3455 pub fn checkout_files(
3456 &mut self,
3457 commit: &str,
3458 paths: Vec<RepoPath>,
3459 _cx: &mut App,
3460 ) -> oneshot::Receiver<Result<()>> {
3461 let commit = commit.to_string();
3462 let id = self.id;
3463
3464 self.send_job(
3465 Some(format!("git checkout {}", commit).into()),
3466 move |git_repo, _| async move {
3467 match git_repo {
3468 RepositoryState::Local {
3469 backend,
3470 environment,
3471 ..
3472 } => {
3473 backend
3474 .checkout_files(commit, paths, environment.clone())
3475 .await
3476 }
3477 RepositoryState::Remote { project_id, client } => {
3478 client
3479 .request(proto::GitCheckoutFiles {
3480 project_id: project_id.0,
3481 repository_id: id.to_proto(),
3482 commit,
3483 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3484 })
3485 .await?;
3486
3487 Ok(())
3488 }
3489 }
3490 },
3491 )
3492 }
3493
3494 pub fn reset(
3495 &mut self,
3496 commit: String,
3497 reset_mode: ResetMode,
3498 _cx: &mut App,
3499 ) -> oneshot::Receiver<Result<()>> {
3500 let id = self.id;
3501
3502 self.send_job(None, move |git_repo, _| async move {
3503 match git_repo {
3504 RepositoryState::Local {
3505 backend,
3506 environment,
3507 ..
3508 } => backend.reset(commit, reset_mode, environment).await,
3509 RepositoryState::Remote { project_id, client } => {
3510 client
3511 .request(proto::GitReset {
3512 project_id: project_id.0,
3513 repository_id: id.to_proto(),
3514 commit,
3515 mode: match reset_mode {
3516 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3517 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3518 },
3519 })
3520 .await?;
3521
3522 Ok(())
3523 }
3524 }
3525 })
3526 }
3527
3528 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3529 let id = self.id;
3530 self.send_job(None, move |git_repo, _cx| async move {
3531 match git_repo {
3532 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3533 RepositoryState::Remote { project_id, client } => {
3534 let resp = client
3535 .request(proto::GitShow {
3536 project_id: project_id.0,
3537 repository_id: id.to_proto(),
3538 commit,
3539 })
3540 .await?;
3541
3542 Ok(CommitDetails {
3543 sha: resp.sha.into(),
3544 message: resp.message.into(),
3545 commit_timestamp: resp.commit_timestamp,
3546 author_email: resp.author_email.into(),
3547 author_name: resp.author_name.into(),
3548 })
3549 }
3550 }
3551 })
3552 }
3553
3554 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3555 let id = self.id;
3556 self.send_job(None, move |git_repo, cx| async move {
3557 match git_repo {
3558 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3559 RepositoryState::Remote {
3560 client, project_id, ..
3561 } => {
3562 let response = client
3563 .request(proto::LoadCommitDiff {
3564 project_id: project_id.0,
3565 repository_id: id.to_proto(),
3566 commit,
3567 })
3568 .await?;
3569 Ok(CommitDiff {
3570 files: response
3571 .files
3572 .into_iter()
3573 .map(|file| {
3574 Ok(CommitFile {
3575 path: RepoPath::from_proto(&file.path)?,
3576 old_text: file.old_text,
3577 new_text: file.new_text,
3578 })
3579 })
3580 .collect::<Result<Vec<_>>>()?,
3581 })
3582 }
3583 }
3584 })
3585 }
3586
3587 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3588 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3589 }
3590
3591 pub fn stage_entries(
3592 &self,
3593 entries: Vec<RepoPath>,
3594 cx: &mut Context<Self>,
3595 ) -> Task<anyhow::Result<()>> {
3596 if entries.is_empty() {
3597 return Task::ready(Ok(()));
3598 }
3599 let id = self.id;
3600
3601 let mut save_futures = Vec::new();
3602 if let Some(buffer_store) = self.buffer_store(cx) {
3603 buffer_store.update(cx, |buffer_store, cx| {
3604 for path in &entries {
3605 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3606 continue;
3607 };
3608 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3609 && buffer
3610 .read(cx)
3611 .file()
3612 .is_some_and(|file| file.disk_state().exists())
3613 {
3614 save_futures.push(buffer_store.save_buffer(buffer, cx));
3615 }
3616 }
3617 })
3618 }
3619
3620 cx.spawn(async move |this, cx| {
3621 for save_future in save_futures {
3622 save_future.await?;
3623 }
3624
3625 this.update(cx, |this, _| {
3626 this.send_job(None, move |git_repo, _cx| async move {
3627 match git_repo {
3628 RepositoryState::Local {
3629 backend,
3630 environment,
3631 ..
3632 } => backend.stage_paths(entries, environment.clone()).await,
3633 RepositoryState::Remote { project_id, client } => {
3634 client
3635 .request(proto::Stage {
3636 project_id: project_id.0,
3637 repository_id: id.to_proto(),
3638 paths: entries
3639 .into_iter()
3640 .map(|repo_path| repo_path.to_proto())
3641 .collect(),
3642 })
3643 .await
3644 .context("sending stage request")?;
3645
3646 Ok(())
3647 }
3648 }
3649 })
3650 })?
3651 .await??;
3652
3653 Ok(())
3654 })
3655 }
3656
3657 pub fn unstage_entries(
3658 &self,
3659 entries: Vec<RepoPath>,
3660 cx: &mut Context<Self>,
3661 ) -> Task<anyhow::Result<()>> {
3662 if entries.is_empty() {
3663 return Task::ready(Ok(()));
3664 }
3665 let id = self.id;
3666
3667 let mut save_futures = Vec::new();
3668 if let Some(buffer_store) = self.buffer_store(cx) {
3669 buffer_store.update(cx, |buffer_store, cx| {
3670 for path in &entries {
3671 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3672 continue;
3673 };
3674 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3675 && buffer
3676 .read(cx)
3677 .file()
3678 .is_some_and(|file| file.disk_state().exists())
3679 {
3680 save_futures.push(buffer_store.save_buffer(buffer, cx));
3681 }
3682 }
3683 })
3684 }
3685
3686 cx.spawn(async move |this, cx| {
3687 for save_future in save_futures {
3688 save_future.await?;
3689 }
3690
3691 this.update(cx, |this, _| {
3692 this.send_job(None, move |git_repo, _cx| async move {
3693 match git_repo {
3694 RepositoryState::Local {
3695 backend,
3696 environment,
3697 ..
3698 } => backend.unstage_paths(entries, environment).await,
3699 RepositoryState::Remote { project_id, client } => {
3700 client
3701 .request(proto::Unstage {
3702 project_id: project_id.0,
3703 repository_id: id.to_proto(),
3704 paths: entries
3705 .into_iter()
3706 .map(|repo_path| repo_path.to_proto())
3707 .collect(),
3708 })
3709 .await
3710 .context("sending unstage request")?;
3711
3712 Ok(())
3713 }
3714 }
3715 })
3716 })?
3717 .await??;
3718
3719 Ok(())
3720 })
3721 }
3722
3723 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3724 let to_stage = self
3725 .cached_status()
3726 .filter(|entry| !entry.status.staging().is_fully_staged())
3727 .map(|entry| entry.repo_path)
3728 .collect();
3729 self.stage_entries(to_stage, cx)
3730 }
3731
3732 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3733 let to_unstage = self
3734 .cached_status()
3735 .filter(|entry| entry.status.staging().has_staged())
3736 .map(|entry| entry.repo_path)
3737 .collect();
3738 self.unstage_entries(to_unstage, cx)
3739 }
3740
3741 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3742 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3743
3744 self.stash_entries(to_stash, cx)
3745 }
3746
3747 pub fn stash_entries(
3748 &mut self,
3749 entries: Vec<RepoPath>,
3750 cx: &mut Context<Self>,
3751 ) -> Task<anyhow::Result<()>> {
3752 let id = self.id;
3753
3754 cx.spawn(async move |this, cx| {
3755 this.update(cx, |this, _| {
3756 this.send_job(None, move |git_repo, _cx| async move {
3757 match git_repo {
3758 RepositoryState::Local {
3759 backend,
3760 environment,
3761 ..
3762 } => backend.stash_paths(entries, environment).await,
3763 RepositoryState::Remote { project_id, client } => {
3764 client
3765 .request(proto::Stash {
3766 project_id: project_id.0,
3767 repository_id: id.to_proto(),
3768 paths: entries
3769 .into_iter()
3770 .map(|repo_path| repo_path.to_proto())
3771 .collect(),
3772 })
3773 .await
3774 .context("sending stash request")?;
3775 Ok(())
3776 }
3777 }
3778 })
3779 })?
3780 .await??;
3781 Ok(())
3782 })
3783 }
3784
3785 pub fn stash_pop(
3786 &mut self,
3787 index: Option<usize>,
3788 cx: &mut Context<Self>,
3789 ) -> Task<anyhow::Result<()>> {
3790 let id = self.id;
3791 cx.spawn(async move |this, cx| {
3792 this.update(cx, |this, _| {
3793 this.send_job(None, move |git_repo, _cx| async move {
3794 match git_repo {
3795 RepositoryState::Local {
3796 backend,
3797 environment,
3798 ..
3799 } => backend.stash_pop(index, environment).await,
3800 RepositoryState::Remote { project_id, client } => {
3801 client
3802 .request(proto::StashPop {
3803 project_id: project_id.0,
3804 repository_id: id.to_proto(),
3805 stash_index: index.map(|i| i as u64),
3806 })
3807 .await
3808 .context("sending stash pop request")?;
3809 Ok(())
3810 }
3811 }
3812 })
3813 })?
3814 .await??;
3815 Ok(())
3816 })
3817 }
3818
3819 pub fn stash_apply(
3820 &mut self,
3821 index: Option<usize>,
3822 cx: &mut Context<Self>,
3823 ) -> Task<anyhow::Result<()>> {
3824 let id = self.id;
3825 cx.spawn(async move |this, cx| {
3826 this.update(cx, |this, _| {
3827 this.send_job(None, move |git_repo, _cx| async move {
3828 match git_repo {
3829 RepositoryState::Local {
3830 backend,
3831 environment,
3832 ..
3833 } => backend.stash_apply(index, environment).await,
3834 RepositoryState::Remote { project_id, client } => {
3835 client
3836 .request(proto::StashApply {
3837 project_id: project_id.0,
3838 repository_id: id.to_proto(),
3839 stash_index: index.map(|i| i as u64),
3840 })
3841 .await
3842 .context("sending stash apply request")?;
3843 Ok(())
3844 }
3845 }
3846 })
3847 })?
3848 .await??;
3849 Ok(())
3850 })
3851 }
3852
3853 pub fn stash_drop(
3854 &mut self,
3855 index: Option<usize>,
3856 cx: &mut Context<Self>,
3857 ) -> oneshot::Receiver<anyhow::Result<()>> {
3858 let id = self.id;
3859 let updates_tx = self
3860 .git_store()
3861 .and_then(|git_store| match &git_store.read(cx).state {
3862 GitStoreState::Local { downstream, .. } => downstream
3863 .as_ref()
3864 .map(|downstream| downstream.updates_tx.clone()),
3865 _ => None,
3866 });
3867 let this = cx.weak_entity();
3868 self.send_job(None, move |git_repo, mut cx| async move {
3869 match git_repo {
3870 RepositoryState::Local {
3871 backend,
3872 environment,
3873 ..
3874 } => {
3875 let result = backend.stash_drop(index, environment).await;
3876 if result.is_ok()
3877 && let Ok(stash_entries) = backend.stash_entries().await
3878 {
3879 let snapshot = this.update(&mut cx, |this, cx| {
3880 this.snapshot.stash_entries = stash_entries;
3881 let snapshot = this.snapshot.clone();
3882 cx.emit(RepositoryEvent::Updated {
3883 full_scan: false,
3884 new_instance: false,
3885 });
3886 snapshot
3887 })?;
3888 if let Some(updates_tx) = updates_tx {
3889 updates_tx
3890 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3891 .ok();
3892 }
3893 }
3894
3895 result
3896 }
3897 RepositoryState::Remote { project_id, client } => {
3898 client
3899 .request(proto::StashDrop {
3900 project_id: project_id.0,
3901 repository_id: id.to_proto(),
3902 stash_index: index.map(|i| i as u64),
3903 })
3904 .await
3905 .context("sending stash pop request")?;
3906 Ok(())
3907 }
3908 }
3909 })
3910 }
3911
3912 pub fn commit(
3913 &mut self,
3914 message: SharedString,
3915 name_and_email: Option<(SharedString, SharedString)>,
3916 options: CommitOptions,
3917 _cx: &mut App,
3918 ) -> oneshot::Receiver<Result<()>> {
3919 let id = self.id;
3920
3921 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3922 match git_repo {
3923 RepositoryState::Local {
3924 backend,
3925 environment,
3926 ..
3927 } => {
3928 backend
3929 .commit(message, name_and_email, options, environment)
3930 .await
3931 }
3932 RepositoryState::Remote { project_id, client } => {
3933 let (name, email) = name_and_email.unzip();
3934 client
3935 .request(proto::Commit {
3936 project_id: project_id.0,
3937 repository_id: id.to_proto(),
3938 message: String::from(message),
3939 name: name.map(String::from),
3940 email: email.map(String::from),
3941 options: Some(proto::commit::CommitOptions {
3942 amend: options.amend,
3943 signoff: options.signoff,
3944 }),
3945 })
3946 .await
3947 .context("sending commit request")?;
3948
3949 Ok(())
3950 }
3951 }
3952 })
3953 }
3954
3955 pub fn fetch(
3956 &mut self,
3957 fetch_options: FetchOptions,
3958 askpass: AskPassDelegate,
3959 _cx: &mut App,
3960 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3961 let askpass_delegates = self.askpass_delegates.clone();
3962 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3963 let id = self.id;
3964
3965 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3966 match git_repo {
3967 RepositoryState::Local {
3968 backend,
3969 environment,
3970 ..
3971 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3972 RepositoryState::Remote { project_id, client } => {
3973 askpass_delegates.lock().insert(askpass_id, askpass);
3974 let _defer = util::defer(|| {
3975 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3976 debug_assert!(askpass_delegate.is_some());
3977 });
3978
3979 let response = client
3980 .request(proto::Fetch {
3981 project_id: project_id.0,
3982 repository_id: id.to_proto(),
3983 askpass_id,
3984 remote: fetch_options.to_proto(),
3985 })
3986 .await
3987 .context("sending fetch request")?;
3988
3989 Ok(RemoteCommandOutput {
3990 stdout: response.stdout,
3991 stderr: response.stderr,
3992 })
3993 }
3994 }
3995 })
3996 }
3997
3998 pub fn push(
3999 &mut self,
4000 branch: SharedString,
4001 remote: SharedString,
4002 options: Option<PushOptions>,
4003 askpass: AskPassDelegate,
4004 cx: &mut Context<Self>,
4005 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4006 let askpass_delegates = self.askpass_delegates.clone();
4007 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4008 let id = self.id;
4009
4010 let args = options
4011 .map(|option| match option {
4012 PushOptions::SetUpstream => " --set-upstream",
4013 PushOptions::Force => " --force-with-lease",
4014 })
4015 .unwrap_or("");
4016
4017 let updates_tx = self
4018 .git_store()
4019 .and_then(|git_store| match &git_store.read(cx).state {
4020 GitStoreState::Local { downstream, .. } => downstream
4021 .as_ref()
4022 .map(|downstream| downstream.updates_tx.clone()),
4023 _ => None,
4024 });
4025
4026 let this = cx.weak_entity();
4027 self.send_job(
4028 Some(format!("git push {} {} {}", args, branch, remote).into()),
4029 move |git_repo, mut cx| async move {
4030 match git_repo {
4031 RepositoryState::Local {
4032 backend,
4033 environment,
4034 ..
4035 } => {
4036 let result = backend
4037 .push(
4038 branch.to_string(),
4039 remote.to_string(),
4040 options,
4041 askpass,
4042 environment.clone(),
4043 cx.clone(),
4044 )
4045 .await;
4046 if result.is_ok() {
4047 let branches = backend.branches().await?;
4048 let branch = branches.into_iter().find(|branch| branch.is_head);
4049 log::info!("head branch after scan is {branch:?}");
4050 let snapshot = this.update(&mut cx, |this, cx| {
4051 this.snapshot.branch = branch;
4052 let snapshot = this.snapshot.clone();
4053 cx.emit(RepositoryEvent::Updated {
4054 full_scan: false,
4055 new_instance: false,
4056 });
4057 snapshot
4058 })?;
4059 if let Some(updates_tx) = updates_tx {
4060 updates_tx
4061 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4062 .ok();
4063 }
4064 }
4065 result
4066 }
4067 RepositoryState::Remote { project_id, client } => {
4068 askpass_delegates.lock().insert(askpass_id, askpass);
4069 let _defer = util::defer(|| {
4070 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4071 debug_assert!(askpass_delegate.is_some());
4072 });
4073 let response = client
4074 .request(proto::Push {
4075 project_id: project_id.0,
4076 repository_id: id.to_proto(),
4077 askpass_id,
4078 branch_name: branch.to_string(),
4079 remote_name: remote.to_string(),
4080 options: options.map(|options| match options {
4081 PushOptions::Force => proto::push::PushOptions::Force,
4082 PushOptions::SetUpstream => {
4083 proto::push::PushOptions::SetUpstream
4084 }
4085 }
4086 as i32),
4087 })
4088 .await
4089 .context("sending push request")?;
4090
4091 Ok(RemoteCommandOutput {
4092 stdout: response.stdout,
4093 stderr: response.stderr,
4094 })
4095 }
4096 }
4097 },
4098 )
4099 }
4100
4101 pub fn pull(
4102 &mut self,
4103 branch: SharedString,
4104 remote: SharedString,
4105 askpass: AskPassDelegate,
4106 _cx: &mut App,
4107 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4108 let askpass_delegates = self.askpass_delegates.clone();
4109 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4110 let id = self.id;
4111
4112 self.send_job(
4113 Some(format!("git pull {} {}", remote, branch).into()),
4114 move |git_repo, cx| async move {
4115 match git_repo {
4116 RepositoryState::Local {
4117 backend,
4118 environment,
4119 ..
4120 } => {
4121 backend
4122 .pull(
4123 branch.to_string(),
4124 remote.to_string(),
4125 askpass,
4126 environment.clone(),
4127 cx,
4128 )
4129 .await
4130 }
4131 RepositoryState::Remote { project_id, client } => {
4132 askpass_delegates.lock().insert(askpass_id, askpass);
4133 let _defer = util::defer(|| {
4134 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4135 debug_assert!(askpass_delegate.is_some());
4136 });
4137 let response = client
4138 .request(proto::Pull {
4139 project_id: project_id.0,
4140 repository_id: id.to_proto(),
4141 askpass_id,
4142 branch_name: branch.to_string(),
4143 remote_name: remote.to_string(),
4144 })
4145 .await
4146 .context("sending pull request")?;
4147
4148 Ok(RemoteCommandOutput {
4149 stdout: response.stdout,
4150 stderr: response.stderr,
4151 })
4152 }
4153 }
4154 },
4155 )
4156 }
4157
4158 fn spawn_set_index_text_job(
4159 &mut self,
4160 path: RepoPath,
4161 content: Option<String>,
4162 hunk_staging_operation_count: Option<usize>,
4163 cx: &mut Context<Self>,
4164 ) -> oneshot::Receiver<anyhow::Result<()>> {
4165 let id = self.id;
4166 let this = cx.weak_entity();
4167 let git_store = self.git_store.clone();
4168 self.send_keyed_job(
4169 Some(GitJobKey::WriteIndex(path.clone())),
4170 None,
4171 move |git_repo, mut cx| async move {
4172 log::debug!(
4173 "start updating index text for buffer {}",
4174 path.as_unix_str()
4175 );
4176 match git_repo {
4177 RepositoryState::Local {
4178 backend,
4179 environment,
4180 ..
4181 } => {
4182 backend
4183 .set_index_text(path.clone(), content, environment.clone())
4184 .await?;
4185 }
4186 RepositoryState::Remote { project_id, client } => {
4187 client
4188 .request(proto::SetIndexText {
4189 project_id: project_id.0,
4190 repository_id: id.to_proto(),
4191 path: path.to_proto(),
4192 text: content,
4193 })
4194 .await?;
4195 }
4196 }
4197 log::debug!(
4198 "finish updating index text for buffer {}",
4199 path.as_unix_str()
4200 );
4201
4202 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4203 let project_path = this
4204 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4205 .ok()
4206 .flatten();
4207 git_store.update(&mut cx, |git_store, cx| {
4208 let buffer_id = git_store
4209 .buffer_store
4210 .read(cx)
4211 .get_by_path(&project_path?)?
4212 .read(cx)
4213 .remote_id();
4214 let diff_state = git_store.diffs.get(&buffer_id)?;
4215 diff_state.update(cx, |diff_state, _| {
4216 diff_state.hunk_staging_operation_count_as_of_write =
4217 hunk_staging_operation_count;
4218 });
4219 Some(())
4220 })?;
4221 }
4222 Ok(())
4223 },
4224 )
4225 }
4226
4227 pub fn get_remotes(
4228 &mut self,
4229 branch_name: Option<String>,
4230 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4231 let id = self.id;
4232 self.send_job(None, move |repo, _cx| async move {
4233 match repo {
4234 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4235 RepositoryState::Remote { project_id, client } => {
4236 let response = client
4237 .request(proto::GetRemotes {
4238 project_id: project_id.0,
4239 repository_id: id.to_proto(),
4240 branch_name,
4241 })
4242 .await?;
4243
4244 let remotes = response
4245 .remotes
4246 .into_iter()
4247 .map(|remotes| git::repository::Remote {
4248 name: remotes.name.into(),
4249 })
4250 .collect();
4251
4252 Ok(remotes)
4253 }
4254 }
4255 })
4256 }
4257
4258 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4259 let id = self.id;
4260 self.send_job(None, move |repo, _| async move {
4261 match repo {
4262 RepositoryState::Local { backend, .. } => backend.branches().await,
4263 RepositoryState::Remote { project_id, client } => {
4264 let response = client
4265 .request(proto::GitGetBranches {
4266 project_id: project_id.0,
4267 repository_id: id.to_proto(),
4268 })
4269 .await?;
4270
4271 let branches = response
4272 .branches
4273 .into_iter()
4274 .map(|branch| proto_to_branch(&branch))
4275 .collect();
4276
4277 Ok(branches)
4278 }
4279 }
4280 })
4281 }
4282
4283 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4284 let id = self.id;
4285 self.send_job(None, move |repo, _| async move {
4286 match repo {
4287 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4288 RepositoryState::Remote { project_id, client } => {
4289 let response = client
4290 .request(proto::GetDefaultBranch {
4291 project_id: project_id.0,
4292 repository_id: id.to_proto(),
4293 })
4294 .await?;
4295
4296 anyhow::Ok(response.branch.map(SharedString::from))
4297 }
4298 }
4299 })
4300 }
4301
4302 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4303 let id = self.id;
4304 self.send_job(None, move |repo, _cx| async move {
4305 match repo {
4306 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4307 RepositoryState::Remote { project_id, client } => {
4308 let response = client
4309 .request(proto::GitDiff {
4310 project_id: project_id.0,
4311 repository_id: id.to_proto(),
4312 diff_type: match diff_type {
4313 DiffType::HeadToIndex => {
4314 proto::git_diff::DiffType::HeadToIndex.into()
4315 }
4316 DiffType::HeadToWorktree => {
4317 proto::git_diff::DiffType::HeadToWorktree.into()
4318 }
4319 },
4320 })
4321 .await?;
4322
4323 Ok(response.diff)
4324 }
4325 }
4326 })
4327 }
4328
4329 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4330 let id = self.id;
4331 self.send_job(
4332 Some(format!("git switch -c {branch_name}").into()),
4333 move |repo, _cx| async move {
4334 match repo {
4335 RepositoryState::Local { backend, .. } => {
4336 backend.create_branch(branch_name).await
4337 }
4338 RepositoryState::Remote { project_id, client } => {
4339 client
4340 .request(proto::GitCreateBranch {
4341 project_id: project_id.0,
4342 repository_id: id.to_proto(),
4343 branch_name,
4344 })
4345 .await?;
4346
4347 Ok(())
4348 }
4349 }
4350 },
4351 )
4352 }
4353
4354 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4355 let id = self.id;
4356 self.send_job(
4357 Some(format!("git switch {branch_name}").into()),
4358 move |repo, _cx| async move {
4359 match repo {
4360 RepositoryState::Local { backend, .. } => {
4361 backend.change_branch(branch_name).await
4362 }
4363 RepositoryState::Remote { project_id, client } => {
4364 client
4365 .request(proto::GitChangeBranch {
4366 project_id: project_id.0,
4367 repository_id: id.to_proto(),
4368 branch_name,
4369 })
4370 .await?;
4371
4372 Ok(())
4373 }
4374 }
4375 },
4376 )
4377 }
4378
4379 pub fn rename_branch(
4380 &mut self,
4381 branch: String,
4382 new_name: String,
4383 ) -> oneshot::Receiver<Result<()>> {
4384 let id = self.id;
4385 self.send_job(
4386 Some(format!("git branch -m {branch} {new_name}").into()),
4387 move |repo, _cx| async move {
4388 match repo {
4389 RepositoryState::Local { backend, .. } => {
4390 backend.rename_branch(branch, new_name).await
4391 }
4392 RepositoryState::Remote { project_id, client } => {
4393 client
4394 .request(proto::GitRenameBranch {
4395 project_id: project_id.0,
4396 repository_id: id.to_proto(),
4397 branch,
4398 new_name,
4399 })
4400 .await?;
4401
4402 Ok(())
4403 }
4404 }
4405 },
4406 )
4407 }
4408
4409 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4410 let id = self.id;
4411 self.send_job(None, move |repo, _cx| async move {
4412 match repo {
4413 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4414 RepositoryState::Remote { project_id, client } => {
4415 let response = client
4416 .request(proto::CheckForPushedCommits {
4417 project_id: project_id.0,
4418 repository_id: id.to_proto(),
4419 })
4420 .await?;
4421
4422 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4423
4424 Ok(branches)
4425 }
4426 }
4427 })
4428 }
4429
4430 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4431 self.send_job(None, |repo, _cx| async move {
4432 match repo {
4433 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4434 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4435 }
4436 })
4437 }
4438
4439 pub fn restore_checkpoint(
4440 &mut self,
4441 checkpoint: GitRepositoryCheckpoint,
4442 ) -> oneshot::Receiver<Result<()>> {
4443 self.send_job(None, move |repo, _cx| async move {
4444 match repo {
4445 RepositoryState::Local { backend, .. } => {
4446 backend.restore_checkpoint(checkpoint).await
4447 }
4448 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4449 }
4450 })
4451 }
4452
4453 pub(crate) fn apply_remote_update(
4454 &mut self,
4455 update: proto::UpdateRepository,
4456 is_new: bool,
4457 cx: &mut Context<Self>,
4458 ) -> Result<()> {
4459 let conflicted_paths = TreeSet::from_ordered_entries(
4460 update
4461 .current_merge_conflicts
4462 .into_iter()
4463 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4464 );
4465 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4466 self.snapshot.head_commit = update
4467 .head_commit_details
4468 .as_ref()
4469 .map(proto_to_commit_details);
4470
4471 self.snapshot.merge.conflicted_paths = conflicted_paths;
4472 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4473 self.snapshot.stash_entries = GitStash {
4474 entries: update
4475 .stash_entries
4476 .iter()
4477 .filter_map(|entry| proto_to_stash(entry).ok())
4478 .collect(),
4479 };
4480
4481 let edits = update
4482 .removed_statuses
4483 .into_iter()
4484 .filter_map(|path| {
4485 Some(sum_tree::Edit::Remove(PathKey(
4486 RelPath::from_proto(&path).log_err()?,
4487 )))
4488 })
4489 .chain(
4490 update
4491 .updated_statuses
4492 .into_iter()
4493 .filter_map(|updated_status| {
4494 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4495 }),
4496 )
4497 .collect::<Vec<_>>();
4498 self.snapshot.statuses_by_path.edit(edits, ());
4499 if update.is_last_update {
4500 self.snapshot.scan_id = update.scan_id;
4501 }
4502 cx.emit(RepositoryEvent::Updated {
4503 full_scan: true,
4504 new_instance: is_new,
4505 });
4506 Ok(())
4507 }
4508
4509 pub fn compare_checkpoints(
4510 &mut self,
4511 left: GitRepositoryCheckpoint,
4512 right: GitRepositoryCheckpoint,
4513 ) -> oneshot::Receiver<Result<bool>> {
4514 self.send_job(None, move |repo, _cx| async move {
4515 match repo {
4516 RepositoryState::Local { backend, .. } => {
4517 backend.compare_checkpoints(left, right).await
4518 }
4519 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4520 }
4521 })
4522 }
4523
4524 pub fn diff_checkpoints(
4525 &mut self,
4526 base_checkpoint: GitRepositoryCheckpoint,
4527 target_checkpoint: GitRepositoryCheckpoint,
4528 ) -> oneshot::Receiver<Result<String>> {
4529 self.send_job(None, move |repo, _cx| async move {
4530 match repo {
4531 RepositoryState::Local { backend, .. } => {
4532 backend
4533 .diff_checkpoints(base_checkpoint, target_checkpoint)
4534 .await
4535 }
4536 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4537 }
4538 })
4539 }
4540
4541 fn schedule_scan(
4542 &mut self,
4543 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4544 cx: &mut Context<Self>,
4545 ) {
4546 let this = cx.weak_entity();
4547 let _ = self.send_keyed_job(
4548 Some(GitJobKey::ReloadGitState),
4549 None,
4550 |state, mut cx| async move {
4551 log::debug!("run scheduled git status scan");
4552
4553 let Some(this) = this.upgrade() else {
4554 return Ok(());
4555 };
4556 let RepositoryState::Local { backend, .. } = state else {
4557 bail!("not a local repository")
4558 };
4559 let (snapshot, events) = this
4560 .update(&mut cx, |this, _| {
4561 this.paths_needing_status_update.clear();
4562 compute_snapshot(
4563 this.id,
4564 this.work_directory_abs_path.clone(),
4565 this.snapshot.clone(),
4566 backend.clone(),
4567 )
4568 })?
4569 .await?;
4570 this.update(&mut cx, |this, cx| {
4571 this.snapshot = snapshot.clone();
4572 for event in events {
4573 cx.emit(event);
4574 }
4575 })?;
4576 if let Some(updates_tx) = updates_tx {
4577 updates_tx
4578 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4579 .ok();
4580 }
4581 Ok(())
4582 },
4583 );
4584 }
4585
4586 fn spawn_local_git_worker(
4587 work_directory_abs_path: Arc<Path>,
4588 dot_git_abs_path: Arc<Path>,
4589 _repository_dir_abs_path: Arc<Path>,
4590 _common_dir_abs_path: Arc<Path>,
4591 project_environment: WeakEntity<ProjectEnvironment>,
4592 fs: Arc<dyn Fs>,
4593 cx: &mut Context<Self>,
4594 ) -> mpsc::UnboundedSender<GitJob> {
4595 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4596
4597 cx.spawn(async move |_, cx| {
4598 let environment = project_environment
4599 .upgrade()
4600 .context("missing project environment")?
4601 .update(cx, |project_environment, cx| {
4602 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4603 })?
4604 .await
4605 .unwrap_or_else(|| {
4606 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4607 HashMap::default()
4608 });
4609 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4610 let backend = cx
4611 .background_spawn(async move {
4612 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4613 .or_else(|| which::which("git").ok());
4614 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4615 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4616 })
4617 .await?;
4618
4619 if let Some(git_hosting_provider_registry) =
4620 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4621 {
4622 git_hosting_providers::register_additional_providers(
4623 git_hosting_provider_registry,
4624 backend.clone(),
4625 );
4626 }
4627
4628 let state = RepositoryState::Local {
4629 backend,
4630 environment: Arc::new(environment),
4631 };
4632 let mut jobs = VecDeque::new();
4633 loop {
4634 while let Ok(Some(next_job)) = job_rx.try_next() {
4635 jobs.push_back(next_job);
4636 }
4637
4638 if let Some(job) = jobs.pop_front() {
4639 if let Some(current_key) = &job.key
4640 && jobs
4641 .iter()
4642 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4643 {
4644 continue;
4645 }
4646 (job.job)(state.clone(), cx).await;
4647 } else if let Some(job) = job_rx.next().await {
4648 jobs.push_back(job);
4649 } else {
4650 break;
4651 }
4652 }
4653 anyhow::Ok(())
4654 })
4655 .detach_and_log_err(cx);
4656
4657 job_tx
4658 }
4659
4660 fn spawn_remote_git_worker(
4661 project_id: ProjectId,
4662 client: AnyProtoClient,
4663 cx: &mut Context<Self>,
4664 ) -> mpsc::UnboundedSender<GitJob> {
4665 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4666
4667 cx.spawn(async move |_, cx| {
4668 let state = RepositoryState::Remote { project_id, client };
4669 let mut jobs = VecDeque::new();
4670 loop {
4671 while let Ok(Some(next_job)) = job_rx.try_next() {
4672 jobs.push_back(next_job);
4673 }
4674
4675 if let Some(job) = jobs.pop_front() {
4676 if let Some(current_key) = &job.key
4677 && jobs
4678 .iter()
4679 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4680 {
4681 continue;
4682 }
4683 (job.job)(state.clone(), cx).await;
4684 } else if let Some(job) = job_rx.next().await {
4685 jobs.push_back(job);
4686 } else {
4687 break;
4688 }
4689 }
4690 anyhow::Ok(())
4691 })
4692 .detach_and_log_err(cx);
4693
4694 job_tx
4695 }
4696
4697 fn load_staged_text(
4698 &mut self,
4699 buffer_id: BufferId,
4700 repo_path: RepoPath,
4701 cx: &App,
4702 ) -> Task<Result<Option<String>>> {
4703 let rx = self.send_job(None, move |state, _| async move {
4704 match state {
4705 RepositoryState::Local { backend, .. } => {
4706 anyhow::Ok(backend.load_index_text(repo_path).await)
4707 }
4708 RepositoryState::Remote { project_id, client } => {
4709 let response = client
4710 .request(proto::OpenUnstagedDiff {
4711 project_id: project_id.to_proto(),
4712 buffer_id: buffer_id.to_proto(),
4713 })
4714 .await?;
4715 Ok(response.staged_text)
4716 }
4717 }
4718 });
4719 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4720 }
4721
4722 fn load_committed_text(
4723 &mut self,
4724 buffer_id: BufferId,
4725 repo_path: RepoPath,
4726 cx: &App,
4727 ) -> Task<Result<DiffBasesChange>> {
4728 let rx = self.send_job(None, move |state, _| async move {
4729 match state {
4730 RepositoryState::Local { backend, .. } => {
4731 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4732 let staged_text = backend.load_index_text(repo_path).await;
4733 let diff_bases_change = if committed_text == staged_text {
4734 DiffBasesChange::SetBoth(committed_text)
4735 } else {
4736 DiffBasesChange::SetEach {
4737 index: staged_text,
4738 head: committed_text,
4739 }
4740 };
4741 anyhow::Ok(diff_bases_change)
4742 }
4743 RepositoryState::Remote { project_id, client } => {
4744 use proto::open_uncommitted_diff_response::Mode;
4745
4746 let response = client
4747 .request(proto::OpenUncommittedDiff {
4748 project_id: project_id.to_proto(),
4749 buffer_id: buffer_id.to_proto(),
4750 })
4751 .await?;
4752 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4753 let bases = match mode {
4754 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4755 Mode::IndexAndHead => DiffBasesChange::SetEach {
4756 head: response.committed_text,
4757 index: response.staged_text,
4758 },
4759 };
4760 Ok(bases)
4761 }
4762 }
4763 });
4764
4765 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4766 }
4767
4768 fn paths_changed(
4769 &mut self,
4770 paths: Vec<RepoPath>,
4771 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4772 cx: &mut Context<Self>,
4773 ) {
4774 self.paths_needing_status_update.extend(paths);
4775
4776 let this = cx.weak_entity();
4777 let _ = self.send_keyed_job(
4778 Some(GitJobKey::RefreshStatuses),
4779 None,
4780 |state, mut cx| async move {
4781 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4782 (
4783 this.snapshot.clone(),
4784 mem::take(&mut this.paths_needing_status_update),
4785 )
4786 })?;
4787 let RepositoryState::Local { backend, .. } = state else {
4788 bail!("not a local repository")
4789 };
4790
4791 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4792 if paths.is_empty() {
4793 return Ok(());
4794 }
4795 let statuses = backend.status(&paths).await?;
4796 let stash_entries = backend.stash_entries().await?;
4797
4798 let changed_path_statuses = cx
4799 .background_spawn(async move {
4800 let mut changed_path_statuses = Vec::new();
4801 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4802 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4803
4804 for (repo_path, status) in &*statuses.entries {
4805 changed_paths.remove(repo_path);
4806 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4807 && cursor.item().is_some_and(|entry| entry.status == *status)
4808 {
4809 continue;
4810 }
4811
4812 changed_path_statuses.push(Edit::Insert(StatusEntry {
4813 repo_path: repo_path.clone(),
4814 status: *status,
4815 }));
4816 }
4817 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4818 for path in changed_paths.into_iter() {
4819 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4820 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4821 }
4822 }
4823 changed_path_statuses
4824 })
4825 .await;
4826
4827 this.update(&mut cx, |this, cx| {
4828 let needs_update = !changed_path_statuses.is_empty()
4829 || this.snapshot.stash_entries != stash_entries;
4830 this.snapshot.stash_entries = stash_entries;
4831 if !changed_path_statuses.is_empty() {
4832 this.snapshot
4833 .statuses_by_path
4834 .edit(changed_path_statuses, ());
4835 this.snapshot.scan_id += 1;
4836 }
4837
4838 if needs_update {
4839 cx.emit(RepositoryEvent::Updated {
4840 full_scan: false,
4841 new_instance: false,
4842 });
4843 }
4844
4845 if let Some(updates_tx) = updates_tx {
4846 updates_tx
4847 .unbounded_send(DownstreamUpdate::UpdateRepository(
4848 this.snapshot.clone(),
4849 ))
4850 .ok();
4851 }
4852 cx.emit(RepositoryEvent::PathsChanged);
4853 })
4854 },
4855 );
4856 }
4857
4858 /// currently running git command and when it started
4859 pub fn current_job(&self) -> Option<JobInfo> {
4860 self.active_jobs.values().next().cloned()
4861 }
4862
4863 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4864 self.send_job(None, |_, _| async {})
4865 }
4866}
4867
4868fn get_permalink_in_rust_registry_src(
4869 provider_registry: Arc<GitHostingProviderRegistry>,
4870 path: PathBuf,
4871 selection: Range<u32>,
4872) -> Result<url::Url> {
4873 #[derive(Deserialize)]
4874 struct CargoVcsGit {
4875 sha1: String,
4876 }
4877
4878 #[derive(Deserialize)]
4879 struct CargoVcsInfo {
4880 git: CargoVcsGit,
4881 path_in_vcs: String,
4882 }
4883
4884 #[derive(Deserialize)]
4885 struct CargoPackage {
4886 repository: String,
4887 }
4888
4889 #[derive(Deserialize)]
4890 struct CargoToml {
4891 package: CargoPackage,
4892 }
4893
4894 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4895 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4896 Some((dir, json))
4897 }) else {
4898 bail!("No .cargo_vcs_info.json found in parent directories")
4899 };
4900 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4901 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4902 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4903 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4904 .context("parsing package.repository field of manifest")?;
4905 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4906 let permalink = provider.build_permalink(
4907 remote,
4908 BuildPermalinkParams::new(
4909 &cargo_vcs_info.git.sha1,
4910 &RepoPath(
4911 RelPath::new(&path, PathStyle::local())
4912 .context("invalid path")?
4913 .into_arc(),
4914 ),
4915 Some(selection),
4916 ),
4917 );
4918 Ok(permalink)
4919}
4920
4921fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4922 let Some(blame) = blame else {
4923 return proto::BlameBufferResponse {
4924 blame_response: None,
4925 };
4926 };
4927
4928 let entries = blame
4929 .entries
4930 .into_iter()
4931 .map(|entry| proto::BlameEntry {
4932 sha: entry.sha.as_bytes().into(),
4933 start_line: entry.range.start,
4934 end_line: entry.range.end,
4935 original_line_number: entry.original_line_number,
4936 author: entry.author,
4937 author_mail: entry.author_mail,
4938 author_time: entry.author_time,
4939 author_tz: entry.author_tz,
4940 committer: entry.committer_name,
4941 committer_mail: entry.committer_email,
4942 committer_time: entry.committer_time,
4943 committer_tz: entry.committer_tz,
4944 summary: entry.summary,
4945 previous: entry.previous,
4946 filename: entry.filename,
4947 })
4948 .collect::<Vec<_>>();
4949
4950 let messages = blame
4951 .messages
4952 .into_iter()
4953 .map(|(oid, message)| proto::CommitMessage {
4954 oid: oid.as_bytes().into(),
4955 message,
4956 })
4957 .collect::<Vec<_>>();
4958
4959 proto::BlameBufferResponse {
4960 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4961 entries,
4962 messages,
4963 remote_url: blame.remote_url,
4964 }),
4965 }
4966}
4967
4968fn deserialize_blame_buffer_response(
4969 response: proto::BlameBufferResponse,
4970) -> Option<git::blame::Blame> {
4971 let response = response.blame_response?;
4972 let entries = response
4973 .entries
4974 .into_iter()
4975 .filter_map(|entry| {
4976 Some(git::blame::BlameEntry {
4977 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4978 range: entry.start_line..entry.end_line,
4979 original_line_number: entry.original_line_number,
4980 committer_name: entry.committer,
4981 committer_time: entry.committer_time,
4982 committer_tz: entry.committer_tz,
4983 committer_email: entry.committer_mail,
4984 author: entry.author,
4985 author_mail: entry.author_mail,
4986 author_time: entry.author_time,
4987 author_tz: entry.author_tz,
4988 summary: entry.summary,
4989 previous: entry.previous,
4990 filename: entry.filename,
4991 })
4992 })
4993 .collect::<Vec<_>>();
4994
4995 let messages = response
4996 .messages
4997 .into_iter()
4998 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4999 .collect::<HashMap<_, _>>();
5000
5001 Some(Blame {
5002 entries,
5003 messages,
5004 remote_url: response.remote_url,
5005 })
5006}
5007
5008fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5009 proto::Branch {
5010 is_head: branch.is_head,
5011 ref_name: branch.ref_name.to_string(),
5012 unix_timestamp: branch
5013 .most_recent_commit
5014 .as_ref()
5015 .map(|commit| commit.commit_timestamp as u64),
5016 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5017 ref_name: upstream.ref_name.to_string(),
5018 tracking: upstream
5019 .tracking
5020 .status()
5021 .map(|upstream| proto::UpstreamTracking {
5022 ahead: upstream.ahead as u64,
5023 behind: upstream.behind as u64,
5024 }),
5025 }),
5026 most_recent_commit: branch
5027 .most_recent_commit
5028 .as_ref()
5029 .map(|commit| proto::CommitSummary {
5030 sha: commit.sha.to_string(),
5031 subject: commit.subject.to_string(),
5032 commit_timestamp: commit.commit_timestamp,
5033 author_name: commit.author_name.to_string(),
5034 }),
5035 }
5036}
5037
5038fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5039 git::repository::Branch {
5040 is_head: proto.is_head,
5041 ref_name: proto.ref_name.clone().into(),
5042 upstream: proto
5043 .upstream
5044 .as_ref()
5045 .map(|upstream| git::repository::Upstream {
5046 ref_name: upstream.ref_name.to_string().into(),
5047 tracking: upstream
5048 .tracking
5049 .as_ref()
5050 .map(|tracking| {
5051 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5052 ahead: tracking.ahead as u32,
5053 behind: tracking.behind as u32,
5054 })
5055 })
5056 .unwrap_or(git::repository::UpstreamTracking::Gone),
5057 }),
5058 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5059 git::repository::CommitSummary {
5060 sha: commit.sha.to_string().into(),
5061 subject: commit.subject.to_string().into(),
5062 commit_timestamp: commit.commit_timestamp,
5063 author_name: commit.author_name.to_string().into(),
5064 has_parent: true,
5065 }
5066 }),
5067 }
5068}
5069
5070fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5071 proto::GitCommitDetails {
5072 sha: commit.sha.to_string(),
5073 message: commit.message.to_string(),
5074 commit_timestamp: commit.commit_timestamp,
5075 author_email: commit.author_email.to_string(),
5076 author_name: commit.author_name.to_string(),
5077 }
5078}
5079
5080fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5081 CommitDetails {
5082 sha: proto.sha.clone().into(),
5083 message: proto.message.clone().into(),
5084 commit_timestamp: proto.commit_timestamp,
5085 author_email: proto.author_email.clone().into(),
5086 author_name: proto.author_name.clone().into(),
5087 }
5088}
5089
5090async fn compute_snapshot(
5091 id: RepositoryId,
5092 work_directory_abs_path: Arc<Path>,
5093 prev_snapshot: RepositorySnapshot,
5094 backend: Arc<dyn GitRepository>,
5095) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5096 let mut events = Vec::new();
5097 let branches = backend.branches().await?;
5098 let branch = branches.into_iter().find(|branch| branch.is_head);
5099 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5100 let stash_entries = backend.stash_entries().await?;
5101 let statuses_by_path = SumTree::from_iter(
5102 statuses
5103 .entries
5104 .iter()
5105 .map(|(repo_path, status)| StatusEntry {
5106 repo_path: repo_path.clone(),
5107 status: *status,
5108 }),
5109 (),
5110 );
5111 let (merge_details, merge_heads_changed) =
5112 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5113 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5114
5115 if merge_heads_changed
5116 || branch != prev_snapshot.branch
5117 || statuses_by_path != prev_snapshot.statuses_by_path
5118 {
5119 events.push(RepositoryEvent::Updated {
5120 full_scan: true,
5121 new_instance: false,
5122 });
5123 }
5124
5125 // Cache merge conflict paths so they don't change from staging/unstaging,
5126 // until the merge heads change (at commit time, etc.).
5127 if merge_heads_changed {
5128 events.push(RepositoryEvent::MergeHeadsChanged);
5129 }
5130
5131 // Useful when branch is None in detached head state
5132 let head_commit = match backend.head_sha().await {
5133 Some(head_sha) => backend.show(head_sha).await.log_err(),
5134 None => None,
5135 };
5136
5137 // Used by edit prediction data collection
5138 let remote_origin_url = backend.remote_url("origin");
5139 let remote_upstream_url = backend.remote_url("upstream");
5140
5141 let snapshot = RepositorySnapshot {
5142 id,
5143 statuses_by_path,
5144 work_directory_abs_path,
5145 path_style: prev_snapshot.path_style,
5146 scan_id: prev_snapshot.scan_id + 1,
5147 branch,
5148 head_commit,
5149 merge: merge_details,
5150 remote_origin_url,
5151 remote_upstream_url,
5152 stash_entries,
5153 };
5154
5155 Ok((snapshot, events))
5156}
5157
5158fn status_from_proto(
5159 simple_status: i32,
5160 status: Option<proto::GitFileStatus>,
5161) -> anyhow::Result<FileStatus> {
5162 use proto::git_file_status::Variant;
5163
5164 let Some(variant) = status.and_then(|status| status.variant) else {
5165 let code = proto::GitStatus::from_i32(simple_status)
5166 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5167 let result = match code {
5168 proto::GitStatus::Added => TrackedStatus {
5169 worktree_status: StatusCode::Added,
5170 index_status: StatusCode::Unmodified,
5171 }
5172 .into(),
5173 proto::GitStatus::Modified => TrackedStatus {
5174 worktree_status: StatusCode::Modified,
5175 index_status: StatusCode::Unmodified,
5176 }
5177 .into(),
5178 proto::GitStatus::Conflict => UnmergedStatus {
5179 first_head: UnmergedStatusCode::Updated,
5180 second_head: UnmergedStatusCode::Updated,
5181 }
5182 .into(),
5183 proto::GitStatus::Deleted => TrackedStatus {
5184 worktree_status: StatusCode::Deleted,
5185 index_status: StatusCode::Unmodified,
5186 }
5187 .into(),
5188 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5189 };
5190 return Ok(result);
5191 };
5192
5193 let result = match variant {
5194 Variant::Untracked(_) => FileStatus::Untracked,
5195 Variant::Ignored(_) => FileStatus::Ignored,
5196 Variant::Unmerged(unmerged) => {
5197 let [first_head, second_head] =
5198 [unmerged.first_head, unmerged.second_head].map(|head| {
5199 let code = proto::GitStatus::from_i32(head)
5200 .with_context(|| format!("Invalid git status code: {head}"))?;
5201 let result = match code {
5202 proto::GitStatus::Added => UnmergedStatusCode::Added,
5203 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5204 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5205 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5206 };
5207 Ok(result)
5208 });
5209 let [first_head, second_head] = [first_head?, second_head?];
5210 UnmergedStatus {
5211 first_head,
5212 second_head,
5213 }
5214 .into()
5215 }
5216 Variant::Tracked(tracked) => {
5217 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5218 .map(|status| {
5219 let code = proto::GitStatus::from_i32(status)
5220 .with_context(|| format!("Invalid git status code: {status}"))?;
5221 let result = match code {
5222 proto::GitStatus::Modified => StatusCode::Modified,
5223 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5224 proto::GitStatus::Added => StatusCode::Added,
5225 proto::GitStatus::Deleted => StatusCode::Deleted,
5226 proto::GitStatus::Renamed => StatusCode::Renamed,
5227 proto::GitStatus::Copied => StatusCode::Copied,
5228 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5229 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5230 };
5231 Ok(result)
5232 });
5233 let [index_status, worktree_status] = [index_status?, worktree_status?];
5234 TrackedStatus {
5235 index_status,
5236 worktree_status,
5237 }
5238 .into()
5239 }
5240 };
5241 Ok(result)
5242}
5243
5244fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5245 use proto::git_file_status::{Tracked, Unmerged, Variant};
5246
5247 let variant = match status {
5248 FileStatus::Untracked => Variant::Untracked(Default::default()),
5249 FileStatus::Ignored => Variant::Ignored(Default::default()),
5250 FileStatus::Unmerged(UnmergedStatus {
5251 first_head,
5252 second_head,
5253 }) => Variant::Unmerged(Unmerged {
5254 first_head: unmerged_status_to_proto(first_head),
5255 second_head: unmerged_status_to_proto(second_head),
5256 }),
5257 FileStatus::Tracked(TrackedStatus {
5258 index_status,
5259 worktree_status,
5260 }) => Variant::Tracked(Tracked {
5261 index_status: tracked_status_to_proto(index_status),
5262 worktree_status: tracked_status_to_proto(worktree_status),
5263 }),
5264 };
5265 proto::GitFileStatus {
5266 variant: Some(variant),
5267 }
5268}
5269
5270fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5271 match code {
5272 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5273 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5274 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5275 }
5276}
5277
5278fn tracked_status_to_proto(code: StatusCode) -> i32 {
5279 match code {
5280 StatusCode::Added => proto::GitStatus::Added as _,
5281 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5282 StatusCode::Modified => proto::GitStatus::Modified as _,
5283 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5284 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5285 StatusCode::Copied => proto::GitStatus::Copied as _,
5286 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5287 }
5288}