1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::{AskPassDelegate, EncryptedPassword};
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use text::{Bias, BufferId};
66use util::{
67 ResultExt, debug_panic,
68 paths::{PathStyle, SanitizedPath},
69 post_inc,
70 rel_path::RelPath,
71};
72use worktree::{
73 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
74 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
75};
76use zeroize::Zeroize;
77
78pub struct GitStore {
79 state: GitStoreState,
80 buffer_store: Entity<BufferStore>,
81 worktree_store: Entity<WorktreeStore>,
82 repositories: HashMap<RepositoryId, Entity<Repository>>,
83 active_repo_id: Option<RepositoryId>,
84 #[allow(clippy::type_complexity)]
85 loading_diffs:
86 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
87 diffs: HashMap<BufferId, Entity<BufferGitState>>,
88 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
89 _subscriptions: Vec<Subscription>,
90}
91
92#[derive(Default)]
93struct SharedDiffs {
94 unstaged: Option<Entity<BufferDiff>>,
95 uncommitted: Option<Entity<BufferDiff>>,
96}
97
98struct BufferGitState {
99 unstaged_diff: Option<WeakEntity<BufferDiff>>,
100 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
101 conflict_set: Option<WeakEntity<ConflictSet>>,
102 recalculate_diff_task: Option<Task<Result<()>>>,
103 reparse_conflict_markers_task: Option<Task<Result<()>>>,
104 language: Option<Arc<Language>>,
105 language_registry: Option<Arc<LanguageRegistry>>,
106 conflict_updated_futures: Vec<oneshot::Sender<()>>,
107 recalculating_tx: postage::watch::Sender<bool>,
108
109 /// These operation counts are used to ensure that head and index text
110 /// values read from the git repository are up-to-date with any hunk staging
111 /// operations that have been performed on the BufferDiff.
112 ///
113 /// The operation count is incremented immediately when the user initiates a
114 /// hunk stage/unstage operation. Then, upon finishing writing the new index
115 /// text do disk, the `operation count as of write` is updated to reflect
116 /// the operation count that prompted the write.
117 hunk_staging_operation_count: usize,
118 hunk_staging_operation_count_as_of_write: usize,
119
120 head_text: Option<Arc<String>>,
121 index_text: Option<Arc<String>>,
122 head_changed: bool,
123 index_changed: bool,
124 language_changed: bool,
125}
126
127#[derive(Clone, Debug)]
128enum DiffBasesChange {
129 SetIndex(Option<String>),
130 SetHead(Option<String>),
131 SetEach {
132 index: Option<String>,
133 head: Option<String>,
134 },
135 SetBoth(Option<String>),
136}
137
138#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
139enum DiffKind {
140 Unstaged,
141 Uncommitted,
142}
143
144enum GitStoreState {
145 Local {
146 next_repository_id: Arc<AtomicU64>,
147 downstream: Option<LocalDownstreamState>,
148 project_environment: Entity<ProjectEnvironment>,
149 fs: Arc<dyn Fs>,
150 },
151 Remote {
152 upstream_client: AnyProtoClient,
153 upstream_project_id: u64,
154 downstream: Option<(AnyProtoClient, ProjectId)>,
155 },
156}
157
158enum DownstreamUpdate {
159 UpdateRepository(RepositorySnapshot),
160 RemoveRepository(RepositoryId),
161}
162
163struct LocalDownstreamState {
164 client: AnyProtoClient,
165 project_id: ProjectId,
166 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
167 _task: Task<Result<()>>,
168}
169
170#[derive(Clone, Debug)]
171pub struct GitStoreCheckpoint {
172 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
173}
174
175#[derive(Clone, Debug, PartialEq, Eq)]
176pub struct StatusEntry {
177 pub repo_path: RepoPath,
178 pub status: FileStatus,
179}
180
181impl StatusEntry {
182 fn to_proto(&self) -> proto::StatusEntry {
183 let simple_status = match self.status {
184 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
185 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
186 FileStatus::Tracked(TrackedStatus {
187 index_status,
188 worktree_status,
189 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
190 worktree_status
191 } else {
192 index_status
193 }),
194 };
195
196 proto::StatusEntry {
197 repo_path: self.repo_path.to_proto(),
198 simple_status,
199 status: Some(status_to_proto(self.status)),
200 }
201 }
202}
203
204impl TryFrom<proto::StatusEntry> for StatusEntry {
205 type Error = anyhow::Error;
206
207 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
208 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
209 let status = status_from_proto(value.simple_status, value.status)?;
210 Ok(Self { repo_path, status })
211 }
212}
213
214impl sum_tree::Item for StatusEntry {
215 type Summary = PathSummary<GitSummary>;
216
217 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
218 PathSummary {
219 max_path: self.repo_path.0.clone(),
220 item_summary: self.status.summary(),
221 }
222 }
223}
224
225impl sum_tree::KeyedItem for StatusEntry {
226 type Key = PathKey;
227
228 fn key(&self) -> Self::Key {
229 PathKey(self.repo_path.0.clone())
230 }
231}
232
233#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
234pub struct RepositoryId(pub u64);
235
236#[derive(Clone, Debug, Default, PartialEq, Eq)]
237pub struct MergeDetails {
238 pub conflicted_paths: TreeSet<RepoPath>,
239 pub message: Option<SharedString>,
240 pub heads: Vec<Option<SharedString>>,
241}
242
243#[derive(Clone, Debug, PartialEq, Eq)]
244pub struct RepositorySnapshot {
245 pub id: RepositoryId,
246 pub statuses_by_path: SumTree<StatusEntry>,
247 pub work_directory_abs_path: Arc<Path>,
248 pub path_style: PathStyle,
249 pub branch: Option<Branch>,
250 pub head_commit: Option<CommitDetails>,
251 pub scan_id: u64,
252 pub merge: MergeDetails,
253 pub remote_origin_url: Option<String>,
254 pub remote_upstream_url: Option<String>,
255 pub stash_entries: GitStash,
256}
257
258type JobId = u64;
259
260#[derive(Clone, Debug, PartialEq, Eq)]
261pub struct JobInfo {
262 pub start: Instant,
263 pub message: SharedString,
264}
265
266pub struct Repository {
267 this: WeakEntity<Self>,
268 snapshot: RepositorySnapshot,
269 commit_message_buffer: Option<Entity<Buffer>>,
270 git_store: WeakEntity<GitStore>,
271 // For a local repository, holds paths that have had worktree events since the last status scan completed,
272 // and that should be examined during the next status scan.
273 paths_needing_status_update: BTreeSet<RepoPath>,
274 job_sender: mpsc::UnboundedSender<GitJob>,
275 active_jobs: HashMap<JobId, JobInfo>,
276 job_id: JobId,
277 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
278 latest_askpass_id: u64,
279}
280
281impl std::ops::Deref for Repository {
282 type Target = RepositorySnapshot;
283
284 fn deref(&self) -> &Self::Target {
285 &self.snapshot
286 }
287}
288
289#[derive(Clone)]
290pub enum RepositoryState {
291 Local {
292 backend: Arc<dyn GitRepository>,
293 environment: Arc<HashMap<String, String>>,
294 },
295 Remote {
296 project_id: ProjectId,
297 client: AnyProtoClient,
298 },
299}
300
301#[derive(Clone, Debug)]
302pub enum RepositoryEvent {
303 Updated { full_scan: bool, new_instance: bool },
304 MergeHeadsChanged,
305}
306
307#[derive(Clone, Debug)]
308pub struct JobsUpdated;
309
310#[derive(Debug)]
311pub enum GitStoreEvent {
312 ActiveRepositoryChanged(Option<RepositoryId>),
313 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
314 RepositoryAdded(RepositoryId),
315 RepositoryRemoved(RepositoryId),
316 IndexWriteError(anyhow::Error),
317 JobsUpdated,
318 ConflictsUpdated,
319}
320
321impl EventEmitter<RepositoryEvent> for Repository {}
322impl EventEmitter<JobsUpdated> for Repository {}
323impl EventEmitter<GitStoreEvent> for GitStore {}
324
325pub struct GitJob {
326 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
327 key: Option<GitJobKey>,
328}
329
330#[derive(PartialEq, Eq)]
331enum GitJobKey {
332 WriteIndex(RepoPath),
333 ReloadBufferDiffBases,
334 RefreshStatuses,
335 ReloadGitState,
336}
337
338impl GitStore {
339 pub fn local(
340 worktree_store: &Entity<WorktreeStore>,
341 buffer_store: Entity<BufferStore>,
342 environment: Entity<ProjectEnvironment>,
343 fs: Arc<dyn Fs>,
344 cx: &mut Context<Self>,
345 ) -> Self {
346 Self::new(
347 worktree_store.clone(),
348 buffer_store,
349 GitStoreState::Local {
350 next_repository_id: Arc::new(AtomicU64::new(1)),
351 downstream: None,
352 project_environment: environment,
353 fs,
354 },
355 cx,
356 )
357 }
358
359 pub fn remote(
360 worktree_store: &Entity<WorktreeStore>,
361 buffer_store: Entity<BufferStore>,
362 upstream_client: AnyProtoClient,
363 project_id: u64,
364 cx: &mut Context<Self>,
365 ) -> Self {
366 Self::new(
367 worktree_store.clone(),
368 buffer_store,
369 GitStoreState::Remote {
370 upstream_client,
371 upstream_project_id: project_id,
372 downstream: None,
373 },
374 cx,
375 )
376 }
377
378 fn new(
379 worktree_store: Entity<WorktreeStore>,
380 buffer_store: Entity<BufferStore>,
381 state: GitStoreState,
382 cx: &mut Context<Self>,
383 ) -> Self {
384 let _subscriptions = vec![
385 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
386 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
387 ];
388
389 GitStore {
390 state,
391 buffer_store,
392 worktree_store,
393 repositories: HashMap::default(),
394 active_repo_id: None,
395 _subscriptions,
396 loading_diffs: HashMap::default(),
397 shared_diffs: HashMap::default(),
398 diffs: HashMap::default(),
399 }
400 }
401
402 pub fn init(client: &AnyProtoClient) {
403 client.add_entity_request_handler(Self::handle_get_remotes);
404 client.add_entity_request_handler(Self::handle_get_branches);
405 client.add_entity_request_handler(Self::handle_get_default_branch);
406 client.add_entity_request_handler(Self::handle_change_branch);
407 client.add_entity_request_handler(Self::handle_create_branch);
408 client.add_entity_request_handler(Self::handle_rename_branch);
409 client.add_entity_request_handler(Self::handle_git_init);
410 client.add_entity_request_handler(Self::handle_push);
411 client.add_entity_request_handler(Self::handle_pull);
412 client.add_entity_request_handler(Self::handle_fetch);
413 client.add_entity_request_handler(Self::handle_stage);
414 client.add_entity_request_handler(Self::handle_unstage);
415 client.add_entity_request_handler(Self::handle_stash);
416 client.add_entity_request_handler(Self::handle_stash_pop);
417 client.add_entity_request_handler(Self::handle_stash_apply);
418 client.add_entity_request_handler(Self::handle_stash_drop);
419 client.add_entity_request_handler(Self::handle_commit);
420 client.add_entity_request_handler(Self::handle_reset);
421 client.add_entity_request_handler(Self::handle_show);
422 client.add_entity_request_handler(Self::handle_load_commit_diff);
423 client.add_entity_request_handler(Self::handle_checkout_files);
424 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
425 client.add_entity_request_handler(Self::handle_set_index_text);
426 client.add_entity_request_handler(Self::handle_askpass);
427 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
428 client.add_entity_request_handler(Self::handle_git_diff);
429 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
430 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
431 client.add_entity_message_handler(Self::handle_update_diff_bases);
432 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
433 client.add_entity_request_handler(Self::handle_blame_buffer);
434 client.add_entity_message_handler(Self::handle_update_repository);
435 client.add_entity_message_handler(Self::handle_remove_repository);
436 client.add_entity_request_handler(Self::handle_git_clone);
437 }
438
439 pub fn is_local(&self) -> bool {
440 matches!(self.state, GitStoreState::Local { .. })
441 }
442
443 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
444 match &mut self.state {
445 GitStoreState::Remote {
446 downstream: downstream_client,
447 ..
448 } => {
449 for repo in self.repositories.values() {
450 let update = repo.read(cx).snapshot.initial_update(project_id);
451 for update in split_repository_update(update) {
452 client.send(update).log_err();
453 }
454 }
455 *downstream_client = Some((client, ProjectId(project_id)));
456 }
457 GitStoreState::Local {
458 downstream: downstream_client,
459 ..
460 } => {
461 let mut snapshots = HashMap::default();
462 let (updates_tx, mut updates_rx) = mpsc::unbounded();
463 for repo in self.repositories.values() {
464 updates_tx
465 .unbounded_send(DownstreamUpdate::UpdateRepository(
466 repo.read(cx).snapshot.clone(),
467 ))
468 .ok();
469 }
470 *downstream_client = Some(LocalDownstreamState {
471 client: client.clone(),
472 project_id: ProjectId(project_id),
473 updates_tx,
474 _task: cx.spawn(async move |this, cx| {
475 cx.background_spawn(async move {
476 while let Some(update) = updates_rx.next().await {
477 match update {
478 DownstreamUpdate::UpdateRepository(snapshot) => {
479 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
480 {
481 let update =
482 snapshot.build_update(old_snapshot, project_id);
483 *old_snapshot = snapshot;
484 for update in split_repository_update(update) {
485 client.send(update)?;
486 }
487 } else {
488 let update = snapshot.initial_update(project_id);
489 for update in split_repository_update(update) {
490 client.send(update)?;
491 }
492 snapshots.insert(snapshot.id, snapshot);
493 }
494 }
495 DownstreamUpdate::RemoveRepository(id) => {
496 client.send(proto::RemoveRepository {
497 project_id,
498 id: id.to_proto(),
499 })?;
500 }
501 }
502 }
503 anyhow::Ok(())
504 })
505 .await
506 .ok();
507 this.update(cx, |this, _| {
508 if let GitStoreState::Local {
509 downstream: downstream_client,
510 ..
511 } = &mut this.state
512 {
513 downstream_client.take();
514 } else {
515 unreachable!("unshared called on remote store");
516 }
517 })
518 }),
519 });
520 }
521 }
522 }
523
524 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
525 match &mut self.state {
526 GitStoreState::Local {
527 downstream: downstream_client,
528 ..
529 } => {
530 downstream_client.take();
531 }
532 GitStoreState::Remote {
533 downstream: downstream_client,
534 ..
535 } => {
536 downstream_client.take();
537 }
538 }
539 self.shared_diffs.clear();
540 }
541
542 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
543 self.shared_diffs.remove(peer_id);
544 }
545
546 pub fn active_repository(&self) -> Option<Entity<Repository>> {
547 self.active_repo_id
548 .as_ref()
549 .map(|id| self.repositories[id].clone())
550 }
551
552 pub fn open_unstaged_diff(
553 &mut self,
554 buffer: Entity<Buffer>,
555 cx: &mut Context<Self>,
556 ) -> Task<Result<Entity<BufferDiff>>> {
557 let buffer_id = buffer.read(cx).remote_id();
558 if let Some(diff_state) = self.diffs.get(&buffer_id)
559 && let Some(unstaged_diff) = diff_state
560 .read(cx)
561 .unstaged_diff
562 .as_ref()
563 .and_then(|weak| weak.upgrade())
564 {
565 if let Some(task) =
566 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
567 {
568 return cx.background_executor().spawn(async move {
569 task.await;
570 Ok(unstaged_diff)
571 });
572 }
573 return Task::ready(Ok(unstaged_diff));
574 }
575
576 let Some((repo, repo_path)) =
577 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
578 else {
579 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
580 };
581
582 let task = self
583 .loading_diffs
584 .entry((buffer_id, DiffKind::Unstaged))
585 .or_insert_with(|| {
586 let staged_text = repo.update(cx, |repo, cx| {
587 repo.load_staged_text(buffer_id, repo_path, cx)
588 });
589 cx.spawn(async move |this, cx| {
590 Self::open_diff_internal(
591 this,
592 DiffKind::Unstaged,
593 staged_text.await.map(DiffBasesChange::SetIndex),
594 buffer,
595 cx,
596 )
597 .await
598 .map_err(Arc::new)
599 })
600 .shared()
601 })
602 .clone();
603
604 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
605 }
606
607 pub fn open_uncommitted_diff(
608 &mut self,
609 buffer: Entity<Buffer>,
610 cx: &mut Context<Self>,
611 ) -> Task<Result<Entity<BufferDiff>>> {
612 let buffer_id = buffer.read(cx).remote_id();
613
614 if let Some(diff_state) = self.diffs.get(&buffer_id)
615 && let Some(uncommitted_diff) = diff_state
616 .read(cx)
617 .uncommitted_diff
618 .as_ref()
619 .and_then(|weak| weak.upgrade())
620 {
621 if let Some(task) =
622 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
623 {
624 return cx.background_executor().spawn(async move {
625 task.await;
626 Ok(uncommitted_diff)
627 });
628 }
629 return Task::ready(Ok(uncommitted_diff));
630 }
631
632 let Some((repo, repo_path)) =
633 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
634 else {
635 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
636 };
637
638 let task = self
639 .loading_diffs
640 .entry((buffer_id, DiffKind::Uncommitted))
641 .or_insert_with(|| {
642 let changes = repo.update(cx, |repo, cx| {
643 repo.load_committed_text(buffer_id, repo_path, cx)
644 });
645
646 cx.spawn(async move |this, cx| {
647 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
648 .await
649 .map_err(Arc::new)
650 })
651 .shared()
652 })
653 .clone();
654
655 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
656 }
657
658 async fn open_diff_internal(
659 this: WeakEntity<Self>,
660 kind: DiffKind,
661 texts: Result<DiffBasesChange>,
662 buffer_entity: Entity<Buffer>,
663 cx: &mut AsyncApp,
664 ) -> Result<Entity<BufferDiff>> {
665 let diff_bases_change = match texts {
666 Err(e) => {
667 this.update(cx, |this, cx| {
668 let buffer = buffer_entity.read(cx);
669 let buffer_id = buffer.remote_id();
670 this.loading_diffs.remove(&(buffer_id, kind));
671 })?;
672 return Err(e);
673 }
674 Ok(change) => change,
675 };
676
677 this.update(cx, |this, cx| {
678 let buffer = buffer_entity.read(cx);
679 let buffer_id = buffer.remote_id();
680 let language = buffer.language().cloned();
681 let language_registry = buffer.language_registry();
682 let text_snapshot = buffer.text_snapshot();
683 this.loading_diffs.remove(&(buffer_id, kind));
684
685 let git_store = cx.weak_entity();
686 let diff_state = this
687 .diffs
688 .entry(buffer_id)
689 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
690
691 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
692
693 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
694 diff_state.update(cx, |diff_state, cx| {
695 diff_state.language = language;
696 diff_state.language_registry = language_registry;
697
698 match kind {
699 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
700 DiffKind::Uncommitted => {
701 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
702 diff
703 } else {
704 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
705 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
706 unstaged_diff
707 };
708
709 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
710 diff_state.uncommitted_diff = Some(diff.downgrade())
711 }
712 }
713
714 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
715 let rx = diff_state.wait_for_recalculation();
716
717 anyhow::Ok(async move {
718 if let Some(rx) = rx {
719 rx.await;
720 }
721 Ok(diff)
722 })
723 })
724 })??
725 .await
726 }
727
728 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
729 let diff_state = self.diffs.get(&buffer_id)?;
730 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
731 }
732
733 pub fn get_uncommitted_diff(
734 &self,
735 buffer_id: BufferId,
736 cx: &App,
737 ) -> Option<Entity<BufferDiff>> {
738 let diff_state = self.diffs.get(&buffer_id)?;
739 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
740 }
741
742 pub fn open_conflict_set(
743 &mut self,
744 buffer: Entity<Buffer>,
745 cx: &mut Context<Self>,
746 ) -> Entity<ConflictSet> {
747 log::debug!("open conflict set");
748 let buffer_id = buffer.read(cx).remote_id();
749
750 if let Some(git_state) = self.diffs.get(&buffer_id)
751 && let Some(conflict_set) = git_state
752 .read(cx)
753 .conflict_set
754 .as_ref()
755 .and_then(|weak| weak.upgrade())
756 {
757 let conflict_set = conflict_set;
758 let buffer_snapshot = buffer.read(cx).text_snapshot();
759
760 git_state.update(cx, |state, cx| {
761 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
762 });
763
764 return conflict_set;
765 }
766
767 let is_unmerged = self
768 .repository_and_path_for_buffer_id(buffer_id, cx)
769 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
770 let git_store = cx.weak_entity();
771 let buffer_git_state = self
772 .diffs
773 .entry(buffer_id)
774 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
775 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
776
777 self._subscriptions
778 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
779 cx.emit(GitStoreEvent::ConflictsUpdated);
780 }));
781
782 buffer_git_state.update(cx, |state, cx| {
783 state.conflict_set = Some(conflict_set.downgrade());
784 let buffer_snapshot = buffer.read(cx).text_snapshot();
785 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
786 });
787
788 conflict_set
789 }
790
791 pub fn project_path_git_status(
792 &self,
793 project_path: &ProjectPath,
794 cx: &App,
795 ) -> Option<FileStatus> {
796 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
797 Some(repo.read(cx).status_for_path(&repo_path)?.status)
798 }
799
800 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
801 let mut work_directory_abs_paths = Vec::new();
802 let mut checkpoints = Vec::new();
803 for repository in self.repositories.values() {
804 repository.update(cx, |repository, _| {
805 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
806 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
807 });
808 }
809
810 cx.background_executor().spawn(async move {
811 let checkpoints = future::try_join_all(checkpoints).await?;
812 Ok(GitStoreCheckpoint {
813 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
814 .into_iter()
815 .zip(checkpoints)
816 .collect(),
817 })
818 })
819 }
820
821 pub fn restore_checkpoint(
822 &self,
823 checkpoint: GitStoreCheckpoint,
824 cx: &mut App,
825 ) -> Task<Result<()>> {
826 let repositories_by_work_dir_abs_path = self
827 .repositories
828 .values()
829 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
830 .collect::<HashMap<_, _>>();
831
832 let mut tasks = Vec::new();
833 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
834 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
835 let restore = repository.update(cx, |repository, _| {
836 repository.restore_checkpoint(checkpoint)
837 });
838 tasks.push(async move { restore.await? });
839 }
840 }
841 cx.background_spawn(async move {
842 future::try_join_all(tasks).await?;
843 Ok(())
844 })
845 }
846
847 /// Compares two checkpoints, returning true if they are equal.
848 pub fn compare_checkpoints(
849 &self,
850 left: GitStoreCheckpoint,
851 mut right: GitStoreCheckpoint,
852 cx: &mut App,
853 ) -> Task<Result<bool>> {
854 let repositories_by_work_dir_abs_path = self
855 .repositories
856 .values()
857 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
858 .collect::<HashMap<_, _>>();
859
860 let mut tasks = Vec::new();
861 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
862 if let Some(right_checkpoint) = right
863 .checkpoints_by_work_dir_abs_path
864 .remove(&work_dir_abs_path)
865 {
866 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
867 {
868 let compare = repository.update(cx, |repository, _| {
869 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
870 });
871
872 tasks.push(async move { compare.await? });
873 }
874 } else {
875 return Task::ready(Ok(false));
876 }
877 }
878 cx.background_spawn(async move {
879 Ok(future::try_join_all(tasks)
880 .await?
881 .into_iter()
882 .all(|result| result))
883 })
884 }
885
886 /// Blames a buffer.
887 pub fn blame_buffer(
888 &self,
889 buffer: &Entity<Buffer>,
890 version: Option<clock::Global>,
891 cx: &mut App,
892 ) -> Task<Result<Option<Blame>>> {
893 let buffer = buffer.read(cx);
894 let Some((repo, repo_path)) =
895 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
896 else {
897 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
898 };
899 let content = match &version {
900 Some(version) => buffer.rope_for_version(version),
901 None => buffer.as_rope().clone(),
902 };
903 let version = version.unwrap_or(buffer.version());
904 let buffer_id = buffer.remote_id();
905
906 let rx = repo.update(cx, |repo, _| {
907 repo.send_job(None, move |state, _| async move {
908 match state {
909 RepositoryState::Local { backend, .. } => backend
910 .blame(repo_path.clone(), content)
911 .await
912 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
913 .map(Some),
914 RepositoryState::Remote { project_id, client } => {
915 let response = client
916 .request(proto::BlameBuffer {
917 project_id: project_id.to_proto(),
918 buffer_id: buffer_id.into(),
919 version: serialize_version(&version),
920 })
921 .await?;
922 Ok(deserialize_blame_buffer_response(response))
923 }
924 }
925 })
926 });
927
928 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
929 }
930
931 pub fn get_permalink_to_line(
932 &self,
933 buffer: &Entity<Buffer>,
934 selection: Range<u32>,
935 cx: &mut App,
936 ) -> Task<Result<url::Url>> {
937 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
938 return Task::ready(Err(anyhow!("buffer has no file")));
939 };
940
941 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
942 &(file.worktree.read(cx).id(), file.path.clone()).into(),
943 cx,
944 ) else {
945 // If we're not in a Git repo, check whether this is a Rust source
946 // file in the Cargo registry (presumably opened with go-to-definition
947 // from a normal Rust file). If so, we can put together a permalink
948 // using crate metadata.
949 if buffer
950 .read(cx)
951 .language()
952 .is_none_or(|lang| lang.name() != "Rust".into())
953 {
954 return Task::ready(Err(anyhow!("no permalink available")));
955 }
956 let file_path = file.worktree.read(cx).absolutize(&file.path);
957 return cx.spawn(async move |cx| {
958 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
959 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
960 .context("no permalink available")
961 });
962
963 // TODO remote case
964 };
965
966 let buffer_id = buffer.read(cx).remote_id();
967 let branch = repo.read(cx).branch.clone();
968 let remote = branch
969 .as_ref()
970 .and_then(|b| b.upstream.as_ref())
971 .and_then(|b| b.remote_name())
972 .unwrap_or("origin")
973 .to_string();
974
975 let rx = repo.update(cx, |repo, _| {
976 repo.send_job(None, move |state, cx| async move {
977 match state {
978 RepositoryState::Local { backend, .. } => {
979 let origin_url = backend
980 .remote_url(&remote)
981 .with_context(|| format!("remote \"{remote}\" not found"))?;
982
983 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
984
985 let provider_registry =
986 cx.update(GitHostingProviderRegistry::default_global)?;
987
988 let (provider, remote) =
989 parse_git_remote_url(provider_registry, &origin_url)
990 .context("parsing Git remote URL")?;
991
992 let path = repo_path.as_unix_str();
993
994 Ok(provider.build_permalink(
995 remote,
996 BuildPermalinkParams {
997 sha: &sha,
998 path,
999 selection: Some(selection),
1000 },
1001 ))
1002 }
1003 RepositoryState::Remote { project_id, client } => {
1004 let response = client
1005 .request(proto::GetPermalinkToLine {
1006 project_id: project_id.to_proto(),
1007 buffer_id: buffer_id.into(),
1008 selection: Some(proto::Range {
1009 start: selection.start as u64,
1010 end: selection.end as u64,
1011 }),
1012 })
1013 .await?;
1014
1015 url::Url::parse(&response.permalink).context("failed to parse permalink")
1016 }
1017 }
1018 })
1019 });
1020 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1021 }
1022
1023 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1024 match &self.state {
1025 GitStoreState::Local {
1026 downstream: downstream_client,
1027 ..
1028 } => downstream_client
1029 .as_ref()
1030 .map(|state| (state.client.clone(), state.project_id)),
1031 GitStoreState::Remote {
1032 downstream: downstream_client,
1033 ..
1034 } => downstream_client.clone(),
1035 }
1036 }
1037
1038 fn upstream_client(&self) -> Option<AnyProtoClient> {
1039 match &self.state {
1040 GitStoreState::Local { .. } => None,
1041 GitStoreState::Remote {
1042 upstream_client, ..
1043 } => Some(upstream_client.clone()),
1044 }
1045 }
1046
1047 fn on_worktree_store_event(
1048 &mut self,
1049 worktree_store: Entity<WorktreeStore>,
1050 event: &WorktreeStoreEvent,
1051 cx: &mut Context<Self>,
1052 ) {
1053 let GitStoreState::Local {
1054 project_environment,
1055 downstream,
1056 next_repository_id,
1057 fs,
1058 } = &self.state
1059 else {
1060 return;
1061 };
1062
1063 match event {
1064 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1065 if let Some(worktree) = self
1066 .worktree_store
1067 .read(cx)
1068 .worktree_for_id(*worktree_id, cx)
1069 {
1070 let paths_by_git_repo =
1071 self.process_updated_entries(&worktree, updated_entries, cx);
1072 let downstream = downstream
1073 .as_ref()
1074 .map(|downstream| downstream.updates_tx.clone());
1075 cx.spawn(async move |_, cx| {
1076 let paths_by_git_repo = paths_by_git_repo.await;
1077 for (repo, paths) in paths_by_git_repo {
1078 repo.update(cx, |repo, cx| {
1079 repo.paths_changed(paths, downstream.clone(), cx);
1080 })
1081 .ok();
1082 }
1083 })
1084 .detach();
1085 }
1086 }
1087 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1088 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1089 else {
1090 return;
1091 };
1092 if !worktree.read(cx).is_visible() {
1093 log::debug!(
1094 "not adding repositories for local worktree {:?} because it's not visible",
1095 worktree.read(cx).abs_path()
1096 );
1097 return;
1098 }
1099 self.update_repositories_from_worktree(
1100 project_environment.clone(),
1101 next_repository_id.clone(),
1102 downstream
1103 .as_ref()
1104 .map(|downstream| downstream.updates_tx.clone()),
1105 changed_repos.clone(),
1106 fs.clone(),
1107 cx,
1108 );
1109 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1110 }
1111 _ => {}
1112 }
1113 }
1114
1115 fn on_repository_event(
1116 &mut self,
1117 repo: Entity<Repository>,
1118 event: &RepositoryEvent,
1119 cx: &mut Context<Self>,
1120 ) {
1121 let id = repo.read(cx).id;
1122 let repo_snapshot = repo.read(cx).snapshot.clone();
1123 for (buffer_id, diff) in self.diffs.iter() {
1124 if let Some((buffer_repo, repo_path)) =
1125 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1126 && buffer_repo == repo
1127 {
1128 diff.update(cx, |diff, cx| {
1129 if let Some(conflict_set) = &diff.conflict_set {
1130 let conflict_status_changed =
1131 conflict_set.update(cx, |conflict_set, cx| {
1132 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1133 conflict_set.set_has_conflict(has_conflict, cx)
1134 })?;
1135 if conflict_status_changed {
1136 let buffer_store = self.buffer_store.read(cx);
1137 if let Some(buffer) = buffer_store.get(*buffer_id) {
1138 let _ = diff
1139 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1140 }
1141 }
1142 }
1143 anyhow::Ok(())
1144 })
1145 .ok();
1146 }
1147 }
1148 cx.emit(GitStoreEvent::RepositoryUpdated(
1149 id,
1150 event.clone(),
1151 self.active_repo_id == Some(id),
1152 ))
1153 }
1154
1155 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1156 cx.emit(GitStoreEvent::JobsUpdated)
1157 }
1158
1159 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1160 fn update_repositories_from_worktree(
1161 &mut self,
1162 project_environment: Entity<ProjectEnvironment>,
1163 next_repository_id: Arc<AtomicU64>,
1164 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1165 updated_git_repositories: UpdatedGitRepositoriesSet,
1166 fs: Arc<dyn Fs>,
1167 cx: &mut Context<Self>,
1168 ) {
1169 let mut removed_ids = Vec::new();
1170 for update in updated_git_repositories.iter() {
1171 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1172 let existing_work_directory_abs_path =
1173 repo.read(cx).work_directory_abs_path.clone();
1174 Some(&existing_work_directory_abs_path)
1175 == update.old_work_directory_abs_path.as_ref()
1176 || Some(&existing_work_directory_abs_path)
1177 == update.new_work_directory_abs_path.as_ref()
1178 }) {
1179 if let Some(new_work_directory_abs_path) =
1180 update.new_work_directory_abs_path.clone()
1181 {
1182 existing.update(cx, |existing, cx| {
1183 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1184 existing.schedule_scan(updates_tx.clone(), cx);
1185 });
1186 } else {
1187 removed_ids.push(*id);
1188 }
1189 } else if let UpdatedGitRepository {
1190 new_work_directory_abs_path: Some(work_directory_abs_path),
1191 dot_git_abs_path: Some(dot_git_abs_path),
1192 repository_dir_abs_path: Some(repository_dir_abs_path),
1193 common_dir_abs_path: Some(common_dir_abs_path),
1194 ..
1195 } = update
1196 {
1197 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1198 let git_store = cx.weak_entity();
1199 let repo = cx.new(|cx| {
1200 let mut repo = Repository::local(
1201 id,
1202 work_directory_abs_path.clone(),
1203 dot_git_abs_path.clone(),
1204 repository_dir_abs_path.clone(),
1205 common_dir_abs_path.clone(),
1206 project_environment.downgrade(),
1207 fs.clone(),
1208 git_store,
1209 cx,
1210 );
1211 repo.schedule_scan(updates_tx.clone(), cx);
1212 repo
1213 });
1214 self._subscriptions
1215 .push(cx.subscribe(&repo, Self::on_repository_event));
1216 self._subscriptions
1217 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1218 self.repositories.insert(id, repo);
1219 cx.emit(GitStoreEvent::RepositoryAdded(id));
1220 self.active_repo_id.get_or_insert_with(|| {
1221 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1222 id
1223 });
1224 }
1225 }
1226
1227 for id in removed_ids {
1228 if self.active_repo_id == Some(id) {
1229 self.active_repo_id = None;
1230 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1231 }
1232 self.repositories.remove(&id);
1233 if let Some(updates_tx) = updates_tx.as_ref() {
1234 updates_tx
1235 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1236 .ok();
1237 }
1238 }
1239 }
1240
1241 fn on_buffer_store_event(
1242 &mut self,
1243 _: Entity<BufferStore>,
1244 event: &BufferStoreEvent,
1245 cx: &mut Context<Self>,
1246 ) {
1247 match event {
1248 BufferStoreEvent::BufferAdded(buffer) => {
1249 cx.subscribe(buffer, |this, buffer, event, cx| {
1250 if let BufferEvent::LanguageChanged = event {
1251 let buffer_id = buffer.read(cx).remote_id();
1252 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1253 diff_state.update(cx, |diff_state, cx| {
1254 diff_state.buffer_language_changed(buffer, cx);
1255 });
1256 }
1257 }
1258 })
1259 .detach();
1260 }
1261 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1262 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1263 diffs.remove(buffer_id);
1264 }
1265 }
1266 BufferStoreEvent::BufferDropped(buffer_id) => {
1267 self.diffs.remove(buffer_id);
1268 for diffs in self.shared_diffs.values_mut() {
1269 diffs.remove(buffer_id);
1270 }
1271 }
1272
1273 _ => {}
1274 }
1275 }
1276
1277 pub fn recalculate_buffer_diffs(
1278 &mut self,
1279 buffers: Vec<Entity<Buffer>>,
1280 cx: &mut Context<Self>,
1281 ) -> impl Future<Output = ()> + use<> {
1282 let mut futures = Vec::new();
1283 for buffer in buffers {
1284 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1285 let buffer = buffer.read(cx).text_snapshot();
1286 diff_state.update(cx, |diff_state, cx| {
1287 diff_state.recalculate_diffs(buffer.clone(), cx);
1288 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1289 });
1290 futures.push(diff_state.update(cx, |diff_state, cx| {
1291 diff_state
1292 .reparse_conflict_markers(buffer, cx)
1293 .map(|_| {})
1294 .boxed()
1295 }));
1296 }
1297 }
1298 async move {
1299 futures::future::join_all(futures).await;
1300 }
1301 }
1302
1303 fn on_buffer_diff_event(
1304 &mut self,
1305 diff: Entity<buffer_diff::BufferDiff>,
1306 event: &BufferDiffEvent,
1307 cx: &mut Context<Self>,
1308 ) {
1309 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1310 let buffer_id = diff.read(cx).buffer_id;
1311 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1312 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1313 diff_state.hunk_staging_operation_count += 1;
1314 diff_state.hunk_staging_operation_count
1315 });
1316 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1317 let recv = repo.update(cx, |repo, cx| {
1318 log::debug!("hunks changed for {}", path.as_unix_str());
1319 repo.spawn_set_index_text_job(
1320 path,
1321 new_index_text.as_ref().map(|rope| rope.to_string()),
1322 Some(hunk_staging_operation_count),
1323 cx,
1324 )
1325 });
1326 let diff = diff.downgrade();
1327 cx.spawn(async move |this, cx| {
1328 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1329 diff.update(cx, |diff, cx| {
1330 diff.clear_pending_hunks(cx);
1331 })
1332 .ok();
1333 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1334 .ok();
1335 }
1336 })
1337 .detach();
1338 }
1339 }
1340 }
1341 }
1342
1343 fn local_worktree_git_repos_changed(
1344 &mut self,
1345 worktree: Entity<Worktree>,
1346 changed_repos: &UpdatedGitRepositoriesSet,
1347 cx: &mut Context<Self>,
1348 ) {
1349 log::debug!("local worktree repos changed");
1350 debug_assert!(worktree.read(cx).is_local());
1351
1352 for repository in self.repositories.values() {
1353 repository.update(cx, |repository, cx| {
1354 let repo_abs_path = &repository.work_directory_abs_path;
1355 if changed_repos.iter().any(|update| {
1356 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1357 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1358 }) {
1359 repository.reload_buffer_diff_bases(cx);
1360 }
1361 });
1362 }
1363 }
1364
1365 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1366 &self.repositories
1367 }
1368
1369 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1370 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1371 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1372 Some(status.status)
1373 }
1374
1375 pub fn repository_and_path_for_buffer_id(
1376 &self,
1377 buffer_id: BufferId,
1378 cx: &App,
1379 ) -> Option<(Entity<Repository>, RepoPath)> {
1380 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1381 let project_path = buffer.read(cx).project_path(cx)?;
1382 self.repository_and_path_for_project_path(&project_path, cx)
1383 }
1384
1385 pub fn repository_and_path_for_project_path(
1386 &self,
1387 path: &ProjectPath,
1388 cx: &App,
1389 ) -> Option<(Entity<Repository>, RepoPath)> {
1390 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1391 self.repositories
1392 .values()
1393 .filter_map(|repo| {
1394 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1395 Some((repo.clone(), repo_path))
1396 })
1397 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1398 }
1399
1400 pub fn git_init(
1401 &self,
1402 path: Arc<Path>,
1403 fallback_branch_name: String,
1404 cx: &App,
1405 ) -> Task<Result<()>> {
1406 match &self.state {
1407 GitStoreState::Local { fs, .. } => {
1408 let fs = fs.clone();
1409 cx.background_executor()
1410 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1411 }
1412 GitStoreState::Remote {
1413 upstream_client,
1414 upstream_project_id: project_id,
1415 ..
1416 } => {
1417 let client = upstream_client.clone();
1418 let project_id = *project_id;
1419 cx.background_executor().spawn(async move {
1420 client
1421 .request(proto::GitInit {
1422 project_id: project_id,
1423 abs_path: path.to_string_lossy().into_owned(),
1424 fallback_branch_name,
1425 })
1426 .await?;
1427 Ok(())
1428 })
1429 }
1430 }
1431 }
1432
1433 pub fn git_clone(
1434 &self,
1435 repo: String,
1436 path: impl Into<Arc<std::path::Path>>,
1437 cx: &App,
1438 ) -> Task<Result<()>> {
1439 let path = path.into();
1440 match &self.state {
1441 GitStoreState::Local { fs, .. } => {
1442 let fs = fs.clone();
1443 cx.background_executor()
1444 .spawn(async move { fs.git_clone(&repo, &path).await })
1445 }
1446 GitStoreState::Remote {
1447 upstream_client,
1448 upstream_project_id,
1449 ..
1450 } => {
1451 if upstream_client.is_via_collab() {
1452 return Task::ready(Err(anyhow!(
1453 "Git Clone isn't supported for project guests"
1454 )));
1455 }
1456 let request = upstream_client.request(proto::GitClone {
1457 project_id: *upstream_project_id,
1458 abs_path: path.to_string_lossy().into_owned(),
1459 remote_repo: repo,
1460 });
1461
1462 cx.background_spawn(async move {
1463 let result = request.await?;
1464
1465 match result.success {
1466 true => Ok(()),
1467 false => Err(anyhow!("Git Clone failed")),
1468 }
1469 })
1470 }
1471 }
1472 }
1473
1474 async fn handle_update_repository(
1475 this: Entity<Self>,
1476 envelope: TypedEnvelope<proto::UpdateRepository>,
1477 mut cx: AsyncApp,
1478 ) -> Result<()> {
1479 this.update(&mut cx, |this, cx| {
1480 let path_style = this.worktree_store.read(cx).path_style();
1481 let mut update = envelope.payload;
1482
1483 let id = RepositoryId::from_proto(update.id);
1484 let client = this.upstream_client().context("no upstream client")?;
1485
1486 let mut is_new = false;
1487 let repo = this.repositories.entry(id).or_insert_with(|| {
1488 is_new = true;
1489 let git_store = cx.weak_entity();
1490 cx.new(|cx| {
1491 Repository::remote(
1492 id,
1493 Path::new(&update.abs_path).into(),
1494 path_style,
1495 ProjectId(update.project_id),
1496 client,
1497 git_store,
1498 cx,
1499 )
1500 })
1501 });
1502 if is_new {
1503 this._subscriptions
1504 .push(cx.subscribe(repo, Self::on_repository_event))
1505 }
1506
1507 repo.update(cx, {
1508 let update = update.clone();
1509 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1510 })?;
1511
1512 this.active_repo_id.get_or_insert_with(|| {
1513 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1514 id
1515 });
1516
1517 if let Some((client, project_id)) = this.downstream_client() {
1518 update.project_id = project_id.to_proto();
1519 client.send(update).log_err();
1520 }
1521 Ok(())
1522 })?
1523 }
1524
1525 async fn handle_remove_repository(
1526 this: Entity<Self>,
1527 envelope: TypedEnvelope<proto::RemoveRepository>,
1528 mut cx: AsyncApp,
1529 ) -> Result<()> {
1530 this.update(&mut cx, |this, cx| {
1531 let mut update = envelope.payload;
1532 let id = RepositoryId::from_proto(update.id);
1533 this.repositories.remove(&id);
1534 if let Some((client, project_id)) = this.downstream_client() {
1535 update.project_id = project_id.to_proto();
1536 client.send(update).log_err();
1537 }
1538 if this.active_repo_id == Some(id) {
1539 this.active_repo_id = None;
1540 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1541 }
1542 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1543 })
1544 }
1545
1546 async fn handle_git_init(
1547 this: Entity<Self>,
1548 envelope: TypedEnvelope<proto::GitInit>,
1549 cx: AsyncApp,
1550 ) -> Result<proto::Ack> {
1551 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1552 let name = envelope.payload.fallback_branch_name;
1553 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1554 .await?;
1555
1556 Ok(proto::Ack {})
1557 }
1558
1559 async fn handle_git_clone(
1560 this: Entity<Self>,
1561 envelope: TypedEnvelope<proto::GitClone>,
1562 cx: AsyncApp,
1563 ) -> Result<proto::GitCloneResponse> {
1564 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1565 let repo_name = envelope.payload.remote_repo;
1566 let result = cx
1567 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1568 .await;
1569
1570 Ok(proto::GitCloneResponse {
1571 success: result.is_ok(),
1572 })
1573 }
1574
1575 async fn handle_fetch(
1576 this: Entity<Self>,
1577 envelope: TypedEnvelope<proto::Fetch>,
1578 mut cx: AsyncApp,
1579 ) -> Result<proto::RemoteMessageResponse> {
1580 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1581 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1582 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1583 let askpass_id = envelope.payload.askpass_id;
1584
1585 let askpass = make_remote_delegate(
1586 this,
1587 envelope.payload.project_id,
1588 repository_id,
1589 askpass_id,
1590 &mut cx,
1591 );
1592
1593 let remote_output = repository_handle
1594 .update(&mut cx, |repository_handle, cx| {
1595 repository_handle.fetch(fetch_options, askpass, cx)
1596 })?
1597 .await??;
1598
1599 Ok(proto::RemoteMessageResponse {
1600 stdout: remote_output.stdout,
1601 stderr: remote_output.stderr,
1602 })
1603 }
1604
1605 async fn handle_push(
1606 this: Entity<Self>,
1607 envelope: TypedEnvelope<proto::Push>,
1608 mut cx: AsyncApp,
1609 ) -> Result<proto::RemoteMessageResponse> {
1610 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1611 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1612
1613 let askpass_id = envelope.payload.askpass_id;
1614 let askpass = make_remote_delegate(
1615 this,
1616 envelope.payload.project_id,
1617 repository_id,
1618 askpass_id,
1619 &mut cx,
1620 );
1621
1622 let options = envelope
1623 .payload
1624 .options
1625 .as_ref()
1626 .map(|_| match envelope.payload.options() {
1627 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1628 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1629 });
1630
1631 let branch_name = envelope.payload.branch_name.into();
1632 let remote_name = envelope.payload.remote_name.into();
1633
1634 let remote_output = repository_handle
1635 .update(&mut cx, |repository_handle, cx| {
1636 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1637 })?
1638 .await??;
1639 Ok(proto::RemoteMessageResponse {
1640 stdout: remote_output.stdout,
1641 stderr: remote_output.stderr,
1642 })
1643 }
1644
1645 async fn handle_pull(
1646 this: Entity<Self>,
1647 envelope: TypedEnvelope<proto::Pull>,
1648 mut cx: AsyncApp,
1649 ) -> Result<proto::RemoteMessageResponse> {
1650 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1651 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1652 let askpass_id = envelope.payload.askpass_id;
1653 let askpass = make_remote_delegate(
1654 this,
1655 envelope.payload.project_id,
1656 repository_id,
1657 askpass_id,
1658 &mut cx,
1659 );
1660
1661 let branch_name = envelope.payload.branch_name.into();
1662 let remote_name = envelope.payload.remote_name.into();
1663
1664 let remote_message = repository_handle
1665 .update(&mut cx, |repository_handle, cx| {
1666 repository_handle.pull(branch_name, remote_name, askpass, cx)
1667 })?
1668 .await??;
1669
1670 Ok(proto::RemoteMessageResponse {
1671 stdout: remote_message.stdout,
1672 stderr: remote_message.stderr,
1673 })
1674 }
1675
1676 async fn handle_stage(
1677 this: Entity<Self>,
1678 envelope: TypedEnvelope<proto::Stage>,
1679 mut cx: AsyncApp,
1680 ) -> Result<proto::Ack> {
1681 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1682 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1683
1684 let entries = envelope
1685 .payload
1686 .paths
1687 .into_iter()
1688 .map(|path| RepoPath::new(&path))
1689 .collect::<Result<Vec<_>>>()?;
1690
1691 repository_handle
1692 .update(&mut cx, |repository_handle, cx| {
1693 repository_handle.stage_entries(entries, cx)
1694 })?
1695 .await?;
1696 Ok(proto::Ack {})
1697 }
1698
1699 async fn handle_unstage(
1700 this: Entity<Self>,
1701 envelope: TypedEnvelope<proto::Unstage>,
1702 mut cx: AsyncApp,
1703 ) -> Result<proto::Ack> {
1704 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1705 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1706
1707 let entries = envelope
1708 .payload
1709 .paths
1710 .into_iter()
1711 .map(|path| RepoPath::new(&path))
1712 .collect::<Result<Vec<_>>>()?;
1713
1714 repository_handle
1715 .update(&mut cx, |repository_handle, cx| {
1716 repository_handle.unstage_entries(entries, cx)
1717 })?
1718 .await?;
1719
1720 Ok(proto::Ack {})
1721 }
1722
1723 async fn handle_stash(
1724 this: Entity<Self>,
1725 envelope: TypedEnvelope<proto::Stash>,
1726 mut cx: AsyncApp,
1727 ) -> Result<proto::Ack> {
1728 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1729 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1730
1731 let entries = envelope
1732 .payload
1733 .paths
1734 .into_iter()
1735 .map(|path| RepoPath::new(&path))
1736 .collect::<Result<Vec<_>>>()?;
1737
1738 repository_handle
1739 .update(&mut cx, |repository_handle, cx| {
1740 repository_handle.stash_entries(entries, cx)
1741 })?
1742 .await?;
1743
1744 Ok(proto::Ack {})
1745 }
1746
1747 async fn handle_stash_pop(
1748 this: Entity<Self>,
1749 envelope: TypedEnvelope<proto::StashPop>,
1750 mut cx: AsyncApp,
1751 ) -> Result<proto::Ack> {
1752 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1753 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1754 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1755
1756 repository_handle
1757 .update(&mut cx, |repository_handle, cx| {
1758 repository_handle.stash_pop(stash_index, cx)
1759 })?
1760 .await?;
1761
1762 Ok(proto::Ack {})
1763 }
1764
1765 async fn handle_stash_apply(
1766 this: Entity<Self>,
1767 envelope: TypedEnvelope<proto::StashApply>,
1768 mut cx: AsyncApp,
1769 ) -> Result<proto::Ack> {
1770 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1771 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1772 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1773
1774 repository_handle
1775 .update(&mut cx, |repository_handle, cx| {
1776 repository_handle.stash_apply(stash_index, cx)
1777 })?
1778 .await?;
1779
1780 Ok(proto::Ack {})
1781 }
1782
1783 async fn handle_stash_drop(
1784 this: Entity<Self>,
1785 envelope: TypedEnvelope<proto::StashDrop>,
1786 mut cx: AsyncApp,
1787 ) -> Result<proto::Ack> {
1788 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1789 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1790 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1791
1792 repository_handle
1793 .update(&mut cx, |repository_handle, cx| {
1794 repository_handle.stash_drop(stash_index, cx)
1795 })?
1796 .await??;
1797
1798 Ok(proto::Ack {})
1799 }
1800
1801 async fn handle_set_index_text(
1802 this: Entity<Self>,
1803 envelope: TypedEnvelope<proto::SetIndexText>,
1804 mut cx: AsyncApp,
1805 ) -> Result<proto::Ack> {
1806 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1807 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1808 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1809
1810 repository_handle
1811 .update(&mut cx, |repository_handle, cx| {
1812 repository_handle.spawn_set_index_text_job(
1813 repo_path,
1814 envelope.payload.text,
1815 None,
1816 cx,
1817 )
1818 })?
1819 .await??;
1820 Ok(proto::Ack {})
1821 }
1822
1823 async fn handle_commit(
1824 this: Entity<Self>,
1825 envelope: TypedEnvelope<proto::Commit>,
1826 mut cx: AsyncApp,
1827 ) -> Result<proto::Ack> {
1828 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1829 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1830
1831 let message = SharedString::from(envelope.payload.message);
1832 let name = envelope.payload.name.map(SharedString::from);
1833 let email = envelope.payload.email.map(SharedString::from);
1834 let options = envelope.payload.options.unwrap_or_default();
1835
1836 repository_handle
1837 .update(&mut cx, |repository_handle, cx| {
1838 repository_handle.commit(
1839 message,
1840 name.zip(email),
1841 CommitOptions {
1842 amend: options.amend,
1843 signoff: options.signoff,
1844 },
1845 cx,
1846 )
1847 })?
1848 .await??;
1849 Ok(proto::Ack {})
1850 }
1851
1852 async fn handle_get_remotes(
1853 this: Entity<Self>,
1854 envelope: TypedEnvelope<proto::GetRemotes>,
1855 mut cx: AsyncApp,
1856 ) -> Result<proto::GetRemotesResponse> {
1857 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1858 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1859
1860 let branch_name = envelope.payload.branch_name;
1861
1862 let remotes = repository_handle
1863 .update(&mut cx, |repository_handle, _| {
1864 repository_handle.get_remotes(branch_name)
1865 })?
1866 .await??;
1867
1868 Ok(proto::GetRemotesResponse {
1869 remotes: remotes
1870 .into_iter()
1871 .map(|remotes| proto::get_remotes_response::Remote {
1872 name: remotes.name.to_string(),
1873 })
1874 .collect::<Vec<_>>(),
1875 })
1876 }
1877
1878 async fn handle_get_branches(
1879 this: Entity<Self>,
1880 envelope: TypedEnvelope<proto::GitGetBranches>,
1881 mut cx: AsyncApp,
1882 ) -> Result<proto::GitBranchesResponse> {
1883 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1884 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1885
1886 let branches = repository_handle
1887 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1888 .await??;
1889
1890 Ok(proto::GitBranchesResponse {
1891 branches: branches
1892 .into_iter()
1893 .map(|branch| branch_to_proto(&branch))
1894 .collect::<Vec<_>>(),
1895 })
1896 }
1897 async fn handle_get_default_branch(
1898 this: Entity<Self>,
1899 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1900 mut cx: AsyncApp,
1901 ) -> Result<proto::GetDefaultBranchResponse> {
1902 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1903 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1904
1905 let branch = repository_handle
1906 .update(&mut cx, |repository_handle, _| {
1907 repository_handle.default_branch()
1908 })?
1909 .await??
1910 .map(Into::into);
1911
1912 Ok(proto::GetDefaultBranchResponse { branch })
1913 }
1914 async fn handle_create_branch(
1915 this: Entity<Self>,
1916 envelope: TypedEnvelope<proto::GitCreateBranch>,
1917 mut cx: AsyncApp,
1918 ) -> Result<proto::Ack> {
1919 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1920 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1921 let branch_name = envelope.payload.branch_name;
1922
1923 repository_handle
1924 .update(&mut cx, |repository_handle, _| {
1925 repository_handle.create_branch(branch_name)
1926 })?
1927 .await??;
1928
1929 Ok(proto::Ack {})
1930 }
1931
1932 async fn handle_change_branch(
1933 this: Entity<Self>,
1934 envelope: TypedEnvelope<proto::GitChangeBranch>,
1935 mut cx: AsyncApp,
1936 ) -> Result<proto::Ack> {
1937 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1938 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1939 let branch_name = envelope.payload.branch_name;
1940
1941 repository_handle
1942 .update(&mut cx, |repository_handle, _| {
1943 repository_handle.change_branch(branch_name)
1944 })?
1945 .await??;
1946
1947 Ok(proto::Ack {})
1948 }
1949
1950 async fn handle_rename_branch(
1951 this: Entity<Self>,
1952 envelope: TypedEnvelope<proto::GitRenameBranch>,
1953 mut cx: AsyncApp,
1954 ) -> Result<proto::Ack> {
1955 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1956 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1957 let branch = envelope.payload.branch;
1958 let new_name = envelope.payload.new_name;
1959
1960 repository_handle
1961 .update(&mut cx, |repository_handle, _| {
1962 repository_handle.rename_branch(branch, new_name)
1963 })?
1964 .await??;
1965
1966 Ok(proto::Ack {})
1967 }
1968
1969 async fn handle_show(
1970 this: Entity<Self>,
1971 envelope: TypedEnvelope<proto::GitShow>,
1972 mut cx: AsyncApp,
1973 ) -> Result<proto::GitCommitDetails> {
1974 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1975 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1976
1977 let commit = repository_handle
1978 .update(&mut cx, |repository_handle, _| {
1979 repository_handle.show(envelope.payload.commit)
1980 })?
1981 .await??;
1982 Ok(proto::GitCommitDetails {
1983 sha: commit.sha.into(),
1984 message: commit.message.into(),
1985 commit_timestamp: commit.commit_timestamp,
1986 author_email: commit.author_email.into(),
1987 author_name: commit.author_name.into(),
1988 })
1989 }
1990
1991 async fn handle_load_commit_diff(
1992 this: Entity<Self>,
1993 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1994 mut cx: AsyncApp,
1995 ) -> Result<proto::LoadCommitDiffResponse> {
1996 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1997 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1998
1999 let commit_diff = repository_handle
2000 .update(&mut cx, |repository_handle, _| {
2001 repository_handle.load_commit_diff(envelope.payload.commit)
2002 })?
2003 .await??;
2004 Ok(proto::LoadCommitDiffResponse {
2005 files: commit_diff
2006 .files
2007 .into_iter()
2008 .map(|file| proto::CommitFile {
2009 path: file.path.to_proto(),
2010 old_text: file.old_text,
2011 new_text: file.new_text,
2012 })
2013 .collect(),
2014 })
2015 }
2016
2017 async fn handle_reset(
2018 this: Entity<Self>,
2019 envelope: TypedEnvelope<proto::GitReset>,
2020 mut cx: AsyncApp,
2021 ) -> Result<proto::Ack> {
2022 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2023 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2024
2025 let mode = match envelope.payload.mode() {
2026 git_reset::ResetMode::Soft => ResetMode::Soft,
2027 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2028 };
2029
2030 repository_handle
2031 .update(&mut cx, |repository_handle, cx| {
2032 repository_handle.reset(envelope.payload.commit, mode, cx)
2033 })?
2034 .await??;
2035 Ok(proto::Ack {})
2036 }
2037
2038 async fn handle_checkout_files(
2039 this: Entity<Self>,
2040 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2041 mut cx: AsyncApp,
2042 ) -> Result<proto::Ack> {
2043 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2044 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2045 let paths = envelope
2046 .payload
2047 .paths
2048 .iter()
2049 .map(|s| RepoPath::from_proto(s))
2050 .collect::<Result<Vec<_>>>()?;
2051
2052 repository_handle
2053 .update(&mut cx, |repository_handle, cx| {
2054 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2055 })?
2056 .await??;
2057 Ok(proto::Ack {})
2058 }
2059
2060 async fn handle_open_commit_message_buffer(
2061 this: Entity<Self>,
2062 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2063 mut cx: AsyncApp,
2064 ) -> Result<proto::OpenBufferResponse> {
2065 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2066 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2067 let buffer = repository
2068 .update(&mut cx, |repository, cx| {
2069 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2070 })?
2071 .await?;
2072
2073 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2074 this.update(&mut cx, |this, cx| {
2075 this.buffer_store.update(cx, |buffer_store, cx| {
2076 buffer_store
2077 .create_buffer_for_peer(
2078 &buffer,
2079 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2080 cx,
2081 )
2082 .detach_and_log_err(cx);
2083 })
2084 })?;
2085
2086 Ok(proto::OpenBufferResponse {
2087 buffer_id: buffer_id.to_proto(),
2088 })
2089 }
2090
2091 async fn handle_askpass(
2092 this: Entity<Self>,
2093 envelope: TypedEnvelope<proto::AskPassRequest>,
2094 mut cx: AsyncApp,
2095 ) -> Result<proto::AskPassResponse> {
2096 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2097 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2098
2099 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2100 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2101 debug_panic!("no askpass found");
2102 anyhow::bail!("no askpass found");
2103 };
2104
2105 let response = askpass.ask_password(envelope.payload.prompt).await?;
2106
2107 delegates
2108 .lock()
2109 .insert(envelope.payload.askpass_id, askpass);
2110
2111 response.try_into()
2112 }
2113
2114 async fn handle_check_for_pushed_commits(
2115 this: Entity<Self>,
2116 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2117 mut cx: AsyncApp,
2118 ) -> Result<proto::CheckForPushedCommitsResponse> {
2119 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2120 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2121
2122 let branches = repository_handle
2123 .update(&mut cx, |repository_handle, _| {
2124 repository_handle.check_for_pushed_commits()
2125 })?
2126 .await??;
2127 Ok(proto::CheckForPushedCommitsResponse {
2128 pushed_to: branches
2129 .into_iter()
2130 .map(|commit| commit.to_string())
2131 .collect(),
2132 })
2133 }
2134
2135 async fn handle_git_diff(
2136 this: Entity<Self>,
2137 envelope: TypedEnvelope<proto::GitDiff>,
2138 mut cx: AsyncApp,
2139 ) -> Result<proto::GitDiffResponse> {
2140 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2141 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2142 let diff_type = match envelope.payload.diff_type() {
2143 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2144 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2145 };
2146
2147 let mut diff = repository_handle
2148 .update(&mut cx, |repository_handle, cx| {
2149 repository_handle.diff(diff_type, cx)
2150 })?
2151 .await??;
2152 const ONE_MB: usize = 1_000_000;
2153 if diff.len() > ONE_MB {
2154 diff = diff.chars().take(ONE_MB).collect()
2155 }
2156
2157 Ok(proto::GitDiffResponse { diff })
2158 }
2159
2160 async fn handle_open_unstaged_diff(
2161 this: Entity<Self>,
2162 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2163 mut cx: AsyncApp,
2164 ) -> Result<proto::OpenUnstagedDiffResponse> {
2165 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2166 let diff = this
2167 .update(&mut cx, |this, cx| {
2168 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2169 Some(this.open_unstaged_diff(buffer, cx))
2170 })?
2171 .context("missing buffer")?
2172 .await?;
2173 this.update(&mut cx, |this, _| {
2174 let shared_diffs = this
2175 .shared_diffs
2176 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2177 .or_default();
2178 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2179 })?;
2180 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2181 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2182 }
2183
2184 async fn handle_open_uncommitted_diff(
2185 this: Entity<Self>,
2186 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2187 mut cx: AsyncApp,
2188 ) -> Result<proto::OpenUncommittedDiffResponse> {
2189 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2190 let diff = this
2191 .update(&mut cx, |this, cx| {
2192 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2193 Some(this.open_uncommitted_diff(buffer, cx))
2194 })?
2195 .context("missing buffer")?
2196 .await?;
2197 this.update(&mut cx, |this, _| {
2198 let shared_diffs = this
2199 .shared_diffs
2200 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2201 .or_default();
2202 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2203 })?;
2204 diff.read_with(&cx, |diff, cx| {
2205 use proto::open_uncommitted_diff_response::Mode;
2206
2207 let unstaged_diff = diff.secondary_diff();
2208 let index_snapshot = unstaged_diff.and_then(|diff| {
2209 let diff = diff.read(cx);
2210 diff.base_text_exists().then(|| diff.base_text())
2211 });
2212
2213 let mode;
2214 let staged_text;
2215 let committed_text;
2216 if diff.base_text_exists() {
2217 let committed_snapshot = diff.base_text();
2218 committed_text = Some(committed_snapshot.text());
2219 if let Some(index_text) = index_snapshot {
2220 if index_text.remote_id() == committed_snapshot.remote_id() {
2221 mode = Mode::IndexMatchesHead;
2222 staged_text = None;
2223 } else {
2224 mode = Mode::IndexAndHead;
2225 staged_text = Some(index_text.text());
2226 }
2227 } else {
2228 mode = Mode::IndexAndHead;
2229 staged_text = None;
2230 }
2231 } else {
2232 mode = Mode::IndexAndHead;
2233 committed_text = None;
2234 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2235 }
2236
2237 proto::OpenUncommittedDiffResponse {
2238 committed_text,
2239 staged_text,
2240 mode: mode.into(),
2241 }
2242 })
2243 }
2244
2245 async fn handle_update_diff_bases(
2246 this: Entity<Self>,
2247 request: TypedEnvelope<proto::UpdateDiffBases>,
2248 mut cx: AsyncApp,
2249 ) -> Result<()> {
2250 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2251 this.update(&mut cx, |this, cx| {
2252 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2253 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2254 {
2255 let buffer = buffer.read(cx).text_snapshot();
2256 diff_state.update(cx, |diff_state, cx| {
2257 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2258 })
2259 }
2260 })
2261 }
2262
2263 async fn handle_blame_buffer(
2264 this: Entity<Self>,
2265 envelope: TypedEnvelope<proto::BlameBuffer>,
2266 mut cx: AsyncApp,
2267 ) -> Result<proto::BlameBufferResponse> {
2268 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2269 let version = deserialize_version(&envelope.payload.version);
2270 let buffer = this.read_with(&cx, |this, cx| {
2271 this.buffer_store.read(cx).get_existing(buffer_id)
2272 })??;
2273 buffer
2274 .update(&mut cx, |buffer, _| {
2275 buffer.wait_for_version(version.clone())
2276 })?
2277 .await?;
2278 let blame = this
2279 .update(&mut cx, |this, cx| {
2280 this.blame_buffer(&buffer, Some(version), cx)
2281 })?
2282 .await?;
2283 Ok(serialize_blame_buffer_response(blame))
2284 }
2285
2286 async fn handle_get_permalink_to_line(
2287 this: Entity<Self>,
2288 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2289 mut cx: AsyncApp,
2290 ) -> Result<proto::GetPermalinkToLineResponse> {
2291 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2292 // let version = deserialize_version(&envelope.payload.version);
2293 let selection = {
2294 let proto_selection = envelope
2295 .payload
2296 .selection
2297 .context("no selection to get permalink for defined")?;
2298 proto_selection.start as u32..proto_selection.end as u32
2299 };
2300 let buffer = this.read_with(&cx, |this, cx| {
2301 this.buffer_store.read(cx).get_existing(buffer_id)
2302 })??;
2303 let permalink = this
2304 .update(&mut cx, |this, cx| {
2305 this.get_permalink_to_line(&buffer, selection, cx)
2306 })?
2307 .await?;
2308 Ok(proto::GetPermalinkToLineResponse {
2309 permalink: permalink.to_string(),
2310 })
2311 }
2312
2313 fn repository_for_request(
2314 this: &Entity<Self>,
2315 id: RepositoryId,
2316 cx: &mut AsyncApp,
2317 ) -> Result<Entity<Repository>> {
2318 this.read_with(cx, |this, _| {
2319 this.repositories
2320 .get(&id)
2321 .context("missing repository handle")
2322 .cloned()
2323 })?
2324 }
2325
2326 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2327 self.repositories
2328 .iter()
2329 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2330 .collect()
2331 }
2332
2333 fn process_updated_entries(
2334 &self,
2335 worktree: &Entity<Worktree>,
2336 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2337 cx: &mut App,
2338 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2339 let path_style = worktree.read(cx).path_style();
2340 let mut repo_paths = self
2341 .repositories
2342 .values()
2343 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2344 .collect::<Vec<_>>();
2345 let mut entries: Vec<_> = updated_entries
2346 .iter()
2347 .map(|(path, _, _)| path.clone())
2348 .collect();
2349 entries.sort();
2350 let worktree = worktree.read(cx);
2351
2352 let entries = entries
2353 .into_iter()
2354 .map(|path| worktree.absolutize(&path))
2355 .collect::<Arc<[_]>>();
2356
2357 let executor = cx.background_executor().clone();
2358 cx.background_executor().spawn(async move {
2359 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2360 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2361 let mut tasks = FuturesOrdered::new();
2362 for (repo_path, repo) in repo_paths.into_iter().rev() {
2363 let entries = entries.clone();
2364 let task = executor.spawn(async move {
2365 // Find all repository paths that belong to this repo
2366 let mut ix = entries.partition_point(|path| path < &*repo_path);
2367 if ix == entries.len() {
2368 return None;
2369 };
2370
2371 let mut paths = Vec::new();
2372 // All paths prefixed by a given repo will constitute a continuous range.
2373 while let Some(path) = entries.get(ix)
2374 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2375 &repo_path, path, path_style,
2376 )
2377 {
2378 paths.push((repo_path, ix));
2379 ix += 1;
2380 }
2381 if paths.is_empty() {
2382 None
2383 } else {
2384 Some((repo, paths))
2385 }
2386 });
2387 tasks.push_back(task);
2388 }
2389
2390 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2391 let mut path_was_used = vec![false; entries.len()];
2392 let tasks = tasks.collect::<Vec<_>>().await;
2393 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2394 // We always want to assign a path to it's innermost repository.
2395 for t in tasks {
2396 let Some((repo, paths)) = t else {
2397 continue;
2398 };
2399 let entry = paths_by_git_repo.entry(repo).or_default();
2400 for (repo_path, ix) in paths {
2401 if path_was_used[ix] {
2402 continue;
2403 }
2404 path_was_used[ix] = true;
2405 entry.push(repo_path);
2406 }
2407 }
2408
2409 paths_by_git_repo
2410 })
2411 }
2412}
2413
2414impl BufferGitState {
2415 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2416 Self {
2417 unstaged_diff: Default::default(),
2418 uncommitted_diff: Default::default(),
2419 recalculate_diff_task: Default::default(),
2420 language: Default::default(),
2421 language_registry: Default::default(),
2422 recalculating_tx: postage::watch::channel_with(false).0,
2423 hunk_staging_operation_count: 0,
2424 hunk_staging_operation_count_as_of_write: 0,
2425 head_text: Default::default(),
2426 index_text: Default::default(),
2427 head_changed: Default::default(),
2428 index_changed: Default::default(),
2429 language_changed: Default::default(),
2430 conflict_updated_futures: Default::default(),
2431 conflict_set: Default::default(),
2432 reparse_conflict_markers_task: Default::default(),
2433 }
2434 }
2435
2436 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2437 self.language = buffer.read(cx).language().cloned();
2438 self.language_changed = true;
2439 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2440 }
2441
2442 fn reparse_conflict_markers(
2443 &mut self,
2444 buffer: text::BufferSnapshot,
2445 cx: &mut Context<Self>,
2446 ) -> oneshot::Receiver<()> {
2447 let (tx, rx) = oneshot::channel();
2448
2449 let Some(conflict_set) = self
2450 .conflict_set
2451 .as_ref()
2452 .and_then(|conflict_set| conflict_set.upgrade())
2453 else {
2454 return rx;
2455 };
2456
2457 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2458 if conflict_set.has_conflict {
2459 Some(conflict_set.snapshot())
2460 } else {
2461 None
2462 }
2463 });
2464
2465 if let Some(old_snapshot) = old_snapshot {
2466 self.conflict_updated_futures.push(tx);
2467 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2468 let (snapshot, changed_range) = cx
2469 .background_spawn(async move {
2470 let new_snapshot = ConflictSet::parse(&buffer);
2471 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2472 (new_snapshot, changed_range)
2473 })
2474 .await;
2475 this.update(cx, |this, cx| {
2476 if let Some(conflict_set) = &this.conflict_set {
2477 conflict_set
2478 .update(cx, |conflict_set, cx| {
2479 conflict_set.set_snapshot(snapshot, changed_range, cx);
2480 })
2481 .ok();
2482 }
2483 let futures = std::mem::take(&mut this.conflict_updated_futures);
2484 for tx in futures {
2485 tx.send(()).ok();
2486 }
2487 })
2488 }))
2489 }
2490
2491 rx
2492 }
2493
2494 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2495 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2496 }
2497
2498 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2499 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2500 }
2501
2502 fn handle_base_texts_updated(
2503 &mut self,
2504 buffer: text::BufferSnapshot,
2505 message: proto::UpdateDiffBases,
2506 cx: &mut Context<Self>,
2507 ) {
2508 use proto::update_diff_bases::Mode;
2509
2510 let Some(mode) = Mode::from_i32(message.mode) else {
2511 return;
2512 };
2513
2514 let diff_bases_change = match mode {
2515 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2516 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2517 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2518 Mode::IndexAndHead => DiffBasesChange::SetEach {
2519 index: message.staged_text,
2520 head: message.committed_text,
2521 },
2522 };
2523
2524 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2525 }
2526
2527 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2528 if *self.recalculating_tx.borrow() {
2529 let mut rx = self.recalculating_tx.subscribe();
2530 Some(async move {
2531 loop {
2532 let is_recalculating = rx.recv().await;
2533 if is_recalculating != Some(true) {
2534 break;
2535 }
2536 }
2537 })
2538 } else {
2539 None
2540 }
2541 }
2542
2543 fn diff_bases_changed(
2544 &mut self,
2545 buffer: text::BufferSnapshot,
2546 diff_bases_change: Option<DiffBasesChange>,
2547 cx: &mut Context<Self>,
2548 ) {
2549 match diff_bases_change {
2550 Some(DiffBasesChange::SetIndex(index)) => {
2551 self.index_text = index.map(|mut index| {
2552 text::LineEnding::normalize(&mut index);
2553 Arc::new(index)
2554 });
2555 self.index_changed = true;
2556 }
2557 Some(DiffBasesChange::SetHead(head)) => {
2558 self.head_text = head.map(|mut head| {
2559 text::LineEnding::normalize(&mut head);
2560 Arc::new(head)
2561 });
2562 self.head_changed = true;
2563 }
2564 Some(DiffBasesChange::SetBoth(text)) => {
2565 let text = text.map(|mut text| {
2566 text::LineEnding::normalize(&mut text);
2567 Arc::new(text)
2568 });
2569 self.head_text = text.clone();
2570 self.index_text = text;
2571 self.head_changed = true;
2572 self.index_changed = true;
2573 }
2574 Some(DiffBasesChange::SetEach { index, head }) => {
2575 self.index_text = index.map(|mut index| {
2576 text::LineEnding::normalize(&mut index);
2577 Arc::new(index)
2578 });
2579 self.index_changed = true;
2580 self.head_text = head.map(|mut head| {
2581 text::LineEnding::normalize(&mut head);
2582 Arc::new(head)
2583 });
2584 self.head_changed = true;
2585 }
2586 None => {}
2587 }
2588
2589 self.recalculate_diffs(buffer, cx)
2590 }
2591
2592 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2593 *self.recalculating_tx.borrow_mut() = true;
2594
2595 let language = self.language.clone();
2596 let language_registry = self.language_registry.clone();
2597 let unstaged_diff = self.unstaged_diff();
2598 let uncommitted_diff = self.uncommitted_diff();
2599 let head = self.head_text.clone();
2600 let index = self.index_text.clone();
2601 let index_changed = self.index_changed;
2602 let head_changed = self.head_changed;
2603 let language_changed = self.language_changed;
2604 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2605 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2606 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2607 (None, None) => true,
2608 _ => false,
2609 };
2610 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2611 log::debug!(
2612 "start recalculating diffs for buffer {}",
2613 buffer.remote_id()
2614 );
2615
2616 let mut new_unstaged_diff = None;
2617 if let Some(unstaged_diff) = &unstaged_diff {
2618 new_unstaged_diff = Some(
2619 BufferDiff::update_diff(
2620 unstaged_diff.clone(),
2621 buffer.clone(),
2622 index,
2623 index_changed,
2624 language_changed,
2625 language.clone(),
2626 language_registry.clone(),
2627 cx,
2628 )
2629 .await?,
2630 );
2631 }
2632
2633 let mut new_uncommitted_diff = None;
2634 if let Some(uncommitted_diff) = &uncommitted_diff {
2635 new_uncommitted_diff = if index_matches_head {
2636 new_unstaged_diff.clone()
2637 } else {
2638 Some(
2639 BufferDiff::update_diff(
2640 uncommitted_diff.clone(),
2641 buffer.clone(),
2642 head,
2643 head_changed,
2644 language_changed,
2645 language.clone(),
2646 language_registry.clone(),
2647 cx,
2648 )
2649 .await?,
2650 )
2651 }
2652 }
2653
2654 let cancel = this.update(cx, |this, _| {
2655 // This checks whether all pending stage/unstage operations
2656 // have quiesced (i.e. both the corresponding write and the
2657 // read of that write have completed). If not, then we cancel
2658 // this recalculation attempt to avoid invalidating pending
2659 // state too quickly; another recalculation will come along
2660 // later and clear the pending state once the state of the index has settled.
2661 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2662 *this.recalculating_tx.borrow_mut() = false;
2663 true
2664 } else {
2665 false
2666 }
2667 })?;
2668 if cancel {
2669 log::debug!(
2670 concat!(
2671 "aborting recalculating diffs for buffer {}",
2672 "due to subsequent hunk operations",
2673 ),
2674 buffer.remote_id()
2675 );
2676 return Ok(());
2677 }
2678
2679 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2680 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2681 {
2682 unstaged_diff.update(cx, |diff, cx| {
2683 if language_changed {
2684 diff.language_changed(cx);
2685 }
2686 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2687 })?
2688 } else {
2689 None
2690 };
2691
2692 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2693 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2694 {
2695 uncommitted_diff.update(cx, |diff, cx| {
2696 if language_changed {
2697 diff.language_changed(cx);
2698 }
2699 diff.set_snapshot_with_secondary(
2700 new_uncommitted_diff,
2701 &buffer,
2702 unstaged_changed_range,
2703 true,
2704 cx,
2705 );
2706 })?;
2707 }
2708
2709 log::debug!(
2710 "finished recalculating diffs for buffer {}",
2711 buffer.remote_id()
2712 );
2713
2714 if let Some(this) = this.upgrade() {
2715 this.update(cx, |this, _| {
2716 this.index_changed = false;
2717 this.head_changed = false;
2718 this.language_changed = false;
2719 *this.recalculating_tx.borrow_mut() = false;
2720 })?;
2721 }
2722
2723 Ok(())
2724 }));
2725 }
2726}
2727
2728fn make_remote_delegate(
2729 this: Entity<GitStore>,
2730 project_id: u64,
2731 repository_id: RepositoryId,
2732 askpass_id: u64,
2733 cx: &mut AsyncApp,
2734) -> AskPassDelegate {
2735 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2736 this.update(cx, |this, cx| {
2737 let Some((client, _)) = this.downstream_client() else {
2738 return;
2739 };
2740 let response = client.request(proto::AskPassRequest {
2741 project_id,
2742 repository_id: repository_id.to_proto(),
2743 askpass_id,
2744 prompt,
2745 });
2746 cx.spawn(async move |_, _| {
2747 let mut response = response.await?.response;
2748 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2749 .ok();
2750 response.zeroize();
2751 anyhow::Ok(())
2752 })
2753 .detach_and_log_err(cx);
2754 })
2755 .log_err();
2756 })
2757}
2758
2759impl RepositoryId {
2760 pub fn to_proto(self) -> u64 {
2761 self.0
2762 }
2763
2764 pub fn from_proto(id: u64) -> Self {
2765 RepositoryId(id)
2766 }
2767}
2768
2769impl RepositorySnapshot {
2770 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2771 Self {
2772 id,
2773 statuses_by_path: Default::default(),
2774 work_directory_abs_path,
2775 branch: None,
2776 head_commit: None,
2777 scan_id: 0,
2778 merge: Default::default(),
2779 remote_origin_url: None,
2780 remote_upstream_url: None,
2781 stash_entries: Default::default(),
2782 path_style,
2783 }
2784 }
2785
2786 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2787 proto::UpdateRepository {
2788 branch_summary: self.branch.as_ref().map(branch_to_proto),
2789 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2790 updated_statuses: self
2791 .statuses_by_path
2792 .iter()
2793 .map(|entry| entry.to_proto())
2794 .collect(),
2795 removed_statuses: Default::default(),
2796 current_merge_conflicts: self
2797 .merge
2798 .conflicted_paths
2799 .iter()
2800 .map(|repo_path| repo_path.to_proto())
2801 .collect(),
2802 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2803 project_id,
2804 id: self.id.to_proto(),
2805 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2806 entry_ids: vec![self.id.to_proto()],
2807 scan_id: self.scan_id,
2808 is_last_update: true,
2809 stash_entries: self
2810 .stash_entries
2811 .entries
2812 .iter()
2813 .map(stash_to_proto)
2814 .collect(),
2815 }
2816 }
2817
2818 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2819 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2820 let mut removed_statuses: Vec<String> = Vec::new();
2821
2822 let mut new_statuses = self.statuses_by_path.iter().peekable();
2823 let mut old_statuses = old.statuses_by_path.iter().peekable();
2824
2825 let mut current_new_entry = new_statuses.next();
2826 let mut current_old_entry = old_statuses.next();
2827 loop {
2828 match (current_new_entry, current_old_entry) {
2829 (Some(new_entry), Some(old_entry)) => {
2830 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2831 Ordering::Less => {
2832 updated_statuses.push(new_entry.to_proto());
2833 current_new_entry = new_statuses.next();
2834 }
2835 Ordering::Equal => {
2836 if new_entry.status != old_entry.status {
2837 updated_statuses.push(new_entry.to_proto());
2838 }
2839 current_old_entry = old_statuses.next();
2840 current_new_entry = new_statuses.next();
2841 }
2842 Ordering::Greater => {
2843 removed_statuses.push(old_entry.repo_path.to_proto());
2844 current_old_entry = old_statuses.next();
2845 }
2846 }
2847 }
2848 (None, Some(old_entry)) => {
2849 removed_statuses.push(old_entry.repo_path.to_proto());
2850 current_old_entry = old_statuses.next();
2851 }
2852 (Some(new_entry), None) => {
2853 updated_statuses.push(new_entry.to_proto());
2854 current_new_entry = new_statuses.next();
2855 }
2856 (None, None) => break,
2857 }
2858 }
2859
2860 proto::UpdateRepository {
2861 branch_summary: self.branch.as_ref().map(branch_to_proto),
2862 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2863 updated_statuses,
2864 removed_statuses,
2865 current_merge_conflicts: self
2866 .merge
2867 .conflicted_paths
2868 .iter()
2869 .map(|path| path.to_proto())
2870 .collect(),
2871 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2872 project_id,
2873 id: self.id.to_proto(),
2874 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2875 entry_ids: vec![],
2876 scan_id: self.scan_id,
2877 is_last_update: true,
2878 stash_entries: self
2879 .stash_entries
2880 .entries
2881 .iter()
2882 .map(stash_to_proto)
2883 .collect(),
2884 }
2885 }
2886
2887 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2888 self.statuses_by_path.iter().cloned()
2889 }
2890
2891 pub fn status_summary(&self) -> GitSummary {
2892 self.statuses_by_path.summary().item_summary
2893 }
2894
2895 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2896 self.statuses_by_path
2897 .get(&PathKey(path.0.clone()), ())
2898 .cloned()
2899 }
2900
2901 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2902 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
2903 }
2904
2905 #[inline]
2906 fn abs_path_to_repo_path_inner(
2907 work_directory_abs_path: &Path,
2908 abs_path: &Path,
2909 path_style: PathStyle,
2910 ) -> Option<RepoPath> {
2911 abs_path
2912 .strip_prefix(&work_directory_abs_path)
2913 .ok()
2914 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
2915 }
2916
2917 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2918 self.merge.conflicted_paths.contains(repo_path)
2919 }
2920
2921 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2922 let had_conflict_on_last_merge_head_change =
2923 self.merge.conflicted_paths.contains(repo_path);
2924 let has_conflict_currently = self
2925 .status_for_path(repo_path)
2926 .is_some_and(|entry| entry.status.is_conflicted());
2927 had_conflict_on_last_merge_head_change || has_conflict_currently
2928 }
2929
2930 /// This is the name that will be displayed in the repository selector for this repository.
2931 pub fn display_name(&self) -> SharedString {
2932 self.work_directory_abs_path
2933 .file_name()
2934 .unwrap_or_default()
2935 .to_string_lossy()
2936 .to_string()
2937 .into()
2938 }
2939}
2940
2941pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2942 proto::StashEntry {
2943 oid: entry.oid.as_bytes().to_vec(),
2944 message: entry.message.clone(),
2945 branch: entry.branch.clone(),
2946 index: entry.index as u64,
2947 timestamp: entry.timestamp,
2948 }
2949}
2950
2951pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2952 Ok(StashEntry {
2953 oid: Oid::from_bytes(&entry.oid)?,
2954 message: entry.message.clone(),
2955 index: entry.index as usize,
2956 branch: entry.branch.clone(),
2957 timestamp: entry.timestamp,
2958 })
2959}
2960
2961impl MergeDetails {
2962 async fn load(
2963 backend: &Arc<dyn GitRepository>,
2964 status: &SumTree<StatusEntry>,
2965 prev_snapshot: &RepositorySnapshot,
2966 ) -> Result<(MergeDetails, bool)> {
2967 log::debug!("load merge details");
2968 let message = backend.merge_message().await;
2969 let heads = backend
2970 .revparse_batch(vec![
2971 "MERGE_HEAD".into(),
2972 "CHERRY_PICK_HEAD".into(),
2973 "REBASE_HEAD".into(),
2974 "REVERT_HEAD".into(),
2975 "APPLY_HEAD".into(),
2976 ])
2977 .await
2978 .log_err()
2979 .unwrap_or_default()
2980 .into_iter()
2981 .map(|opt| opt.map(SharedString::from))
2982 .collect::<Vec<_>>();
2983 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2984 let conflicted_paths = if merge_heads_changed {
2985 let current_conflicted_paths = TreeSet::from_ordered_entries(
2986 status
2987 .iter()
2988 .filter(|entry| entry.status.is_conflicted())
2989 .map(|entry| entry.repo_path.clone()),
2990 );
2991
2992 // It can happen that we run a scan while a lengthy merge is in progress
2993 // that will eventually result in conflicts, but before those conflicts
2994 // are reported by `git status`. Since for the moment we only care about
2995 // the merge heads state for the purposes of tracking conflicts, don't update
2996 // this state until we see some conflicts.
2997 if heads.iter().any(Option::is_some)
2998 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2999 && current_conflicted_paths.is_empty()
3000 {
3001 log::debug!("not updating merge heads because no conflicts found");
3002 return Ok((
3003 MergeDetails {
3004 message: message.map(SharedString::from),
3005 ..prev_snapshot.merge.clone()
3006 },
3007 false,
3008 ));
3009 }
3010
3011 current_conflicted_paths
3012 } else {
3013 prev_snapshot.merge.conflicted_paths.clone()
3014 };
3015 let details = MergeDetails {
3016 conflicted_paths,
3017 message: message.map(SharedString::from),
3018 heads,
3019 };
3020 Ok((details, merge_heads_changed))
3021 }
3022}
3023
3024impl Repository {
3025 pub fn snapshot(&self) -> RepositorySnapshot {
3026 self.snapshot.clone()
3027 }
3028
3029 fn local(
3030 id: RepositoryId,
3031 work_directory_abs_path: Arc<Path>,
3032 dot_git_abs_path: Arc<Path>,
3033 repository_dir_abs_path: Arc<Path>,
3034 common_dir_abs_path: Arc<Path>,
3035 project_environment: WeakEntity<ProjectEnvironment>,
3036 fs: Arc<dyn Fs>,
3037 git_store: WeakEntity<GitStore>,
3038 cx: &mut Context<Self>,
3039 ) -> Self {
3040 let snapshot =
3041 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3042 Repository {
3043 this: cx.weak_entity(),
3044 git_store,
3045 snapshot,
3046 commit_message_buffer: None,
3047 askpass_delegates: Default::default(),
3048 paths_needing_status_update: Default::default(),
3049 latest_askpass_id: 0,
3050 job_sender: Repository::spawn_local_git_worker(
3051 work_directory_abs_path,
3052 dot_git_abs_path,
3053 repository_dir_abs_path,
3054 common_dir_abs_path,
3055 project_environment,
3056 fs,
3057 cx,
3058 ),
3059 job_id: 0,
3060 active_jobs: Default::default(),
3061 }
3062 }
3063
3064 fn remote(
3065 id: RepositoryId,
3066 work_directory_abs_path: Arc<Path>,
3067 path_style: PathStyle,
3068 project_id: ProjectId,
3069 client: AnyProtoClient,
3070 git_store: WeakEntity<GitStore>,
3071 cx: &mut Context<Self>,
3072 ) -> Self {
3073 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3074 Self {
3075 this: cx.weak_entity(),
3076 snapshot,
3077 commit_message_buffer: None,
3078 git_store,
3079 paths_needing_status_update: Default::default(),
3080 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3081 askpass_delegates: Default::default(),
3082 latest_askpass_id: 0,
3083 active_jobs: Default::default(),
3084 job_id: 0,
3085 }
3086 }
3087
3088 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3089 self.git_store.upgrade()
3090 }
3091
3092 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3093 let this = cx.weak_entity();
3094 let git_store = self.git_store.clone();
3095 let _ = self.send_keyed_job(
3096 Some(GitJobKey::ReloadBufferDiffBases),
3097 None,
3098 |state, mut cx| async move {
3099 let RepositoryState::Local { backend, .. } = state else {
3100 log::error!("tried to recompute diffs for a non-local repository");
3101 return Ok(());
3102 };
3103
3104 let Some(this) = this.upgrade() else {
3105 return Ok(());
3106 };
3107
3108 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3109 git_store.update(cx, |git_store, cx| {
3110 git_store
3111 .diffs
3112 .iter()
3113 .filter_map(|(buffer_id, diff_state)| {
3114 let buffer_store = git_store.buffer_store.read(cx);
3115 let buffer = buffer_store.get(*buffer_id)?;
3116 let file = File::from_dyn(buffer.read(cx).file())?;
3117 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3118 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3119 log::debug!(
3120 "start reload diff bases for repo path {}",
3121 repo_path.as_unix_str()
3122 );
3123 diff_state.update(cx, |diff_state, _| {
3124 let has_unstaged_diff = diff_state
3125 .unstaged_diff
3126 .as_ref()
3127 .is_some_and(|diff| diff.is_upgradable());
3128 let has_uncommitted_diff = diff_state
3129 .uncommitted_diff
3130 .as_ref()
3131 .is_some_and(|set| set.is_upgradable());
3132
3133 Some((
3134 buffer,
3135 repo_path,
3136 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3137 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3138 ))
3139 })
3140 })
3141 .collect::<Vec<_>>()
3142 })
3143 })??;
3144
3145 let buffer_diff_base_changes = cx
3146 .background_spawn(async move {
3147 let mut changes = Vec::new();
3148 for (buffer, repo_path, current_index_text, current_head_text) in
3149 &repo_diff_state_updates
3150 {
3151 let index_text = if current_index_text.is_some() {
3152 backend.load_index_text(repo_path.clone()).await
3153 } else {
3154 None
3155 };
3156 let head_text = if current_head_text.is_some() {
3157 backend.load_committed_text(repo_path.clone()).await
3158 } else {
3159 None
3160 };
3161
3162 let change =
3163 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3164 (Some(current_index), Some(current_head)) => {
3165 let index_changed =
3166 index_text.as_ref() != current_index.as_deref();
3167 let head_changed =
3168 head_text.as_ref() != current_head.as_deref();
3169 if index_changed && head_changed {
3170 if index_text == head_text {
3171 Some(DiffBasesChange::SetBoth(head_text))
3172 } else {
3173 Some(DiffBasesChange::SetEach {
3174 index: index_text,
3175 head: head_text,
3176 })
3177 }
3178 } else if index_changed {
3179 Some(DiffBasesChange::SetIndex(index_text))
3180 } else if head_changed {
3181 Some(DiffBasesChange::SetHead(head_text))
3182 } else {
3183 None
3184 }
3185 }
3186 (Some(current_index), None) => {
3187 let index_changed =
3188 index_text.as_ref() != current_index.as_deref();
3189 index_changed
3190 .then_some(DiffBasesChange::SetIndex(index_text))
3191 }
3192 (None, Some(current_head)) => {
3193 let head_changed =
3194 head_text.as_ref() != current_head.as_deref();
3195 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3196 }
3197 (None, None) => None,
3198 };
3199
3200 changes.push((buffer.clone(), change))
3201 }
3202 changes
3203 })
3204 .await;
3205
3206 git_store.update(&mut cx, |git_store, cx| {
3207 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3208 let buffer_snapshot = buffer.read(cx).text_snapshot();
3209 let buffer_id = buffer_snapshot.remote_id();
3210 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3211 continue;
3212 };
3213
3214 let downstream_client = git_store.downstream_client();
3215 diff_state.update(cx, |diff_state, cx| {
3216 use proto::update_diff_bases::Mode;
3217
3218 if let Some((diff_bases_change, (client, project_id))) =
3219 diff_bases_change.clone().zip(downstream_client)
3220 {
3221 let (staged_text, committed_text, mode) = match diff_bases_change {
3222 DiffBasesChange::SetIndex(index) => {
3223 (index, None, Mode::IndexOnly)
3224 }
3225 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3226 DiffBasesChange::SetEach { index, head } => {
3227 (index, head, Mode::IndexAndHead)
3228 }
3229 DiffBasesChange::SetBoth(text) => {
3230 (None, text, Mode::IndexMatchesHead)
3231 }
3232 };
3233 client
3234 .send(proto::UpdateDiffBases {
3235 project_id: project_id.to_proto(),
3236 buffer_id: buffer_id.to_proto(),
3237 staged_text,
3238 committed_text,
3239 mode: mode as i32,
3240 })
3241 .log_err();
3242 }
3243
3244 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3245 });
3246 }
3247 })
3248 },
3249 );
3250 }
3251
3252 pub fn send_job<F, Fut, R>(
3253 &mut self,
3254 status: Option<SharedString>,
3255 job: F,
3256 ) -> oneshot::Receiver<R>
3257 where
3258 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3259 Fut: Future<Output = R> + 'static,
3260 R: Send + 'static,
3261 {
3262 self.send_keyed_job(None, status, job)
3263 }
3264
3265 fn send_keyed_job<F, Fut, R>(
3266 &mut self,
3267 key: Option<GitJobKey>,
3268 status: Option<SharedString>,
3269 job: F,
3270 ) -> oneshot::Receiver<R>
3271 where
3272 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3273 Fut: Future<Output = R> + 'static,
3274 R: Send + 'static,
3275 {
3276 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3277 let job_id = post_inc(&mut self.job_id);
3278 let this = self.this.clone();
3279 self.job_sender
3280 .unbounded_send(GitJob {
3281 key,
3282 job: Box::new(move |state, cx: &mut AsyncApp| {
3283 let job = job(state, cx.clone());
3284 cx.spawn(async move |cx| {
3285 if let Some(s) = status.clone() {
3286 this.update(cx, |this, cx| {
3287 this.active_jobs.insert(
3288 job_id,
3289 JobInfo {
3290 start: Instant::now(),
3291 message: s.clone(),
3292 },
3293 );
3294
3295 cx.notify();
3296 })
3297 .ok();
3298 }
3299 let result = job.await;
3300
3301 this.update(cx, |this, cx| {
3302 this.active_jobs.remove(&job_id);
3303 cx.notify();
3304 })
3305 .ok();
3306
3307 result_tx.send(result).ok();
3308 })
3309 }),
3310 })
3311 .ok();
3312 result_rx
3313 }
3314
3315 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3316 let Some(git_store) = self.git_store.upgrade() else {
3317 return;
3318 };
3319 let entity = cx.entity();
3320 git_store.update(cx, |git_store, cx| {
3321 let Some((&id, _)) = git_store
3322 .repositories
3323 .iter()
3324 .find(|(_, handle)| *handle == &entity)
3325 else {
3326 return;
3327 };
3328 git_store.active_repo_id = Some(id);
3329 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3330 });
3331 }
3332
3333 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3334 self.snapshot.status()
3335 }
3336
3337 pub fn cached_stash(&self) -> GitStash {
3338 self.snapshot.stash_entries.clone()
3339 }
3340
3341 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3342 let git_store = self.git_store.upgrade()?;
3343 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3344 let abs_path = self
3345 .snapshot
3346 .work_directory_abs_path
3347 .join(path.as_std_path());
3348 let abs_path = SanitizedPath::new(&abs_path);
3349 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3350 Some(ProjectPath {
3351 worktree_id: worktree.read(cx).id(),
3352 path: relative_path,
3353 })
3354 }
3355
3356 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3357 let git_store = self.git_store.upgrade()?;
3358 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3359 let abs_path = worktree_store.absolutize(path, cx)?;
3360 self.snapshot.abs_path_to_repo_path(&abs_path)
3361 }
3362
3363 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3364 other
3365 .read(cx)
3366 .snapshot
3367 .work_directory_abs_path
3368 .starts_with(&self.snapshot.work_directory_abs_path)
3369 }
3370
3371 pub fn open_commit_buffer(
3372 &mut self,
3373 languages: Option<Arc<LanguageRegistry>>,
3374 buffer_store: Entity<BufferStore>,
3375 cx: &mut Context<Self>,
3376 ) -> Task<Result<Entity<Buffer>>> {
3377 let id = self.id;
3378 if let Some(buffer) = self.commit_message_buffer.clone() {
3379 return Task::ready(Ok(buffer));
3380 }
3381 let this = cx.weak_entity();
3382
3383 let rx = self.send_job(None, move |state, mut cx| async move {
3384 let Some(this) = this.upgrade() else {
3385 bail!("git store was dropped");
3386 };
3387 match state {
3388 RepositoryState::Local { .. } => {
3389 this.update(&mut cx, |_, cx| {
3390 Self::open_local_commit_buffer(languages, buffer_store, cx)
3391 })?
3392 .await
3393 }
3394 RepositoryState::Remote { project_id, client } => {
3395 let request = client.request(proto::OpenCommitMessageBuffer {
3396 project_id: project_id.0,
3397 repository_id: id.to_proto(),
3398 });
3399 let response = request.await.context("requesting to open commit buffer")?;
3400 let buffer_id = BufferId::new(response.buffer_id)?;
3401 let buffer = buffer_store
3402 .update(&mut cx, |buffer_store, cx| {
3403 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3404 })?
3405 .await?;
3406 if let Some(language_registry) = languages {
3407 let git_commit_language =
3408 language_registry.language_for_name("Git Commit").await?;
3409 buffer.update(&mut cx, |buffer, cx| {
3410 buffer.set_language(Some(git_commit_language), cx);
3411 })?;
3412 }
3413 this.update(&mut cx, |this, _| {
3414 this.commit_message_buffer = Some(buffer.clone());
3415 })?;
3416 Ok(buffer)
3417 }
3418 }
3419 });
3420
3421 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3422 }
3423
3424 fn open_local_commit_buffer(
3425 language_registry: Option<Arc<LanguageRegistry>>,
3426 buffer_store: Entity<BufferStore>,
3427 cx: &mut Context<Self>,
3428 ) -> Task<Result<Entity<Buffer>>> {
3429 cx.spawn(async move |repository, cx| {
3430 let buffer = buffer_store
3431 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3432 .await?;
3433
3434 if let Some(language_registry) = language_registry {
3435 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3436 buffer.update(cx, |buffer, cx| {
3437 buffer.set_language(Some(git_commit_language), cx);
3438 })?;
3439 }
3440
3441 repository.update(cx, |repository, _| {
3442 repository.commit_message_buffer = Some(buffer.clone());
3443 })?;
3444 Ok(buffer)
3445 })
3446 }
3447
3448 pub fn checkout_files(
3449 &mut self,
3450 commit: &str,
3451 paths: Vec<RepoPath>,
3452 _cx: &mut App,
3453 ) -> oneshot::Receiver<Result<()>> {
3454 let commit = commit.to_string();
3455 let id = self.id;
3456
3457 self.send_job(
3458 Some(format!("git checkout {}", commit).into()),
3459 move |git_repo, _| async move {
3460 match git_repo {
3461 RepositoryState::Local {
3462 backend,
3463 environment,
3464 ..
3465 } => {
3466 backend
3467 .checkout_files(commit, paths, environment.clone())
3468 .await
3469 }
3470 RepositoryState::Remote { project_id, client } => {
3471 client
3472 .request(proto::GitCheckoutFiles {
3473 project_id: project_id.0,
3474 repository_id: id.to_proto(),
3475 commit,
3476 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3477 })
3478 .await?;
3479
3480 Ok(())
3481 }
3482 }
3483 },
3484 )
3485 }
3486
3487 pub fn reset(
3488 &mut self,
3489 commit: String,
3490 reset_mode: ResetMode,
3491 _cx: &mut App,
3492 ) -> oneshot::Receiver<Result<()>> {
3493 let id = self.id;
3494
3495 self.send_job(None, move |git_repo, _| async move {
3496 match git_repo {
3497 RepositoryState::Local {
3498 backend,
3499 environment,
3500 ..
3501 } => backend.reset(commit, reset_mode, environment).await,
3502 RepositoryState::Remote { project_id, client } => {
3503 client
3504 .request(proto::GitReset {
3505 project_id: project_id.0,
3506 repository_id: id.to_proto(),
3507 commit,
3508 mode: match reset_mode {
3509 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3510 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3511 },
3512 })
3513 .await?;
3514
3515 Ok(())
3516 }
3517 }
3518 })
3519 }
3520
3521 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3522 let id = self.id;
3523 self.send_job(None, move |git_repo, _cx| async move {
3524 match git_repo {
3525 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3526 RepositoryState::Remote { project_id, client } => {
3527 let resp = client
3528 .request(proto::GitShow {
3529 project_id: project_id.0,
3530 repository_id: id.to_proto(),
3531 commit,
3532 })
3533 .await?;
3534
3535 Ok(CommitDetails {
3536 sha: resp.sha.into(),
3537 message: resp.message.into(),
3538 commit_timestamp: resp.commit_timestamp,
3539 author_email: resp.author_email.into(),
3540 author_name: resp.author_name.into(),
3541 })
3542 }
3543 }
3544 })
3545 }
3546
3547 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3548 let id = self.id;
3549 self.send_job(None, move |git_repo, cx| async move {
3550 match git_repo {
3551 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3552 RepositoryState::Remote {
3553 client, project_id, ..
3554 } => {
3555 let response = client
3556 .request(proto::LoadCommitDiff {
3557 project_id: project_id.0,
3558 repository_id: id.to_proto(),
3559 commit,
3560 })
3561 .await?;
3562 Ok(CommitDiff {
3563 files: response
3564 .files
3565 .into_iter()
3566 .map(|file| {
3567 Ok(CommitFile {
3568 path: RepoPath::from_proto(&file.path)?,
3569 old_text: file.old_text,
3570 new_text: file.new_text,
3571 })
3572 })
3573 .collect::<Result<Vec<_>>>()?,
3574 })
3575 }
3576 }
3577 })
3578 }
3579
3580 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3581 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3582 }
3583
3584 pub fn stage_entries(
3585 &self,
3586 entries: Vec<RepoPath>,
3587 cx: &mut Context<Self>,
3588 ) -> Task<anyhow::Result<()>> {
3589 if entries.is_empty() {
3590 return Task::ready(Ok(()));
3591 }
3592 let id = self.id;
3593
3594 let mut save_futures = Vec::new();
3595 if let Some(buffer_store) = self.buffer_store(cx) {
3596 buffer_store.update(cx, |buffer_store, cx| {
3597 for path in &entries {
3598 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3599 continue;
3600 };
3601 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3602 && buffer
3603 .read(cx)
3604 .file()
3605 .is_some_and(|file| file.disk_state().exists())
3606 {
3607 save_futures.push(buffer_store.save_buffer(buffer, cx));
3608 }
3609 }
3610 })
3611 }
3612
3613 cx.spawn(async move |this, cx| {
3614 for save_future in save_futures {
3615 save_future.await?;
3616 }
3617
3618 this.update(cx, |this, _| {
3619 this.send_job(None, move |git_repo, _cx| async move {
3620 match git_repo {
3621 RepositoryState::Local {
3622 backend,
3623 environment,
3624 ..
3625 } => backend.stage_paths(entries, environment.clone()).await,
3626 RepositoryState::Remote { project_id, client } => {
3627 client
3628 .request(proto::Stage {
3629 project_id: project_id.0,
3630 repository_id: id.to_proto(),
3631 paths: entries
3632 .into_iter()
3633 .map(|repo_path| repo_path.to_proto())
3634 .collect(),
3635 })
3636 .await
3637 .context("sending stage request")?;
3638
3639 Ok(())
3640 }
3641 }
3642 })
3643 })?
3644 .await??;
3645
3646 Ok(())
3647 })
3648 }
3649
3650 pub fn unstage_entries(
3651 &self,
3652 entries: Vec<RepoPath>,
3653 cx: &mut Context<Self>,
3654 ) -> Task<anyhow::Result<()>> {
3655 if entries.is_empty() {
3656 return Task::ready(Ok(()));
3657 }
3658 let id = self.id;
3659
3660 let mut save_futures = Vec::new();
3661 if let Some(buffer_store) = self.buffer_store(cx) {
3662 buffer_store.update(cx, |buffer_store, cx| {
3663 for path in &entries {
3664 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3665 continue;
3666 };
3667 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3668 && buffer
3669 .read(cx)
3670 .file()
3671 .is_some_and(|file| file.disk_state().exists())
3672 {
3673 save_futures.push(buffer_store.save_buffer(buffer, cx));
3674 }
3675 }
3676 })
3677 }
3678
3679 cx.spawn(async move |this, cx| {
3680 for save_future in save_futures {
3681 save_future.await?;
3682 }
3683
3684 this.update(cx, |this, _| {
3685 this.send_job(None, move |git_repo, _cx| async move {
3686 match git_repo {
3687 RepositoryState::Local {
3688 backend,
3689 environment,
3690 ..
3691 } => backend.unstage_paths(entries, environment).await,
3692 RepositoryState::Remote { project_id, client } => {
3693 client
3694 .request(proto::Unstage {
3695 project_id: project_id.0,
3696 repository_id: id.to_proto(),
3697 paths: entries
3698 .into_iter()
3699 .map(|repo_path| repo_path.to_proto())
3700 .collect(),
3701 })
3702 .await
3703 .context("sending unstage request")?;
3704
3705 Ok(())
3706 }
3707 }
3708 })
3709 })?
3710 .await??;
3711
3712 Ok(())
3713 })
3714 }
3715
3716 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3717 let to_stage = self
3718 .cached_status()
3719 .filter(|entry| !entry.status.staging().is_fully_staged())
3720 .map(|entry| entry.repo_path)
3721 .collect();
3722 self.stage_entries(to_stage, cx)
3723 }
3724
3725 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3726 let to_unstage = self
3727 .cached_status()
3728 .filter(|entry| entry.status.staging().has_staged())
3729 .map(|entry| entry.repo_path)
3730 .collect();
3731 self.unstage_entries(to_unstage, cx)
3732 }
3733
3734 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3735 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3736
3737 self.stash_entries(to_stash, cx)
3738 }
3739
3740 pub fn stash_entries(
3741 &mut self,
3742 entries: Vec<RepoPath>,
3743 cx: &mut Context<Self>,
3744 ) -> Task<anyhow::Result<()>> {
3745 let id = self.id;
3746
3747 cx.spawn(async move |this, cx| {
3748 this.update(cx, |this, _| {
3749 this.send_job(None, move |git_repo, _cx| async move {
3750 match git_repo {
3751 RepositoryState::Local {
3752 backend,
3753 environment,
3754 ..
3755 } => backend.stash_paths(entries, environment).await,
3756 RepositoryState::Remote { project_id, client } => {
3757 client
3758 .request(proto::Stash {
3759 project_id: project_id.0,
3760 repository_id: id.to_proto(),
3761 paths: entries
3762 .into_iter()
3763 .map(|repo_path| repo_path.to_proto())
3764 .collect(),
3765 })
3766 .await
3767 .context("sending stash request")?;
3768 Ok(())
3769 }
3770 }
3771 })
3772 })?
3773 .await??;
3774 Ok(())
3775 })
3776 }
3777
3778 pub fn stash_pop(
3779 &mut self,
3780 index: Option<usize>,
3781 cx: &mut Context<Self>,
3782 ) -> Task<anyhow::Result<()>> {
3783 let id = self.id;
3784 cx.spawn(async move |this, cx| {
3785 this.update(cx, |this, _| {
3786 this.send_job(None, move |git_repo, _cx| async move {
3787 match git_repo {
3788 RepositoryState::Local {
3789 backend,
3790 environment,
3791 ..
3792 } => backend.stash_pop(index, environment).await,
3793 RepositoryState::Remote { project_id, client } => {
3794 client
3795 .request(proto::StashPop {
3796 project_id: project_id.0,
3797 repository_id: id.to_proto(),
3798 stash_index: index.map(|i| i as u64),
3799 })
3800 .await
3801 .context("sending stash pop request")?;
3802 Ok(())
3803 }
3804 }
3805 })
3806 })?
3807 .await??;
3808 Ok(())
3809 })
3810 }
3811
3812 pub fn stash_apply(
3813 &mut self,
3814 index: Option<usize>,
3815 cx: &mut Context<Self>,
3816 ) -> Task<anyhow::Result<()>> {
3817 let id = self.id;
3818 cx.spawn(async move |this, cx| {
3819 this.update(cx, |this, _| {
3820 this.send_job(None, move |git_repo, _cx| async move {
3821 match git_repo {
3822 RepositoryState::Local {
3823 backend,
3824 environment,
3825 ..
3826 } => backend.stash_apply(index, environment).await,
3827 RepositoryState::Remote { project_id, client } => {
3828 client
3829 .request(proto::StashApply {
3830 project_id: project_id.0,
3831 repository_id: id.to_proto(),
3832 stash_index: index.map(|i| i as u64),
3833 })
3834 .await
3835 .context("sending stash apply request")?;
3836 Ok(())
3837 }
3838 }
3839 })
3840 })?
3841 .await??;
3842 Ok(())
3843 })
3844 }
3845
3846 pub fn stash_drop(
3847 &mut self,
3848 index: Option<usize>,
3849 cx: &mut Context<Self>,
3850 ) -> oneshot::Receiver<anyhow::Result<()>> {
3851 let id = self.id;
3852 let updates_tx = self
3853 .git_store()
3854 .and_then(|git_store| match &git_store.read(cx).state {
3855 GitStoreState::Local { downstream, .. } => downstream
3856 .as_ref()
3857 .map(|downstream| downstream.updates_tx.clone()),
3858 _ => None,
3859 });
3860 let this = cx.weak_entity();
3861 self.send_job(None, move |git_repo, mut cx| async move {
3862 match git_repo {
3863 RepositoryState::Local {
3864 backend,
3865 environment,
3866 ..
3867 } => {
3868 let result = backend.stash_drop(index, environment).await;
3869 if result.is_ok()
3870 && let Ok(stash_entries) = backend.stash_entries().await
3871 {
3872 let snapshot = this.update(&mut cx, |this, cx| {
3873 this.snapshot.stash_entries = stash_entries;
3874 let snapshot = this.snapshot.clone();
3875 cx.emit(RepositoryEvent::Updated {
3876 full_scan: false,
3877 new_instance: false,
3878 });
3879 snapshot
3880 })?;
3881 if let Some(updates_tx) = updates_tx {
3882 updates_tx
3883 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3884 .ok();
3885 }
3886 }
3887
3888 result
3889 }
3890 RepositoryState::Remote { project_id, client } => {
3891 client
3892 .request(proto::StashDrop {
3893 project_id: project_id.0,
3894 repository_id: id.to_proto(),
3895 stash_index: index.map(|i| i as u64),
3896 })
3897 .await
3898 .context("sending stash pop request")?;
3899 Ok(())
3900 }
3901 }
3902 })
3903 }
3904
3905 pub fn commit(
3906 &mut self,
3907 message: SharedString,
3908 name_and_email: Option<(SharedString, SharedString)>,
3909 options: CommitOptions,
3910 _cx: &mut App,
3911 ) -> oneshot::Receiver<Result<()>> {
3912 let id = self.id;
3913
3914 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3915 match git_repo {
3916 RepositoryState::Local {
3917 backend,
3918 environment,
3919 ..
3920 } => {
3921 backend
3922 .commit(message, name_and_email, options, environment)
3923 .await
3924 }
3925 RepositoryState::Remote { project_id, client } => {
3926 let (name, email) = name_and_email.unzip();
3927 client
3928 .request(proto::Commit {
3929 project_id: project_id.0,
3930 repository_id: id.to_proto(),
3931 message: String::from(message),
3932 name: name.map(String::from),
3933 email: email.map(String::from),
3934 options: Some(proto::commit::CommitOptions {
3935 amend: options.amend,
3936 signoff: options.signoff,
3937 }),
3938 })
3939 .await
3940 .context("sending commit request")?;
3941
3942 Ok(())
3943 }
3944 }
3945 })
3946 }
3947
3948 pub fn fetch(
3949 &mut self,
3950 fetch_options: FetchOptions,
3951 askpass: AskPassDelegate,
3952 _cx: &mut App,
3953 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3954 let askpass_delegates = self.askpass_delegates.clone();
3955 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3956 let id = self.id;
3957
3958 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3959 match git_repo {
3960 RepositoryState::Local {
3961 backend,
3962 environment,
3963 ..
3964 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3965 RepositoryState::Remote { project_id, client } => {
3966 askpass_delegates.lock().insert(askpass_id, askpass);
3967 let _defer = util::defer(|| {
3968 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3969 debug_assert!(askpass_delegate.is_some());
3970 });
3971
3972 let response = client
3973 .request(proto::Fetch {
3974 project_id: project_id.0,
3975 repository_id: id.to_proto(),
3976 askpass_id,
3977 remote: fetch_options.to_proto(),
3978 })
3979 .await
3980 .context("sending fetch request")?;
3981
3982 Ok(RemoteCommandOutput {
3983 stdout: response.stdout,
3984 stderr: response.stderr,
3985 })
3986 }
3987 }
3988 })
3989 }
3990
3991 pub fn push(
3992 &mut self,
3993 branch: SharedString,
3994 remote: SharedString,
3995 options: Option<PushOptions>,
3996 askpass: AskPassDelegate,
3997 cx: &mut Context<Self>,
3998 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3999 let askpass_delegates = self.askpass_delegates.clone();
4000 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4001 let id = self.id;
4002
4003 let args = options
4004 .map(|option| match option {
4005 PushOptions::SetUpstream => " --set-upstream",
4006 PushOptions::Force => " --force-with-lease",
4007 })
4008 .unwrap_or("");
4009
4010 let updates_tx = self
4011 .git_store()
4012 .and_then(|git_store| match &git_store.read(cx).state {
4013 GitStoreState::Local { downstream, .. } => downstream
4014 .as_ref()
4015 .map(|downstream| downstream.updates_tx.clone()),
4016 _ => None,
4017 });
4018
4019 let this = cx.weak_entity();
4020 self.send_job(
4021 Some(format!("git push {} {} {}", args, branch, remote).into()),
4022 move |git_repo, mut cx| async move {
4023 match git_repo {
4024 RepositoryState::Local {
4025 backend,
4026 environment,
4027 ..
4028 } => {
4029 let result = backend
4030 .push(
4031 branch.to_string(),
4032 remote.to_string(),
4033 options,
4034 askpass,
4035 environment.clone(),
4036 cx.clone(),
4037 )
4038 .await;
4039 if result.is_ok() {
4040 let branches = backend.branches().await?;
4041 let branch = branches.into_iter().find(|branch| branch.is_head);
4042 log::info!("head branch after scan is {branch:?}");
4043 let snapshot = this.update(&mut cx, |this, cx| {
4044 this.snapshot.branch = branch;
4045 let snapshot = this.snapshot.clone();
4046 cx.emit(RepositoryEvent::Updated {
4047 full_scan: false,
4048 new_instance: false,
4049 });
4050 snapshot
4051 })?;
4052 if let Some(updates_tx) = updates_tx {
4053 updates_tx
4054 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4055 .ok();
4056 }
4057 }
4058 result
4059 }
4060 RepositoryState::Remote { project_id, client } => {
4061 askpass_delegates.lock().insert(askpass_id, askpass);
4062 let _defer = util::defer(|| {
4063 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4064 debug_assert!(askpass_delegate.is_some());
4065 });
4066 let response = client
4067 .request(proto::Push {
4068 project_id: project_id.0,
4069 repository_id: id.to_proto(),
4070 askpass_id,
4071 branch_name: branch.to_string(),
4072 remote_name: remote.to_string(),
4073 options: options.map(|options| match options {
4074 PushOptions::Force => proto::push::PushOptions::Force,
4075 PushOptions::SetUpstream => {
4076 proto::push::PushOptions::SetUpstream
4077 }
4078 }
4079 as i32),
4080 })
4081 .await
4082 .context("sending push request")?;
4083
4084 Ok(RemoteCommandOutput {
4085 stdout: response.stdout,
4086 stderr: response.stderr,
4087 })
4088 }
4089 }
4090 },
4091 )
4092 }
4093
4094 pub fn pull(
4095 &mut self,
4096 branch: SharedString,
4097 remote: SharedString,
4098 askpass: AskPassDelegate,
4099 _cx: &mut App,
4100 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4101 let askpass_delegates = self.askpass_delegates.clone();
4102 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4103 let id = self.id;
4104
4105 self.send_job(
4106 Some(format!("git pull {} {}", remote, branch).into()),
4107 move |git_repo, cx| async move {
4108 match git_repo {
4109 RepositoryState::Local {
4110 backend,
4111 environment,
4112 ..
4113 } => {
4114 backend
4115 .pull(
4116 branch.to_string(),
4117 remote.to_string(),
4118 askpass,
4119 environment.clone(),
4120 cx,
4121 )
4122 .await
4123 }
4124 RepositoryState::Remote { project_id, client } => {
4125 askpass_delegates.lock().insert(askpass_id, askpass);
4126 let _defer = util::defer(|| {
4127 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4128 debug_assert!(askpass_delegate.is_some());
4129 });
4130 let response = client
4131 .request(proto::Pull {
4132 project_id: project_id.0,
4133 repository_id: id.to_proto(),
4134 askpass_id,
4135 branch_name: branch.to_string(),
4136 remote_name: remote.to_string(),
4137 })
4138 .await
4139 .context("sending pull request")?;
4140
4141 Ok(RemoteCommandOutput {
4142 stdout: response.stdout,
4143 stderr: response.stderr,
4144 })
4145 }
4146 }
4147 },
4148 )
4149 }
4150
4151 fn spawn_set_index_text_job(
4152 &mut self,
4153 path: RepoPath,
4154 content: Option<String>,
4155 hunk_staging_operation_count: Option<usize>,
4156 cx: &mut Context<Self>,
4157 ) -> oneshot::Receiver<anyhow::Result<()>> {
4158 let id = self.id;
4159 let this = cx.weak_entity();
4160 let git_store = self.git_store.clone();
4161 self.send_keyed_job(
4162 Some(GitJobKey::WriteIndex(path.clone())),
4163 None,
4164 move |git_repo, mut cx| async move {
4165 log::debug!(
4166 "start updating index text for buffer {}",
4167 path.as_unix_str()
4168 );
4169 match git_repo {
4170 RepositoryState::Local {
4171 backend,
4172 environment,
4173 ..
4174 } => {
4175 backend
4176 .set_index_text(path.clone(), content, environment.clone())
4177 .await?;
4178 }
4179 RepositoryState::Remote { project_id, client } => {
4180 client
4181 .request(proto::SetIndexText {
4182 project_id: project_id.0,
4183 repository_id: id.to_proto(),
4184 path: path.to_proto(),
4185 text: content,
4186 })
4187 .await?;
4188 }
4189 }
4190 log::debug!(
4191 "finish updating index text for buffer {}",
4192 path.as_unix_str()
4193 );
4194
4195 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4196 let project_path = this
4197 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4198 .ok()
4199 .flatten();
4200 git_store.update(&mut cx, |git_store, cx| {
4201 let buffer_id = git_store
4202 .buffer_store
4203 .read(cx)
4204 .get_by_path(&project_path?)?
4205 .read(cx)
4206 .remote_id();
4207 let diff_state = git_store.diffs.get(&buffer_id)?;
4208 diff_state.update(cx, |diff_state, _| {
4209 diff_state.hunk_staging_operation_count_as_of_write =
4210 hunk_staging_operation_count;
4211 });
4212 Some(())
4213 })?;
4214 }
4215 Ok(())
4216 },
4217 )
4218 }
4219
4220 pub fn get_remotes(
4221 &mut self,
4222 branch_name: Option<String>,
4223 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4224 let id = self.id;
4225 self.send_job(None, move |repo, _cx| async move {
4226 match repo {
4227 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4228 RepositoryState::Remote { project_id, client } => {
4229 let response = client
4230 .request(proto::GetRemotes {
4231 project_id: project_id.0,
4232 repository_id: id.to_proto(),
4233 branch_name,
4234 })
4235 .await?;
4236
4237 let remotes = response
4238 .remotes
4239 .into_iter()
4240 .map(|remotes| git::repository::Remote {
4241 name: remotes.name.into(),
4242 })
4243 .collect();
4244
4245 Ok(remotes)
4246 }
4247 }
4248 })
4249 }
4250
4251 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4252 let id = self.id;
4253 self.send_job(None, move |repo, _| async move {
4254 match repo {
4255 RepositoryState::Local { backend, .. } => backend.branches().await,
4256 RepositoryState::Remote { project_id, client } => {
4257 let response = client
4258 .request(proto::GitGetBranches {
4259 project_id: project_id.0,
4260 repository_id: id.to_proto(),
4261 })
4262 .await?;
4263
4264 let branches = response
4265 .branches
4266 .into_iter()
4267 .map(|branch| proto_to_branch(&branch))
4268 .collect();
4269
4270 Ok(branches)
4271 }
4272 }
4273 })
4274 }
4275
4276 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4277 let id = self.id;
4278 self.send_job(None, move |repo, _| async move {
4279 match repo {
4280 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4281 RepositoryState::Remote { project_id, client } => {
4282 let response = client
4283 .request(proto::GetDefaultBranch {
4284 project_id: project_id.0,
4285 repository_id: id.to_proto(),
4286 })
4287 .await?;
4288
4289 anyhow::Ok(response.branch.map(SharedString::from))
4290 }
4291 }
4292 })
4293 }
4294
4295 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4296 let id = self.id;
4297 self.send_job(None, move |repo, _cx| async move {
4298 match repo {
4299 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4300 RepositoryState::Remote { project_id, client } => {
4301 let response = client
4302 .request(proto::GitDiff {
4303 project_id: project_id.0,
4304 repository_id: id.to_proto(),
4305 diff_type: match diff_type {
4306 DiffType::HeadToIndex => {
4307 proto::git_diff::DiffType::HeadToIndex.into()
4308 }
4309 DiffType::HeadToWorktree => {
4310 proto::git_diff::DiffType::HeadToWorktree.into()
4311 }
4312 },
4313 })
4314 .await?;
4315
4316 Ok(response.diff)
4317 }
4318 }
4319 })
4320 }
4321
4322 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4323 let id = self.id;
4324 self.send_job(
4325 Some(format!("git switch -c {branch_name}").into()),
4326 move |repo, _cx| async move {
4327 match repo {
4328 RepositoryState::Local { backend, .. } => {
4329 backend.create_branch(branch_name).await
4330 }
4331 RepositoryState::Remote { project_id, client } => {
4332 client
4333 .request(proto::GitCreateBranch {
4334 project_id: project_id.0,
4335 repository_id: id.to_proto(),
4336 branch_name,
4337 })
4338 .await?;
4339
4340 Ok(())
4341 }
4342 }
4343 },
4344 )
4345 }
4346
4347 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4348 let id = self.id;
4349 self.send_job(
4350 Some(format!("git switch {branch_name}").into()),
4351 move |repo, _cx| async move {
4352 match repo {
4353 RepositoryState::Local { backend, .. } => {
4354 backend.change_branch(branch_name).await
4355 }
4356 RepositoryState::Remote { project_id, client } => {
4357 client
4358 .request(proto::GitChangeBranch {
4359 project_id: project_id.0,
4360 repository_id: id.to_proto(),
4361 branch_name,
4362 })
4363 .await?;
4364
4365 Ok(())
4366 }
4367 }
4368 },
4369 )
4370 }
4371
4372 pub fn rename_branch(
4373 &mut self,
4374 branch: String,
4375 new_name: String,
4376 ) -> oneshot::Receiver<Result<()>> {
4377 let id = self.id;
4378 self.send_job(
4379 Some(format!("git branch -m {branch} {new_name}").into()),
4380 move |repo, _cx| async move {
4381 match repo {
4382 RepositoryState::Local { backend, .. } => {
4383 backend.rename_branch(branch, new_name).await
4384 }
4385 RepositoryState::Remote { project_id, client } => {
4386 client
4387 .request(proto::GitRenameBranch {
4388 project_id: project_id.0,
4389 repository_id: id.to_proto(),
4390 branch,
4391 new_name,
4392 })
4393 .await?;
4394
4395 Ok(())
4396 }
4397 }
4398 },
4399 )
4400 }
4401
4402 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4403 let id = self.id;
4404 self.send_job(None, move |repo, _cx| async move {
4405 match repo {
4406 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4407 RepositoryState::Remote { project_id, client } => {
4408 let response = client
4409 .request(proto::CheckForPushedCommits {
4410 project_id: project_id.0,
4411 repository_id: id.to_proto(),
4412 })
4413 .await?;
4414
4415 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4416
4417 Ok(branches)
4418 }
4419 }
4420 })
4421 }
4422
4423 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4424 self.send_job(None, |repo, _cx| async move {
4425 match repo {
4426 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4427 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4428 }
4429 })
4430 }
4431
4432 pub fn restore_checkpoint(
4433 &mut self,
4434 checkpoint: GitRepositoryCheckpoint,
4435 ) -> oneshot::Receiver<Result<()>> {
4436 self.send_job(None, move |repo, _cx| async move {
4437 match repo {
4438 RepositoryState::Local { backend, .. } => {
4439 backend.restore_checkpoint(checkpoint).await
4440 }
4441 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4442 }
4443 })
4444 }
4445
4446 pub(crate) fn apply_remote_update(
4447 &mut self,
4448 update: proto::UpdateRepository,
4449 is_new: bool,
4450 cx: &mut Context<Self>,
4451 ) -> Result<()> {
4452 let conflicted_paths = TreeSet::from_ordered_entries(
4453 update
4454 .current_merge_conflicts
4455 .into_iter()
4456 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4457 );
4458 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4459 self.snapshot.head_commit = update
4460 .head_commit_details
4461 .as_ref()
4462 .map(proto_to_commit_details);
4463
4464 self.snapshot.merge.conflicted_paths = conflicted_paths;
4465 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4466 self.snapshot.stash_entries = GitStash {
4467 entries: update
4468 .stash_entries
4469 .iter()
4470 .filter_map(|entry| proto_to_stash(entry).ok())
4471 .collect(),
4472 };
4473
4474 let edits = update
4475 .removed_statuses
4476 .into_iter()
4477 .filter_map(|path| {
4478 Some(sum_tree::Edit::Remove(PathKey(
4479 RelPath::from_proto(&path).log_err()?,
4480 )))
4481 })
4482 .chain(
4483 update
4484 .updated_statuses
4485 .into_iter()
4486 .filter_map(|updated_status| {
4487 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4488 }),
4489 )
4490 .collect::<Vec<_>>();
4491 self.snapshot.statuses_by_path.edit(edits, ());
4492 if update.is_last_update {
4493 self.snapshot.scan_id = update.scan_id;
4494 }
4495 cx.emit(RepositoryEvent::Updated {
4496 full_scan: true,
4497 new_instance: is_new,
4498 });
4499 Ok(())
4500 }
4501
4502 pub fn compare_checkpoints(
4503 &mut self,
4504 left: GitRepositoryCheckpoint,
4505 right: GitRepositoryCheckpoint,
4506 ) -> oneshot::Receiver<Result<bool>> {
4507 self.send_job(None, move |repo, _cx| async move {
4508 match repo {
4509 RepositoryState::Local { backend, .. } => {
4510 backend.compare_checkpoints(left, right).await
4511 }
4512 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4513 }
4514 })
4515 }
4516
4517 pub fn diff_checkpoints(
4518 &mut self,
4519 base_checkpoint: GitRepositoryCheckpoint,
4520 target_checkpoint: GitRepositoryCheckpoint,
4521 ) -> oneshot::Receiver<Result<String>> {
4522 self.send_job(None, move |repo, _cx| async move {
4523 match repo {
4524 RepositoryState::Local { backend, .. } => {
4525 backend
4526 .diff_checkpoints(base_checkpoint, target_checkpoint)
4527 .await
4528 }
4529 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4530 }
4531 })
4532 }
4533
4534 fn schedule_scan(
4535 &mut self,
4536 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4537 cx: &mut Context<Self>,
4538 ) {
4539 let this = cx.weak_entity();
4540 let _ = self.send_keyed_job(
4541 Some(GitJobKey::ReloadGitState),
4542 None,
4543 |state, mut cx| async move {
4544 log::debug!("run scheduled git status scan");
4545
4546 let Some(this) = this.upgrade() else {
4547 return Ok(());
4548 };
4549 let RepositoryState::Local { backend, .. } = state else {
4550 bail!("not a local repository")
4551 };
4552 let (snapshot, events) = this
4553 .update(&mut cx, |this, _| {
4554 this.paths_needing_status_update.clear();
4555 compute_snapshot(
4556 this.id,
4557 this.work_directory_abs_path.clone(),
4558 this.snapshot.clone(),
4559 backend.clone(),
4560 )
4561 })?
4562 .await?;
4563 this.update(&mut cx, |this, cx| {
4564 this.snapshot = snapshot.clone();
4565 for event in events {
4566 cx.emit(event);
4567 }
4568 })?;
4569 if let Some(updates_tx) = updates_tx {
4570 updates_tx
4571 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4572 .ok();
4573 }
4574 Ok(())
4575 },
4576 );
4577 }
4578
4579 fn spawn_local_git_worker(
4580 work_directory_abs_path: Arc<Path>,
4581 dot_git_abs_path: Arc<Path>,
4582 _repository_dir_abs_path: Arc<Path>,
4583 _common_dir_abs_path: Arc<Path>,
4584 project_environment: WeakEntity<ProjectEnvironment>,
4585 fs: Arc<dyn Fs>,
4586 cx: &mut Context<Self>,
4587 ) -> mpsc::UnboundedSender<GitJob> {
4588 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4589
4590 cx.spawn(async move |_, cx| {
4591 let environment = project_environment
4592 .upgrade()
4593 .context("missing project environment")?
4594 .update(cx, |project_environment, cx| {
4595 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4596 })?
4597 .await
4598 .unwrap_or_else(|| {
4599 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4600 HashMap::default()
4601 });
4602 let backend = cx
4603 .background_spawn(async move {
4604 fs.open_repo(&dot_git_abs_path)
4605 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4606 })
4607 .await?;
4608
4609 if let Some(git_hosting_provider_registry) =
4610 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4611 {
4612 git_hosting_providers::register_additional_providers(
4613 git_hosting_provider_registry,
4614 backend.clone(),
4615 );
4616 }
4617
4618 let state = RepositoryState::Local {
4619 backend,
4620 environment: Arc::new(environment),
4621 };
4622 let mut jobs = VecDeque::new();
4623 loop {
4624 while let Ok(Some(next_job)) = job_rx.try_next() {
4625 jobs.push_back(next_job);
4626 }
4627
4628 if let Some(job) = jobs.pop_front() {
4629 if let Some(current_key) = &job.key
4630 && jobs
4631 .iter()
4632 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4633 {
4634 continue;
4635 }
4636 (job.job)(state.clone(), cx).await;
4637 } else if let Some(job) = job_rx.next().await {
4638 jobs.push_back(job);
4639 } else {
4640 break;
4641 }
4642 }
4643 anyhow::Ok(())
4644 })
4645 .detach_and_log_err(cx);
4646
4647 job_tx
4648 }
4649
4650 fn spawn_remote_git_worker(
4651 project_id: ProjectId,
4652 client: AnyProtoClient,
4653 cx: &mut Context<Self>,
4654 ) -> mpsc::UnboundedSender<GitJob> {
4655 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4656
4657 cx.spawn(async move |_, cx| {
4658 let state = RepositoryState::Remote { project_id, client };
4659 let mut jobs = VecDeque::new();
4660 loop {
4661 while let Ok(Some(next_job)) = job_rx.try_next() {
4662 jobs.push_back(next_job);
4663 }
4664
4665 if let Some(job) = jobs.pop_front() {
4666 if let Some(current_key) = &job.key
4667 && jobs
4668 .iter()
4669 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4670 {
4671 continue;
4672 }
4673 (job.job)(state.clone(), cx).await;
4674 } else if let Some(job) = job_rx.next().await {
4675 jobs.push_back(job);
4676 } else {
4677 break;
4678 }
4679 }
4680 anyhow::Ok(())
4681 })
4682 .detach_and_log_err(cx);
4683
4684 job_tx
4685 }
4686
4687 fn load_staged_text(
4688 &mut self,
4689 buffer_id: BufferId,
4690 repo_path: RepoPath,
4691 cx: &App,
4692 ) -> Task<Result<Option<String>>> {
4693 let rx = self.send_job(None, move |state, _| async move {
4694 match state {
4695 RepositoryState::Local { backend, .. } => {
4696 anyhow::Ok(backend.load_index_text(repo_path).await)
4697 }
4698 RepositoryState::Remote { project_id, client } => {
4699 let response = client
4700 .request(proto::OpenUnstagedDiff {
4701 project_id: project_id.to_proto(),
4702 buffer_id: buffer_id.to_proto(),
4703 })
4704 .await?;
4705 Ok(response.staged_text)
4706 }
4707 }
4708 });
4709 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4710 }
4711
4712 fn load_committed_text(
4713 &mut self,
4714 buffer_id: BufferId,
4715 repo_path: RepoPath,
4716 cx: &App,
4717 ) -> Task<Result<DiffBasesChange>> {
4718 let rx = self.send_job(None, move |state, _| async move {
4719 match state {
4720 RepositoryState::Local { backend, .. } => {
4721 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4722 let staged_text = backend.load_index_text(repo_path).await;
4723 let diff_bases_change = if committed_text == staged_text {
4724 DiffBasesChange::SetBoth(committed_text)
4725 } else {
4726 DiffBasesChange::SetEach {
4727 index: staged_text,
4728 head: committed_text,
4729 }
4730 };
4731 anyhow::Ok(diff_bases_change)
4732 }
4733 RepositoryState::Remote { project_id, client } => {
4734 use proto::open_uncommitted_diff_response::Mode;
4735
4736 let response = client
4737 .request(proto::OpenUncommittedDiff {
4738 project_id: project_id.to_proto(),
4739 buffer_id: buffer_id.to_proto(),
4740 })
4741 .await?;
4742 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4743 let bases = match mode {
4744 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4745 Mode::IndexAndHead => DiffBasesChange::SetEach {
4746 head: response.committed_text,
4747 index: response.staged_text,
4748 },
4749 };
4750 Ok(bases)
4751 }
4752 }
4753 });
4754
4755 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4756 }
4757
4758 fn paths_changed(
4759 &mut self,
4760 paths: Vec<RepoPath>,
4761 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4762 cx: &mut Context<Self>,
4763 ) {
4764 self.paths_needing_status_update.extend(paths);
4765
4766 let this = cx.weak_entity();
4767 let _ = self.send_keyed_job(
4768 Some(GitJobKey::RefreshStatuses),
4769 None,
4770 |state, mut cx| async move {
4771 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4772 (
4773 this.snapshot.clone(),
4774 mem::take(&mut this.paths_needing_status_update),
4775 )
4776 })?;
4777 let RepositoryState::Local { backend, .. } = state else {
4778 bail!("not a local repository")
4779 };
4780
4781 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4782 if paths.is_empty() {
4783 return Ok(());
4784 }
4785 let statuses = backend.status(&paths).await?;
4786 let stash_entries = backend.stash_entries().await?;
4787
4788 let changed_path_statuses = cx
4789 .background_spawn(async move {
4790 let mut changed_path_statuses = Vec::new();
4791 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4792 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4793
4794 for (repo_path, status) in &*statuses.entries {
4795 changed_paths.remove(repo_path);
4796 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4797 && cursor.item().is_some_and(|entry| entry.status == *status)
4798 {
4799 continue;
4800 }
4801
4802 changed_path_statuses.push(Edit::Insert(StatusEntry {
4803 repo_path: repo_path.clone(),
4804 status: *status,
4805 }));
4806 }
4807 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4808 for path in changed_paths.into_iter() {
4809 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4810 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4811 }
4812 }
4813 changed_path_statuses
4814 })
4815 .await;
4816
4817 this.update(&mut cx, |this, cx| {
4818 let needs_update = !changed_path_statuses.is_empty()
4819 || this.snapshot.stash_entries != stash_entries;
4820 this.snapshot.stash_entries = stash_entries;
4821 if !changed_path_statuses.is_empty() {
4822 this.snapshot
4823 .statuses_by_path
4824 .edit(changed_path_statuses, ());
4825 this.snapshot.scan_id += 1;
4826 }
4827
4828 if needs_update && let Some(updates_tx) = updates_tx {
4829 updates_tx
4830 .unbounded_send(DownstreamUpdate::UpdateRepository(
4831 this.snapshot.clone(),
4832 ))
4833 .ok();
4834 }
4835 cx.emit(RepositoryEvent::Updated {
4836 full_scan: false,
4837 new_instance: false,
4838 });
4839 })
4840 },
4841 );
4842 }
4843
4844 /// currently running git command and when it started
4845 pub fn current_job(&self) -> Option<JobInfo> {
4846 self.active_jobs.values().next().cloned()
4847 }
4848
4849 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4850 self.send_job(None, |_, _| async {})
4851 }
4852}
4853
4854fn get_permalink_in_rust_registry_src(
4855 provider_registry: Arc<GitHostingProviderRegistry>,
4856 path: PathBuf,
4857 selection: Range<u32>,
4858) -> Result<url::Url> {
4859 #[derive(Deserialize)]
4860 struct CargoVcsGit {
4861 sha1: String,
4862 }
4863
4864 #[derive(Deserialize)]
4865 struct CargoVcsInfo {
4866 git: CargoVcsGit,
4867 path_in_vcs: String,
4868 }
4869
4870 #[derive(Deserialize)]
4871 struct CargoPackage {
4872 repository: String,
4873 }
4874
4875 #[derive(Deserialize)]
4876 struct CargoToml {
4877 package: CargoPackage,
4878 }
4879
4880 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4881 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4882 Some((dir, json))
4883 }) else {
4884 bail!("No .cargo_vcs_info.json found in parent directories")
4885 };
4886 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4887 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4888 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4889 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4890 .context("parsing package.repository field of manifest")?;
4891 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4892 let permalink = provider.build_permalink(
4893 remote,
4894 BuildPermalinkParams {
4895 sha: &cargo_vcs_info.git.sha1,
4896 path: &path.to_string_lossy(),
4897 selection: Some(selection),
4898 },
4899 );
4900 Ok(permalink)
4901}
4902
4903fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4904 let Some(blame) = blame else {
4905 return proto::BlameBufferResponse {
4906 blame_response: None,
4907 };
4908 };
4909
4910 let entries = blame
4911 .entries
4912 .into_iter()
4913 .map(|entry| proto::BlameEntry {
4914 sha: entry.sha.as_bytes().into(),
4915 start_line: entry.range.start,
4916 end_line: entry.range.end,
4917 original_line_number: entry.original_line_number,
4918 author: entry.author,
4919 author_mail: entry.author_mail,
4920 author_time: entry.author_time,
4921 author_tz: entry.author_tz,
4922 committer: entry.committer_name,
4923 committer_mail: entry.committer_email,
4924 committer_time: entry.committer_time,
4925 committer_tz: entry.committer_tz,
4926 summary: entry.summary,
4927 previous: entry.previous,
4928 filename: entry.filename,
4929 })
4930 .collect::<Vec<_>>();
4931
4932 let messages = blame
4933 .messages
4934 .into_iter()
4935 .map(|(oid, message)| proto::CommitMessage {
4936 oid: oid.as_bytes().into(),
4937 message,
4938 })
4939 .collect::<Vec<_>>();
4940
4941 proto::BlameBufferResponse {
4942 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4943 entries,
4944 messages,
4945 remote_url: blame.remote_url,
4946 }),
4947 }
4948}
4949
4950fn deserialize_blame_buffer_response(
4951 response: proto::BlameBufferResponse,
4952) -> Option<git::blame::Blame> {
4953 let response = response.blame_response?;
4954 let entries = response
4955 .entries
4956 .into_iter()
4957 .filter_map(|entry| {
4958 Some(git::blame::BlameEntry {
4959 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4960 range: entry.start_line..entry.end_line,
4961 original_line_number: entry.original_line_number,
4962 committer_name: entry.committer,
4963 committer_time: entry.committer_time,
4964 committer_tz: entry.committer_tz,
4965 committer_email: entry.committer_mail,
4966 author: entry.author,
4967 author_mail: entry.author_mail,
4968 author_time: entry.author_time,
4969 author_tz: entry.author_tz,
4970 summary: entry.summary,
4971 previous: entry.previous,
4972 filename: entry.filename,
4973 })
4974 })
4975 .collect::<Vec<_>>();
4976
4977 let messages = response
4978 .messages
4979 .into_iter()
4980 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4981 .collect::<HashMap<_, _>>();
4982
4983 Some(Blame {
4984 entries,
4985 messages,
4986 remote_url: response.remote_url,
4987 })
4988}
4989
4990fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4991 proto::Branch {
4992 is_head: branch.is_head,
4993 ref_name: branch.ref_name.to_string(),
4994 unix_timestamp: branch
4995 .most_recent_commit
4996 .as_ref()
4997 .map(|commit| commit.commit_timestamp as u64),
4998 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4999 ref_name: upstream.ref_name.to_string(),
5000 tracking: upstream
5001 .tracking
5002 .status()
5003 .map(|upstream| proto::UpstreamTracking {
5004 ahead: upstream.ahead as u64,
5005 behind: upstream.behind as u64,
5006 }),
5007 }),
5008 most_recent_commit: branch
5009 .most_recent_commit
5010 .as_ref()
5011 .map(|commit| proto::CommitSummary {
5012 sha: commit.sha.to_string(),
5013 subject: commit.subject.to_string(),
5014 commit_timestamp: commit.commit_timestamp,
5015 author_name: commit.author_name.to_string(),
5016 }),
5017 }
5018}
5019
5020fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5021 git::repository::Branch {
5022 is_head: proto.is_head,
5023 ref_name: proto.ref_name.clone().into(),
5024 upstream: proto
5025 .upstream
5026 .as_ref()
5027 .map(|upstream| git::repository::Upstream {
5028 ref_name: upstream.ref_name.to_string().into(),
5029 tracking: upstream
5030 .tracking
5031 .as_ref()
5032 .map(|tracking| {
5033 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5034 ahead: tracking.ahead as u32,
5035 behind: tracking.behind as u32,
5036 })
5037 })
5038 .unwrap_or(git::repository::UpstreamTracking::Gone),
5039 }),
5040 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5041 git::repository::CommitSummary {
5042 sha: commit.sha.to_string().into(),
5043 subject: commit.subject.to_string().into(),
5044 commit_timestamp: commit.commit_timestamp,
5045 author_name: commit.author_name.to_string().into(),
5046 has_parent: true,
5047 }
5048 }),
5049 }
5050}
5051
5052fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5053 proto::GitCommitDetails {
5054 sha: commit.sha.to_string(),
5055 message: commit.message.to_string(),
5056 commit_timestamp: commit.commit_timestamp,
5057 author_email: commit.author_email.to_string(),
5058 author_name: commit.author_name.to_string(),
5059 }
5060}
5061
5062fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5063 CommitDetails {
5064 sha: proto.sha.clone().into(),
5065 message: proto.message.clone().into(),
5066 commit_timestamp: proto.commit_timestamp,
5067 author_email: proto.author_email.clone().into(),
5068 author_name: proto.author_name.clone().into(),
5069 }
5070}
5071
5072async fn compute_snapshot(
5073 id: RepositoryId,
5074 work_directory_abs_path: Arc<Path>,
5075 prev_snapshot: RepositorySnapshot,
5076 backend: Arc<dyn GitRepository>,
5077) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5078 let mut events = Vec::new();
5079 let branches = backend.branches().await?;
5080 let branch = branches.into_iter().find(|branch| branch.is_head);
5081 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5082 let stash_entries = backend.stash_entries().await?;
5083 let statuses_by_path = SumTree::from_iter(
5084 statuses
5085 .entries
5086 .iter()
5087 .map(|(repo_path, status)| StatusEntry {
5088 repo_path: repo_path.clone(),
5089 status: *status,
5090 }),
5091 (),
5092 );
5093 let (merge_details, merge_heads_changed) =
5094 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5095 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5096
5097 if merge_heads_changed
5098 || branch != prev_snapshot.branch
5099 || statuses_by_path != prev_snapshot.statuses_by_path
5100 {
5101 events.push(RepositoryEvent::Updated {
5102 full_scan: true,
5103 new_instance: false,
5104 });
5105 }
5106
5107 // Cache merge conflict paths so they don't change from staging/unstaging,
5108 // until the merge heads change (at commit time, etc.).
5109 if merge_heads_changed {
5110 events.push(RepositoryEvent::MergeHeadsChanged);
5111 }
5112
5113 // Useful when branch is None in detached head state
5114 let head_commit = match backend.head_sha().await {
5115 Some(head_sha) => backend.show(head_sha).await.log_err(),
5116 None => None,
5117 };
5118
5119 // Used by edit prediction data collection
5120 let remote_origin_url = backend.remote_url("origin");
5121 let remote_upstream_url = backend.remote_url("upstream");
5122
5123 let snapshot = RepositorySnapshot {
5124 id,
5125 statuses_by_path,
5126 work_directory_abs_path,
5127 path_style: prev_snapshot.path_style,
5128 scan_id: prev_snapshot.scan_id + 1,
5129 branch,
5130 head_commit,
5131 merge: merge_details,
5132 remote_origin_url,
5133 remote_upstream_url,
5134 stash_entries,
5135 };
5136
5137 Ok((snapshot, events))
5138}
5139
5140fn status_from_proto(
5141 simple_status: i32,
5142 status: Option<proto::GitFileStatus>,
5143) -> anyhow::Result<FileStatus> {
5144 use proto::git_file_status::Variant;
5145
5146 let Some(variant) = status.and_then(|status| status.variant) else {
5147 let code = proto::GitStatus::from_i32(simple_status)
5148 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5149 let result = match code {
5150 proto::GitStatus::Added => TrackedStatus {
5151 worktree_status: StatusCode::Added,
5152 index_status: StatusCode::Unmodified,
5153 }
5154 .into(),
5155 proto::GitStatus::Modified => TrackedStatus {
5156 worktree_status: StatusCode::Modified,
5157 index_status: StatusCode::Unmodified,
5158 }
5159 .into(),
5160 proto::GitStatus::Conflict => UnmergedStatus {
5161 first_head: UnmergedStatusCode::Updated,
5162 second_head: UnmergedStatusCode::Updated,
5163 }
5164 .into(),
5165 proto::GitStatus::Deleted => TrackedStatus {
5166 worktree_status: StatusCode::Deleted,
5167 index_status: StatusCode::Unmodified,
5168 }
5169 .into(),
5170 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5171 };
5172 return Ok(result);
5173 };
5174
5175 let result = match variant {
5176 Variant::Untracked(_) => FileStatus::Untracked,
5177 Variant::Ignored(_) => FileStatus::Ignored,
5178 Variant::Unmerged(unmerged) => {
5179 let [first_head, second_head] =
5180 [unmerged.first_head, unmerged.second_head].map(|head| {
5181 let code = proto::GitStatus::from_i32(head)
5182 .with_context(|| format!("Invalid git status code: {head}"))?;
5183 let result = match code {
5184 proto::GitStatus::Added => UnmergedStatusCode::Added,
5185 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5186 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5187 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5188 };
5189 Ok(result)
5190 });
5191 let [first_head, second_head] = [first_head?, second_head?];
5192 UnmergedStatus {
5193 first_head,
5194 second_head,
5195 }
5196 .into()
5197 }
5198 Variant::Tracked(tracked) => {
5199 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5200 .map(|status| {
5201 let code = proto::GitStatus::from_i32(status)
5202 .with_context(|| format!("Invalid git status code: {status}"))?;
5203 let result = match code {
5204 proto::GitStatus::Modified => StatusCode::Modified,
5205 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5206 proto::GitStatus::Added => StatusCode::Added,
5207 proto::GitStatus::Deleted => StatusCode::Deleted,
5208 proto::GitStatus::Renamed => StatusCode::Renamed,
5209 proto::GitStatus::Copied => StatusCode::Copied,
5210 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5211 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5212 };
5213 Ok(result)
5214 });
5215 let [index_status, worktree_status] = [index_status?, worktree_status?];
5216 TrackedStatus {
5217 index_status,
5218 worktree_status,
5219 }
5220 .into()
5221 }
5222 };
5223 Ok(result)
5224}
5225
5226fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5227 use proto::git_file_status::{Tracked, Unmerged, Variant};
5228
5229 let variant = match status {
5230 FileStatus::Untracked => Variant::Untracked(Default::default()),
5231 FileStatus::Ignored => Variant::Ignored(Default::default()),
5232 FileStatus::Unmerged(UnmergedStatus {
5233 first_head,
5234 second_head,
5235 }) => Variant::Unmerged(Unmerged {
5236 first_head: unmerged_status_to_proto(first_head),
5237 second_head: unmerged_status_to_proto(second_head),
5238 }),
5239 FileStatus::Tracked(TrackedStatus {
5240 index_status,
5241 worktree_status,
5242 }) => Variant::Tracked(Tracked {
5243 index_status: tracked_status_to_proto(index_status),
5244 worktree_status: tracked_status_to_proto(worktree_status),
5245 }),
5246 };
5247 proto::GitFileStatus {
5248 variant: Some(variant),
5249 }
5250}
5251
5252fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5253 match code {
5254 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5255 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5256 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5257 }
5258}
5259
5260fn tracked_status_to_proto(code: StatusCode) -> i32 {
5261 match code {
5262 StatusCode::Added => proto::GitStatus::Added as _,
5263 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5264 StatusCode::Modified => proto::GitStatus::Modified as _,
5265 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5266 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5267 StatusCode::Copied => proto::GitStatus::Copied as _,
5268 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5269 }
5270}