1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::{AskPassDelegate, EncryptedPassword};
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use text::{Bias, BufferId};
66use util::{
67 ResultExt, debug_panic,
68 paths::{PathStyle, SanitizedPath},
69 post_inc,
70 rel_path::RelPath,
71};
72use worktree::{
73 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
74 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
75};
76use zeroize::Zeroize;
77
78pub struct GitStore {
79 state: GitStoreState,
80 buffer_store: Entity<BufferStore>,
81 worktree_store: Entity<WorktreeStore>,
82 repositories: HashMap<RepositoryId, Entity<Repository>>,
83 active_repo_id: Option<RepositoryId>,
84 #[allow(clippy::type_complexity)]
85 loading_diffs:
86 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
87 diffs: HashMap<BufferId, Entity<BufferGitState>>,
88 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
89 _subscriptions: Vec<Subscription>,
90}
91
92#[derive(Default)]
93struct SharedDiffs {
94 unstaged: Option<Entity<BufferDiff>>,
95 uncommitted: Option<Entity<BufferDiff>>,
96}
97
98struct BufferGitState {
99 unstaged_diff: Option<WeakEntity<BufferDiff>>,
100 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
101 conflict_set: Option<WeakEntity<ConflictSet>>,
102 recalculate_diff_task: Option<Task<Result<()>>>,
103 reparse_conflict_markers_task: Option<Task<Result<()>>>,
104 language: Option<Arc<Language>>,
105 language_registry: Option<Arc<LanguageRegistry>>,
106 conflict_updated_futures: Vec<oneshot::Sender<()>>,
107 recalculating_tx: postage::watch::Sender<bool>,
108
109 /// These operation counts are used to ensure that head and index text
110 /// values read from the git repository are up-to-date with any hunk staging
111 /// operations that have been performed on the BufferDiff.
112 ///
113 /// The operation count is incremented immediately when the user initiates a
114 /// hunk stage/unstage operation. Then, upon finishing writing the new index
115 /// text do disk, the `operation count as of write` is updated to reflect
116 /// the operation count that prompted the write.
117 hunk_staging_operation_count: usize,
118 hunk_staging_operation_count_as_of_write: usize,
119
120 head_text: Option<Arc<String>>,
121 index_text: Option<Arc<String>>,
122 head_changed: bool,
123 index_changed: bool,
124 language_changed: bool,
125}
126
127#[derive(Clone, Debug)]
128enum DiffBasesChange {
129 SetIndex(Option<String>),
130 SetHead(Option<String>),
131 SetEach {
132 index: Option<String>,
133 head: Option<String>,
134 },
135 SetBoth(Option<String>),
136}
137
138#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
139enum DiffKind {
140 Unstaged,
141 Uncommitted,
142}
143
144enum GitStoreState {
145 Local {
146 next_repository_id: Arc<AtomicU64>,
147 downstream: Option<LocalDownstreamState>,
148 project_environment: Entity<ProjectEnvironment>,
149 fs: Arc<dyn Fs>,
150 },
151 Remote {
152 upstream_client: AnyProtoClient,
153 upstream_project_id: u64,
154 downstream: Option<(AnyProtoClient, ProjectId)>,
155 },
156}
157
158enum DownstreamUpdate {
159 UpdateRepository(RepositorySnapshot),
160 RemoveRepository(RepositoryId),
161}
162
163struct LocalDownstreamState {
164 client: AnyProtoClient,
165 project_id: ProjectId,
166 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
167 _task: Task<Result<()>>,
168}
169
170#[derive(Clone, Debug)]
171pub struct GitStoreCheckpoint {
172 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
173}
174
175#[derive(Clone, Debug, PartialEq, Eq)]
176pub struct StatusEntry {
177 pub repo_path: RepoPath,
178 pub status: FileStatus,
179}
180
181impl StatusEntry {
182 fn to_proto(&self) -> proto::StatusEntry {
183 let simple_status = match self.status {
184 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
185 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
186 FileStatus::Tracked(TrackedStatus {
187 index_status,
188 worktree_status,
189 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
190 worktree_status
191 } else {
192 index_status
193 }),
194 };
195
196 proto::StatusEntry {
197 repo_path: self.repo_path.to_proto(),
198 simple_status,
199 status: Some(status_to_proto(self.status)),
200 }
201 }
202}
203
204impl TryFrom<proto::StatusEntry> for StatusEntry {
205 type Error = anyhow::Error;
206
207 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
208 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
209 let status = status_from_proto(value.simple_status, value.status)?;
210 Ok(Self { repo_path, status })
211 }
212}
213
214impl sum_tree::Item for StatusEntry {
215 type Summary = PathSummary<GitSummary>;
216
217 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
218 PathSummary {
219 max_path: self.repo_path.0.clone(),
220 item_summary: self.status.summary(),
221 }
222 }
223}
224
225impl sum_tree::KeyedItem for StatusEntry {
226 type Key = PathKey;
227
228 fn key(&self) -> Self::Key {
229 PathKey(self.repo_path.0.clone())
230 }
231}
232
233#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
234pub struct RepositoryId(pub u64);
235
236#[derive(Clone, Debug, Default, PartialEq, Eq)]
237pub struct MergeDetails {
238 pub conflicted_paths: TreeSet<RepoPath>,
239 pub message: Option<SharedString>,
240 pub heads: Vec<Option<SharedString>>,
241}
242
243#[derive(Clone, Debug, PartialEq, Eq)]
244pub struct RepositorySnapshot {
245 pub id: RepositoryId,
246 pub statuses_by_path: SumTree<StatusEntry>,
247 pub work_directory_abs_path: Arc<Path>,
248 pub path_style: PathStyle,
249 pub branch: Option<Branch>,
250 pub head_commit: Option<CommitDetails>,
251 pub scan_id: u64,
252 pub merge: MergeDetails,
253 pub remote_origin_url: Option<String>,
254 pub remote_upstream_url: Option<String>,
255 pub stash_entries: GitStash,
256}
257
258type JobId = u64;
259
260#[derive(Clone, Debug, PartialEq, Eq)]
261pub struct JobInfo {
262 pub start: Instant,
263 pub message: SharedString,
264}
265
266pub struct Repository {
267 this: WeakEntity<Self>,
268 snapshot: RepositorySnapshot,
269 commit_message_buffer: Option<Entity<Buffer>>,
270 git_store: WeakEntity<GitStore>,
271 // For a local repository, holds paths that have had worktree events since the last status scan completed,
272 // and that should be examined during the next status scan.
273 paths_needing_status_update: BTreeSet<RepoPath>,
274 job_sender: mpsc::UnboundedSender<GitJob>,
275 active_jobs: HashMap<JobId, JobInfo>,
276 job_id: JobId,
277 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
278 latest_askpass_id: u64,
279}
280
281impl std::ops::Deref for Repository {
282 type Target = RepositorySnapshot;
283
284 fn deref(&self) -> &Self::Target {
285 &self.snapshot
286 }
287}
288
289#[derive(Clone)]
290pub enum RepositoryState {
291 Local {
292 backend: Arc<dyn GitRepository>,
293 environment: Arc<HashMap<String, String>>,
294 },
295 Remote {
296 project_id: ProjectId,
297 client: AnyProtoClient,
298 },
299}
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub enum RepositoryEvent {
303 Updated { full_scan: bool, new_instance: bool },
304 MergeHeadsChanged,
305}
306
307#[derive(Clone, Debug)]
308pub struct JobsUpdated;
309
310#[derive(Debug)]
311pub enum GitStoreEvent {
312 ActiveRepositoryChanged(Option<RepositoryId>),
313 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
314 RepositoryAdded(RepositoryId),
315 RepositoryRemoved(RepositoryId),
316 IndexWriteError(anyhow::Error),
317 JobsUpdated,
318 ConflictsUpdated,
319}
320
321impl EventEmitter<RepositoryEvent> for Repository {}
322impl EventEmitter<JobsUpdated> for Repository {}
323impl EventEmitter<GitStoreEvent> for GitStore {}
324
325pub struct GitJob {
326 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
327 key: Option<GitJobKey>,
328}
329
330#[derive(PartialEq, Eq)]
331enum GitJobKey {
332 WriteIndex(RepoPath),
333 ReloadBufferDiffBases,
334 RefreshStatuses,
335 ReloadGitState,
336}
337
338impl GitStore {
339 pub fn local(
340 worktree_store: &Entity<WorktreeStore>,
341 buffer_store: Entity<BufferStore>,
342 environment: Entity<ProjectEnvironment>,
343 fs: Arc<dyn Fs>,
344 cx: &mut Context<Self>,
345 ) -> Self {
346 Self::new(
347 worktree_store.clone(),
348 buffer_store,
349 GitStoreState::Local {
350 next_repository_id: Arc::new(AtomicU64::new(1)),
351 downstream: None,
352 project_environment: environment,
353 fs,
354 },
355 cx,
356 )
357 }
358
359 pub fn remote(
360 worktree_store: &Entity<WorktreeStore>,
361 buffer_store: Entity<BufferStore>,
362 upstream_client: AnyProtoClient,
363 project_id: u64,
364 cx: &mut Context<Self>,
365 ) -> Self {
366 Self::new(
367 worktree_store.clone(),
368 buffer_store,
369 GitStoreState::Remote {
370 upstream_client,
371 upstream_project_id: project_id,
372 downstream: None,
373 },
374 cx,
375 )
376 }
377
378 fn new(
379 worktree_store: Entity<WorktreeStore>,
380 buffer_store: Entity<BufferStore>,
381 state: GitStoreState,
382 cx: &mut Context<Self>,
383 ) -> Self {
384 let _subscriptions = vec![
385 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
386 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
387 ];
388
389 GitStore {
390 state,
391 buffer_store,
392 worktree_store,
393 repositories: HashMap::default(),
394 active_repo_id: None,
395 _subscriptions,
396 loading_diffs: HashMap::default(),
397 shared_diffs: HashMap::default(),
398 diffs: HashMap::default(),
399 }
400 }
401
402 pub fn init(client: &AnyProtoClient) {
403 client.add_entity_request_handler(Self::handle_get_remotes);
404 client.add_entity_request_handler(Self::handle_get_branches);
405 client.add_entity_request_handler(Self::handle_get_default_branch);
406 client.add_entity_request_handler(Self::handle_change_branch);
407 client.add_entity_request_handler(Self::handle_create_branch);
408 client.add_entity_request_handler(Self::handle_rename_branch);
409 client.add_entity_request_handler(Self::handle_git_init);
410 client.add_entity_request_handler(Self::handle_push);
411 client.add_entity_request_handler(Self::handle_pull);
412 client.add_entity_request_handler(Self::handle_fetch);
413 client.add_entity_request_handler(Self::handle_stage);
414 client.add_entity_request_handler(Self::handle_unstage);
415 client.add_entity_request_handler(Self::handle_stash);
416 client.add_entity_request_handler(Self::handle_stash_pop);
417 client.add_entity_request_handler(Self::handle_stash_apply);
418 client.add_entity_request_handler(Self::handle_stash_drop);
419 client.add_entity_request_handler(Self::handle_commit);
420 client.add_entity_request_handler(Self::handle_reset);
421 client.add_entity_request_handler(Self::handle_show);
422 client.add_entity_request_handler(Self::handle_load_commit_diff);
423 client.add_entity_request_handler(Self::handle_checkout_files);
424 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
425 client.add_entity_request_handler(Self::handle_set_index_text);
426 client.add_entity_request_handler(Self::handle_askpass);
427 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
428 client.add_entity_request_handler(Self::handle_git_diff);
429 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
430 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
431 client.add_entity_message_handler(Self::handle_update_diff_bases);
432 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
433 client.add_entity_request_handler(Self::handle_blame_buffer);
434 client.add_entity_message_handler(Self::handle_update_repository);
435 client.add_entity_message_handler(Self::handle_remove_repository);
436 client.add_entity_request_handler(Self::handle_git_clone);
437 }
438
439 pub fn is_local(&self) -> bool {
440 matches!(self.state, GitStoreState::Local { .. })
441 }
442 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
443 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
444 let id = repo.read(cx).id;
445 if self.active_repo_id != Some(id) {
446 self.active_repo_id = Some(id);
447 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
448 }
449 }
450 }
451
452 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
453 match &mut self.state {
454 GitStoreState::Remote {
455 downstream: downstream_client,
456 ..
457 } => {
458 for repo in self.repositories.values() {
459 let update = repo.read(cx).snapshot.initial_update(project_id);
460 for update in split_repository_update(update) {
461 client.send(update).log_err();
462 }
463 }
464 *downstream_client = Some((client, ProjectId(project_id)));
465 }
466 GitStoreState::Local {
467 downstream: downstream_client,
468 ..
469 } => {
470 let mut snapshots = HashMap::default();
471 let (updates_tx, mut updates_rx) = mpsc::unbounded();
472 for repo in self.repositories.values() {
473 updates_tx
474 .unbounded_send(DownstreamUpdate::UpdateRepository(
475 repo.read(cx).snapshot.clone(),
476 ))
477 .ok();
478 }
479 *downstream_client = Some(LocalDownstreamState {
480 client: client.clone(),
481 project_id: ProjectId(project_id),
482 updates_tx,
483 _task: cx.spawn(async move |this, cx| {
484 cx.background_spawn(async move {
485 while let Some(update) = updates_rx.next().await {
486 match update {
487 DownstreamUpdate::UpdateRepository(snapshot) => {
488 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
489 {
490 let update =
491 snapshot.build_update(old_snapshot, project_id);
492 *old_snapshot = snapshot;
493 for update in split_repository_update(update) {
494 client.send(update)?;
495 }
496 } else {
497 let update = snapshot.initial_update(project_id);
498 for update in split_repository_update(update) {
499 client.send(update)?;
500 }
501 snapshots.insert(snapshot.id, snapshot);
502 }
503 }
504 DownstreamUpdate::RemoveRepository(id) => {
505 client.send(proto::RemoveRepository {
506 project_id,
507 id: id.to_proto(),
508 })?;
509 }
510 }
511 }
512 anyhow::Ok(())
513 })
514 .await
515 .ok();
516 this.update(cx, |this, _| {
517 if let GitStoreState::Local {
518 downstream: downstream_client,
519 ..
520 } = &mut this.state
521 {
522 downstream_client.take();
523 } else {
524 unreachable!("unshared called on remote store");
525 }
526 })
527 }),
528 });
529 }
530 }
531 }
532
533 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
534 match &mut self.state {
535 GitStoreState::Local {
536 downstream: downstream_client,
537 ..
538 } => {
539 downstream_client.take();
540 }
541 GitStoreState::Remote {
542 downstream: downstream_client,
543 ..
544 } => {
545 downstream_client.take();
546 }
547 }
548 self.shared_diffs.clear();
549 }
550
551 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
552 self.shared_diffs.remove(peer_id);
553 }
554
555 pub fn active_repository(&self) -> Option<Entity<Repository>> {
556 self.active_repo_id
557 .as_ref()
558 .map(|id| self.repositories[id].clone())
559 }
560
561 pub fn open_unstaged_diff(
562 &mut self,
563 buffer: Entity<Buffer>,
564 cx: &mut Context<Self>,
565 ) -> Task<Result<Entity<BufferDiff>>> {
566 let buffer_id = buffer.read(cx).remote_id();
567 if let Some(diff_state) = self.diffs.get(&buffer_id)
568 && let Some(unstaged_diff) = diff_state
569 .read(cx)
570 .unstaged_diff
571 .as_ref()
572 .and_then(|weak| weak.upgrade())
573 {
574 if let Some(task) =
575 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
576 {
577 return cx.background_executor().spawn(async move {
578 task.await;
579 Ok(unstaged_diff)
580 });
581 }
582 return Task::ready(Ok(unstaged_diff));
583 }
584
585 let Some((repo, repo_path)) =
586 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
587 else {
588 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
589 };
590
591 let task = self
592 .loading_diffs
593 .entry((buffer_id, DiffKind::Unstaged))
594 .or_insert_with(|| {
595 let staged_text = repo.update(cx, |repo, cx| {
596 repo.load_staged_text(buffer_id, repo_path, cx)
597 });
598 cx.spawn(async move |this, cx| {
599 Self::open_diff_internal(
600 this,
601 DiffKind::Unstaged,
602 staged_text.await.map(DiffBasesChange::SetIndex),
603 buffer,
604 cx,
605 )
606 .await
607 .map_err(Arc::new)
608 })
609 .shared()
610 })
611 .clone();
612
613 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
614 }
615
616 pub fn open_uncommitted_diff(
617 &mut self,
618 buffer: Entity<Buffer>,
619 cx: &mut Context<Self>,
620 ) -> Task<Result<Entity<BufferDiff>>> {
621 let buffer_id = buffer.read(cx).remote_id();
622
623 if let Some(diff_state) = self.diffs.get(&buffer_id)
624 && let Some(uncommitted_diff) = diff_state
625 .read(cx)
626 .uncommitted_diff
627 .as_ref()
628 .and_then(|weak| weak.upgrade())
629 {
630 if let Some(task) =
631 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
632 {
633 return cx.background_executor().spawn(async move {
634 task.await;
635 Ok(uncommitted_diff)
636 });
637 }
638 return Task::ready(Ok(uncommitted_diff));
639 }
640
641 let Some((repo, repo_path)) =
642 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
643 else {
644 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
645 };
646
647 let task = self
648 .loading_diffs
649 .entry((buffer_id, DiffKind::Uncommitted))
650 .or_insert_with(|| {
651 let changes = repo.update(cx, |repo, cx| {
652 repo.load_committed_text(buffer_id, repo_path, cx)
653 });
654
655 cx.spawn(async move |this, cx| {
656 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
657 .await
658 .map_err(Arc::new)
659 })
660 .shared()
661 })
662 .clone();
663
664 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
665 }
666
667 async fn open_diff_internal(
668 this: WeakEntity<Self>,
669 kind: DiffKind,
670 texts: Result<DiffBasesChange>,
671 buffer_entity: Entity<Buffer>,
672 cx: &mut AsyncApp,
673 ) -> Result<Entity<BufferDiff>> {
674 let diff_bases_change = match texts {
675 Err(e) => {
676 this.update(cx, |this, cx| {
677 let buffer = buffer_entity.read(cx);
678 let buffer_id = buffer.remote_id();
679 this.loading_diffs.remove(&(buffer_id, kind));
680 })?;
681 return Err(e);
682 }
683 Ok(change) => change,
684 };
685
686 this.update(cx, |this, cx| {
687 let buffer = buffer_entity.read(cx);
688 let buffer_id = buffer.remote_id();
689 let language = buffer.language().cloned();
690 let language_registry = buffer.language_registry();
691 let text_snapshot = buffer.text_snapshot();
692 this.loading_diffs.remove(&(buffer_id, kind));
693
694 let git_store = cx.weak_entity();
695 let diff_state = this
696 .diffs
697 .entry(buffer_id)
698 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
699
700 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
701
702 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
703 diff_state.update(cx, |diff_state, cx| {
704 diff_state.language = language;
705 diff_state.language_registry = language_registry;
706
707 match kind {
708 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
709 DiffKind::Uncommitted => {
710 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
711 diff
712 } else {
713 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
714 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
715 unstaged_diff
716 };
717
718 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
719 diff_state.uncommitted_diff = Some(diff.downgrade())
720 }
721 }
722
723 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
724 let rx = diff_state.wait_for_recalculation();
725
726 anyhow::Ok(async move {
727 if let Some(rx) = rx {
728 rx.await;
729 }
730 Ok(diff)
731 })
732 })
733 })??
734 .await
735 }
736
737 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
738 let diff_state = self.diffs.get(&buffer_id)?;
739 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
740 }
741
742 pub fn get_uncommitted_diff(
743 &self,
744 buffer_id: BufferId,
745 cx: &App,
746 ) -> Option<Entity<BufferDiff>> {
747 let diff_state = self.diffs.get(&buffer_id)?;
748 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
749 }
750
751 pub fn open_conflict_set(
752 &mut self,
753 buffer: Entity<Buffer>,
754 cx: &mut Context<Self>,
755 ) -> Entity<ConflictSet> {
756 log::debug!("open conflict set");
757 let buffer_id = buffer.read(cx).remote_id();
758
759 if let Some(git_state) = self.diffs.get(&buffer_id)
760 && let Some(conflict_set) = git_state
761 .read(cx)
762 .conflict_set
763 .as_ref()
764 .and_then(|weak| weak.upgrade())
765 {
766 let conflict_set = conflict_set;
767 let buffer_snapshot = buffer.read(cx).text_snapshot();
768
769 git_state.update(cx, |state, cx| {
770 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
771 });
772
773 return conflict_set;
774 }
775
776 let is_unmerged = self
777 .repository_and_path_for_buffer_id(buffer_id, cx)
778 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
779 let git_store = cx.weak_entity();
780 let buffer_git_state = self
781 .diffs
782 .entry(buffer_id)
783 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
784 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
785
786 self._subscriptions
787 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
788 cx.emit(GitStoreEvent::ConflictsUpdated);
789 }));
790
791 buffer_git_state.update(cx, |state, cx| {
792 state.conflict_set = Some(conflict_set.downgrade());
793 let buffer_snapshot = buffer.read(cx).text_snapshot();
794 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
795 });
796
797 conflict_set
798 }
799
800 pub fn project_path_git_status(
801 &self,
802 project_path: &ProjectPath,
803 cx: &App,
804 ) -> Option<FileStatus> {
805 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
806 Some(repo.read(cx).status_for_path(&repo_path)?.status)
807 }
808
809 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
810 let mut work_directory_abs_paths = Vec::new();
811 let mut checkpoints = Vec::new();
812 for repository in self.repositories.values() {
813 repository.update(cx, |repository, _| {
814 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
815 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
816 });
817 }
818
819 cx.background_executor().spawn(async move {
820 let checkpoints = future::try_join_all(checkpoints).await?;
821 Ok(GitStoreCheckpoint {
822 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
823 .into_iter()
824 .zip(checkpoints)
825 .collect(),
826 })
827 })
828 }
829
830 pub fn restore_checkpoint(
831 &self,
832 checkpoint: GitStoreCheckpoint,
833 cx: &mut App,
834 ) -> Task<Result<()>> {
835 let repositories_by_work_dir_abs_path = self
836 .repositories
837 .values()
838 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
839 .collect::<HashMap<_, _>>();
840
841 let mut tasks = Vec::new();
842 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
843 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
844 let restore = repository.update(cx, |repository, _| {
845 repository.restore_checkpoint(checkpoint)
846 });
847 tasks.push(async move { restore.await? });
848 }
849 }
850 cx.background_spawn(async move {
851 future::try_join_all(tasks).await?;
852 Ok(())
853 })
854 }
855
856 /// Compares two checkpoints, returning true if they are equal.
857 pub fn compare_checkpoints(
858 &self,
859 left: GitStoreCheckpoint,
860 mut right: GitStoreCheckpoint,
861 cx: &mut App,
862 ) -> Task<Result<bool>> {
863 let repositories_by_work_dir_abs_path = self
864 .repositories
865 .values()
866 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
867 .collect::<HashMap<_, _>>();
868
869 let mut tasks = Vec::new();
870 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
871 if let Some(right_checkpoint) = right
872 .checkpoints_by_work_dir_abs_path
873 .remove(&work_dir_abs_path)
874 {
875 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
876 {
877 let compare = repository.update(cx, |repository, _| {
878 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
879 });
880
881 tasks.push(async move { compare.await? });
882 }
883 } else {
884 return Task::ready(Ok(false));
885 }
886 }
887 cx.background_spawn(async move {
888 Ok(future::try_join_all(tasks)
889 .await?
890 .into_iter()
891 .all(|result| result))
892 })
893 }
894
895 /// Blames a buffer.
896 pub fn blame_buffer(
897 &self,
898 buffer: &Entity<Buffer>,
899 version: Option<clock::Global>,
900 cx: &mut App,
901 ) -> Task<Result<Option<Blame>>> {
902 let buffer = buffer.read(cx);
903 let Some((repo, repo_path)) =
904 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
905 else {
906 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
907 };
908 let content = match &version {
909 Some(version) => buffer.rope_for_version(version),
910 None => buffer.as_rope().clone(),
911 };
912 let version = version.unwrap_or(buffer.version());
913 let buffer_id = buffer.remote_id();
914
915 let rx = repo.update(cx, |repo, _| {
916 repo.send_job(None, move |state, _| async move {
917 match state {
918 RepositoryState::Local { backend, .. } => backend
919 .blame(repo_path.clone(), content)
920 .await
921 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
922 .map(Some),
923 RepositoryState::Remote { project_id, client } => {
924 let response = client
925 .request(proto::BlameBuffer {
926 project_id: project_id.to_proto(),
927 buffer_id: buffer_id.into(),
928 version: serialize_version(&version),
929 })
930 .await?;
931 Ok(deserialize_blame_buffer_response(response))
932 }
933 }
934 })
935 });
936
937 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
938 }
939
940 pub fn get_permalink_to_line(
941 &self,
942 buffer: &Entity<Buffer>,
943 selection: Range<u32>,
944 cx: &mut App,
945 ) -> Task<Result<url::Url>> {
946 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
947 return Task::ready(Err(anyhow!("buffer has no file")));
948 };
949
950 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
951 &(file.worktree.read(cx).id(), file.path.clone()).into(),
952 cx,
953 ) else {
954 // If we're not in a Git repo, check whether this is a Rust source
955 // file in the Cargo registry (presumably opened with go-to-definition
956 // from a normal Rust file). If so, we can put together a permalink
957 // using crate metadata.
958 if buffer
959 .read(cx)
960 .language()
961 .is_none_or(|lang| lang.name() != "Rust".into())
962 {
963 return Task::ready(Err(anyhow!("no permalink available")));
964 }
965 let file_path = file.worktree.read(cx).absolutize(&file.path);
966 return cx.spawn(async move |cx| {
967 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
968 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
969 .context("no permalink available")
970 });
971
972 // TODO remote case
973 };
974
975 let buffer_id = buffer.read(cx).remote_id();
976 let branch = repo.read(cx).branch.clone();
977 let remote = branch
978 .as_ref()
979 .and_then(|b| b.upstream.as_ref())
980 .and_then(|b| b.remote_name())
981 .unwrap_or("origin")
982 .to_string();
983
984 let rx = repo.update(cx, |repo, _| {
985 repo.send_job(None, move |state, cx| async move {
986 match state {
987 RepositoryState::Local { backend, .. } => {
988 let origin_url = backend
989 .remote_url(&remote)
990 .with_context(|| format!("remote \"{remote}\" not found"))?;
991
992 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
993
994 let provider_registry =
995 cx.update(GitHostingProviderRegistry::default_global)?;
996
997 let (provider, remote) =
998 parse_git_remote_url(provider_registry, &origin_url)
999 .context("parsing Git remote URL")?;
1000
1001 let path = repo_path.as_unix_str();
1002
1003 Ok(provider.build_permalink(
1004 remote,
1005 BuildPermalinkParams {
1006 sha: &sha,
1007 path,
1008 selection: Some(selection),
1009 },
1010 ))
1011 }
1012 RepositoryState::Remote { project_id, client } => {
1013 let response = client
1014 .request(proto::GetPermalinkToLine {
1015 project_id: project_id.to_proto(),
1016 buffer_id: buffer_id.into(),
1017 selection: Some(proto::Range {
1018 start: selection.start as u64,
1019 end: selection.end as u64,
1020 }),
1021 })
1022 .await?;
1023
1024 url::Url::parse(&response.permalink).context("failed to parse permalink")
1025 }
1026 }
1027 })
1028 });
1029 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1030 }
1031
1032 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1033 match &self.state {
1034 GitStoreState::Local {
1035 downstream: downstream_client,
1036 ..
1037 } => downstream_client
1038 .as_ref()
1039 .map(|state| (state.client.clone(), state.project_id)),
1040 GitStoreState::Remote {
1041 downstream: downstream_client,
1042 ..
1043 } => downstream_client.clone(),
1044 }
1045 }
1046
1047 fn upstream_client(&self) -> Option<AnyProtoClient> {
1048 match &self.state {
1049 GitStoreState::Local { .. } => None,
1050 GitStoreState::Remote {
1051 upstream_client, ..
1052 } => Some(upstream_client.clone()),
1053 }
1054 }
1055
1056 fn on_worktree_store_event(
1057 &mut self,
1058 worktree_store: Entity<WorktreeStore>,
1059 event: &WorktreeStoreEvent,
1060 cx: &mut Context<Self>,
1061 ) {
1062 let GitStoreState::Local {
1063 project_environment,
1064 downstream,
1065 next_repository_id,
1066 fs,
1067 } = &self.state
1068 else {
1069 return;
1070 };
1071
1072 match event {
1073 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1074 if let Some(worktree) = self
1075 .worktree_store
1076 .read(cx)
1077 .worktree_for_id(*worktree_id, cx)
1078 {
1079 let paths_by_git_repo =
1080 self.process_updated_entries(&worktree, updated_entries, cx);
1081 let downstream = downstream
1082 .as_ref()
1083 .map(|downstream| downstream.updates_tx.clone());
1084 cx.spawn(async move |_, cx| {
1085 let paths_by_git_repo = paths_by_git_repo.await;
1086 for (repo, paths) in paths_by_git_repo {
1087 repo.update(cx, |repo, cx| {
1088 repo.paths_changed(paths, downstream.clone(), cx);
1089 })
1090 .ok();
1091 }
1092 })
1093 .detach();
1094 }
1095 }
1096 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1097 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1098 else {
1099 return;
1100 };
1101 if !worktree.read(cx).is_visible() {
1102 log::debug!(
1103 "not adding repositories for local worktree {:?} because it's not visible",
1104 worktree.read(cx).abs_path()
1105 );
1106 return;
1107 }
1108 self.update_repositories_from_worktree(
1109 project_environment.clone(),
1110 next_repository_id.clone(),
1111 downstream
1112 .as_ref()
1113 .map(|downstream| downstream.updates_tx.clone()),
1114 changed_repos.clone(),
1115 fs.clone(),
1116 cx,
1117 );
1118 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1119 }
1120 _ => {}
1121 }
1122 }
1123 fn on_repository_event(
1124 &mut self,
1125 repo: Entity<Repository>,
1126 event: &RepositoryEvent,
1127 cx: &mut Context<Self>,
1128 ) {
1129 let id = repo.read(cx).id;
1130 let repo_snapshot = repo.read(cx).snapshot.clone();
1131 for (buffer_id, diff) in self.diffs.iter() {
1132 if let Some((buffer_repo, repo_path)) =
1133 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1134 && buffer_repo == repo
1135 {
1136 diff.update(cx, |diff, cx| {
1137 if let Some(conflict_set) = &diff.conflict_set {
1138 let conflict_status_changed =
1139 conflict_set.update(cx, |conflict_set, cx| {
1140 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1141 conflict_set.set_has_conflict(has_conflict, cx)
1142 })?;
1143 if conflict_status_changed {
1144 let buffer_store = self.buffer_store.read(cx);
1145 if let Some(buffer) = buffer_store.get(*buffer_id) {
1146 let _ = diff
1147 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1148 }
1149 }
1150 }
1151 anyhow::Ok(())
1152 })
1153 .ok();
1154 }
1155 }
1156 cx.emit(GitStoreEvent::RepositoryUpdated(
1157 id,
1158 event.clone(),
1159 self.active_repo_id == Some(id),
1160 ))
1161 }
1162
1163 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1164 cx.emit(GitStoreEvent::JobsUpdated)
1165 }
1166
1167 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1168 fn update_repositories_from_worktree(
1169 &mut self,
1170 project_environment: Entity<ProjectEnvironment>,
1171 next_repository_id: Arc<AtomicU64>,
1172 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1173 updated_git_repositories: UpdatedGitRepositoriesSet,
1174 fs: Arc<dyn Fs>,
1175 cx: &mut Context<Self>,
1176 ) {
1177 let mut removed_ids = Vec::new();
1178 for update in updated_git_repositories.iter() {
1179 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1180 let existing_work_directory_abs_path =
1181 repo.read(cx).work_directory_abs_path.clone();
1182 Some(&existing_work_directory_abs_path)
1183 == update.old_work_directory_abs_path.as_ref()
1184 || Some(&existing_work_directory_abs_path)
1185 == update.new_work_directory_abs_path.as_ref()
1186 }) {
1187 if let Some(new_work_directory_abs_path) =
1188 update.new_work_directory_abs_path.clone()
1189 {
1190 existing.update(cx, |existing, cx| {
1191 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1192 existing.schedule_scan(updates_tx.clone(), cx);
1193 });
1194 } else {
1195 removed_ids.push(*id);
1196 }
1197 } else if let UpdatedGitRepository {
1198 new_work_directory_abs_path: Some(work_directory_abs_path),
1199 dot_git_abs_path: Some(dot_git_abs_path),
1200 repository_dir_abs_path: Some(repository_dir_abs_path),
1201 common_dir_abs_path: Some(common_dir_abs_path),
1202 ..
1203 } = update
1204 {
1205 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1206 let git_store = cx.weak_entity();
1207 let repo = cx.new(|cx| {
1208 let mut repo = Repository::local(
1209 id,
1210 work_directory_abs_path.clone(),
1211 dot_git_abs_path.clone(),
1212 repository_dir_abs_path.clone(),
1213 common_dir_abs_path.clone(),
1214 project_environment.downgrade(),
1215 fs.clone(),
1216 git_store,
1217 cx,
1218 );
1219 repo.schedule_scan(updates_tx.clone(), cx);
1220 repo
1221 });
1222 self._subscriptions
1223 .push(cx.subscribe(&repo, Self::on_repository_event));
1224 self._subscriptions
1225 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1226 self.repositories.insert(id, repo);
1227 cx.emit(GitStoreEvent::RepositoryAdded(id));
1228 self.active_repo_id.get_or_insert_with(|| {
1229 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1230 id
1231 });
1232 }
1233 }
1234
1235 for id in removed_ids {
1236 if self.active_repo_id == Some(id) {
1237 self.active_repo_id = None;
1238 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1239 }
1240 self.repositories.remove(&id);
1241 if let Some(updates_tx) = updates_tx.as_ref() {
1242 updates_tx
1243 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1244 .ok();
1245 }
1246 }
1247 }
1248
1249 fn on_buffer_store_event(
1250 &mut self,
1251 _: Entity<BufferStore>,
1252 event: &BufferStoreEvent,
1253 cx: &mut Context<Self>,
1254 ) {
1255 match event {
1256 BufferStoreEvent::BufferAdded(buffer) => {
1257 cx.subscribe(buffer, |this, buffer, event, cx| {
1258 if let BufferEvent::LanguageChanged = event {
1259 let buffer_id = buffer.read(cx).remote_id();
1260 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1261 diff_state.update(cx, |diff_state, cx| {
1262 diff_state.buffer_language_changed(buffer, cx);
1263 });
1264 }
1265 }
1266 })
1267 .detach();
1268 }
1269 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1270 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1271 diffs.remove(buffer_id);
1272 }
1273 }
1274 BufferStoreEvent::BufferDropped(buffer_id) => {
1275 self.diffs.remove(buffer_id);
1276 for diffs in self.shared_diffs.values_mut() {
1277 diffs.remove(buffer_id);
1278 }
1279 }
1280
1281 _ => {}
1282 }
1283 }
1284
1285 pub fn recalculate_buffer_diffs(
1286 &mut self,
1287 buffers: Vec<Entity<Buffer>>,
1288 cx: &mut Context<Self>,
1289 ) -> impl Future<Output = ()> + use<> {
1290 let mut futures = Vec::new();
1291 for buffer in buffers {
1292 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1293 let buffer = buffer.read(cx).text_snapshot();
1294 diff_state.update(cx, |diff_state, cx| {
1295 diff_state.recalculate_diffs(buffer.clone(), cx);
1296 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1297 });
1298 futures.push(diff_state.update(cx, |diff_state, cx| {
1299 diff_state
1300 .reparse_conflict_markers(buffer, cx)
1301 .map(|_| {})
1302 .boxed()
1303 }));
1304 }
1305 }
1306 async move {
1307 futures::future::join_all(futures).await;
1308 }
1309 }
1310
1311 fn on_buffer_diff_event(
1312 &mut self,
1313 diff: Entity<buffer_diff::BufferDiff>,
1314 event: &BufferDiffEvent,
1315 cx: &mut Context<Self>,
1316 ) {
1317 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1318 let buffer_id = diff.read(cx).buffer_id;
1319 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1320 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1321 diff_state.hunk_staging_operation_count += 1;
1322 diff_state.hunk_staging_operation_count
1323 });
1324 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1325 let recv = repo.update(cx, |repo, cx| {
1326 log::debug!("hunks changed for {}", path.as_unix_str());
1327 repo.spawn_set_index_text_job(
1328 path,
1329 new_index_text.as_ref().map(|rope| rope.to_string()),
1330 Some(hunk_staging_operation_count),
1331 cx,
1332 )
1333 });
1334 let diff = diff.downgrade();
1335 cx.spawn(async move |this, cx| {
1336 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1337 diff.update(cx, |diff, cx| {
1338 diff.clear_pending_hunks(cx);
1339 })
1340 .ok();
1341 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1342 .ok();
1343 }
1344 })
1345 .detach();
1346 }
1347 }
1348 }
1349 }
1350
1351 fn local_worktree_git_repos_changed(
1352 &mut self,
1353 worktree: Entity<Worktree>,
1354 changed_repos: &UpdatedGitRepositoriesSet,
1355 cx: &mut Context<Self>,
1356 ) {
1357 log::debug!("local worktree repos changed");
1358 debug_assert!(worktree.read(cx).is_local());
1359
1360 for repository in self.repositories.values() {
1361 repository.update(cx, |repository, cx| {
1362 let repo_abs_path = &repository.work_directory_abs_path;
1363 if changed_repos.iter().any(|update| {
1364 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1365 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1366 }) {
1367 repository.reload_buffer_diff_bases(cx);
1368 }
1369 });
1370 }
1371 }
1372
1373 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1374 &self.repositories
1375 }
1376
1377 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1378 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1379 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1380 Some(status.status)
1381 }
1382
1383 pub fn repository_and_path_for_buffer_id(
1384 &self,
1385 buffer_id: BufferId,
1386 cx: &App,
1387 ) -> Option<(Entity<Repository>, RepoPath)> {
1388 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1389 let project_path = buffer.read(cx).project_path(cx)?;
1390 self.repository_and_path_for_project_path(&project_path, cx)
1391 }
1392
1393 pub fn repository_and_path_for_project_path(
1394 &self,
1395 path: &ProjectPath,
1396 cx: &App,
1397 ) -> Option<(Entity<Repository>, RepoPath)> {
1398 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1399 self.repositories
1400 .values()
1401 .filter_map(|repo| {
1402 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1403 Some((repo.clone(), repo_path))
1404 })
1405 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1406 }
1407
1408 pub fn git_init(
1409 &self,
1410 path: Arc<Path>,
1411 fallback_branch_name: String,
1412 cx: &App,
1413 ) -> Task<Result<()>> {
1414 match &self.state {
1415 GitStoreState::Local { fs, .. } => {
1416 let fs = fs.clone();
1417 cx.background_executor()
1418 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1419 }
1420 GitStoreState::Remote {
1421 upstream_client,
1422 upstream_project_id: project_id,
1423 ..
1424 } => {
1425 let client = upstream_client.clone();
1426 let project_id = *project_id;
1427 cx.background_executor().spawn(async move {
1428 client
1429 .request(proto::GitInit {
1430 project_id: project_id,
1431 abs_path: path.to_string_lossy().into_owned(),
1432 fallback_branch_name,
1433 })
1434 .await?;
1435 Ok(())
1436 })
1437 }
1438 }
1439 }
1440
1441 pub fn git_clone(
1442 &self,
1443 repo: String,
1444 path: impl Into<Arc<std::path::Path>>,
1445 cx: &App,
1446 ) -> Task<Result<()>> {
1447 let path = path.into();
1448 match &self.state {
1449 GitStoreState::Local { fs, .. } => {
1450 let fs = fs.clone();
1451 cx.background_executor()
1452 .spawn(async move { fs.git_clone(&repo, &path).await })
1453 }
1454 GitStoreState::Remote {
1455 upstream_client,
1456 upstream_project_id,
1457 ..
1458 } => {
1459 if upstream_client.is_via_collab() {
1460 return Task::ready(Err(anyhow!(
1461 "Git Clone isn't supported for project guests"
1462 )));
1463 }
1464 let request = upstream_client.request(proto::GitClone {
1465 project_id: *upstream_project_id,
1466 abs_path: path.to_string_lossy().into_owned(),
1467 remote_repo: repo,
1468 });
1469
1470 cx.background_spawn(async move {
1471 let result = request.await?;
1472
1473 match result.success {
1474 true => Ok(()),
1475 false => Err(anyhow!("Git Clone failed")),
1476 }
1477 })
1478 }
1479 }
1480 }
1481
1482 async fn handle_update_repository(
1483 this: Entity<Self>,
1484 envelope: TypedEnvelope<proto::UpdateRepository>,
1485 mut cx: AsyncApp,
1486 ) -> Result<()> {
1487 this.update(&mut cx, |this, cx| {
1488 let path_style = this.worktree_store.read(cx).path_style();
1489 let mut update = envelope.payload;
1490
1491 let id = RepositoryId::from_proto(update.id);
1492 let client = this.upstream_client().context("no upstream client")?;
1493
1494 let mut is_new = false;
1495 let repo = this.repositories.entry(id).or_insert_with(|| {
1496 is_new = true;
1497 let git_store = cx.weak_entity();
1498 cx.new(|cx| {
1499 Repository::remote(
1500 id,
1501 Path::new(&update.abs_path).into(),
1502 path_style,
1503 ProjectId(update.project_id),
1504 client,
1505 git_store,
1506 cx,
1507 )
1508 })
1509 });
1510 if is_new {
1511 this._subscriptions
1512 .push(cx.subscribe(repo, Self::on_repository_event))
1513 }
1514
1515 repo.update(cx, {
1516 let update = update.clone();
1517 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1518 })?;
1519
1520 this.active_repo_id.get_or_insert_with(|| {
1521 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1522 id
1523 });
1524
1525 if let Some((client, project_id)) = this.downstream_client() {
1526 update.project_id = project_id.to_proto();
1527 client.send(update).log_err();
1528 }
1529 Ok(())
1530 })?
1531 }
1532
1533 async fn handle_remove_repository(
1534 this: Entity<Self>,
1535 envelope: TypedEnvelope<proto::RemoveRepository>,
1536 mut cx: AsyncApp,
1537 ) -> Result<()> {
1538 this.update(&mut cx, |this, cx| {
1539 let mut update = envelope.payload;
1540 let id = RepositoryId::from_proto(update.id);
1541 this.repositories.remove(&id);
1542 if let Some((client, project_id)) = this.downstream_client() {
1543 update.project_id = project_id.to_proto();
1544 client.send(update).log_err();
1545 }
1546 if this.active_repo_id == Some(id) {
1547 this.active_repo_id = None;
1548 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1549 }
1550 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1551 })
1552 }
1553
1554 async fn handle_git_init(
1555 this: Entity<Self>,
1556 envelope: TypedEnvelope<proto::GitInit>,
1557 cx: AsyncApp,
1558 ) -> Result<proto::Ack> {
1559 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1560 let name = envelope.payload.fallback_branch_name;
1561 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1562 .await?;
1563
1564 Ok(proto::Ack {})
1565 }
1566
1567 async fn handle_git_clone(
1568 this: Entity<Self>,
1569 envelope: TypedEnvelope<proto::GitClone>,
1570 cx: AsyncApp,
1571 ) -> Result<proto::GitCloneResponse> {
1572 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1573 let repo_name = envelope.payload.remote_repo;
1574 let result = cx
1575 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1576 .await;
1577
1578 Ok(proto::GitCloneResponse {
1579 success: result.is_ok(),
1580 })
1581 }
1582
1583 async fn handle_fetch(
1584 this: Entity<Self>,
1585 envelope: TypedEnvelope<proto::Fetch>,
1586 mut cx: AsyncApp,
1587 ) -> Result<proto::RemoteMessageResponse> {
1588 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1589 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1590 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1591 let askpass_id = envelope.payload.askpass_id;
1592
1593 let askpass = make_remote_delegate(
1594 this,
1595 envelope.payload.project_id,
1596 repository_id,
1597 askpass_id,
1598 &mut cx,
1599 );
1600
1601 let remote_output = repository_handle
1602 .update(&mut cx, |repository_handle, cx| {
1603 repository_handle.fetch(fetch_options, askpass, cx)
1604 })?
1605 .await??;
1606
1607 Ok(proto::RemoteMessageResponse {
1608 stdout: remote_output.stdout,
1609 stderr: remote_output.stderr,
1610 })
1611 }
1612
1613 async fn handle_push(
1614 this: Entity<Self>,
1615 envelope: TypedEnvelope<proto::Push>,
1616 mut cx: AsyncApp,
1617 ) -> Result<proto::RemoteMessageResponse> {
1618 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1619 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1620
1621 let askpass_id = envelope.payload.askpass_id;
1622 let askpass = make_remote_delegate(
1623 this,
1624 envelope.payload.project_id,
1625 repository_id,
1626 askpass_id,
1627 &mut cx,
1628 );
1629
1630 let options = envelope
1631 .payload
1632 .options
1633 .as_ref()
1634 .map(|_| match envelope.payload.options() {
1635 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1636 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1637 });
1638
1639 let branch_name = envelope.payload.branch_name.into();
1640 let remote_name = envelope.payload.remote_name.into();
1641
1642 let remote_output = repository_handle
1643 .update(&mut cx, |repository_handle, cx| {
1644 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1645 })?
1646 .await??;
1647 Ok(proto::RemoteMessageResponse {
1648 stdout: remote_output.stdout,
1649 stderr: remote_output.stderr,
1650 })
1651 }
1652
1653 async fn handle_pull(
1654 this: Entity<Self>,
1655 envelope: TypedEnvelope<proto::Pull>,
1656 mut cx: AsyncApp,
1657 ) -> Result<proto::RemoteMessageResponse> {
1658 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1659 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1660 let askpass_id = envelope.payload.askpass_id;
1661 let askpass = make_remote_delegate(
1662 this,
1663 envelope.payload.project_id,
1664 repository_id,
1665 askpass_id,
1666 &mut cx,
1667 );
1668
1669 let branch_name = envelope.payload.branch_name.into();
1670 let remote_name = envelope.payload.remote_name.into();
1671
1672 let remote_message = repository_handle
1673 .update(&mut cx, |repository_handle, cx| {
1674 repository_handle.pull(branch_name, remote_name, askpass, cx)
1675 })?
1676 .await??;
1677
1678 Ok(proto::RemoteMessageResponse {
1679 stdout: remote_message.stdout,
1680 stderr: remote_message.stderr,
1681 })
1682 }
1683
1684 async fn handle_stage(
1685 this: Entity<Self>,
1686 envelope: TypedEnvelope<proto::Stage>,
1687 mut cx: AsyncApp,
1688 ) -> Result<proto::Ack> {
1689 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1690 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1691
1692 let entries = envelope
1693 .payload
1694 .paths
1695 .into_iter()
1696 .map(|path| RepoPath::new(&path))
1697 .collect::<Result<Vec<_>>>()?;
1698
1699 repository_handle
1700 .update(&mut cx, |repository_handle, cx| {
1701 repository_handle.stage_entries(entries, cx)
1702 })?
1703 .await?;
1704 Ok(proto::Ack {})
1705 }
1706
1707 async fn handle_unstage(
1708 this: Entity<Self>,
1709 envelope: TypedEnvelope<proto::Unstage>,
1710 mut cx: AsyncApp,
1711 ) -> Result<proto::Ack> {
1712 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1713 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1714
1715 let entries = envelope
1716 .payload
1717 .paths
1718 .into_iter()
1719 .map(|path| RepoPath::new(&path))
1720 .collect::<Result<Vec<_>>>()?;
1721
1722 repository_handle
1723 .update(&mut cx, |repository_handle, cx| {
1724 repository_handle.unstage_entries(entries, cx)
1725 })?
1726 .await?;
1727
1728 Ok(proto::Ack {})
1729 }
1730
1731 async fn handle_stash(
1732 this: Entity<Self>,
1733 envelope: TypedEnvelope<proto::Stash>,
1734 mut cx: AsyncApp,
1735 ) -> Result<proto::Ack> {
1736 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1737 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1738
1739 let entries = envelope
1740 .payload
1741 .paths
1742 .into_iter()
1743 .map(|path| RepoPath::new(&path))
1744 .collect::<Result<Vec<_>>>()?;
1745
1746 repository_handle
1747 .update(&mut cx, |repository_handle, cx| {
1748 repository_handle.stash_entries(entries, cx)
1749 })?
1750 .await?;
1751
1752 Ok(proto::Ack {})
1753 }
1754
1755 async fn handle_stash_pop(
1756 this: Entity<Self>,
1757 envelope: TypedEnvelope<proto::StashPop>,
1758 mut cx: AsyncApp,
1759 ) -> Result<proto::Ack> {
1760 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1761 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1762 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1763
1764 repository_handle
1765 .update(&mut cx, |repository_handle, cx| {
1766 repository_handle.stash_pop(stash_index, cx)
1767 })?
1768 .await?;
1769
1770 Ok(proto::Ack {})
1771 }
1772
1773 async fn handle_stash_apply(
1774 this: Entity<Self>,
1775 envelope: TypedEnvelope<proto::StashApply>,
1776 mut cx: AsyncApp,
1777 ) -> Result<proto::Ack> {
1778 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1779 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1780 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1781
1782 repository_handle
1783 .update(&mut cx, |repository_handle, cx| {
1784 repository_handle.stash_apply(stash_index, cx)
1785 })?
1786 .await?;
1787
1788 Ok(proto::Ack {})
1789 }
1790
1791 async fn handle_stash_drop(
1792 this: Entity<Self>,
1793 envelope: TypedEnvelope<proto::StashDrop>,
1794 mut cx: AsyncApp,
1795 ) -> Result<proto::Ack> {
1796 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1797 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1798 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1799
1800 repository_handle
1801 .update(&mut cx, |repository_handle, cx| {
1802 repository_handle.stash_drop(stash_index, cx)
1803 })?
1804 .await??;
1805
1806 Ok(proto::Ack {})
1807 }
1808
1809 async fn handle_set_index_text(
1810 this: Entity<Self>,
1811 envelope: TypedEnvelope<proto::SetIndexText>,
1812 mut cx: AsyncApp,
1813 ) -> Result<proto::Ack> {
1814 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1815 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1816 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1817
1818 repository_handle
1819 .update(&mut cx, |repository_handle, cx| {
1820 repository_handle.spawn_set_index_text_job(
1821 repo_path,
1822 envelope.payload.text,
1823 None,
1824 cx,
1825 )
1826 })?
1827 .await??;
1828 Ok(proto::Ack {})
1829 }
1830
1831 async fn handle_commit(
1832 this: Entity<Self>,
1833 envelope: TypedEnvelope<proto::Commit>,
1834 mut cx: AsyncApp,
1835 ) -> Result<proto::Ack> {
1836 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1837 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1838
1839 let message = SharedString::from(envelope.payload.message);
1840 let name = envelope.payload.name.map(SharedString::from);
1841 let email = envelope.payload.email.map(SharedString::from);
1842 let options = envelope.payload.options.unwrap_or_default();
1843
1844 repository_handle
1845 .update(&mut cx, |repository_handle, cx| {
1846 repository_handle.commit(
1847 message,
1848 name.zip(email),
1849 CommitOptions {
1850 amend: options.amend,
1851 signoff: options.signoff,
1852 },
1853 cx,
1854 )
1855 })?
1856 .await??;
1857 Ok(proto::Ack {})
1858 }
1859
1860 async fn handle_get_remotes(
1861 this: Entity<Self>,
1862 envelope: TypedEnvelope<proto::GetRemotes>,
1863 mut cx: AsyncApp,
1864 ) -> Result<proto::GetRemotesResponse> {
1865 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1866 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1867
1868 let branch_name = envelope.payload.branch_name;
1869
1870 let remotes = repository_handle
1871 .update(&mut cx, |repository_handle, _| {
1872 repository_handle.get_remotes(branch_name)
1873 })?
1874 .await??;
1875
1876 Ok(proto::GetRemotesResponse {
1877 remotes: remotes
1878 .into_iter()
1879 .map(|remotes| proto::get_remotes_response::Remote {
1880 name: remotes.name.to_string(),
1881 })
1882 .collect::<Vec<_>>(),
1883 })
1884 }
1885
1886 async fn handle_get_branches(
1887 this: Entity<Self>,
1888 envelope: TypedEnvelope<proto::GitGetBranches>,
1889 mut cx: AsyncApp,
1890 ) -> Result<proto::GitBranchesResponse> {
1891 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1892 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1893
1894 let branches = repository_handle
1895 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1896 .await??;
1897
1898 Ok(proto::GitBranchesResponse {
1899 branches: branches
1900 .into_iter()
1901 .map(|branch| branch_to_proto(&branch))
1902 .collect::<Vec<_>>(),
1903 })
1904 }
1905 async fn handle_get_default_branch(
1906 this: Entity<Self>,
1907 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1908 mut cx: AsyncApp,
1909 ) -> Result<proto::GetDefaultBranchResponse> {
1910 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1911 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1912
1913 let branch = repository_handle
1914 .update(&mut cx, |repository_handle, _| {
1915 repository_handle.default_branch()
1916 })?
1917 .await??
1918 .map(Into::into);
1919
1920 Ok(proto::GetDefaultBranchResponse { branch })
1921 }
1922 async fn handle_create_branch(
1923 this: Entity<Self>,
1924 envelope: TypedEnvelope<proto::GitCreateBranch>,
1925 mut cx: AsyncApp,
1926 ) -> Result<proto::Ack> {
1927 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1928 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1929 let branch_name = envelope.payload.branch_name;
1930
1931 repository_handle
1932 .update(&mut cx, |repository_handle, _| {
1933 repository_handle.create_branch(branch_name)
1934 })?
1935 .await??;
1936
1937 Ok(proto::Ack {})
1938 }
1939
1940 async fn handle_change_branch(
1941 this: Entity<Self>,
1942 envelope: TypedEnvelope<proto::GitChangeBranch>,
1943 mut cx: AsyncApp,
1944 ) -> Result<proto::Ack> {
1945 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1946 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1947 let branch_name = envelope.payload.branch_name;
1948
1949 repository_handle
1950 .update(&mut cx, |repository_handle, _| {
1951 repository_handle.change_branch(branch_name)
1952 })?
1953 .await??;
1954
1955 Ok(proto::Ack {})
1956 }
1957
1958 async fn handle_rename_branch(
1959 this: Entity<Self>,
1960 envelope: TypedEnvelope<proto::GitRenameBranch>,
1961 mut cx: AsyncApp,
1962 ) -> Result<proto::Ack> {
1963 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1964 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1965 let branch = envelope.payload.branch;
1966 let new_name = envelope.payload.new_name;
1967
1968 repository_handle
1969 .update(&mut cx, |repository_handle, _| {
1970 repository_handle.rename_branch(branch, new_name)
1971 })?
1972 .await??;
1973
1974 Ok(proto::Ack {})
1975 }
1976
1977 async fn handle_show(
1978 this: Entity<Self>,
1979 envelope: TypedEnvelope<proto::GitShow>,
1980 mut cx: AsyncApp,
1981 ) -> Result<proto::GitCommitDetails> {
1982 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1983 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1984
1985 let commit = repository_handle
1986 .update(&mut cx, |repository_handle, _| {
1987 repository_handle.show(envelope.payload.commit)
1988 })?
1989 .await??;
1990 Ok(proto::GitCommitDetails {
1991 sha: commit.sha.into(),
1992 message: commit.message.into(),
1993 commit_timestamp: commit.commit_timestamp,
1994 author_email: commit.author_email.into(),
1995 author_name: commit.author_name.into(),
1996 })
1997 }
1998
1999 async fn handle_load_commit_diff(
2000 this: Entity<Self>,
2001 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2002 mut cx: AsyncApp,
2003 ) -> Result<proto::LoadCommitDiffResponse> {
2004 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2005 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2006
2007 let commit_diff = repository_handle
2008 .update(&mut cx, |repository_handle, _| {
2009 repository_handle.load_commit_diff(envelope.payload.commit)
2010 })?
2011 .await??;
2012 Ok(proto::LoadCommitDiffResponse {
2013 files: commit_diff
2014 .files
2015 .into_iter()
2016 .map(|file| proto::CommitFile {
2017 path: file.path.to_proto(),
2018 old_text: file.old_text,
2019 new_text: file.new_text,
2020 })
2021 .collect(),
2022 })
2023 }
2024
2025 async fn handle_reset(
2026 this: Entity<Self>,
2027 envelope: TypedEnvelope<proto::GitReset>,
2028 mut cx: AsyncApp,
2029 ) -> Result<proto::Ack> {
2030 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2031 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2032
2033 let mode = match envelope.payload.mode() {
2034 git_reset::ResetMode::Soft => ResetMode::Soft,
2035 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2036 };
2037
2038 repository_handle
2039 .update(&mut cx, |repository_handle, cx| {
2040 repository_handle.reset(envelope.payload.commit, mode, cx)
2041 })?
2042 .await??;
2043 Ok(proto::Ack {})
2044 }
2045
2046 async fn handle_checkout_files(
2047 this: Entity<Self>,
2048 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2049 mut cx: AsyncApp,
2050 ) -> Result<proto::Ack> {
2051 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2052 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2053 let paths = envelope
2054 .payload
2055 .paths
2056 .iter()
2057 .map(|s| RepoPath::from_proto(s))
2058 .collect::<Result<Vec<_>>>()?;
2059
2060 repository_handle
2061 .update(&mut cx, |repository_handle, cx| {
2062 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2063 })?
2064 .await??;
2065 Ok(proto::Ack {})
2066 }
2067
2068 async fn handle_open_commit_message_buffer(
2069 this: Entity<Self>,
2070 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2071 mut cx: AsyncApp,
2072 ) -> Result<proto::OpenBufferResponse> {
2073 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2074 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2075 let buffer = repository
2076 .update(&mut cx, |repository, cx| {
2077 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2078 })?
2079 .await?;
2080
2081 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2082 this.update(&mut cx, |this, cx| {
2083 this.buffer_store.update(cx, |buffer_store, cx| {
2084 buffer_store
2085 .create_buffer_for_peer(
2086 &buffer,
2087 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2088 cx,
2089 )
2090 .detach_and_log_err(cx);
2091 })
2092 })?;
2093
2094 Ok(proto::OpenBufferResponse {
2095 buffer_id: buffer_id.to_proto(),
2096 })
2097 }
2098
2099 async fn handle_askpass(
2100 this: Entity<Self>,
2101 envelope: TypedEnvelope<proto::AskPassRequest>,
2102 mut cx: AsyncApp,
2103 ) -> Result<proto::AskPassResponse> {
2104 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2105 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2106
2107 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2108 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2109 debug_panic!("no askpass found");
2110 anyhow::bail!("no askpass found");
2111 };
2112
2113 let response = askpass
2114 .ask_password(envelope.payload.prompt)
2115 .await
2116 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2117
2118 delegates
2119 .lock()
2120 .insert(envelope.payload.askpass_id, askpass);
2121
2122 response.try_into()
2123 }
2124
2125 async fn handle_check_for_pushed_commits(
2126 this: Entity<Self>,
2127 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2128 mut cx: AsyncApp,
2129 ) -> Result<proto::CheckForPushedCommitsResponse> {
2130 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2131 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2132
2133 let branches = repository_handle
2134 .update(&mut cx, |repository_handle, _| {
2135 repository_handle.check_for_pushed_commits()
2136 })?
2137 .await??;
2138 Ok(proto::CheckForPushedCommitsResponse {
2139 pushed_to: branches
2140 .into_iter()
2141 .map(|commit| commit.to_string())
2142 .collect(),
2143 })
2144 }
2145
2146 async fn handle_git_diff(
2147 this: Entity<Self>,
2148 envelope: TypedEnvelope<proto::GitDiff>,
2149 mut cx: AsyncApp,
2150 ) -> Result<proto::GitDiffResponse> {
2151 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2152 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2153 let diff_type = match envelope.payload.diff_type() {
2154 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2155 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2156 };
2157
2158 let mut diff = repository_handle
2159 .update(&mut cx, |repository_handle, cx| {
2160 repository_handle.diff(diff_type, cx)
2161 })?
2162 .await??;
2163 const ONE_MB: usize = 1_000_000;
2164 if diff.len() > ONE_MB {
2165 diff = diff.chars().take(ONE_MB).collect()
2166 }
2167
2168 Ok(proto::GitDiffResponse { diff })
2169 }
2170
2171 async fn handle_open_unstaged_diff(
2172 this: Entity<Self>,
2173 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2174 mut cx: AsyncApp,
2175 ) -> Result<proto::OpenUnstagedDiffResponse> {
2176 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2177 let diff = this
2178 .update(&mut cx, |this, cx| {
2179 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2180 Some(this.open_unstaged_diff(buffer, cx))
2181 })?
2182 .context("missing buffer")?
2183 .await?;
2184 this.update(&mut cx, |this, _| {
2185 let shared_diffs = this
2186 .shared_diffs
2187 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2188 .or_default();
2189 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2190 })?;
2191 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2192 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2193 }
2194
2195 async fn handle_open_uncommitted_diff(
2196 this: Entity<Self>,
2197 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2198 mut cx: AsyncApp,
2199 ) -> Result<proto::OpenUncommittedDiffResponse> {
2200 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2201 let diff = this
2202 .update(&mut cx, |this, cx| {
2203 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2204 Some(this.open_uncommitted_diff(buffer, cx))
2205 })?
2206 .context("missing buffer")?
2207 .await?;
2208 this.update(&mut cx, |this, _| {
2209 let shared_diffs = this
2210 .shared_diffs
2211 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2212 .or_default();
2213 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2214 })?;
2215 diff.read_with(&cx, |diff, cx| {
2216 use proto::open_uncommitted_diff_response::Mode;
2217
2218 let unstaged_diff = diff.secondary_diff();
2219 let index_snapshot = unstaged_diff.and_then(|diff| {
2220 let diff = diff.read(cx);
2221 diff.base_text_exists().then(|| diff.base_text())
2222 });
2223
2224 let mode;
2225 let staged_text;
2226 let committed_text;
2227 if diff.base_text_exists() {
2228 let committed_snapshot = diff.base_text();
2229 committed_text = Some(committed_snapshot.text());
2230 if let Some(index_text) = index_snapshot {
2231 if index_text.remote_id() == committed_snapshot.remote_id() {
2232 mode = Mode::IndexMatchesHead;
2233 staged_text = None;
2234 } else {
2235 mode = Mode::IndexAndHead;
2236 staged_text = Some(index_text.text());
2237 }
2238 } else {
2239 mode = Mode::IndexAndHead;
2240 staged_text = None;
2241 }
2242 } else {
2243 mode = Mode::IndexAndHead;
2244 committed_text = None;
2245 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2246 }
2247
2248 proto::OpenUncommittedDiffResponse {
2249 committed_text,
2250 staged_text,
2251 mode: mode.into(),
2252 }
2253 })
2254 }
2255
2256 async fn handle_update_diff_bases(
2257 this: Entity<Self>,
2258 request: TypedEnvelope<proto::UpdateDiffBases>,
2259 mut cx: AsyncApp,
2260 ) -> Result<()> {
2261 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2262 this.update(&mut cx, |this, cx| {
2263 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2264 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2265 {
2266 let buffer = buffer.read(cx).text_snapshot();
2267 diff_state.update(cx, |diff_state, cx| {
2268 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2269 })
2270 }
2271 })
2272 }
2273
2274 async fn handle_blame_buffer(
2275 this: Entity<Self>,
2276 envelope: TypedEnvelope<proto::BlameBuffer>,
2277 mut cx: AsyncApp,
2278 ) -> Result<proto::BlameBufferResponse> {
2279 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2280 let version = deserialize_version(&envelope.payload.version);
2281 let buffer = this.read_with(&cx, |this, cx| {
2282 this.buffer_store.read(cx).get_existing(buffer_id)
2283 })??;
2284 buffer
2285 .update(&mut cx, |buffer, _| {
2286 buffer.wait_for_version(version.clone())
2287 })?
2288 .await?;
2289 let blame = this
2290 .update(&mut cx, |this, cx| {
2291 this.blame_buffer(&buffer, Some(version), cx)
2292 })?
2293 .await?;
2294 Ok(serialize_blame_buffer_response(blame))
2295 }
2296
2297 async fn handle_get_permalink_to_line(
2298 this: Entity<Self>,
2299 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2300 mut cx: AsyncApp,
2301 ) -> Result<proto::GetPermalinkToLineResponse> {
2302 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2303 // let version = deserialize_version(&envelope.payload.version);
2304 let selection = {
2305 let proto_selection = envelope
2306 .payload
2307 .selection
2308 .context("no selection to get permalink for defined")?;
2309 proto_selection.start as u32..proto_selection.end as u32
2310 };
2311 let buffer = this.read_with(&cx, |this, cx| {
2312 this.buffer_store.read(cx).get_existing(buffer_id)
2313 })??;
2314 let permalink = this
2315 .update(&mut cx, |this, cx| {
2316 this.get_permalink_to_line(&buffer, selection, cx)
2317 })?
2318 .await?;
2319 Ok(proto::GetPermalinkToLineResponse {
2320 permalink: permalink.to_string(),
2321 })
2322 }
2323
2324 fn repository_for_request(
2325 this: &Entity<Self>,
2326 id: RepositoryId,
2327 cx: &mut AsyncApp,
2328 ) -> Result<Entity<Repository>> {
2329 this.read_with(cx, |this, _| {
2330 this.repositories
2331 .get(&id)
2332 .context("missing repository handle")
2333 .cloned()
2334 })?
2335 }
2336
2337 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2338 self.repositories
2339 .iter()
2340 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2341 .collect()
2342 }
2343
2344 fn process_updated_entries(
2345 &self,
2346 worktree: &Entity<Worktree>,
2347 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2348 cx: &mut App,
2349 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2350 let path_style = worktree.read(cx).path_style();
2351 let mut repo_paths = self
2352 .repositories
2353 .values()
2354 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2355 .collect::<Vec<_>>();
2356 let mut entries: Vec<_> = updated_entries
2357 .iter()
2358 .map(|(path, _, _)| path.clone())
2359 .collect();
2360 entries.sort();
2361 let worktree = worktree.read(cx);
2362
2363 let entries = entries
2364 .into_iter()
2365 .map(|path| worktree.absolutize(&path))
2366 .collect::<Arc<[_]>>();
2367
2368 let executor = cx.background_executor().clone();
2369 cx.background_executor().spawn(async move {
2370 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2371 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2372 let mut tasks = FuturesOrdered::new();
2373 for (repo_path, repo) in repo_paths.into_iter().rev() {
2374 let entries = entries.clone();
2375 let task = executor.spawn(async move {
2376 // Find all repository paths that belong to this repo
2377 let mut ix = entries.partition_point(|path| path < &*repo_path);
2378 if ix == entries.len() {
2379 return None;
2380 };
2381
2382 let mut paths = Vec::new();
2383 // All paths prefixed by a given repo will constitute a continuous range.
2384 while let Some(path) = entries.get(ix)
2385 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2386 &repo_path, path, path_style,
2387 )
2388 {
2389 paths.push((repo_path, ix));
2390 ix += 1;
2391 }
2392 if paths.is_empty() {
2393 None
2394 } else {
2395 Some((repo, paths))
2396 }
2397 });
2398 tasks.push_back(task);
2399 }
2400
2401 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2402 let mut path_was_used = vec![false; entries.len()];
2403 let tasks = tasks.collect::<Vec<_>>().await;
2404 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2405 // We always want to assign a path to it's innermost repository.
2406 for t in tasks {
2407 let Some((repo, paths)) = t else {
2408 continue;
2409 };
2410 let entry = paths_by_git_repo.entry(repo).or_default();
2411 for (repo_path, ix) in paths {
2412 if path_was_used[ix] {
2413 continue;
2414 }
2415 path_was_used[ix] = true;
2416 entry.push(repo_path);
2417 }
2418 }
2419
2420 paths_by_git_repo
2421 })
2422 }
2423}
2424
2425impl BufferGitState {
2426 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2427 Self {
2428 unstaged_diff: Default::default(),
2429 uncommitted_diff: Default::default(),
2430 recalculate_diff_task: Default::default(),
2431 language: Default::default(),
2432 language_registry: Default::default(),
2433 recalculating_tx: postage::watch::channel_with(false).0,
2434 hunk_staging_operation_count: 0,
2435 hunk_staging_operation_count_as_of_write: 0,
2436 head_text: Default::default(),
2437 index_text: Default::default(),
2438 head_changed: Default::default(),
2439 index_changed: Default::default(),
2440 language_changed: Default::default(),
2441 conflict_updated_futures: Default::default(),
2442 conflict_set: Default::default(),
2443 reparse_conflict_markers_task: Default::default(),
2444 }
2445 }
2446
2447 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2448 self.language = buffer.read(cx).language().cloned();
2449 self.language_changed = true;
2450 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2451 }
2452
2453 fn reparse_conflict_markers(
2454 &mut self,
2455 buffer: text::BufferSnapshot,
2456 cx: &mut Context<Self>,
2457 ) -> oneshot::Receiver<()> {
2458 let (tx, rx) = oneshot::channel();
2459
2460 let Some(conflict_set) = self
2461 .conflict_set
2462 .as_ref()
2463 .and_then(|conflict_set| conflict_set.upgrade())
2464 else {
2465 return rx;
2466 };
2467
2468 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2469 if conflict_set.has_conflict {
2470 Some(conflict_set.snapshot())
2471 } else {
2472 None
2473 }
2474 });
2475
2476 if let Some(old_snapshot) = old_snapshot {
2477 self.conflict_updated_futures.push(tx);
2478 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2479 let (snapshot, changed_range) = cx
2480 .background_spawn(async move {
2481 let new_snapshot = ConflictSet::parse(&buffer);
2482 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2483 (new_snapshot, changed_range)
2484 })
2485 .await;
2486 this.update(cx, |this, cx| {
2487 if let Some(conflict_set) = &this.conflict_set {
2488 conflict_set
2489 .update(cx, |conflict_set, cx| {
2490 conflict_set.set_snapshot(snapshot, changed_range, cx);
2491 })
2492 .ok();
2493 }
2494 let futures = std::mem::take(&mut this.conflict_updated_futures);
2495 for tx in futures {
2496 tx.send(()).ok();
2497 }
2498 })
2499 }))
2500 }
2501
2502 rx
2503 }
2504
2505 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2506 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2507 }
2508
2509 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2510 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2511 }
2512
2513 fn handle_base_texts_updated(
2514 &mut self,
2515 buffer: text::BufferSnapshot,
2516 message: proto::UpdateDiffBases,
2517 cx: &mut Context<Self>,
2518 ) {
2519 use proto::update_diff_bases::Mode;
2520
2521 let Some(mode) = Mode::from_i32(message.mode) else {
2522 return;
2523 };
2524
2525 let diff_bases_change = match mode {
2526 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2527 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2528 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2529 Mode::IndexAndHead => DiffBasesChange::SetEach {
2530 index: message.staged_text,
2531 head: message.committed_text,
2532 },
2533 };
2534
2535 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2536 }
2537
2538 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2539 if *self.recalculating_tx.borrow() {
2540 let mut rx = self.recalculating_tx.subscribe();
2541 Some(async move {
2542 loop {
2543 let is_recalculating = rx.recv().await;
2544 if is_recalculating != Some(true) {
2545 break;
2546 }
2547 }
2548 })
2549 } else {
2550 None
2551 }
2552 }
2553
2554 fn diff_bases_changed(
2555 &mut self,
2556 buffer: text::BufferSnapshot,
2557 diff_bases_change: Option<DiffBasesChange>,
2558 cx: &mut Context<Self>,
2559 ) {
2560 match diff_bases_change {
2561 Some(DiffBasesChange::SetIndex(index)) => {
2562 self.index_text = index.map(|mut index| {
2563 text::LineEnding::normalize(&mut index);
2564 Arc::new(index)
2565 });
2566 self.index_changed = true;
2567 }
2568 Some(DiffBasesChange::SetHead(head)) => {
2569 self.head_text = head.map(|mut head| {
2570 text::LineEnding::normalize(&mut head);
2571 Arc::new(head)
2572 });
2573 self.head_changed = true;
2574 }
2575 Some(DiffBasesChange::SetBoth(text)) => {
2576 let text = text.map(|mut text| {
2577 text::LineEnding::normalize(&mut text);
2578 Arc::new(text)
2579 });
2580 self.head_text = text.clone();
2581 self.index_text = text;
2582 self.head_changed = true;
2583 self.index_changed = true;
2584 }
2585 Some(DiffBasesChange::SetEach { index, head }) => {
2586 self.index_text = index.map(|mut index| {
2587 text::LineEnding::normalize(&mut index);
2588 Arc::new(index)
2589 });
2590 self.index_changed = true;
2591 self.head_text = head.map(|mut head| {
2592 text::LineEnding::normalize(&mut head);
2593 Arc::new(head)
2594 });
2595 self.head_changed = true;
2596 }
2597 None => {}
2598 }
2599
2600 self.recalculate_diffs(buffer, cx)
2601 }
2602
2603 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2604 *self.recalculating_tx.borrow_mut() = true;
2605
2606 let language = self.language.clone();
2607 let language_registry = self.language_registry.clone();
2608 let unstaged_diff = self.unstaged_diff();
2609 let uncommitted_diff = self.uncommitted_diff();
2610 let head = self.head_text.clone();
2611 let index = self.index_text.clone();
2612 let index_changed = self.index_changed;
2613 let head_changed = self.head_changed;
2614 let language_changed = self.language_changed;
2615 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2616 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2617 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2618 (None, None) => true,
2619 _ => false,
2620 };
2621 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2622 log::debug!(
2623 "start recalculating diffs for buffer {}",
2624 buffer.remote_id()
2625 );
2626
2627 let mut new_unstaged_diff = None;
2628 if let Some(unstaged_diff) = &unstaged_diff {
2629 new_unstaged_diff = Some(
2630 BufferDiff::update_diff(
2631 unstaged_diff.clone(),
2632 buffer.clone(),
2633 index,
2634 index_changed,
2635 language_changed,
2636 language.clone(),
2637 language_registry.clone(),
2638 cx,
2639 )
2640 .await?,
2641 );
2642 }
2643
2644 let mut new_uncommitted_diff = None;
2645 if let Some(uncommitted_diff) = &uncommitted_diff {
2646 new_uncommitted_diff = if index_matches_head {
2647 new_unstaged_diff.clone()
2648 } else {
2649 Some(
2650 BufferDiff::update_diff(
2651 uncommitted_diff.clone(),
2652 buffer.clone(),
2653 head,
2654 head_changed,
2655 language_changed,
2656 language.clone(),
2657 language_registry.clone(),
2658 cx,
2659 )
2660 .await?,
2661 )
2662 }
2663 }
2664
2665 let cancel = this.update(cx, |this, _| {
2666 // This checks whether all pending stage/unstage operations
2667 // have quiesced (i.e. both the corresponding write and the
2668 // read of that write have completed). If not, then we cancel
2669 // this recalculation attempt to avoid invalidating pending
2670 // state too quickly; another recalculation will come along
2671 // later and clear the pending state once the state of the index has settled.
2672 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2673 *this.recalculating_tx.borrow_mut() = false;
2674 true
2675 } else {
2676 false
2677 }
2678 })?;
2679 if cancel {
2680 log::debug!(
2681 concat!(
2682 "aborting recalculating diffs for buffer {}",
2683 "due to subsequent hunk operations",
2684 ),
2685 buffer.remote_id()
2686 );
2687 return Ok(());
2688 }
2689
2690 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2691 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2692 {
2693 unstaged_diff.update(cx, |diff, cx| {
2694 if language_changed {
2695 diff.language_changed(cx);
2696 }
2697 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2698 })?
2699 } else {
2700 None
2701 };
2702
2703 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2704 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2705 {
2706 uncommitted_diff.update(cx, |diff, cx| {
2707 if language_changed {
2708 diff.language_changed(cx);
2709 }
2710 diff.set_snapshot_with_secondary(
2711 new_uncommitted_diff,
2712 &buffer,
2713 unstaged_changed_range,
2714 true,
2715 cx,
2716 );
2717 })?;
2718 }
2719
2720 log::debug!(
2721 "finished recalculating diffs for buffer {}",
2722 buffer.remote_id()
2723 );
2724
2725 if let Some(this) = this.upgrade() {
2726 this.update(cx, |this, _| {
2727 this.index_changed = false;
2728 this.head_changed = false;
2729 this.language_changed = false;
2730 *this.recalculating_tx.borrow_mut() = false;
2731 })?;
2732 }
2733
2734 Ok(())
2735 }));
2736 }
2737}
2738
2739fn make_remote_delegate(
2740 this: Entity<GitStore>,
2741 project_id: u64,
2742 repository_id: RepositoryId,
2743 askpass_id: u64,
2744 cx: &mut AsyncApp,
2745) -> AskPassDelegate {
2746 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2747 this.update(cx, |this, cx| {
2748 let Some((client, _)) = this.downstream_client() else {
2749 return;
2750 };
2751 let response = client.request(proto::AskPassRequest {
2752 project_id,
2753 repository_id: repository_id.to_proto(),
2754 askpass_id,
2755 prompt,
2756 });
2757 cx.spawn(async move |_, _| {
2758 let mut response = response.await?.response;
2759 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2760 .ok();
2761 response.zeroize();
2762 anyhow::Ok(())
2763 })
2764 .detach_and_log_err(cx);
2765 })
2766 .log_err();
2767 })
2768}
2769
2770impl RepositoryId {
2771 pub fn to_proto(self) -> u64 {
2772 self.0
2773 }
2774
2775 pub fn from_proto(id: u64) -> Self {
2776 RepositoryId(id)
2777 }
2778}
2779
2780impl RepositorySnapshot {
2781 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2782 Self {
2783 id,
2784 statuses_by_path: Default::default(),
2785 work_directory_abs_path,
2786 branch: None,
2787 head_commit: None,
2788 scan_id: 0,
2789 merge: Default::default(),
2790 remote_origin_url: None,
2791 remote_upstream_url: None,
2792 stash_entries: Default::default(),
2793 path_style,
2794 }
2795 }
2796
2797 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2798 proto::UpdateRepository {
2799 branch_summary: self.branch.as_ref().map(branch_to_proto),
2800 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2801 updated_statuses: self
2802 .statuses_by_path
2803 .iter()
2804 .map(|entry| entry.to_proto())
2805 .collect(),
2806 removed_statuses: Default::default(),
2807 current_merge_conflicts: self
2808 .merge
2809 .conflicted_paths
2810 .iter()
2811 .map(|repo_path| repo_path.to_proto())
2812 .collect(),
2813 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2814 project_id,
2815 id: self.id.to_proto(),
2816 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2817 entry_ids: vec![self.id.to_proto()],
2818 scan_id: self.scan_id,
2819 is_last_update: true,
2820 stash_entries: self
2821 .stash_entries
2822 .entries
2823 .iter()
2824 .map(stash_to_proto)
2825 .collect(),
2826 }
2827 }
2828
2829 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2830 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2831 let mut removed_statuses: Vec<String> = Vec::new();
2832
2833 let mut new_statuses = self.statuses_by_path.iter().peekable();
2834 let mut old_statuses = old.statuses_by_path.iter().peekable();
2835
2836 let mut current_new_entry = new_statuses.next();
2837 let mut current_old_entry = old_statuses.next();
2838 loop {
2839 match (current_new_entry, current_old_entry) {
2840 (Some(new_entry), Some(old_entry)) => {
2841 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2842 Ordering::Less => {
2843 updated_statuses.push(new_entry.to_proto());
2844 current_new_entry = new_statuses.next();
2845 }
2846 Ordering::Equal => {
2847 if new_entry.status != old_entry.status {
2848 updated_statuses.push(new_entry.to_proto());
2849 }
2850 current_old_entry = old_statuses.next();
2851 current_new_entry = new_statuses.next();
2852 }
2853 Ordering::Greater => {
2854 removed_statuses.push(old_entry.repo_path.to_proto());
2855 current_old_entry = old_statuses.next();
2856 }
2857 }
2858 }
2859 (None, Some(old_entry)) => {
2860 removed_statuses.push(old_entry.repo_path.to_proto());
2861 current_old_entry = old_statuses.next();
2862 }
2863 (Some(new_entry), None) => {
2864 updated_statuses.push(new_entry.to_proto());
2865 current_new_entry = new_statuses.next();
2866 }
2867 (None, None) => break,
2868 }
2869 }
2870
2871 proto::UpdateRepository {
2872 branch_summary: self.branch.as_ref().map(branch_to_proto),
2873 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2874 updated_statuses,
2875 removed_statuses,
2876 current_merge_conflicts: self
2877 .merge
2878 .conflicted_paths
2879 .iter()
2880 .map(|path| path.to_proto())
2881 .collect(),
2882 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2883 project_id,
2884 id: self.id.to_proto(),
2885 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2886 entry_ids: vec![],
2887 scan_id: self.scan_id,
2888 is_last_update: true,
2889 stash_entries: self
2890 .stash_entries
2891 .entries
2892 .iter()
2893 .map(stash_to_proto)
2894 .collect(),
2895 }
2896 }
2897
2898 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2899 self.statuses_by_path.iter().cloned()
2900 }
2901
2902 pub fn status_summary(&self) -> GitSummary {
2903 self.statuses_by_path.summary().item_summary
2904 }
2905
2906 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2907 self.statuses_by_path
2908 .get(&PathKey(path.0.clone()), ())
2909 .cloned()
2910 }
2911
2912 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2913 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
2914 }
2915
2916 #[inline]
2917 fn abs_path_to_repo_path_inner(
2918 work_directory_abs_path: &Path,
2919 abs_path: &Path,
2920 path_style: PathStyle,
2921 ) -> Option<RepoPath> {
2922 abs_path
2923 .strip_prefix(&work_directory_abs_path)
2924 .ok()
2925 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
2926 }
2927
2928 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2929 self.merge.conflicted_paths.contains(repo_path)
2930 }
2931
2932 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2933 let had_conflict_on_last_merge_head_change =
2934 self.merge.conflicted_paths.contains(repo_path);
2935 let has_conflict_currently = self
2936 .status_for_path(repo_path)
2937 .is_some_and(|entry| entry.status.is_conflicted());
2938 had_conflict_on_last_merge_head_change || has_conflict_currently
2939 }
2940
2941 /// This is the name that will be displayed in the repository selector for this repository.
2942 pub fn display_name(&self) -> SharedString {
2943 self.work_directory_abs_path
2944 .file_name()
2945 .unwrap_or_default()
2946 .to_string_lossy()
2947 .to_string()
2948 .into()
2949 }
2950}
2951
2952pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2953 proto::StashEntry {
2954 oid: entry.oid.as_bytes().to_vec(),
2955 message: entry.message.clone(),
2956 branch: entry.branch.clone(),
2957 index: entry.index as u64,
2958 timestamp: entry.timestamp,
2959 }
2960}
2961
2962pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2963 Ok(StashEntry {
2964 oid: Oid::from_bytes(&entry.oid)?,
2965 message: entry.message.clone(),
2966 index: entry.index as usize,
2967 branch: entry.branch.clone(),
2968 timestamp: entry.timestamp,
2969 })
2970}
2971
2972impl MergeDetails {
2973 async fn load(
2974 backend: &Arc<dyn GitRepository>,
2975 status: &SumTree<StatusEntry>,
2976 prev_snapshot: &RepositorySnapshot,
2977 ) -> Result<(MergeDetails, bool)> {
2978 log::debug!("load merge details");
2979 let message = backend.merge_message().await;
2980 let heads = backend
2981 .revparse_batch(vec![
2982 "MERGE_HEAD".into(),
2983 "CHERRY_PICK_HEAD".into(),
2984 "REBASE_HEAD".into(),
2985 "REVERT_HEAD".into(),
2986 "APPLY_HEAD".into(),
2987 ])
2988 .await
2989 .log_err()
2990 .unwrap_or_default()
2991 .into_iter()
2992 .map(|opt| opt.map(SharedString::from))
2993 .collect::<Vec<_>>();
2994 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2995 let conflicted_paths = if merge_heads_changed {
2996 let current_conflicted_paths = TreeSet::from_ordered_entries(
2997 status
2998 .iter()
2999 .filter(|entry| entry.status.is_conflicted())
3000 .map(|entry| entry.repo_path.clone()),
3001 );
3002
3003 // It can happen that we run a scan while a lengthy merge is in progress
3004 // that will eventually result in conflicts, but before those conflicts
3005 // are reported by `git status`. Since for the moment we only care about
3006 // the merge heads state for the purposes of tracking conflicts, don't update
3007 // this state until we see some conflicts.
3008 if heads.iter().any(Option::is_some)
3009 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3010 && current_conflicted_paths.is_empty()
3011 {
3012 log::debug!("not updating merge heads because no conflicts found");
3013 return Ok((
3014 MergeDetails {
3015 message: message.map(SharedString::from),
3016 ..prev_snapshot.merge.clone()
3017 },
3018 false,
3019 ));
3020 }
3021
3022 current_conflicted_paths
3023 } else {
3024 prev_snapshot.merge.conflicted_paths.clone()
3025 };
3026 let details = MergeDetails {
3027 conflicted_paths,
3028 message: message.map(SharedString::from),
3029 heads,
3030 };
3031 Ok((details, merge_heads_changed))
3032 }
3033}
3034
3035impl Repository {
3036 pub fn snapshot(&self) -> RepositorySnapshot {
3037 self.snapshot.clone()
3038 }
3039
3040 fn local(
3041 id: RepositoryId,
3042 work_directory_abs_path: Arc<Path>,
3043 dot_git_abs_path: Arc<Path>,
3044 repository_dir_abs_path: Arc<Path>,
3045 common_dir_abs_path: Arc<Path>,
3046 project_environment: WeakEntity<ProjectEnvironment>,
3047 fs: Arc<dyn Fs>,
3048 git_store: WeakEntity<GitStore>,
3049 cx: &mut Context<Self>,
3050 ) -> Self {
3051 let snapshot =
3052 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3053 Repository {
3054 this: cx.weak_entity(),
3055 git_store,
3056 snapshot,
3057 commit_message_buffer: None,
3058 askpass_delegates: Default::default(),
3059 paths_needing_status_update: Default::default(),
3060 latest_askpass_id: 0,
3061 job_sender: Repository::spawn_local_git_worker(
3062 work_directory_abs_path,
3063 dot_git_abs_path,
3064 repository_dir_abs_path,
3065 common_dir_abs_path,
3066 project_environment,
3067 fs,
3068 cx,
3069 ),
3070 job_id: 0,
3071 active_jobs: Default::default(),
3072 }
3073 }
3074
3075 fn remote(
3076 id: RepositoryId,
3077 work_directory_abs_path: Arc<Path>,
3078 path_style: PathStyle,
3079 project_id: ProjectId,
3080 client: AnyProtoClient,
3081 git_store: WeakEntity<GitStore>,
3082 cx: &mut Context<Self>,
3083 ) -> Self {
3084 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3085 Self {
3086 this: cx.weak_entity(),
3087 snapshot,
3088 commit_message_buffer: None,
3089 git_store,
3090 paths_needing_status_update: Default::default(),
3091 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3092 askpass_delegates: Default::default(),
3093 latest_askpass_id: 0,
3094 active_jobs: Default::default(),
3095 job_id: 0,
3096 }
3097 }
3098
3099 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3100 self.git_store.upgrade()
3101 }
3102
3103 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3104 let this = cx.weak_entity();
3105 let git_store = self.git_store.clone();
3106 let _ = self.send_keyed_job(
3107 Some(GitJobKey::ReloadBufferDiffBases),
3108 None,
3109 |state, mut cx| async move {
3110 let RepositoryState::Local { backend, .. } = state else {
3111 log::error!("tried to recompute diffs for a non-local repository");
3112 return Ok(());
3113 };
3114
3115 let Some(this) = this.upgrade() else {
3116 return Ok(());
3117 };
3118
3119 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3120 git_store.update(cx, |git_store, cx| {
3121 git_store
3122 .diffs
3123 .iter()
3124 .filter_map(|(buffer_id, diff_state)| {
3125 let buffer_store = git_store.buffer_store.read(cx);
3126 let buffer = buffer_store.get(*buffer_id)?;
3127 let file = File::from_dyn(buffer.read(cx).file())?;
3128 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3129 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3130 log::debug!(
3131 "start reload diff bases for repo path {}",
3132 repo_path.as_unix_str()
3133 );
3134 diff_state.update(cx, |diff_state, _| {
3135 let has_unstaged_diff = diff_state
3136 .unstaged_diff
3137 .as_ref()
3138 .is_some_and(|diff| diff.is_upgradable());
3139 let has_uncommitted_diff = diff_state
3140 .uncommitted_diff
3141 .as_ref()
3142 .is_some_and(|set| set.is_upgradable());
3143
3144 Some((
3145 buffer,
3146 repo_path,
3147 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3148 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3149 ))
3150 })
3151 })
3152 .collect::<Vec<_>>()
3153 })
3154 })??;
3155
3156 let buffer_diff_base_changes = cx
3157 .background_spawn(async move {
3158 let mut changes = Vec::new();
3159 for (buffer, repo_path, current_index_text, current_head_text) in
3160 &repo_diff_state_updates
3161 {
3162 let index_text = if current_index_text.is_some() {
3163 backend.load_index_text(repo_path.clone()).await
3164 } else {
3165 None
3166 };
3167 let head_text = if current_head_text.is_some() {
3168 backend.load_committed_text(repo_path.clone()).await
3169 } else {
3170 None
3171 };
3172
3173 let change =
3174 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3175 (Some(current_index), Some(current_head)) => {
3176 let index_changed =
3177 index_text.as_ref() != current_index.as_deref();
3178 let head_changed =
3179 head_text.as_ref() != current_head.as_deref();
3180 if index_changed && head_changed {
3181 if index_text == head_text {
3182 Some(DiffBasesChange::SetBoth(head_text))
3183 } else {
3184 Some(DiffBasesChange::SetEach {
3185 index: index_text,
3186 head: head_text,
3187 })
3188 }
3189 } else if index_changed {
3190 Some(DiffBasesChange::SetIndex(index_text))
3191 } else if head_changed {
3192 Some(DiffBasesChange::SetHead(head_text))
3193 } else {
3194 None
3195 }
3196 }
3197 (Some(current_index), None) => {
3198 let index_changed =
3199 index_text.as_ref() != current_index.as_deref();
3200 index_changed
3201 .then_some(DiffBasesChange::SetIndex(index_text))
3202 }
3203 (None, Some(current_head)) => {
3204 let head_changed =
3205 head_text.as_ref() != current_head.as_deref();
3206 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3207 }
3208 (None, None) => None,
3209 };
3210
3211 changes.push((buffer.clone(), change))
3212 }
3213 changes
3214 })
3215 .await;
3216
3217 git_store.update(&mut cx, |git_store, cx| {
3218 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3219 let buffer_snapshot = buffer.read(cx).text_snapshot();
3220 let buffer_id = buffer_snapshot.remote_id();
3221 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3222 continue;
3223 };
3224
3225 let downstream_client = git_store.downstream_client();
3226 diff_state.update(cx, |diff_state, cx| {
3227 use proto::update_diff_bases::Mode;
3228
3229 if let Some((diff_bases_change, (client, project_id))) =
3230 diff_bases_change.clone().zip(downstream_client)
3231 {
3232 let (staged_text, committed_text, mode) = match diff_bases_change {
3233 DiffBasesChange::SetIndex(index) => {
3234 (index, None, Mode::IndexOnly)
3235 }
3236 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3237 DiffBasesChange::SetEach { index, head } => {
3238 (index, head, Mode::IndexAndHead)
3239 }
3240 DiffBasesChange::SetBoth(text) => {
3241 (None, text, Mode::IndexMatchesHead)
3242 }
3243 };
3244 client
3245 .send(proto::UpdateDiffBases {
3246 project_id: project_id.to_proto(),
3247 buffer_id: buffer_id.to_proto(),
3248 staged_text,
3249 committed_text,
3250 mode: mode as i32,
3251 })
3252 .log_err();
3253 }
3254
3255 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3256 });
3257 }
3258 })
3259 },
3260 );
3261 }
3262
3263 pub fn send_job<F, Fut, R>(
3264 &mut self,
3265 status: Option<SharedString>,
3266 job: F,
3267 ) -> oneshot::Receiver<R>
3268 where
3269 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3270 Fut: Future<Output = R> + 'static,
3271 R: Send + 'static,
3272 {
3273 self.send_keyed_job(None, status, job)
3274 }
3275
3276 fn send_keyed_job<F, Fut, R>(
3277 &mut self,
3278 key: Option<GitJobKey>,
3279 status: Option<SharedString>,
3280 job: F,
3281 ) -> oneshot::Receiver<R>
3282 where
3283 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3284 Fut: Future<Output = R> + 'static,
3285 R: Send + 'static,
3286 {
3287 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3288 let job_id = post_inc(&mut self.job_id);
3289 let this = self.this.clone();
3290 self.job_sender
3291 .unbounded_send(GitJob {
3292 key,
3293 job: Box::new(move |state, cx: &mut AsyncApp| {
3294 let job = job(state, cx.clone());
3295 cx.spawn(async move |cx| {
3296 if let Some(s) = status.clone() {
3297 this.update(cx, |this, cx| {
3298 this.active_jobs.insert(
3299 job_id,
3300 JobInfo {
3301 start: Instant::now(),
3302 message: s.clone(),
3303 },
3304 );
3305
3306 cx.notify();
3307 })
3308 .ok();
3309 }
3310 let result = job.await;
3311
3312 this.update(cx, |this, cx| {
3313 this.active_jobs.remove(&job_id);
3314 cx.notify();
3315 })
3316 .ok();
3317
3318 result_tx.send(result).ok();
3319 })
3320 }),
3321 })
3322 .ok();
3323 result_rx
3324 }
3325
3326 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3327 let Some(git_store) = self.git_store.upgrade() else {
3328 return;
3329 };
3330 let entity = cx.entity();
3331 git_store.update(cx, |git_store, cx| {
3332 let Some((&id, _)) = git_store
3333 .repositories
3334 .iter()
3335 .find(|(_, handle)| *handle == &entity)
3336 else {
3337 return;
3338 };
3339 git_store.active_repo_id = Some(id);
3340 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3341 });
3342 }
3343
3344 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3345 self.snapshot.status()
3346 }
3347
3348 pub fn cached_stash(&self) -> GitStash {
3349 self.snapshot.stash_entries.clone()
3350 }
3351
3352 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3353 let git_store = self.git_store.upgrade()?;
3354 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3355 let abs_path = self
3356 .snapshot
3357 .work_directory_abs_path
3358 .join(path.as_std_path());
3359 let abs_path = SanitizedPath::new(&abs_path);
3360 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3361 Some(ProjectPath {
3362 worktree_id: worktree.read(cx).id(),
3363 path: relative_path,
3364 })
3365 }
3366
3367 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3368 let git_store = self.git_store.upgrade()?;
3369 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3370 let abs_path = worktree_store.absolutize(path, cx)?;
3371 self.snapshot.abs_path_to_repo_path(&abs_path)
3372 }
3373
3374 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3375 other
3376 .read(cx)
3377 .snapshot
3378 .work_directory_abs_path
3379 .starts_with(&self.snapshot.work_directory_abs_path)
3380 }
3381
3382 pub fn open_commit_buffer(
3383 &mut self,
3384 languages: Option<Arc<LanguageRegistry>>,
3385 buffer_store: Entity<BufferStore>,
3386 cx: &mut Context<Self>,
3387 ) -> Task<Result<Entity<Buffer>>> {
3388 let id = self.id;
3389 if let Some(buffer) = self.commit_message_buffer.clone() {
3390 return Task::ready(Ok(buffer));
3391 }
3392 let this = cx.weak_entity();
3393
3394 let rx = self.send_job(None, move |state, mut cx| async move {
3395 let Some(this) = this.upgrade() else {
3396 bail!("git store was dropped");
3397 };
3398 match state {
3399 RepositoryState::Local { .. } => {
3400 this.update(&mut cx, |_, cx| {
3401 Self::open_local_commit_buffer(languages, buffer_store, cx)
3402 })?
3403 .await
3404 }
3405 RepositoryState::Remote { project_id, client } => {
3406 let request = client.request(proto::OpenCommitMessageBuffer {
3407 project_id: project_id.0,
3408 repository_id: id.to_proto(),
3409 });
3410 let response = request.await.context("requesting to open commit buffer")?;
3411 let buffer_id = BufferId::new(response.buffer_id)?;
3412 let buffer = buffer_store
3413 .update(&mut cx, |buffer_store, cx| {
3414 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3415 })?
3416 .await?;
3417 if let Some(language_registry) = languages {
3418 let git_commit_language =
3419 language_registry.language_for_name("Git Commit").await?;
3420 buffer.update(&mut cx, |buffer, cx| {
3421 buffer.set_language(Some(git_commit_language), cx);
3422 })?;
3423 }
3424 this.update(&mut cx, |this, _| {
3425 this.commit_message_buffer = Some(buffer.clone());
3426 })?;
3427 Ok(buffer)
3428 }
3429 }
3430 });
3431
3432 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3433 }
3434
3435 fn open_local_commit_buffer(
3436 language_registry: Option<Arc<LanguageRegistry>>,
3437 buffer_store: Entity<BufferStore>,
3438 cx: &mut Context<Self>,
3439 ) -> Task<Result<Entity<Buffer>>> {
3440 cx.spawn(async move |repository, cx| {
3441 let buffer = buffer_store
3442 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3443 .await?;
3444
3445 if let Some(language_registry) = language_registry {
3446 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3447 buffer.update(cx, |buffer, cx| {
3448 buffer.set_language(Some(git_commit_language), cx);
3449 })?;
3450 }
3451
3452 repository.update(cx, |repository, _| {
3453 repository.commit_message_buffer = Some(buffer.clone());
3454 })?;
3455 Ok(buffer)
3456 })
3457 }
3458
3459 pub fn checkout_files(
3460 &mut self,
3461 commit: &str,
3462 paths: Vec<RepoPath>,
3463 _cx: &mut App,
3464 ) -> oneshot::Receiver<Result<()>> {
3465 let commit = commit.to_string();
3466 let id = self.id;
3467
3468 self.send_job(
3469 Some(format!("git checkout {}", commit).into()),
3470 move |git_repo, _| async move {
3471 match git_repo {
3472 RepositoryState::Local {
3473 backend,
3474 environment,
3475 ..
3476 } => {
3477 backend
3478 .checkout_files(commit, paths, environment.clone())
3479 .await
3480 }
3481 RepositoryState::Remote { project_id, client } => {
3482 client
3483 .request(proto::GitCheckoutFiles {
3484 project_id: project_id.0,
3485 repository_id: id.to_proto(),
3486 commit,
3487 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3488 })
3489 .await?;
3490
3491 Ok(())
3492 }
3493 }
3494 },
3495 )
3496 }
3497
3498 pub fn reset(
3499 &mut self,
3500 commit: String,
3501 reset_mode: ResetMode,
3502 _cx: &mut App,
3503 ) -> oneshot::Receiver<Result<()>> {
3504 let id = self.id;
3505
3506 self.send_job(None, move |git_repo, _| async move {
3507 match git_repo {
3508 RepositoryState::Local {
3509 backend,
3510 environment,
3511 ..
3512 } => backend.reset(commit, reset_mode, environment).await,
3513 RepositoryState::Remote { project_id, client } => {
3514 client
3515 .request(proto::GitReset {
3516 project_id: project_id.0,
3517 repository_id: id.to_proto(),
3518 commit,
3519 mode: match reset_mode {
3520 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3521 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3522 },
3523 })
3524 .await?;
3525
3526 Ok(())
3527 }
3528 }
3529 })
3530 }
3531
3532 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3533 let id = self.id;
3534 self.send_job(None, move |git_repo, _cx| async move {
3535 match git_repo {
3536 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3537 RepositoryState::Remote { project_id, client } => {
3538 let resp = client
3539 .request(proto::GitShow {
3540 project_id: project_id.0,
3541 repository_id: id.to_proto(),
3542 commit,
3543 })
3544 .await?;
3545
3546 Ok(CommitDetails {
3547 sha: resp.sha.into(),
3548 message: resp.message.into(),
3549 commit_timestamp: resp.commit_timestamp,
3550 author_email: resp.author_email.into(),
3551 author_name: resp.author_name.into(),
3552 })
3553 }
3554 }
3555 })
3556 }
3557
3558 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3559 let id = self.id;
3560 self.send_job(None, move |git_repo, cx| async move {
3561 match git_repo {
3562 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3563 RepositoryState::Remote {
3564 client, project_id, ..
3565 } => {
3566 let response = client
3567 .request(proto::LoadCommitDiff {
3568 project_id: project_id.0,
3569 repository_id: id.to_proto(),
3570 commit,
3571 })
3572 .await?;
3573 Ok(CommitDiff {
3574 files: response
3575 .files
3576 .into_iter()
3577 .map(|file| {
3578 Ok(CommitFile {
3579 path: RepoPath::from_proto(&file.path)?,
3580 old_text: file.old_text,
3581 new_text: file.new_text,
3582 })
3583 })
3584 .collect::<Result<Vec<_>>>()?,
3585 })
3586 }
3587 }
3588 })
3589 }
3590
3591 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3592 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3593 }
3594
3595 pub fn stage_entries(
3596 &self,
3597 entries: Vec<RepoPath>,
3598 cx: &mut Context<Self>,
3599 ) -> Task<anyhow::Result<()>> {
3600 if entries.is_empty() {
3601 return Task::ready(Ok(()));
3602 }
3603 let id = self.id;
3604
3605 let mut save_futures = Vec::new();
3606 if let Some(buffer_store) = self.buffer_store(cx) {
3607 buffer_store.update(cx, |buffer_store, cx| {
3608 for path in &entries {
3609 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3610 continue;
3611 };
3612 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3613 && buffer
3614 .read(cx)
3615 .file()
3616 .is_some_and(|file| file.disk_state().exists())
3617 {
3618 save_futures.push(buffer_store.save_buffer(buffer, cx));
3619 }
3620 }
3621 })
3622 }
3623
3624 cx.spawn(async move |this, cx| {
3625 for save_future in save_futures {
3626 save_future.await?;
3627 }
3628
3629 this.update(cx, |this, _| {
3630 this.send_job(None, move |git_repo, _cx| async move {
3631 match git_repo {
3632 RepositoryState::Local {
3633 backend,
3634 environment,
3635 ..
3636 } => backend.stage_paths(entries, environment.clone()).await,
3637 RepositoryState::Remote { project_id, client } => {
3638 client
3639 .request(proto::Stage {
3640 project_id: project_id.0,
3641 repository_id: id.to_proto(),
3642 paths: entries
3643 .into_iter()
3644 .map(|repo_path| repo_path.to_proto())
3645 .collect(),
3646 })
3647 .await
3648 .context("sending stage request")?;
3649
3650 Ok(())
3651 }
3652 }
3653 })
3654 })?
3655 .await??;
3656
3657 Ok(())
3658 })
3659 }
3660
3661 pub fn unstage_entries(
3662 &self,
3663 entries: Vec<RepoPath>,
3664 cx: &mut Context<Self>,
3665 ) -> Task<anyhow::Result<()>> {
3666 if entries.is_empty() {
3667 return Task::ready(Ok(()));
3668 }
3669 let id = self.id;
3670
3671 let mut save_futures = Vec::new();
3672 if let Some(buffer_store) = self.buffer_store(cx) {
3673 buffer_store.update(cx, |buffer_store, cx| {
3674 for path in &entries {
3675 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3676 continue;
3677 };
3678 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3679 && buffer
3680 .read(cx)
3681 .file()
3682 .is_some_and(|file| file.disk_state().exists())
3683 {
3684 save_futures.push(buffer_store.save_buffer(buffer, cx));
3685 }
3686 }
3687 })
3688 }
3689
3690 cx.spawn(async move |this, cx| {
3691 for save_future in save_futures {
3692 save_future.await?;
3693 }
3694
3695 this.update(cx, |this, _| {
3696 this.send_job(None, move |git_repo, _cx| async move {
3697 match git_repo {
3698 RepositoryState::Local {
3699 backend,
3700 environment,
3701 ..
3702 } => backend.unstage_paths(entries, environment).await,
3703 RepositoryState::Remote { project_id, client } => {
3704 client
3705 .request(proto::Unstage {
3706 project_id: project_id.0,
3707 repository_id: id.to_proto(),
3708 paths: entries
3709 .into_iter()
3710 .map(|repo_path| repo_path.to_proto())
3711 .collect(),
3712 })
3713 .await
3714 .context("sending unstage request")?;
3715
3716 Ok(())
3717 }
3718 }
3719 })
3720 })?
3721 .await??;
3722
3723 Ok(())
3724 })
3725 }
3726
3727 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3728 let to_stage = self
3729 .cached_status()
3730 .filter(|entry| !entry.status.staging().is_fully_staged())
3731 .map(|entry| entry.repo_path)
3732 .collect();
3733 self.stage_entries(to_stage, cx)
3734 }
3735
3736 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3737 let to_unstage = self
3738 .cached_status()
3739 .filter(|entry| entry.status.staging().has_staged())
3740 .map(|entry| entry.repo_path)
3741 .collect();
3742 self.unstage_entries(to_unstage, cx)
3743 }
3744
3745 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3746 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3747
3748 self.stash_entries(to_stash, cx)
3749 }
3750
3751 pub fn stash_entries(
3752 &mut self,
3753 entries: Vec<RepoPath>,
3754 cx: &mut Context<Self>,
3755 ) -> Task<anyhow::Result<()>> {
3756 let id = self.id;
3757
3758 cx.spawn(async move |this, cx| {
3759 this.update(cx, |this, _| {
3760 this.send_job(None, move |git_repo, _cx| async move {
3761 match git_repo {
3762 RepositoryState::Local {
3763 backend,
3764 environment,
3765 ..
3766 } => backend.stash_paths(entries, environment).await,
3767 RepositoryState::Remote { project_id, client } => {
3768 client
3769 .request(proto::Stash {
3770 project_id: project_id.0,
3771 repository_id: id.to_proto(),
3772 paths: entries
3773 .into_iter()
3774 .map(|repo_path| repo_path.to_proto())
3775 .collect(),
3776 })
3777 .await
3778 .context("sending stash request")?;
3779 Ok(())
3780 }
3781 }
3782 })
3783 })?
3784 .await??;
3785 Ok(())
3786 })
3787 }
3788
3789 pub fn stash_pop(
3790 &mut self,
3791 index: Option<usize>,
3792 cx: &mut Context<Self>,
3793 ) -> Task<anyhow::Result<()>> {
3794 let id = self.id;
3795 cx.spawn(async move |this, cx| {
3796 this.update(cx, |this, _| {
3797 this.send_job(None, move |git_repo, _cx| async move {
3798 match git_repo {
3799 RepositoryState::Local {
3800 backend,
3801 environment,
3802 ..
3803 } => backend.stash_pop(index, environment).await,
3804 RepositoryState::Remote { project_id, client } => {
3805 client
3806 .request(proto::StashPop {
3807 project_id: project_id.0,
3808 repository_id: id.to_proto(),
3809 stash_index: index.map(|i| i as u64),
3810 })
3811 .await
3812 .context("sending stash pop request")?;
3813 Ok(())
3814 }
3815 }
3816 })
3817 })?
3818 .await??;
3819 Ok(())
3820 })
3821 }
3822
3823 pub fn stash_apply(
3824 &mut self,
3825 index: Option<usize>,
3826 cx: &mut Context<Self>,
3827 ) -> Task<anyhow::Result<()>> {
3828 let id = self.id;
3829 cx.spawn(async move |this, cx| {
3830 this.update(cx, |this, _| {
3831 this.send_job(None, move |git_repo, _cx| async move {
3832 match git_repo {
3833 RepositoryState::Local {
3834 backend,
3835 environment,
3836 ..
3837 } => backend.stash_apply(index, environment).await,
3838 RepositoryState::Remote { project_id, client } => {
3839 client
3840 .request(proto::StashApply {
3841 project_id: project_id.0,
3842 repository_id: id.to_proto(),
3843 stash_index: index.map(|i| i as u64),
3844 })
3845 .await
3846 .context("sending stash apply request")?;
3847 Ok(())
3848 }
3849 }
3850 })
3851 })?
3852 .await??;
3853 Ok(())
3854 })
3855 }
3856
3857 pub fn stash_drop(
3858 &mut self,
3859 index: Option<usize>,
3860 cx: &mut Context<Self>,
3861 ) -> oneshot::Receiver<anyhow::Result<()>> {
3862 let id = self.id;
3863 let updates_tx = self
3864 .git_store()
3865 .and_then(|git_store| match &git_store.read(cx).state {
3866 GitStoreState::Local { downstream, .. } => downstream
3867 .as_ref()
3868 .map(|downstream| downstream.updates_tx.clone()),
3869 _ => None,
3870 });
3871 let this = cx.weak_entity();
3872 self.send_job(None, move |git_repo, mut cx| async move {
3873 match git_repo {
3874 RepositoryState::Local {
3875 backend,
3876 environment,
3877 ..
3878 } => {
3879 let result = backend.stash_drop(index, environment).await;
3880 if result.is_ok()
3881 && let Ok(stash_entries) = backend.stash_entries().await
3882 {
3883 let snapshot = this.update(&mut cx, |this, cx| {
3884 this.snapshot.stash_entries = stash_entries;
3885 let snapshot = this.snapshot.clone();
3886 cx.emit(RepositoryEvent::Updated {
3887 full_scan: false,
3888 new_instance: false,
3889 });
3890 snapshot
3891 })?;
3892 if let Some(updates_tx) = updates_tx {
3893 updates_tx
3894 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3895 .ok();
3896 }
3897 }
3898
3899 result
3900 }
3901 RepositoryState::Remote { project_id, client } => {
3902 client
3903 .request(proto::StashDrop {
3904 project_id: project_id.0,
3905 repository_id: id.to_proto(),
3906 stash_index: index.map(|i| i as u64),
3907 })
3908 .await
3909 .context("sending stash pop request")?;
3910 Ok(())
3911 }
3912 }
3913 })
3914 }
3915
3916 pub fn commit(
3917 &mut self,
3918 message: SharedString,
3919 name_and_email: Option<(SharedString, SharedString)>,
3920 options: CommitOptions,
3921 _cx: &mut App,
3922 ) -> oneshot::Receiver<Result<()>> {
3923 let id = self.id;
3924
3925 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3926 match git_repo {
3927 RepositoryState::Local {
3928 backend,
3929 environment,
3930 ..
3931 } => {
3932 backend
3933 .commit(message, name_and_email, options, environment)
3934 .await
3935 }
3936 RepositoryState::Remote { project_id, client } => {
3937 let (name, email) = name_and_email.unzip();
3938 client
3939 .request(proto::Commit {
3940 project_id: project_id.0,
3941 repository_id: id.to_proto(),
3942 message: String::from(message),
3943 name: name.map(String::from),
3944 email: email.map(String::from),
3945 options: Some(proto::commit::CommitOptions {
3946 amend: options.amend,
3947 signoff: options.signoff,
3948 }),
3949 })
3950 .await
3951 .context("sending commit request")?;
3952
3953 Ok(())
3954 }
3955 }
3956 })
3957 }
3958
3959 pub fn fetch(
3960 &mut self,
3961 fetch_options: FetchOptions,
3962 askpass: AskPassDelegate,
3963 _cx: &mut App,
3964 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3965 let askpass_delegates = self.askpass_delegates.clone();
3966 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3967 let id = self.id;
3968
3969 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3970 match git_repo {
3971 RepositoryState::Local {
3972 backend,
3973 environment,
3974 ..
3975 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3976 RepositoryState::Remote { project_id, client } => {
3977 askpass_delegates.lock().insert(askpass_id, askpass);
3978 let _defer = util::defer(|| {
3979 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3980 debug_assert!(askpass_delegate.is_some());
3981 });
3982
3983 let response = client
3984 .request(proto::Fetch {
3985 project_id: project_id.0,
3986 repository_id: id.to_proto(),
3987 askpass_id,
3988 remote: fetch_options.to_proto(),
3989 })
3990 .await
3991 .context("sending fetch request")?;
3992
3993 Ok(RemoteCommandOutput {
3994 stdout: response.stdout,
3995 stderr: response.stderr,
3996 })
3997 }
3998 }
3999 })
4000 }
4001
4002 pub fn push(
4003 &mut self,
4004 branch: SharedString,
4005 remote: SharedString,
4006 options: Option<PushOptions>,
4007 askpass: AskPassDelegate,
4008 cx: &mut Context<Self>,
4009 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4010 let askpass_delegates = self.askpass_delegates.clone();
4011 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4012 let id = self.id;
4013
4014 let args = options
4015 .map(|option| match option {
4016 PushOptions::SetUpstream => " --set-upstream",
4017 PushOptions::Force => " --force-with-lease",
4018 })
4019 .unwrap_or("");
4020
4021 let updates_tx = self
4022 .git_store()
4023 .and_then(|git_store| match &git_store.read(cx).state {
4024 GitStoreState::Local { downstream, .. } => downstream
4025 .as_ref()
4026 .map(|downstream| downstream.updates_tx.clone()),
4027 _ => None,
4028 });
4029
4030 let this = cx.weak_entity();
4031 self.send_job(
4032 Some(format!("git push {} {} {}", args, branch, remote).into()),
4033 move |git_repo, mut cx| async move {
4034 match git_repo {
4035 RepositoryState::Local {
4036 backend,
4037 environment,
4038 ..
4039 } => {
4040 let result = backend
4041 .push(
4042 branch.to_string(),
4043 remote.to_string(),
4044 options,
4045 askpass,
4046 environment.clone(),
4047 cx.clone(),
4048 )
4049 .await;
4050 if result.is_ok() {
4051 let branches = backend.branches().await?;
4052 let branch = branches.into_iter().find(|branch| branch.is_head);
4053 log::info!("head branch after scan is {branch:?}");
4054 let snapshot = this.update(&mut cx, |this, cx| {
4055 this.snapshot.branch = branch;
4056 let snapshot = this.snapshot.clone();
4057 cx.emit(RepositoryEvent::Updated {
4058 full_scan: false,
4059 new_instance: false,
4060 });
4061 snapshot
4062 })?;
4063 if let Some(updates_tx) = updates_tx {
4064 updates_tx
4065 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4066 .ok();
4067 }
4068 }
4069 result
4070 }
4071 RepositoryState::Remote { project_id, client } => {
4072 askpass_delegates.lock().insert(askpass_id, askpass);
4073 let _defer = util::defer(|| {
4074 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4075 debug_assert!(askpass_delegate.is_some());
4076 });
4077 let response = client
4078 .request(proto::Push {
4079 project_id: project_id.0,
4080 repository_id: id.to_proto(),
4081 askpass_id,
4082 branch_name: branch.to_string(),
4083 remote_name: remote.to_string(),
4084 options: options.map(|options| match options {
4085 PushOptions::Force => proto::push::PushOptions::Force,
4086 PushOptions::SetUpstream => {
4087 proto::push::PushOptions::SetUpstream
4088 }
4089 }
4090 as i32),
4091 })
4092 .await
4093 .context("sending push request")?;
4094
4095 Ok(RemoteCommandOutput {
4096 stdout: response.stdout,
4097 stderr: response.stderr,
4098 })
4099 }
4100 }
4101 },
4102 )
4103 }
4104
4105 pub fn pull(
4106 &mut self,
4107 branch: SharedString,
4108 remote: SharedString,
4109 askpass: AskPassDelegate,
4110 _cx: &mut App,
4111 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4112 let askpass_delegates = self.askpass_delegates.clone();
4113 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4114 let id = self.id;
4115
4116 self.send_job(
4117 Some(format!("git pull {} {}", remote, branch).into()),
4118 move |git_repo, cx| async move {
4119 match git_repo {
4120 RepositoryState::Local {
4121 backend,
4122 environment,
4123 ..
4124 } => {
4125 backend
4126 .pull(
4127 branch.to_string(),
4128 remote.to_string(),
4129 askpass,
4130 environment.clone(),
4131 cx,
4132 )
4133 .await
4134 }
4135 RepositoryState::Remote { project_id, client } => {
4136 askpass_delegates.lock().insert(askpass_id, askpass);
4137 let _defer = util::defer(|| {
4138 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4139 debug_assert!(askpass_delegate.is_some());
4140 });
4141 let response = client
4142 .request(proto::Pull {
4143 project_id: project_id.0,
4144 repository_id: id.to_proto(),
4145 askpass_id,
4146 branch_name: branch.to_string(),
4147 remote_name: remote.to_string(),
4148 })
4149 .await
4150 .context("sending pull request")?;
4151
4152 Ok(RemoteCommandOutput {
4153 stdout: response.stdout,
4154 stderr: response.stderr,
4155 })
4156 }
4157 }
4158 },
4159 )
4160 }
4161
4162 fn spawn_set_index_text_job(
4163 &mut self,
4164 path: RepoPath,
4165 content: Option<String>,
4166 hunk_staging_operation_count: Option<usize>,
4167 cx: &mut Context<Self>,
4168 ) -> oneshot::Receiver<anyhow::Result<()>> {
4169 let id = self.id;
4170 let this = cx.weak_entity();
4171 let git_store = self.git_store.clone();
4172 self.send_keyed_job(
4173 Some(GitJobKey::WriteIndex(path.clone())),
4174 None,
4175 move |git_repo, mut cx| async move {
4176 log::debug!(
4177 "start updating index text for buffer {}",
4178 path.as_unix_str()
4179 );
4180 match git_repo {
4181 RepositoryState::Local {
4182 backend,
4183 environment,
4184 ..
4185 } => {
4186 backend
4187 .set_index_text(path.clone(), content, environment.clone())
4188 .await?;
4189 }
4190 RepositoryState::Remote { project_id, client } => {
4191 client
4192 .request(proto::SetIndexText {
4193 project_id: project_id.0,
4194 repository_id: id.to_proto(),
4195 path: path.to_proto(),
4196 text: content,
4197 })
4198 .await?;
4199 }
4200 }
4201 log::debug!(
4202 "finish updating index text for buffer {}",
4203 path.as_unix_str()
4204 );
4205
4206 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4207 let project_path = this
4208 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4209 .ok()
4210 .flatten();
4211 git_store.update(&mut cx, |git_store, cx| {
4212 let buffer_id = git_store
4213 .buffer_store
4214 .read(cx)
4215 .get_by_path(&project_path?)?
4216 .read(cx)
4217 .remote_id();
4218 let diff_state = git_store.diffs.get(&buffer_id)?;
4219 diff_state.update(cx, |diff_state, _| {
4220 diff_state.hunk_staging_operation_count_as_of_write =
4221 hunk_staging_operation_count;
4222 });
4223 Some(())
4224 })?;
4225 }
4226 Ok(())
4227 },
4228 )
4229 }
4230
4231 pub fn get_remotes(
4232 &mut self,
4233 branch_name: Option<String>,
4234 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4235 let id = self.id;
4236 self.send_job(None, move |repo, _cx| async move {
4237 match repo {
4238 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4239 RepositoryState::Remote { project_id, client } => {
4240 let response = client
4241 .request(proto::GetRemotes {
4242 project_id: project_id.0,
4243 repository_id: id.to_proto(),
4244 branch_name,
4245 })
4246 .await?;
4247
4248 let remotes = response
4249 .remotes
4250 .into_iter()
4251 .map(|remotes| git::repository::Remote {
4252 name: remotes.name.into(),
4253 })
4254 .collect();
4255
4256 Ok(remotes)
4257 }
4258 }
4259 })
4260 }
4261
4262 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4263 let id = self.id;
4264 self.send_job(None, move |repo, _| async move {
4265 match repo {
4266 RepositoryState::Local { backend, .. } => backend.branches().await,
4267 RepositoryState::Remote { project_id, client } => {
4268 let response = client
4269 .request(proto::GitGetBranches {
4270 project_id: project_id.0,
4271 repository_id: id.to_proto(),
4272 })
4273 .await?;
4274
4275 let branches = response
4276 .branches
4277 .into_iter()
4278 .map(|branch| proto_to_branch(&branch))
4279 .collect();
4280
4281 Ok(branches)
4282 }
4283 }
4284 })
4285 }
4286
4287 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4288 let id = self.id;
4289 self.send_job(None, move |repo, _| async move {
4290 match repo {
4291 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4292 RepositoryState::Remote { project_id, client } => {
4293 let response = client
4294 .request(proto::GetDefaultBranch {
4295 project_id: project_id.0,
4296 repository_id: id.to_proto(),
4297 })
4298 .await?;
4299
4300 anyhow::Ok(response.branch.map(SharedString::from))
4301 }
4302 }
4303 })
4304 }
4305
4306 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4307 let id = self.id;
4308 self.send_job(None, move |repo, _cx| async move {
4309 match repo {
4310 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4311 RepositoryState::Remote { project_id, client } => {
4312 let response = client
4313 .request(proto::GitDiff {
4314 project_id: project_id.0,
4315 repository_id: id.to_proto(),
4316 diff_type: match diff_type {
4317 DiffType::HeadToIndex => {
4318 proto::git_diff::DiffType::HeadToIndex.into()
4319 }
4320 DiffType::HeadToWorktree => {
4321 proto::git_diff::DiffType::HeadToWorktree.into()
4322 }
4323 },
4324 })
4325 .await?;
4326
4327 Ok(response.diff)
4328 }
4329 }
4330 })
4331 }
4332
4333 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4334 let id = self.id;
4335 self.send_job(
4336 Some(format!("git switch -c {branch_name}").into()),
4337 move |repo, _cx| async move {
4338 match repo {
4339 RepositoryState::Local { backend, .. } => {
4340 backend.create_branch(branch_name).await
4341 }
4342 RepositoryState::Remote { project_id, client } => {
4343 client
4344 .request(proto::GitCreateBranch {
4345 project_id: project_id.0,
4346 repository_id: id.to_proto(),
4347 branch_name,
4348 })
4349 .await?;
4350
4351 Ok(())
4352 }
4353 }
4354 },
4355 )
4356 }
4357
4358 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4359 let id = self.id;
4360 self.send_job(
4361 Some(format!("git switch {branch_name}").into()),
4362 move |repo, _cx| async move {
4363 match repo {
4364 RepositoryState::Local { backend, .. } => {
4365 backend.change_branch(branch_name).await
4366 }
4367 RepositoryState::Remote { project_id, client } => {
4368 client
4369 .request(proto::GitChangeBranch {
4370 project_id: project_id.0,
4371 repository_id: id.to_proto(),
4372 branch_name,
4373 })
4374 .await?;
4375
4376 Ok(())
4377 }
4378 }
4379 },
4380 )
4381 }
4382
4383 pub fn rename_branch(
4384 &mut self,
4385 branch: String,
4386 new_name: String,
4387 ) -> oneshot::Receiver<Result<()>> {
4388 let id = self.id;
4389 self.send_job(
4390 Some(format!("git branch -m {branch} {new_name}").into()),
4391 move |repo, _cx| async move {
4392 match repo {
4393 RepositoryState::Local { backend, .. } => {
4394 backend.rename_branch(branch, new_name).await
4395 }
4396 RepositoryState::Remote { project_id, client } => {
4397 client
4398 .request(proto::GitRenameBranch {
4399 project_id: project_id.0,
4400 repository_id: id.to_proto(),
4401 branch,
4402 new_name,
4403 })
4404 .await?;
4405
4406 Ok(())
4407 }
4408 }
4409 },
4410 )
4411 }
4412
4413 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4414 let id = self.id;
4415 self.send_job(None, move |repo, _cx| async move {
4416 match repo {
4417 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4418 RepositoryState::Remote { project_id, client } => {
4419 let response = client
4420 .request(proto::CheckForPushedCommits {
4421 project_id: project_id.0,
4422 repository_id: id.to_proto(),
4423 })
4424 .await?;
4425
4426 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4427
4428 Ok(branches)
4429 }
4430 }
4431 })
4432 }
4433
4434 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4435 self.send_job(None, |repo, _cx| async move {
4436 match repo {
4437 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4438 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4439 }
4440 })
4441 }
4442
4443 pub fn restore_checkpoint(
4444 &mut self,
4445 checkpoint: GitRepositoryCheckpoint,
4446 ) -> oneshot::Receiver<Result<()>> {
4447 self.send_job(None, move |repo, _cx| async move {
4448 match repo {
4449 RepositoryState::Local { backend, .. } => {
4450 backend.restore_checkpoint(checkpoint).await
4451 }
4452 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4453 }
4454 })
4455 }
4456
4457 pub(crate) fn apply_remote_update(
4458 &mut self,
4459 update: proto::UpdateRepository,
4460 is_new: bool,
4461 cx: &mut Context<Self>,
4462 ) -> Result<()> {
4463 let conflicted_paths = TreeSet::from_ordered_entries(
4464 update
4465 .current_merge_conflicts
4466 .into_iter()
4467 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4468 );
4469 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4470 self.snapshot.head_commit = update
4471 .head_commit_details
4472 .as_ref()
4473 .map(proto_to_commit_details);
4474
4475 self.snapshot.merge.conflicted_paths = conflicted_paths;
4476 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4477 self.snapshot.stash_entries = GitStash {
4478 entries: update
4479 .stash_entries
4480 .iter()
4481 .filter_map(|entry| proto_to_stash(entry).ok())
4482 .collect(),
4483 };
4484
4485 let edits = update
4486 .removed_statuses
4487 .into_iter()
4488 .filter_map(|path| {
4489 Some(sum_tree::Edit::Remove(PathKey(
4490 RelPath::from_proto(&path).log_err()?,
4491 )))
4492 })
4493 .chain(
4494 update
4495 .updated_statuses
4496 .into_iter()
4497 .filter_map(|updated_status| {
4498 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4499 }),
4500 )
4501 .collect::<Vec<_>>();
4502 self.snapshot.statuses_by_path.edit(edits, ());
4503 if update.is_last_update {
4504 self.snapshot.scan_id = update.scan_id;
4505 }
4506 cx.emit(RepositoryEvent::Updated {
4507 full_scan: true,
4508 new_instance: is_new,
4509 });
4510 Ok(())
4511 }
4512
4513 pub fn compare_checkpoints(
4514 &mut self,
4515 left: GitRepositoryCheckpoint,
4516 right: GitRepositoryCheckpoint,
4517 ) -> oneshot::Receiver<Result<bool>> {
4518 self.send_job(None, move |repo, _cx| async move {
4519 match repo {
4520 RepositoryState::Local { backend, .. } => {
4521 backend.compare_checkpoints(left, right).await
4522 }
4523 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4524 }
4525 })
4526 }
4527
4528 pub fn diff_checkpoints(
4529 &mut self,
4530 base_checkpoint: GitRepositoryCheckpoint,
4531 target_checkpoint: GitRepositoryCheckpoint,
4532 ) -> oneshot::Receiver<Result<String>> {
4533 self.send_job(None, move |repo, _cx| async move {
4534 match repo {
4535 RepositoryState::Local { backend, .. } => {
4536 backend
4537 .diff_checkpoints(base_checkpoint, target_checkpoint)
4538 .await
4539 }
4540 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4541 }
4542 })
4543 }
4544
4545 fn schedule_scan(
4546 &mut self,
4547 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4548 cx: &mut Context<Self>,
4549 ) {
4550 let this = cx.weak_entity();
4551 let _ = self.send_keyed_job(
4552 Some(GitJobKey::ReloadGitState),
4553 None,
4554 |state, mut cx| async move {
4555 log::debug!("run scheduled git status scan");
4556
4557 let Some(this) = this.upgrade() else {
4558 return Ok(());
4559 };
4560 let RepositoryState::Local { backend, .. } = state else {
4561 bail!("not a local repository")
4562 };
4563 let (snapshot, events) = this
4564 .update(&mut cx, |this, _| {
4565 this.paths_needing_status_update.clear();
4566 compute_snapshot(
4567 this.id,
4568 this.work_directory_abs_path.clone(),
4569 this.snapshot.clone(),
4570 backend.clone(),
4571 )
4572 })?
4573 .await?;
4574 this.update(&mut cx, |this, cx| {
4575 this.snapshot = snapshot.clone();
4576 for event in events {
4577 cx.emit(event);
4578 }
4579 })?;
4580 if let Some(updates_tx) = updates_tx {
4581 updates_tx
4582 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4583 .ok();
4584 }
4585 Ok(())
4586 },
4587 );
4588 }
4589
4590 fn spawn_local_git_worker(
4591 work_directory_abs_path: Arc<Path>,
4592 dot_git_abs_path: Arc<Path>,
4593 _repository_dir_abs_path: Arc<Path>,
4594 _common_dir_abs_path: Arc<Path>,
4595 project_environment: WeakEntity<ProjectEnvironment>,
4596 fs: Arc<dyn Fs>,
4597 cx: &mut Context<Self>,
4598 ) -> mpsc::UnboundedSender<GitJob> {
4599 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4600
4601 cx.spawn(async move |_, cx| {
4602 let environment = project_environment
4603 .upgrade()
4604 .context("missing project environment")?
4605 .update(cx, |project_environment, cx| {
4606 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4607 })?
4608 .await
4609 .unwrap_or_else(|| {
4610 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4611 HashMap::default()
4612 });
4613 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4614 let backend = cx
4615 .background_spawn(async move {
4616 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4617 .or_else(|| which::which("git").ok());
4618 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4619 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4620 })
4621 .await?;
4622
4623 if let Some(git_hosting_provider_registry) =
4624 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4625 {
4626 git_hosting_providers::register_additional_providers(
4627 git_hosting_provider_registry,
4628 backend.clone(),
4629 );
4630 }
4631
4632 let state = RepositoryState::Local {
4633 backend,
4634 environment: Arc::new(environment),
4635 };
4636 let mut jobs = VecDeque::new();
4637 loop {
4638 while let Ok(Some(next_job)) = job_rx.try_next() {
4639 jobs.push_back(next_job);
4640 }
4641
4642 if let Some(job) = jobs.pop_front() {
4643 if let Some(current_key) = &job.key
4644 && jobs
4645 .iter()
4646 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4647 {
4648 continue;
4649 }
4650 (job.job)(state.clone(), cx).await;
4651 } else if let Some(job) = job_rx.next().await {
4652 jobs.push_back(job);
4653 } else {
4654 break;
4655 }
4656 }
4657 anyhow::Ok(())
4658 })
4659 .detach_and_log_err(cx);
4660
4661 job_tx
4662 }
4663
4664 fn spawn_remote_git_worker(
4665 project_id: ProjectId,
4666 client: AnyProtoClient,
4667 cx: &mut Context<Self>,
4668 ) -> mpsc::UnboundedSender<GitJob> {
4669 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4670
4671 cx.spawn(async move |_, cx| {
4672 let state = RepositoryState::Remote { project_id, client };
4673 let mut jobs = VecDeque::new();
4674 loop {
4675 while let Ok(Some(next_job)) = job_rx.try_next() {
4676 jobs.push_back(next_job);
4677 }
4678
4679 if let Some(job) = jobs.pop_front() {
4680 if let Some(current_key) = &job.key
4681 && jobs
4682 .iter()
4683 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4684 {
4685 continue;
4686 }
4687 (job.job)(state.clone(), cx).await;
4688 } else if let Some(job) = job_rx.next().await {
4689 jobs.push_back(job);
4690 } else {
4691 break;
4692 }
4693 }
4694 anyhow::Ok(())
4695 })
4696 .detach_and_log_err(cx);
4697
4698 job_tx
4699 }
4700
4701 fn load_staged_text(
4702 &mut self,
4703 buffer_id: BufferId,
4704 repo_path: RepoPath,
4705 cx: &App,
4706 ) -> Task<Result<Option<String>>> {
4707 let rx = self.send_job(None, move |state, _| async move {
4708 match state {
4709 RepositoryState::Local { backend, .. } => {
4710 anyhow::Ok(backend.load_index_text(repo_path).await)
4711 }
4712 RepositoryState::Remote { project_id, client } => {
4713 let response = client
4714 .request(proto::OpenUnstagedDiff {
4715 project_id: project_id.to_proto(),
4716 buffer_id: buffer_id.to_proto(),
4717 })
4718 .await?;
4719 Ok(response.staged_text)
4720 }
4721 }
4722 });
4723 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4724 }
4725
4726 fn load_committed_text(
4727 &mut self,
4728 buffer_id: BufferId,
4729 repo_path: RepoPath,
4730 cx: &App,
4731 ) -> Task<Result<DiffBasesChange>> {
4732 let rx = self.send_job(None, move |state, _| async move {
4733 match state {
4734 RepositoryState::Local { backend, .. } => {
4735 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4736 let staged_text = backend.load_index_text(repo_path).await;
4737 let diff_bases_change = if committed_text == staged_text {
4738 DiffBasesChange::SetBoth(committed_text)
4739 } else {
4740 DiffBasesChange::SetEach {
4741 index: staged_text,
4742 head: committed_text,
4743 }
4744 };
4745 anyhow::Ok(diff_bases_change)
4746 }
4747 RepositoryState::Remote { project_id, client } => {
4748 use proto::open_uncommitted_diff_response::Mode;
4749
4750 let response = client
4751 .request(proto::OpenUncommittedDiff {
4752 project_id: project_id.to_proto(),
4753 buffer_id: buffer_id.to_proto(),
4754 })
4755 .await?;
4756 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4757 let bases = match mode {
4758 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4759 Mode::IndexAndHead => DiffBasesChange::SetEach {
4760 head: response.committed_text,
4761 index: response.staged_text,
4762 },
4763 };
4764 Ok(bases)
4765 }
4766 }
4767 });
4768
4769 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4770 }
4771
4772 fn paths_changed(
4773 &mut self,
4774 paths: Vec<RepoPath>,
4775 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4776 cx: &mut Context<Self>,
4777 ) {
4778 self.paths_needing_status_update.extend(paths);
4779
4780 let this = cx.weak_entity();
4781 let _ = self.send_keyed_job(
4782 Some(GitJobKey::RefreshStatuses),
4783 None,
4784 |state, mut cx| async move {
4785 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4786 (
4787 this.snapshot.clone(),
4788 mem::take(&mut this.paths_needing_status_update),
4789 )
4790 })?;
4791 let RepositoryState::Local { backend, .. } = state else {
4792 bail!("not a local repository")
4793 };
4794
4795 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4796 if paths.is_empty() {
4797 return Ok(());
4798 }
4799 let statuses = backend.status(&paths).await?;
4800 let stash_entries = backend.stash_entries().await?;
4801
4802 let changed_path_statuses = cx
4803 .background_spawn(async move {
4804 let mut changed_path_statuses = Vec::new();
4805 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4806 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4807
4808 for (repo_path, status) in &*statuses.entries {
4809 changed_paths.remove(repo_path);
4810 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4811 && cursor.item().is_some_and(|entry| entry.status == *status)
4812 {
4813 continue;
4814 }
4815
4816 changed_path_statuses.push(Edit::Insert(StatusEntry {
4817 repo_path: repo_path.clone(),
4818 status: *status,
4819 }));
4820 }
4821 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4822 for path in changed_paths.into_iter() {
4823 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4824 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4825 }
4826 }
4827 changed_path_statuses
4828 })
4829 .await;
4830
4831 this.update(&mut cx, |this, cx| {
4832 let needs_update = !changed_path_statuses.is_empty()
4833 || this.snapshot.stash_entries != stash_entries;
4834 this.snapshot.stash_entries = stash_entries;
4835 if !changed_path_statuses.is_empty() {
4836 this.snapshot
4837 .statuses_by_path
4838 .edit(changed_path_statuses, ());
4839 this.snapshot.scan_id += 1;
4840 }
4841
4842 if needs_update {
4843 if let Some(updates_tx) = updates_tx {
4844 updates_tx
4845 .unbounded_send(DownstreamUpdate::UpdateRepository(
4846 this.snapshot.clone(),
4847 ))
4848 .ok();
4849 }
4850
4851 cx.emit(RepositoryEvent::Updated {
4852 full_scan: false,
4853 new_instance: false,
4854 });
4855 }
4856 })
4857 },
4858 );
4859 }
4860
4861 /// currently running git command and when it started
4862 pub fn current_job(&self) -> Option<JobInfo> {
4863 self.active_jobs.values().next().cloned()
4864 }
4865
4866 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4867 self.send_job(None, |_, _| async {})
4868 }
4869}
4870
4871fn get_permalink_in_rust_registry_src(
4872 provider_registry: Arc<GitHostingProviderRegistry>,
4873 path: PathBuf,
4874 selection: Range<u32>,
4875) -> Result<url::Url> {
4876 #[derive(Deserialize)]
4877 struct CargoVcsGit {
4878 sha1: String,
4879 }
4880
4881 #[derive(Deserialize)]
4882 struct CargoVcsInfo {
4883 git: CargoVcsGit,
4884 path_in_vcs: String,
4885 }
4886
4887 #[derive(Deserialize)]
4888 struct CargoPackage {
4889 repository: String,
4890 }
4891
4892 #[derive(Deserialize)]
4893 struct CargoToml {
4894 package: CargoPackage,
4895 }
4896
4897 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4898 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4899 Some((dir, json))
4900 }) else {
4901 bail!("No .cargo_vcs_info.json found in parent directories")
4902 };
4903 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4904 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4905 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4906 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4907 .context("parsing package.repository field of manifest")?;
4908 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4909 let permalink = provider.build_permalink(
4910 remote,
4911 BuildPermalinkParams {
4912 sha: &cargo_vcs_info.git.sha1,
4913 path: &path.to_string_lossy(),
4914 selection: Some(selection),
4915 },
4916 );
4917 Ok(permalink)
4918}
4919
4920fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4921 let Some(blame) = blame else {
4922 return proto::BlameBufferResponse {
4923 blame_response: None,
4924 };
4925 };
4926
4927 let entries = blame
4928 .entries
4929 .into_iter()
4930 .map(|entry| proto::BlameEntry {
4931 sha: entry.sha.as_bytes().into(),
4932 start_line: entry.range.start,
4933 end_line: entry.range.end,
4934 original_line_number: entry.original_line_number,
4935 author: entry.author,
4936 author_mail: entry.author_mail,
4937 author_time: entry.author_time,
4938 author_tz: entry.author_tz,
4939 committer: entry.committer_name,
4940 committer_mail: entry.committer_email,
4941 committer_time: entry.committer_time,
4942 committer_tz: entry.committer_tz,
4943 summary: entry.summary,
4944 previous: entry.previous,
4945 filename: entry.filename,
4946 })
4947 .collect::<Vec<_>>();
4948
4949 let messages = blame
4950 .messages
4951 .into_iter()
4952 .map(|(oid, message)| proto::CommitMessage {
4953 oid: oid.as_bytes().into(),
4954 message,
4955 })
4956 .collect::<Vec<_>>();
4957
4958 proto::BlameBufferResponse {
4959 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4960 entries,
4961 messages,
4962 remote_url: blame.remote_url,
4963 }),
4964 }
4965}
4966
4967fn deserialize_blame_buffer_response(
4968 response: proto::BlameBufferResponse,
4969) -> Option<git::blame::Blame> {
4970 let response = response.blame_response?;
4971 let entries = response
4972 .entries
4973 .into_iter()
4974 .filter_map(|entry| {
4975 Some(git::blame::BlameEntry {
4976 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4977 range: entry.start_line..entry.end_line,
4978 original_line_number: entry.original_line_number,
4979 committer_name: entry.committer,
4980 committer_time: entry.committer_time,
4981 committer_tz: entry.committer_tz,
4982 committer_email: entry.committer_mail,
4983 author: entry.author,
4984 author_mail: entry.author_mail,
4985 author_time: entry.author_time,
4986 author_tz: entry.author_tz,
4987 summary: entry.summary,
4988 previous: entry.previous,
4989 filename: entry.filename,
4990 })
4991 })
4992 .collect::<Vec<_>>();
4993
4994 let messages = response
4995 .messages
4996 .into_iter()
4997 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4998 .collect::<HashMap<_, _>>();
4999
5000 Some(Blame {
5001 entries,
5002 messages,
5003 remote_url: response.remote_url,
5004 })
5005}
5006
5007fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5008 proto::Branch {
5009 is_head: branch.is_head,
5010 ref_name: branch.ref_name.to_string(),
5011 unix_timestamp: branch
5012 .most_recent_commit
5013 .as_ref()
5014 .map(|commit| commit.commit_timestamp as u64),
5015 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5016 ref_name: upstream.ref_name.to_string(),
5017 tracking: upstream
5018 .tracking
5019 .status()
5020 .map(|upstream| proto::UpstreamTracking {
5021 ahead: upstream.ahead as u64,
5022 behind: upstream.behind as u64,
5023 }),
5024 }),
5025 most_recent_commit: branch
5026 .most_recent_commit
5027 .as_ref()
5028 .map(|commit| proto::CommitSummary {
5029 sha: commit.sha.to_string(),
5030 subject: commit.subject.to_string(),
5031 commit_timestamp: commit.commit_timestamp,
5032 author_name: commit.author_name.to_string(),
5033 }),
5034 }
5035}
5036
5037fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5038 git::repository::Branch {
5039 is_head: proto.is_head,
5040 ref_name: proto.ref_name.clone().into(),
5041 upstream: proto
5042 .upstream
5043 .as_ref()
5044 .map(|upstream| git::repository::Upstream {
5045 ref_name: upstream.ref_name.to_string().into(),
5046 tracking: upstream
5047 .tracking
5048 .as_ref()
5049 .map(|tracking| {
5050 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5051 ahead: tracking.ahead as u32,
5052 behind: tracking.behind as u32,
5053 })
5054 })
5055 .unwrap_or(git::repository::UpstreamTracking::Gone),
5056 }),
5057 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5058 git::repository::CommitSummary {
5059 sha: commit.sha.to_string().into(),
5060 subject: commit.subject.to_string().into(),
5061 commit_timestamp: commit.commit_timestamp,
5062 author_name: commit.author_name.to_string().into(),
5063 has_parent: true,
5064 }
5065 }),
5066 }
5067}
5068
5069fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5070 proto::GitCommitDetails {
5071 sha: commit.sha.to_string(),
5072 message: commit.message.to_string(),
5073 commit_timestamp: commit.commit_timestamp,
5074 author_email: commit.author_email.to_string(),
5075 author_name: commit.author_name.to_string(),
5076 }
5077}
5078
5079fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5080 CommitDetails {
5081 sha: proto.sha.clone().into(),
5082 message: proto.message.clone().into(),
5083 commit_timestamp: proto.commit_timestamp,
5084 author_email: proto.author_email.clone().into(),
5085 author_name: proto.author_name.clone().into(),
5086 }
5087}
5088
5089async fn compute_snapshot(
5090 id: RepositoryId,
5091 work_directory_abs_path: Arc<Path>,
5092 prev_snapshot: RepositorySnapshot,
5093 backend: Arc<dyn GitRepository>,
5094) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5095 let mut events = Vec::new();
5096 let branches = backend.branches().await?;
5097 let branch = branches.into_iter().find(|branch| branch.is_head);
5098 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5099 let stash_entries = backend.stash_entries().await?;
5100 let statuses_by_path = SumTree::from_iter(
5101 statuses
5102 .entries
5103 .iter()
5104 .map(|(repo_path, status)| StatusEntry {
5105 repo_path: repo_path.clone(),
5106 status: *status,
5107 }),
5108 (),
5109 );
5110 let (merge_details, merge_heads_changed) =
5111 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5112 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5113
5114 if merge_heads_changed
5115 || branch != prev_snapshot.branch
5116 || statuses_by_path != prev_snapshot.statuses_by_path
5117 {
5118 events.push(RepositoryEvent::Updated {
5119 full_scan: true,
5120 new_instance: false,
5121 });
5122 }
5123
5124 // Cache merge conflict paths so they don't change from staging/unstaging,
5125 // until the merge heads change (at commit time, etc.).
5126 if merge_heads_changed {
5127 events.push(RepositoryEvent::MergeHeadsChanged);
5128 }
5129
5130 // Useful when branch is None in detached head state
5131 let head_commit = match backend.head_sha().await {
5132 Some(head_sha) => backend.show(head_sha).await.log_err(),
5133 None => None,
5134 };
5135
5136 // Used by edit prediction data collection
5137 let remote_origin_url = backend.remote_url("origin");
5138 let remote_upstream_url = backend.remote_url("upstream");
5139
5140 let snapshot = RepositorySnapshot {
5141 id,
5142 statuses_by_path,
5143 work_directory_abs_path,
5144 path_style: prev_snapshot.path_style,
5145 scan_id: prev_snapshot.scan_id + 1,
5146 branch,
5147 head_commit,
5148 merge: merge_details,
5149 remote_origin_url,
5150 remote_upstream_url,
5151 stash_entries,
5152 };
5153
5154 Ok((snapshot, events))
5155}
5156
5157fn status_from_proto(
5158 simple_status: i32,
5159 status: Option<proto::GitFileStatus>,
5160) -> anyhow::Result<FileStatus> {
5161 use proto::git_file_status::Variant;
5162
5163 let Some(variant) = status.and_then(|status| status.variant) else {
5164 let code = proto::GitStatus::from_i32(simple_status)
5165 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5166 let result = match code {
5167 proto::GitStatus::Added => TrackedStatus {
5168 worktree_status: StatusCode::Added,
5169 index_status: StatusCode::Unmodified,
5170 }
5171 .into(),
5172 proto::GitStatus::Modified => TrackedStatus {
5173 worktree_status: StatusCode::Modified,
5174 index_status: StatusCode::Unmodified,
5175 }
5176 .into(),
5177 proto::GitStatus::Conflict => UnmergedStatus {
5178 first_head: UnmergedStatusCode::Updated,
5179 second_head: UnmergedStatusCode::Updated,
5180 }
5181 .into(),
5182 proto::GitStatus::Deleted => TrackedStatus {
5183 worktree_status: StatusCode::Deleted,
5184 index_status: StatusCode::Unmodified,
5185 }
5186 .into(),
5187 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5188 };
5189 return Ok(result);
5190 };
5191
5192 let result = match variant {
5193 Variant::Untracked(_) => FileStatus::Untracked,
5194 Variant::Ignored(_) => FileStatus::Ignored,
5195 Variant::Unmerged(unmerged) => {
5196 let [first_head, second_head] =
5197 [unmerged.first_head, unmerged.second_head].map(|head| {
5198 let code = proto::GitStatus::from_i32(head)
5199 .with_context(|| format!("Invalid git status code: {head}"))?;
5200 let result = match code {
5201 proto::GitStatus::Added => UnmergedStatusCode::Added,
5202 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5203 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5204 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5205 };
5206 Ok(result)
5207 });
5208 let [first_head, second_head] = [first_head?, second_head?];
5209 UnmergedStatus {
5210 first_head,
5211 second_head,
5212 }
5213 .into()
5214 }
5215 Variant::Tracked(tracked) => {
5216 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5217 .map(|status| {
5218 let code = proto::GitStatus::from_i32(status)
5219 .with_context(|| format!("Invalid git status code: {status}"))?;
5220 let result = match code {
5221 proto::GitStatus::Modified => StatusCode::Modified,
5222 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5223 proto::GitStatus::Added => StatusCode::Added,
5224 proto::GitStatus::Deleted => StatusCode::Deleted,
5225 proto::GitStatus::Renamed => StatusCode::Renamed,
5226 proto::GitStatus::Copied => StatusCode::Copied,
5227 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5228 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5229 };
5230 Ok(result)
5231 });
5232 let [index_status, worktree_status] = [index_status?, worktree_status?];
5233 TrackedStatus {
5234 index_status,
5235 worktree_status,
5236 }
5237 .into()
5238 }
5239 };
5240 Ok(result)
5241}
5242
5243fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5244 use proto::git_file_status::{Tracked, Unmerged, Variant};
5245
5246 let variant = match status {
5247 FileStatus::Untracked => Variant::Untracked(Default::default()),
5248 FileStatus::Ignored => Variant::Ignored(Default::default()),
5249 FileStatus::Unmerged(UnmergedStatus {
5250 first_head,
5251 second_head,
5252 }) => Variant::Unmerged(Unmerged {
5253 first_head: unmerged_status_to_proto(first_head),
5254 second_head: unmerged_status_to_proto(second_head),
5255 }),
5256 FileStatus::Tracked(TrackedStatus {
5257 index_status,
5258 worktree_status,
5259 }) => Variant::Tracked(Tracked {
5260 index_status: tracked_status_to_proto(index_status),
5261 worktree_status: tracked_status_to_proto(worktree_status),
5262 }),
5263 };
5264 proto::GitFileStatus {
5265 variant: Some(variant),
5266 }
5267}
5268
5269fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5270 match code {
5271 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5272 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5273 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5274 }
5275}
5276
5277fn tracked_status_to_proto(code: StatusCode) -> i32 {
5278 match code {
5279 StatusCode::Added => proto::GitStatus::Added as _,
5280 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5281 StatusCode::Modified => proto::GitStatus::Modified as _,
5282 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5283 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5284 StatusCode::Copied => proto::GitStatus::Copied as _,
5285 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5286 }
5287}