1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::{AskPassDelegate, EncryptedPassword};
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use text::{Bias, BufferId};
66use util::{
67 ResultExt, debug_panic,
68 paths::{PathStyle, SanitizedPath},
69 post_inc,
70 rel_path::RelPath,
71};
72use worktree::{
73 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
74 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
75};
76use zeroize::Zeroize;
77
78pub struct GitStore {
79 state: GitStoreState,
80 buffer_store: Entity<BufferStore>,
81 worktree_store: Entity<WorktreeStore>,
82 repositories: HashMap<RepositoryId, Entity<Repository>>,
83 active_repo_id: Option<RepositoryId>,
84 #[allow(clippy::type_complexity)]
85 loading_diffs:
86 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
87 diffs: HashMap<BufferId, Entity<BufferGitState>>,
88 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
89 _subscriptions: Vec<Subscription>,
90}
91
92#[derive(Default)]
93struct SharedDiffs {
94 unstaged: Option<Entity<BufferDiff>>,
95 uncommitted: Option<Entity<BufferDiff>>,
96}
97
98struct BufferGitState {
99 unstaged_diff: Option<WeakEntity<BufferDiff>>,
100 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
101 conflict_set: Option<WeakEntity<ConflictSet>>,
102 recalculate_diff_task: Option<Task<Result<()>>>,
103 reparse_conflict_markers_task: Option<Task<Result<()>>>,
104 language: Option<Arc<Language>>,
105 language_registry: Option<Arc<LanguageRegistry>>,
106 conflict_updated_futures: Vec<oneshot::Sender<()>>,
107 recalculating_tx: postage::watch::Sender<bool>,
108
109 /// These operation counts are used to ensure that head and index text
110 /// values read from the git repository are up-to-date with any hunk staging
111 /// operations that have been performed on the BufferDiff.
112 ///
113 /// The operation count is incremented immediately when the user initiates a
114 /// hunk stage/unstage operation. Then, upon finishing writing the new index
115 /// text do disk, the `operation count as of write` is updated to reflect
116 /// the operation count that prompted the write.
117 hunk_staging_operation_count: usize,
118 hunk_staging_operation_count_as_of_write: usize,
119
120 head_text: Option<Arc<String>>,
121 index_text: Option<Arc<String>>,
122 head_changed: bool,
123 index_changed: bool,
124 language_changed: bool,
125}
126
127#[derive(Clone, Debug)]
128enum DiffBasesChange {
129 SetIndex(Option<String>),
130 SetHead(Option<String>),
131 SetEach {
132 index: Option<String>,
133 head: Option<String>,
134 },
135 SetBoth(Option<String>),
136}
137
138#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
139enum DiffKind {
140 Unstaged,
141 Uncommitted,
142}
143
144enum GitStoreState {
145 Local {
146 next_repository_id: Arc<AtomicU64>,
147 downstream: Option<LocalDownstreamState>,
148 project_environment: Entity<ProjectEnvironment>,
149 fs: Arc<dyn Fs>,
150 },
151 Remote {
152 upstream_client: AnyProtoClient,
153 upstream_project_id: u64,
154 downstream: Option<(AnyProtoClient, ProjectId)>,
155 },
156}
157
158enum DownstreamUpdate {
159 UpdateRepository(RepositorySnapshot),
160 RemoveRepository(RepositoryId),
161}
162
163struct LocalDownstreamState {
164 client: AnyProtoClient,
165 project_id: ProjectId,
166 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
167 _task: Task<Result<()>>,
168}
169
170#[derive(Clone, Debug)]
171pub struct GitStoreCheckpoint {
172 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
173}
174
175#[derive(Clone, Debug, PartialEq, Eq)]
176pub struct StatusEntry {
177 pub repo_path: RepoPath,
178 pub status: FileStatus,
179}
180
181impl StatusEntry {
182 fn to_proto(&self) -> proto::StatusEntry {
183 let simple_status = match self.status {
184 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
185 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
186 FileStatus::Tracked(TrackedStatus {
187 index_status,
188 worktree_status,
189 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
190 worktree_status
191 } else {
192 index_status
193 }),
194 };
195
196 proto::StatusEntry {
197 repo_path: self.repo_path.to_proto(),
198 simple_status,
199 status: Some(status_to_proto(self.status)),
200 }
201 }
202}
203
204impl TryFrom<proto::StatusEntry> for StatusEntry {
205 type Error = anyhow::Error;
206
207 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
208 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
209 let status = status_from_proto(value.simple_status, value.status)?;
210 Ok(Self { repo_path, status })
211 }
212}
213
214impl sum_tree::Item for StatusEntry {
215 type Summary = PathSummary<GitSummary>;
216
217 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
218 PathSummary {
219 max_path: self.repo_path.0.clone(),
220 item_summary: self.status.summary(),
221 }
222 }
223}
224
225impl sum_tree::KeyedItem for StatusEntry {
226 type Key = PathKey;
227
228 fn key(&self) -> Self::Key {
229 PathKey(self.repo_path.0.clone())
230 }
231}
232
233#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
234pub struct RepositoryId(pub u64);
235
236#[derive(Clone, Debug, Default, PartialEq, Eq)]
237pub struct MergeDetails {
238 pub conflicted_paths: TreeSet<RepoPath>,
239 pub message: Option<SharedString>,
240 pub heads: Vec<Option<SharedString>>,
241}
242
243#[derive(Clone, Debug, PartialEq, Eq)]
244pub struct RepositorySnapshot {
245 pub id: RepositoryId,
246 pub statuses_by_path: SumTree<StatusEntry>,
247 pub work_directory_abs_path: Arc<Path>,
248 pub path_style: PathStyle,
249 pub branch: Option<Branch>,
250 pub head_commit: Option<CommitDetails>,
251 pub scan_id: u64,
252 pub merge: MergeDetails,
253 pub remote_origin_url: Option<String>,
254 pub remote_upstream_url: Option<String>,
255 pub stash_entries: GitStash,
256}
257
258type JobId = u64;
259
260#[derive(Clone, Debug, PartialEq, Eq)]
261pub struct JobInfo {
262 pub start: Instant,
263 pub message: SharedString,
264}
265
266pub struct Repository {
267 this: WeakEntity<Self>,
268 snapshot: RepositorySnapshot,
269 commit_message_buffer: Option<Entity<Buffer>>,
270 git_store: WeakEntity<GitStore>,
271 // For a local repository, holds paths that have had worktree events since the last status scan completed,
272 // and that should be examined during the next status scan.
273 paths_needing_status_update: BTreeSet<RepoPath>,
274 job_sender: mpsc::UnboundedSender<GitJob>,
275 active_jobs: HashMap<JobId, JobInfo>,
276 job_id: JobId,
277 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
278 latest_askpass_id: u64,
279}
280
281impl std::ops::Deref for Repository {
282 type Target = RepositorySnapshot;
283
284 fn deref(&self) -> &Self::Target {
285 &self.snapshot
286 }
287}
288
289#[derive(Clone)]
290pub enum RepositoryState {
291 Local {
292 backend: Arc<dyn GitRepository>,
293 environment: Arc<HashMap<String, String>>,
294 },
295 Remote {
296 project_id: ProjectId,
297 client: AnyProtoClient,
298 },
299}
300
301#[derive(Clone, Debug)]
302pub enum RepositoryEvent {
303 Updated { full_scan: bool, new_instance: bool },
304 MergeHeadsChanged,
305}
306
307#[derive(Clone, Debug)]
308pub struct JobsUpdated;
309
310#[derive(Debug)]
311pub enum GitStoreEvent {
312 ActiveRepositoryChanged(Option<RepositoryId>),
313 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
314 RepositoryAdded(RepositoryId),
315 RepositoryRemoved(RepositoryId),
316 IndexWriteError(anyhow::Error),
317 JobsUpdated,
318 ConflictsUpdated,
319}
320
321impl EventEmitter<RepositoryEvent> for Repository {}
322impl EventEmitter<JobsUpdated> for Repository {}
323impl EventEmitter<GitStoreEvent> for GitStore {}
324
325pub struct GitJob {
326 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
327 key: Option<GitJobKey>,
328}
329
330#[derive(PartialEq, Eq)]
331enum GitJobKey {
332 WriteIndex(RepoPath),
333 ReloadBufferDiffBases,
334 RefreshStatuses,
335 ReloadGitState,
336}
337
338impl GitStore {
339 pub fn local(
340 worktree_store: &Entity<WorktreeStore>,
341 buffer_store: Entity<BufferStore>,
342 environment: Entity<ProjectEnvironment>,
343 fs: Arc<dyn Fs>,
344 cx: &mut Context<Self>,
345 ) -> Self {
346 Self::new(
347 worktree_store.clone(),
348 buffer_store,
349 GitStoreState::Local {
350 next_repository_id: Arc::new(AtomicU64::new(1)),
351 downstream: None,
352 project_environment: environment,
353 fs,
354 },
355 cx,
356 )
357 }
358
359 pub fn remote(
360 worktree_store: &Entity<WorktreeStore>,
361 buffer_store: Entity<BufferStore>,
362 upstream_client: AnyProtoClient,
363 project_id: u64,
364 cx: &mut Context<Self>,
365 ) -> Self {
366 Self::new(
367 worktree_store.clone(),
368 buffer_store,
369 GitStoreState::Remote {
370 upstream_client,
371 upstream_project_id: project_id,
372 downstream: None,
373 },
374 cx,
375 )
376 }
377
378 fn new(
379 worktree_store: Entity<WorktreeStore>,
380 buffer_store: Entity<BufferStore>,
381 state: GitStoreState,
382 cx: &mut Context<Self>,
383 ) -> Self {
384 let _subscriptions = vec![
385 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
386 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
387 ];
388
389 GitStore {
390 state,
391 buffer_store,
392 worktree_store,
393 repositories: HashMap::default(),
394 active_repo_id: None,
395 _subscriptions,
396 loading_diffs: HashMap::default(),
397 shared_diffs: HashMap::default(),
398 diffs: HashMap::default(),
399 }
400 }
401
402 pub fn init(client: &AnyProtoClient) {
403 client.add_entity_request_handler(Self::handle_get_remotes);
404 client.add_entity_request_handler(Self::handle_get_branches);
405 client.add_entity_request_handler(Self::handle_get_default_branch);
406 client.add_entity_request_handler(Self::handle_change_branch);
407 client.add_entity_request_handler(Self::handle_create_branch);
408 client.add_entity_request_handler(Self::handle_rename_branch);
409 client.add_entity_request_handler(Self::handle_git_init);
410 client.add_entity_request_handler(Self::handle_push);
411 client.add_entity_request_handler(Self::handle_pull);
412 client.add_entity_request_handler(Self::handle_fetch);
413 client.add_entity_request_handler(Self::handle_stage);
414 client.add_entity_request_handler(Self::handle_unstage);
415 client.add_entity_request_handler(Self::handle_stash);
416 client.add_entity_request_handler(Self::handle_stash_pop);
417 client.add_entity_request_handler(Self::handle_stash_apply);
418 client.add_entity_request_handler(Self::handle_stash_drop);
419 client.add_entity_request_handler(Self::handle_commit);
420 client.add_entity_request_handler(Self::handle_reset);
421 client.add_entity_request_handler(Self::handle_show);
422 client.add_entity_request_handler(Self::handle_load_commit_diff);
423 client.add_entity_request_handler(Self::handle_checkout_files);
424 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
425 client.add_entity_request_handler(Self::handle_set_index_text);
426 client.add_entity_request_handler(Self::handle_askpass);
427 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
428 client.add_entity_request_handler(Self::handle_git_diff);
429 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
430 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
431 client.add_entity_message_handler(Self::handle_update_diff_bases);
432 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
433 client.add_entity_request_handler(Self::handle_blame_buffer);
434 client.add_entity_message_handler(Self::handle_update_repository);
435 client.add_entity_message_handler(Self::handle_remove_repository);
436 client.add_entity_request_handler(Self::handle_git_clone);
437 }
438
439 pub fn is_local(&self) -> bool {
440 matches!(self.state, GitStoreState::Local { .. })
441 }
442 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
443 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
444 let id = repo.read(cx).id;
445 if self.active_repo_id != Some(id) {
446 self.active_repo_id = Some(id);
447 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
448 }
449 }
450 }
451
452 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
453 match &mut self.state {
454 GitStoreState::Remote {
455 downstream: downstream_client,
456 ..
457 } => {
458 for repo in self.repositories.values() {
459 let update = repo.read(cx).snapshot.initial_update(project_id);
460 for update in split_repository_update(update) {
461 client.send(update).log_err();
462 }
463 }
464 *downstream_client = Some((client, ProjectId(project_id)));
465 }
466 GitStoreState::Local {
467 downstream: downstream_client,
468 ..
469 } => {
470 let mut snapshots = HashMap::default();
471 let (updates_tx, mut updates_rx) = mpsc::unbounded();
472 for repo in self.repositories.values() {
473 updates_tx
474 .unbounded_send(DownstreamUpdate::UpdateRepository(
475 repo.read(cx).snapshot.clone(),
476 ))
477 .ok();
478 }
479 *downstream_client = Some(LocalDownstreamState {
480 client: client.clone(),
481 project_id: ProjectId(project_id),
482 updates_tx,
483 _task: cx.spawn(async move |this, cx| {
484 cx.background_spawn(async move {
485 while let Some(update) = updates_rx.next().await {
486 match update {
487 DownstreamUpdate::UpdateRepository(snapshot) => {
488 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
489 {
490 let update =
491 snapshot.build_update(old_snapshot, project_id);
492 *old_snapshot = snapshot;
493 for update in split_repository_update(update) {
494 client.send(update)?;
495 }
496 } else {
497 let update = snapshot.initial_update(project_id);
498 for update in split_repository_update(update) {
499 client.send(update)?;
500 }
501 snapshots.insert(snapshot.id, snapshot);
502 }
503 }
504 DownstreamUpdate::RemoveRepository(id) => {
505 client.send(proto::RemoveRepository {
506 project_id,
507 id: id.to_proto(),
508 })?;
509 }
510 }
511 }
512 anyhow::Ok(())
513 })
514 .await
515 .ok();
516 this.update(cx, |this, _| {
517 if let GitStoreState::Local {
518 downstream: downstream_client,
519 ..
520 } = &mut this.state
521 {
522 downstream_client.take();
523 } else {
524 unreachable!("unshared called on remote store");
525 }
526 })
527 }),
528 });
529 }
530 }
531 }
532
533 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
534 match &mut self.state {
535 GitStoreState::Local {
536 downstream: downstream_client,
537 ..
538 } => {
539 downstream_client.take();
540 }
541 GitStoreState::Remote {
542 downstream: downstream_client,
543 ..
544 } => {
545 downstream_client.take();
546 }
547 }
548 self.shared_diffs.clear();
549 }
550
551 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
552 self.shared_diffs.remove(peer_id);
553 }
554
555 pub fn active_repository(&self) -> Option<Entity<Repository>> {
556 self.active_repo_id
557 .as_ref()
558 .map(|id| self.repositories[id].clone())
559 }
560
561 pub fn open_unstaged_diff(
562 &mut self,
563 buffer: Entity<Buffer>,
564 cx: &mut Context<Self>,
565 ) -> Task<Result<Entity<BufferDiff>>> {
566 let buffer_id = buffer.read(cx).remote_id();
567 if let Some(diff_state) = self.diffs.get(&buffer_id)
568 && let Some(unstaged_diff) = diff_state
569 .read(cx)
570 .unstaged_diff
571 .as_ref()
572 .and_then(|weak| weak.upgrade())
573 {
574 if let Some(task) =
575 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
576 {
577 return cx.background_executor().spawn(async move {
578 task.await;
579 Ok(unstaged_diff)
580 });
581 }
582 return Task::ready(Ok(unstaged_diff));
583 }
584
585 let Some((repo, repo_path)) =
586 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
587 else {
588 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
589 };
590
591 let task = self
592 .loading_diffs
593 .entry((buffer_id, DiffKind::Unstaged))
594 .or_insert_with(|| {
595 let staged_text = repo.update(cx, |repo, cx| {
596 repo.load_staged_text(buffer_id, repo_path, cx)
597 });
598 cx.spawn(async move |this, cx| {
599 Self::open_diff_internal(
600 this,
601 DiffKind::Unstaged,
602 staged_text.await.map(DiffBasesChange::SetIndex),
603 buffer,
604 cx,
605 )
606 .await
607 .map_err(Arc::new)
608 })
609 .shared()
610 })
611 .clone();
612
613 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
614 }
615
616 pub fn open_uncommitted_diff(
617 &mut self,
618 buffer: Entity<Buffer>,
619 cx: &mut Context<Self>,
620 ) -> Task<Result<Entity<BufferDiff>>> {
621 let buffer_id = buffer.read(cx).remote_id();
622
623 if let Some(diff_state) = self.diffs.get(&buffer_id)
624 && let Some(uncommitted_diff) = diff_state
625 .read(cx)
626 .uncommitted_diff
627 .as_ref()
628 .and_then(|weak| weak.upgrade())
629 {
630 if let Some(task) =
631 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
632 {
633 return cx.background_executor().spawn(async move {
634 task.await;
635 Ok(uncommitted_diff)
636 });
637 }
638 return Task::ready(Ok(uncommitted_diff));
639 }
640
641 let Some((repo, repo_path)) =
642 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
643 else {
644 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
645 };
646
647 let task = self
648 .loading_diffs
649 .entry((buffer_id, DiffKind::Uncommitted))
650 .or_insert_with(|| {
651 let changes = repo.update(cx, |repo, cx| {
652 repo.load_committed_text(buffer_id, repo_path, cx)
653 });
654
655 cx.spawn(async move |this, cx| {
656 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
657 .await
658 .map_err(Arc::new)
659 })
660 .shared()
661 })
662 .clone();
663
664 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
665 }
666
667 async fn open_diff_internal(
668 this: WeakEntity<Self>,
669 kind: DiffKind,
670 texts: Result<DiffBasesChange>,
671 buffer_entity: Entity<Buffer>,
672 cx: &mut AsyncApp,
673 ) -> Result<Entity<BufferDiff>> {
674 let diff_bases_change = match texts {
675 Err(e) => {
676 this.update(cx, |this, cx| {
677 let buffer = buffer_entity.read(cx);
678 let buffer_id = buffer.remote_id();
679 this.loading_diffs.remove(&(buffer_id, kind));
680 })?;
681 return Err(e);
682 }
683 Ok(change) => change,
684 };
685
686 this.update(cx, |this, cx| {
687 let buffer = buffer_entity.read(cx);
688 let buffer_id = buffer.remote_id();
689 let language = buffer.language().cloned();
690 let language_registry = buffer.language_registry();
691 let text_snapshot = buffer.text_snapshot();
692 this.loading_diffs.remove(&(buffer_id, kind));
693
694 let git_store = cx.weak_entity();
695 let diff_state = this
696 .diffs
697 .entry(buffer_id)
698 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
699
700 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
701
702 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
703 diff_state.update(cx, |diff_state, cx| {
704 diff_state.language = language;
705 diff_state.language_registry = language_registry;
706
707 match kind {
708 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
709 DiffKind::Uncommitted => {
710 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
711 diff
712 } else {
713 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
714 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
715 unstaged_diff
716 };
717
718 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
719 diff_state.uncommitted_diff = Some(diff.downgrade())
720 }
721 }
722
723 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
724 let rx = diff_state.wait_for_recalculation();
725
726 anyhow::Ok(async move {
727 if let Some(rx) = rx {
728 rx.await;
729 }
730 Ok(diff)
731 })
732 })
733 })??
734 .await
735 }
736
737 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
738 let diff_state = self.diffs.get(&buffer_id)?;
739 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
740 }
741
742 pub fn get_uncommitted_diff(
743 &self,
744 buffer_id: BufferId,
745 cx: &App,
746 ) -> Option<Entity<BufferDiff>> {
747 let diff_state = self.diffs.get(&buffer_id)?;
748 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
749 }
750
751 pub fn open_conflict_set(
752 &mut self,
753 buffer: Entity<Buffer>,
754 cx: &mut Context<Self>,
755 ) -> Entity<ConflictSet> {
756 log::debug!("open conflict set");
757 let buffer_id = buffer.read(cx).remote_id();
758
759 if let Some(git_state) = self.diffs.get(&buffer_id)
760 && let Some(conflict_set) = git_state
761 .read(cx)
762 .conflict_set
763 .as_ref()
764 .and_then(|weak| weak.upgrade())
765 {
766 let conflict_set = conflict_set;
767 let buffer_snapshot = buffer.read(cx).text_snapshot();
768
769 git_state.update(cx, |state, cx| {
770 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
771 });
772
773 return conflict_set;
774 }
775
776 let is_unmerged = self
777 .repository_and_path_for_buffer_id(buffer_id, cx)
778 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
779 let git_store = cx.weak_entity();
780 let buffer_git_state = self
781 .diffs
782 .entry(buffer_id)
783 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
784 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
785
786 self._subscriptions
787 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
788 cx.emit(GitStoreEvent::ConflictsUpdated);
789 }));
790
791 buffer_git_state.update(cx, |state, cx| {
792 state.conflict_set = Some(conflict_set.downgrade());
793 let buffer_snapshot = buffer.read(cx).text_snapshot();
794 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
795 });
796
797 conflict_set
798 }
799
800 pub fn project_path_git_status(
801 &self,
802 project_path: &ProjectPath,
803 cx: &App,
804 ) -> Option<FileStatus> {
805 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
806 Some(repo.read(cx).status_for_path(&repo_path)?.status)
807 }
808
809 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
810 let mut work_directory_abs_paths = Vec::new();
811 let mut checkpoints = Vec::new();
812 for repository in self.repositories.values() {
813 repository.update(cx, |repository, _| {
814 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
815 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
816 });
817 }
818
819 cx.background_executor().spawn(async move {
820 let checkpoints = future::try_join_all(checkpoints).await?;
821 Ok(GitStoreCheckpoint {
822 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
823 .into_iter()
824 .zip(checkpoints)
825 .collect(),
826 })
827 })
828 }
829
830 pub fn restore_checkpoint(
831 &self,
832 checkpoint: GitStoreCheckpoint,
833 cx: &mut App,
834 ) -> Task<Result<()>> {
835 let repositories_by_work_dir_abs_path = self
836 .repositories
837 .values()
838 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
839 .collect::<HashMap<_, _>>();
840
841 let mut tasks = Vec::new();
842 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
843 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
844 let restore = repository.update(cx, |repository, _| {
845 repository.restore_checkpoint(checkpoint)
846 });
847 tasks.push(async move { restore.await? });
848 }
849 }
850 cx.background_spawn(async move {
851 future::try_join_all(tasks).await?;
852 Ok(())
853 })
854 }
855
856 /// Compares two checkpoints, returning true if they are equal.
857 pub fn compare_checkpoints(
858 &self,
859 left: GitStoreCheckpoint,
860 mut right: GitStoreCheckpoint,
861 cx: &mut App,
862 ) -> Task<Result<bool>> {
863 let repositories_by_work_dir_abs_path = self
864 .repositories
865 .values()
866 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
867 .collect::<HashMap<_, _>>();
868
869 let mut tasks = Vec::new();
870 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
871 if let Some(right_checkpoint) = right
872 .checkpoints_by_work_dir_abs_path
873 .remove(&work_dir_abs_path)
874 {
875 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
876 {
877 let compare = repository.update(cx, |repository, _| {
878 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
879 });
880
881 tasks.push(async move { compare.await? });
882 }
883 } else {
884 return Task::ready(Ok(false));
885 }
886 }
887 cx.background_spawn(async move {
888 Ok(future::try_join_all(tasks)
889 .await?
890 .into_iter()
891 .all(|result| result))
892 })
893 }
894
895 /// Blames a buffer.
896 pub fn blame_buffer(
897 &self,
898 buffer: &Entity<Buffer>,
899 version: Option<clock::Global>,
900 cx: &mut App,
901 ) -> Task<Result<Option<Blame>>> {
902 let buffer = buffer.read(cx);
903 let Some((repo, repo_path)) =
904 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
905 else {
906 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
907 };
908 let content = match &version {
909 Some(version) => buffer.rope_for_version(version),
910 None => buffer.as_rope().clone(),
911 };
912 let version = version.unwrap_or(buffer.version());
913 let buffer_id = buffer.remote_id();
914
915 let rx = repo.update(cx, |repo, _| {
916 repo.send_job(None, move |state, _| async move {
917 match state {
918 RepositoryState::Local { backend, .. } => backend
919 .blame(repo_path.clone(), content)
920 .await
921 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
922 .map(Some),
923 RepositoryState::Remote { project_id, client } => {
924 let response = client
925 .request(proto::BlameBuffer {
926 project_id: project_id.to_proto(),
927 buffer_id: buffer_id.into(),
928 version: serialize_version(&version),
929 })
930 .await?;
931 Ok(deserialize_blame_buffer_response(response))
932 }
933 }
934 })
935 });
936
937 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
938 }
939
940 pub fn get_permalink_to_line(
941 &self,
942 buffer: &Entity<Buffer>,
943 selection: Range<u32>,
944 cx: &mut App,
945 ) -> Task<Result<url::Url>> {
946 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
947 return Task::ready(Err(anyhow!("buffer has no file")));
948 };
949
950 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
951 &(file.worktree.read(cx).id(), file.path.clone()).into(),
952 cx,
953 ) else {
954 // If we're not in a Git repo, check whether this is a Rust source
955 // file in the Cargo registry (presumably opened with go-to-definition
956 // from a normal Rust file). If so, we can put together a permalink
957 // using crate metadata.
958 if buffer
959 .read(cx)
960 .language()
961 .is_none_or(|lang| lang.name() != "Rust".into())
962 {
963 return Task::ready(Err(anyhow!("no permalink available")));
964 }
965 let file_path = file.worktree.read(cx).absolutize(&file.path);
966 return cx.spawn(async move |cx| {
967 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
968 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
969 .context("no permalink available")
970 });
971
972 // TODO remote case
973 };
974
975 let buffer_id = buffer.read(cx).remote_id();
976 let branch = repo.read(cx).branch.clone();
977 let remote = branch
978 .as_ref()
979 .and_then(|b| b.upstream.as_ref())
980 .and_then(|b| b.remote_name())
981 .unwrap_or("origin")
982 .to_string();
983
984 let rx = repo.update(cx, |repo, _| {
985 repo.send_job(None, move |state, cx| async move {
986 match state {
987 RepositoryState::Local { backend, .. } => {
988 let origin_url = backend
989 .remote_url(&remote)
990 .with_context(|| format!("remote \"{remote}\" not found"))?;
991
992 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
993
994 let provider_registry =
995 cx.update(GitHostingProviderRegistry::default_global)?;
996
997 let (provider, remote) =
998 parse_git_remote_url(provider_registry, &origin_url)
999 .context("parsing Git remote URL")?;
1000
1001 let path = repo_path.as_unix_str();
1002
1003 Ok(provider.build_permalink(
1004 remote,
1005 BuildPermalinkParams {
1006 sha: &sha,
1007 path,
1008 selection: Some(selection),
1009 },
1010 ))
1011 }
1012 RepositoryState::Remote { project_id, client } => {
1013 let response = client
1014 .request(proto::GetPermalinkToLine {
1015 project_id: project_id.to_proto(),
1016 buffer_id: buffer_id.into(),
1017 selection: Some(proto::Range {
1018 start: selection.start as u64,
1019 end: selection.end as u64,
1020 }),
1021 })
1022 .await?;
1023
1024 url::Url::parse(&response.permalink).context("failed to parse permalink")
1025 }
1026 }
1027 })
1028 });
1029 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1030 }
1031
1032 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1033 match &self.state {
1034 GitStoreState::Local {
1035 downstream: downstream_client,
1036 ..
1037 } => downstream_client
1038 .as_ref()
1039 .map(|state| (state.client.clone(), state.project_id)),
1040 GitStoreState::Remote {
1041 downstream: downstream_client,
1042 ..
1043 } => downstream_client.clone(),
1044 }
1045 }
1046
1047 fn upstream_client(&self) -> Option<AnyProtoClient> {
1048 match &self.state {
1049 GitStoreState::Local { .. } => None,
1050 GitStoreState::Remote {
1051 upstream_client, ..
1052 } => Some(upstream_client.clone()),
1053 }
1054 }
1055
1056 fn on_worktree_store_event(
1057 &mut self,
1058 worktree_store: Entity<WorktreeStore>,
1059 event: &WorktreeStoreEvent,
1060 cx: &mut Context<Self>,
1061 ) {
1062 let GitStoreState::Local {
1063 project_environment,
1064 downstream,
1065 next_repository_id,
1066 fs,
1067 } = &self.state
1068 else {
1069 return;
1070 };
1071
1072 match event {
1073 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1074 if let Some(worktree) = self
1075 .worktree_store
1076 .read(cx)
1077 .worktree_for_id(*worktree_id, cx)
1078 {
1079 let paths_by_git_repo =
1080 self.process_updated_entries(&worktree, updated_entries, cx);
1081 let downstream = downstream
1082 .as_ref()
1083 .map(|downstream| downstream.updates_tx.clone());
1084 cx.spawn(async move |_, cx| {
1085 let paths_by_git_repo = paths_by_git_repo.await;
1086 for (repo, paths) in paths_by_git_repo {
1087 repo.update(cx, |repo, cx| {
1088 repo.paths_changed(paths, downstream.clone(), cx);
1089 })
1090 .ok();
1091 }
1092 })
1093 .detach();
1094 }
1095 }
1096 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1097 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1098 else {
1099 return;
1100 };
1101 if !worktree.read(cx).is_visible() {
1102 log::debug!(
1103 "not adding repositories for local worktree {:?} because it's not visible",
1104 worktree.read(cx).abs_path()
1105 );
1106 return;
1107 }
1108 self.update_repositories_from_worktree(
1109 project_environment.clone(),
1110 next_repository_id.clone(),
1111 downstream
1112 .as_ref()
1113 .map(|downstream| downstream.updates_tx.clone()),
1114 changed_repos.clone(),
1115 fs.clone(),
1116 cx,
1117 );
1118 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1119 }
1120 _ => {}
1121 }
1122 }
1123 fn on_repository_event(
1124 &mut self,
1125 repo: Entity<Repository>,
1126 event: &RepositoryEvent,
1127 cx: &mut Context<Self>,
1128 ) {
1129 let id = repo.read(cx).id;
1130 let repo_snapshot = repo.read(cx).snapshot.clone();
1131 for (buffer_id, diff) in self.diffs.iter() {
1132 if let Some((buffer_repo, repo_path)) =
1133 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1134 && buffer_repo == repo
1135 {
1136 diff.update(cx, |diff, cx| {
1137 if let Some(conflict_set) = &diff.conflict_set {
1138 let conflict_status_changed =
1139 conflict_set.update(cx, |conflict_set, cx| {
1140 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1141 conflict_set.set_has_conflict(has_conflict, cx)
1142 })?;
1143 if conflict_status_changed {
1144 let buffer_store = self.buffer_store.read(cx);
1145 if let Some(buffer) = buffer_store.get(*buffer_id) {
1146 let _ = diff
1147 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1148 }
1149 }
1150 }
1151 anyhow::Ok(())
1152 })
1153 .ok();
1154 }
1155 }
1156 cx.emit(GitStoreEvent::RepositoryUpdated(
1157 id,
1158 event.clone(),
1159 self.active_repo_id == Some(id),
1160 ))
1161 }
1162
1163 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1164 cx.emit(GitStoreEvent::JobsUpdated)
1165 }
1166
1167 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1168 fn update_repositories_from_worktree(
1169 &mut self,
1170 project_environment: Entity<ProjectEnvironment>,
1171 next_repository_id: Arc<AtomicU64>,
1172 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1173 updated_git_repositories: UpdatedGitRepositoriesSet,
1174 fs: Arc<dyn Fs>,
1175 cx: &mut Context<Self>,
1176 ) {
1177 let mut removed_ids = Vec::new();
1178 for update in updated_git_repositories.iter() {
1179 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1180 let existing_work_directory_abs_path =
1181 repo.read(cx).work_directory_abs_path.clone();
1182 Some(&existing_work_directory_abs_path)
1183 == update.old_work_directory_abs_path.as_ref()
1184 || Some(&existing_work_directory_abs_path)
1185 == update.new_work_directory_abs_path.as_ref()
1186 }) {
1187 if let Some(new_work_directory_abs_path) =
1188 update.new_work_directory_abs_path.clone()
1189 {
1190 existing.update(cx, |existing, cx| {
1191 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1192 existing.schedule_scan(updates_tx.clone(), cx);
1193 });
1194 } else {
1195 removed_ids.push(*id);
1196 }
1197 } else if let UpdatedGitRepository {
1198 new_work_directory_abs_path: Some(work_directory_abs_path),
1199 dot_git_abs_path: Some(dot_git_abs_path),
1200 repository_dir_abs_path: Some(repository_dir_abs_path),
1201 common_dir_abs_path: Some(common_dir_abs_path),
1202 ..
1203 } = update
1204 {
1205 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1206 let git_store = cx.weak_entity();
1207 let repo = cx.new(|cx| {
1208 let mut repo = Repository::local(
1209 id,
1210 work_directory_abs_path.clone(),
1211 dot_git_abs_path.clone(),
1212 repository_dir_abs_path.clone(),
1213 common_dir_abs_path.clone(),
1214 project_environment.downgrade(),
1215 fs.clone(),
1216 git_store,
1217 cx,
1218 );
1219 repo.schedule_scan(updates_tx.clone(), cx);
1220 repo
1221 });
1222 self._subscriptions
1223 .push(cx.subscribe(&repo, Self::on_repository_event));
1224 self._subscriptions
1225 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1226 self.repositories.insert(id, repo);
1227 cx.emit(GitStoreEvent::RepositoryAdded(id));
1228 self.active_repo_id.get_or_insert_with(|| {
1229 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1230 id
1231 });
1232 }
1233 }
1234
1235 for id in removed_ids {
1236 if self.active_repo_id == Some(id) {
1237 self.active_repo_id = None;
1238 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1239 }
1240 self.repositories.remove(&id);
1241 if let Some(updates_tx) = updates_tx.as_ref() {
1242 updates_tx
1243 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1244 .ok();
1245 }
1246 }
1247 }
1248
1249 fn on_buffer_store_event(
1250 &mut self,
1251 _: Entity<BufferStore>,
1252 event: &BufferStoreEvent,
1253 cx: &mut Context<Self>,
1254 ) {
1255 match event {
1256 BufferStoreEvent::BufferAdded(buffer) => {
1257 cx.subscribe(buffer, |this, buffer, event, cx| {
1258 if let BufferEvent::LanguageChanged = event {
1259 let buffer_id = buffer.read(cx).remote_id();
1260 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1261 diff_state.update(cx, |diff_state, cx| {
1262 diff_state.buffer_language_changed(buffer, cx);
1263 });
1264 }
1265 }
1266 })
1267 .detach();
1268 }
1269 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1270 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1271 diffs.remove(buffer_id);
1272 }
1273 }
1274 BufferStoreEvent::BufferDropped(buffer_id) => {
1275 self.diffs.remove(buffer_id);
1276 for diffs in self.shared_diffs.values_mut() {
1277 diffs.remove(buffer_id);
1278 }
1279 }
1280
1281 _ => {}
1282 }
1283 }
1284
1285 pub fn recalculate_buffer_diffs(
1286 &mut self,
1287 buffers: Vec<Entity<Buffer>>,
1288 cx: &mut Context<Self>,
1289 ) -> impl Future<Output = ()> + use<> {
1290 let mut futures = Vec::new();
1291 for buffer in buffers {
1292 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1293 let buffer = buffer.read(cx).text_snapshot();
1294 diff_state.update(cx, |diff_state, cx| {
1295 diff_state.recalculate_diffs(buffer.clone(), cx);
1296 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1297 });
1298 futures.push(diff_state.update(cx, |diff_state, cx| {
1299 diff_state
1300 .reparse_conflict_markers(buffer, cx)
1301 .map(|_| {})
1302 .boxed()
1303 }));
1304 }
1305 }
1306 async move {
1307 futures::future::join_all(futures).await;
1308 }
1309 }
1310
1311 fn on_buffer_diff_event(
1312 &mut self,
1313 diff: Entity<buffer_diff::BufferDiff>,
1314 event: &BufferDiffEvent,
1315 cx: &mut Context<Self>,
1316 ) {
1317 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1318 let buffer_id = diff.read(cx).buffer_id;
1319 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1320 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1321 diff_state.hunk_staging_operation_count += 1;
1322 diff_state.hunk_staging_operation_count
1323 });
1324 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1325 let recv = repo.update(cx, |repo, cx| {
1326 log::debug!("hunks changed for {}", path.as_unix_str());
1327 repo.spawn_set_index_text_job(
1328 path,
1329 new_index_text.as_ref().map(|rope| rope.to_string()),
1330 Some(hunk_staging_operation_count),
1331 cx,
1332 )
1333 });
1334 let diff = diff.downgrade();
1335 cx.spawn(async move |this, cx| {
1336 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1337 diff.update(cx, |diff, cx| {
1338 diff.clear_pending_hunks(cx);
1339 })
1340 .ok();
1341 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1342 .ok();
1343 }
1344 })
1345 .detach();
1346 }
1347 }
1348 }
1349 }
1350
1351 fn local_worktree_git_repos_changed(
1352 &mut self,
1353 worktree: Entity<Worktree>,
1354 changed_repos: &UpdatedGitRepositoriesSet,
1355 cx: &mut Context<Self>,
1356 ) {
1357 log::debug!("local worktree repos changed");
1358 debug_assert!(worktree.read(cx).is_local());
1359
1360 for repository in self.repositories.values() {
1361 repository.update(cx, |repository, cx| {
1362 let repo_abs_path = &repository.work_directory_abs_path;
1363 if changed_repos.iter().any(|update| {
1364 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1365 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1366 }) {
1367 repository.reload_buffer_diff_bases(cx);
1368 }
1369 });
1370 }
1371 }
1372
1373 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1374 &self.repositories
1375 }
1376
1377 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1378 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1379 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1380 Some(status.status)
1381 }
1382
1383 pub fn repository_and_path_for_buffer_id(
1384 &self,
1385 buffer_id: BufferId,
1386 cx: &App,
1387 ) -> Option<(Entity<Repository>, RepoPath)> {
1388 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1389 let project_path = buffer.read(cx).project_path(cx)?;
1390 self.repository_and_path_for_project_path(&project_path, cx)
1391 }
1392
1393 pub fn repository_and_path_for_project_path(
1394 &self,
1395 path: &ProjectPath,
1396 cx: &App,
1397 ) -> Option<(Entity<Repository>, RepoPath)> {
1398 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1399 self.repositories
1400 .values()
1401 .filter_map(|repo| {
1402 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1403 Some((repo.clone(), repo_path))
1404 })
1405 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1406 }
1407
1408 pub fn git_init(
1409 &self,
1410 path: Arc<Path>,
1411 fallback_branch_name: String,
1412 cx: &App,
1413 ) -> Task<Result<()>> {
1414 match &self.state {
1415 GitStoreState::Local { fs, .. } => {
1416 let fs = fs.clone();
1417 cx.background_executor()
1418 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1419 }
1420 GitStoreState::Remote {
1421 upstream_client,
1422 upstream_project_id: project_id,
1423 ..
1424 } => {
1425 let client = upstream_client.clone();
1426 let project_id = *project_id;
1427 cx.background_executor().spawn(async move {
1428 client
1429 .request(proto::GitInit {
1430 project_id: project_id,
1431 abs_path: path.to_string_lossy().into_owned(),
1432 fallback_branch_name,
1433 })
1434 .await?;
1435 Ok(())
1436 })
1437 }
1438 }
1439 }
1440
1441 pub fn git_clone(
1442 &self,
1443 repo: String,
1444 path: impl Into<Arc<std::path::Path>>,
1445 cx: &App,
1446 ) -> Task<Result<()>> {
1447 let path = path.into();
1448 match &self.state {
1449 GitStoreState::Local { fs, .. } => {
1450 let fs = fs.clone();
1451 cx.background_executor()
1452 .spawn(async move { fs.git_clone(&repo, &path).await })
1453 }
1454 GitStoreState::Remote {
1455 upstream_client,
1456 upstream_project_id,
1457 ..
1458 } => {
1459 if upstream_client.is_via_collab() {
1460 return Task::ready(Err(anyhow!(
1461 "Git Clone isn't supported for project guests"
1462 )));
1463 }
1464 let request = upstream_client.request(proto::GitClone {
1465 project_id: *upstream_project_id,
1466 abs_path: path.to_string_lossy().into_owned(),
1467 remote_repo: repo,
1468 });
1469
1470 cx.background_spawn(async move {
1471 let result = request.await?;
1472
1473 match result.success {
1474 true => Ok(()),
1475 false => Err(anyhow!("Git Clone failed")),
1476 }
1477 })
1478 }
1479 }
1480 }
1481
1482 async fn handle_update_repository(
1483 this: Entity<Self>,
1484 envelope: TypedEnvelope<proto::UpdateRepository>,
1485 mut cx: AsyncApp,
1486 ) -> Result<()> {
1487 this.update(&mut cx, |this, cx| {
1488 let path_style = this.worktree_store.read(cx).path_style();
1489 let mut update = envelope.payload;
1490
1491 let id = RepositoryId::from_proto(update.id);
1492 let client = this.upstream_client().context("no upstream client")?;
1493
1494 let mut is_new = false;
1495 let repo = this.repositories.entry(id).or_insert_with(|| {
1496 is_new = true;
1497 let git_store = cx.weak_entity();
1498 cx.new(|cx| {
1499 Repository::remote(
1500 id,
1501 Path::new(&update.abs_path).into(),
1502 path_style,
1503 ProjectId(update.project_id),
1504 client,
1505 git_store,
1506 cx,
1507 )
1508 })
1509 });
1510 if is_new {
1511 this._subscriptions
1512 .push(cx.subscribe(repo, Self::on_repository_event))
1513 }
1514
1515 repo.update(cx, {
1516 let update = update.clone();
1517 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1518 })?;
1519
1520 this.active_repo_id.get_or_insert_with(|| {
1521 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1522 id
1523 });
1524
1525 if let Some((client, project_id)) = this.downstream_client() {
1526 update.project_id = project_id.to_proto();
1527 client.send(update).log_err();
1528 }
1529 Ok(())
1530 })?
1531 }
1532
1533 async fn handle_remove_repository(
1534 this: Entity<Self>,
1535 envelope: TypedEnvelope<proto::RemoveRepository>,
1536 mut cx: AsyncApp,
1537 ) -> Result<()> {
1538 this.update(&mut cx, |this, cx| {
1539 let mut update = envelope.payload;
1540 let id = RepositoryId::from_proto(update.id);
1541 this.repositories.remove(&id);
1542 if let Some((client, project_id)) = this.downstream_client() {
1543 update.project_id = project_id.to_proto();
1544 client.send(update).log_err();
1545 }
1546 if this.active_repo_id == Some(id) {
1547 this.active_repo_id = None;
1548 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1549 }
1550 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1551 })
1552 }
1553
1554 async fn handle_git_init(
1555 this: Entity<Self>,
1556 envelope: TypedEnvelope<proto::GitInit>,
1557 cx: AsyncApp,
1558 ) -> Result<proto::Ack> {
1559 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1560 let name = envelope.payload.fallback_branch_name;
1561 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1562 .await?;
1563
1564 Ok(proto::Ack {})
1565 }
1566
1567 async fn handle_git_clone(
1568 this: Entity<Self>,
1569 envelope: TypedEnvelope<proto::GitClone>,
1570 cx: AsyncApp,
1571 ) -> Result<proto::GitCloneResponse> {
1572 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1573 let repo_name = envelope.payload.remote_repo;
1574 let result = cx
1575 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1576 .await;
1577
1578 Ok(proto::GitCloneResponse {
1579 success: result.is_ok(),
1580 })
1581 }
1582
1583 async fn handle_fetch(
1584 this: Entity<Self>,
1585 envelope: TypedEnvelope<proto::Fetch>,
1586 mut cx: AsyncApp,
1587 ) -> Result<proto::RemoteMessageResponse> {
1588 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1589 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1590 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1591 let askpass_id = envelope.payload.askpass_id;
1592
1593 let askpass = make_remote_delegate(
1594 this,
1595 envelope.payload.project_id,
1596 repository_id,
1597 askpass_id,
1598 &mut cx,
1599 );
1600
1601 let remote_output = repository_handle
1602 .update(&mut cx, |repository_handle, cx| {
1603 repository_handle.fetch(fetch_options, askpass, cx)
1604 })?
1605 .await??;
1606
1607 Ok(proto::RemoteMessageResponse {
1608 stdout: remote_output.stdout,
1609 stderr: remote_output.stderr,
1610 })
1611 }
1612
1613 async fn handle_push(
1614 this: Entity<Self>,
1615 envelope: TypedEnvelope<proto::Push>,
1616 mut cx: AsyncApp,
1617 ) -> Result<proto::RemoteMessageResponse> {
1618 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1619 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1620
1621 let askpass_id = envelope.payload.askpass_id;
1622 let askpass = make_remote_delegate(
1623 this,
1624 envelope.payload.project_id,
1625 repository_id,
1626 askpass_id,
1627 &mut cx,
1628 );
1629
1630 let options = envelope
1631 .payload
1632 .options
1633 .as_ref()
1634 .map(|_| match envelope.payload.options() {
1635 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1636 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1637 });
1638
1639 let branch_name = envelope.payload.branch_name.into();
1640 let remote_name = envelope.payload.remote_name.into();
1641
1642 let remote_output = repository_handle
1643 .update(&mut cx, |repository_handle, cx| {
1644 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1645 })?
1646 .await??;
1647 Ok(proto::RemoteMessageResponse {
1648 stdout: remote_output.stdout,
1649 stderr: remote_output.stderr,
1650 })
1651 }
1652
1653 async fn handle_pull(
1654 this: Entity<Self>,
1655 envelope: TypedEnvelope<proto::Pull>,
1656 mut cx: AsyncApp,
1657 ) -> Result<proto::RemoteMessageResponse> {
1658 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1659 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1660 let askpass_id = envelope.payload.askpass_id;
1661 let askpass = make_remote_delegate(
1662 this,
1663 envelope.payload.project_id,
1664 repository_id,
1665 askpass_id,
1666 &mut cx,
1667 );
1668
1669 let branch_name = envelope.payload.branch_name.into();
1670 let remote_name = envelope.payload.remote_name.into();
1671
1672 let remote_message = repository_handle
1673 .update(&mut cx, |repository_handle, cx| {
1674 repository_handle.pull(branch_name, remote_name, askpass, cx)
1675 })?
1676 .await??;
1677
1678 Ok(proto::RemoteMessageResponse {
1679 stdout: remote_message.stdout,
1680 stderr: remote_message.stderr,
1681 })
1682 }
1683
1684 async fn handle_stage(
1685 this: Entity<Self>,
1686 envelope: TypedEnvelope<proto::Stage>,
1687 mut cx: AsyncApp,
1688 ) -> Result<proto::Ack> {
1689 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1690 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1691
1692 let entries = envelope
1693 .payload
1694 .paths
1695 .into_iter()
1696 .map(|path| RepoPath::new(&path))
1697 .collect::<Result<Vec<_>>>()?;
1698
1699 repository_handle
1700 .update(&mut cx, |repository_handle, cx| {
1701 repository_handle.stage_entries(entries, cx)
1702 })?
1703 .await?;
1704 Ok(proto::Ack {})
1705 }
1706
1707 async fn handle_unstage(
1708 this: Entity<Self>,
1709 envelope: TypedEnvelope<proto::Unstage>,
1710 mut cx: AsyncApp,
1711 ) -> Result<proto::Ack> {
1712 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1713 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1714
1715 let entries = envelope
1716 .payload
1717 .paths
1718 .into_iter()
1719 .map(|path| RepoPath::new(&path))
1720 .collect::<Result<Vec<_>>>()?;
1721
1722 repository_handle
1723 .update(&mut cx, |repository_handle, cx| {
1724 repository_handle.unstage_entries(entries, cx)
1725 })?
1726 .await?;
1727
1728 Ok(proto::Ack {})
1729 }
1730
1731 async fn handle_stash(
1732 this: Entity<Self>,
1733 envelope: TypedEnvelope<proto::Stash>,
1734 mut cx: AsyncApp,
1735 ) -> Result<proto::Ack> {
1736 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1737 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1738
1739 let entries = envelope
1740 .payload
1741 .paths
1742 .into_iter()
1743 .map(|path| RepoPath::new(&path))
1744 .collect::<Result<Vec<_>>>()?;
1745
1746 repository_handle
1747 .update(&mut cx, |repository_handle, cx| {
1748 repository_handle.stash_entries(entries, cx)
1749 })?
1750 .await?;
1751
1752 Ok(proto::Ack {})
1753 }
1754
1755 async fn handle_stash_pop(
1756 this: Entity<Self>,
1757 envelope: TypedEnvelope<proto::StashPop>,
1758 mut cx: AsyncApp,
1759 ) -> Result<proto::Ack> {
1760 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1761 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1762 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1763
1764 repository_handle
1765 .update(&mut cx, |repository_handle, cx| {
1766 repository_handle.stash_pop(stash_index, cx)
1767 })?
1768 .await?;
1769
1770 Ok(proto::Ack {})
1771 }
1772
1773 async fn handle_stash_apply(
1774 this: Entity<Self>,
1775 envelope: TypedEnvelope<proto::StashApply>,
1776 mut cx: AsyncApp,
1777 ) -> Result<proto::Ack> {
1778 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1779 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1780 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1781
1782 repository_handle
1783 .update(&mut cx, |repository_handle, cx| {
1784 repository_handle.stash_apply(stash_index, cx)
1785 })?
1786 .await?;
1787
1788 Ok(proto::Ack {})
1789 }
1790
1791 async fn handle_stash_drop(
1792 this: Entity<Self>,
1793 envelope: TypedEnvelope<proto::StashDrop>,
1794 mut cx: AsyncApp,
1795 ) -> Result<proto::Ack> {
1796 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1797 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1798 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1799
1800 repository_handle
1801 .update(&mut cx, |repository_handle, cx| {
1802 repository_handle.stash_drop(stash_index, cx)
1803 })?
1804 .await??;
1805
1806 Ok(proto::Ack {})
1807 }
1808
1809 async fn handle_set_index_text(
1810 this: Entity<Self>,
1811 envelope: TypedEnvelope<proto::SetIndexText>,
1812 mut cx: AsyncApp,
1813 ) -> Result<proto::Ack> {
1814 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1815 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1816 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1817
1818 repository_handle
1819 .update(&mut cx, |repository_handle, cx| {
1820 repository_handle.spawn_set_index_text_job(
1821 repo_path,
1822 envelope.payload.text,
1823 None,
1824 cx,
1825 )
1826 })?
1827 .await??;
1828 Ok(proto::Ack {})
1829 }
1830
1831 async fn handle_commit(
1832 this: Entity<Self>,
1833 envelope: TypedEnvelope<proto::Commit>,
1834 mut cx: AsyncApp,
1835 ) -> Result<proto::Ack> {
1836 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1837 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1838
1839 let message = SharedString::from(envelope.payload.message);
1840 let name = envelope.payload.name.map(SharedString::from);
1841 let email = envelope.payload.email.map(SharedString::from);
1842 let options = envelope.payload.options.unwrap_or_default();
1843
1844 repository_handle
1845 .update(&mut cx, |repository_handle, cx| {
1846 repository_handle.commit(
1847 message,
1848 name.zip(email),
1849 CommitOptions {
1850 amend: options.amend,
1851 signoff: options.signoff,
1852 },
1853 cx,
1854 )
1855 })?
1856 .await??;
1857 Ok(proto::Ack {})
1858 }
1859
1860 async fn handle_get_remotes(
1861 this: Entity<Self>,
1862 envelope: TypedEnvelope<proto::GetRemotes>,
1863 mut cx: AsyncApp,
1864 ) -> Result<proto::GetRemotesResponse> {
1865 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1866 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1867
1868 let branch_name = envelope.payload.branch_name;
1869
1870 let remotes = repository_handle
1871 .update(&mut cx, |repository_handle, _| {
1872 repository_handle.get_remotes(branch_name)
1873 })?
1874 .await??;
1875
1876 Ok(proto::GetRemotesResponse {
1877 remotes: remotes
1878 .into_iter()
1879 .map(|remotes| proto::get_remotes_response::Remote {
1880 name: remotes.name.to_string(),
1881 })
1882 .collect::<Vec<_>>(),
1883 })
1884 }
1885
1886 async fn handle_get_branches(
1887 this: Entity<Self>,
1888 envelope: TypedEnvelope<proto::GitGetBranches>,
1889 mut cx: AsyncApp,
1890 ) -> Result<proto::GitBranchesResponse> {
1891 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1892 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1893
1894 let branches = repository_handle
1895 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1896 .await??;
1897
1898 Ok(proto::GitBranchesResponse {
1899 branches: branches
1900 .into_iter()
1901 .map(|branch| branch_to_proto(&branch))
1902 .collect::<Vec<_>>(),
1903 })
1904 }
1905 async fn handle_get_default_branch(
1906 this: Entity<Self>,
1907 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1908 mut cx: AsyncApp,
1909 ) -> Result<proto::GetDefaultBranchResponse> {
1910 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1911 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1912
1913 let branch = repository_handle
1914 .update(&mut cx, |repository_handle, _| {
1915 repository_handle.default_branch()
1916 })?
1917 .await??
1918 .map(Into::into);
1919
1920 Ok(proto::GetDefaultBranchResponse { branch })
1921 }
1922 async fn handle_create_branch(
1923 this: Entity<Self>,
1924 envelope: TypedEnvelope<proto::GitCreateBranch>,
1925 mut cx: AsyncApp,
1926 ) -> Result<proto::Ack> {
1927 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1928 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1929 let branch_name = envelope.payload.branch_name;
1930
1931 repository_handle
1932 .update(&mut cx, |repository_handle, _| {
1933 repository_handle.create_branch(branch_name)
1934 })?
1935 .await??;
1936
1937 Ok(proto::Ack {})
1938 }
1939
1940 async fn handle_change_branch(
1941 this: Entity<Self>,
1942 envelope: TypedEnvelope<proto::GitChangeBranch>,
1943 mut cx: AsyncApp,
1944 ) -> Result<proto::Ack> {
1945 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1946 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1947 let branch_name = envelope.payload.branch_name;
1948
1949 repository_handle
1950 .update(&mut cx, |repository_handle, _| {
1951 repository_handle.change_branch(branch_name)
1952 })?
1953 .await??;
1954
1955 Ok(proto::Ack {})
1956 }
1957
1958 async fn handle_rename_branch(
1959 this: Entity<Self>,
1960 envelope: TypedEnvelope<proto::GitRenameBranch>,
1961 mut cx: AsyncApp,
1962 ) -> Result<proto::Ack> {
1963 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1964 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1965 let branch = envelope.payload.branch;
1966 let new_name = envelope.payload.new_name;
1967
1968 repository_handle
1969 .update(&mut cx, |repository_handle, _| {
1970 repository_handle.rename_branch(branch, new_name)
1971 })?
1972 .await??;
1973
1974 Ok(proto::Ack {})
1975 }
1976
1977 async fn handle_show(
1978 this: Entity<Self>,
1979 envelope: TypedEnvelope<proto::GitShow>,
1980 mut cx: AsyncApp,
1981 ) -> Result<proto::GitCommitDetails> {
1982 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1983 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1984
1985 let commit = repository_handle
1986 .update(&mut cx, |repository_handle, _| {
1987 repository_handle.show(envelope.payload.commit)
1988 })?
1989 .await??;
1990 Ok(proto::GitCommitDetails {
1991 sha: commit.sha.into(),
1992 message: commit.message.into(),
1993 commit_timestamp: commit.commit_timestamp,
1994 author_email: commit.author_email.into(),
1995 author_name: commit.author_name.into(),
1996 })
1997 }
1998
1999 async fn handle_load_commit_diff(
2000 this: Entity<Self>,
2001 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2002 mut cx: AsyncApp,
2003 ) -> Result<proto::LoadCommitDiffResponse> {
2004 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2005 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2006
2007 let commit_diff = repository_handle
2008 .update(&mut cx, |repository_handle, _| {
2009 repository_handle.load_commit_diff(envelope.payload.commit)
2010 })?
2011 .await??;
2012 Ok(proto::LoadCommitDiffResponse {
2013 files: commit_diff
2014 .files
2015 .into_iter()
2016 .map(|file| proto::CommitFile {
2017 path: file.path.to_proto(),
2018 old_text: file.old_text,
2019 new_text: file.new_text,
2020 })
2021 .collect(),
2022 })
2023 }
2024
2025 async fn handle_reset(
2026 this: Entity<Self>,
2027 envelope: TypedEnvelope<proto::GitReset>,
2028 mut cx: AsyncApp,
2029 ) -> Result<proto::Ack> {
2030 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2031 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2032
2033 let mode = match envelope.payload.mode() {
2034 git_reset::ResetMode::Soft => ResetMode::Soft,
2035 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2036 };
2037
2038 repository_handle
2039 .update(&mut cx, |repository_handle, cx| {
2040 repository_handle.reset(envelope.payload.commit, mode, cx)
2041 })?
2042 .await??;
2043 Ok(proto::Ack {})
2044 }
2045
2046 async fn handle_checkout_files(
2047 this: Entity<Self>,
2048 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2049 mut cx: AsyncApp,
2050 ) -> Result<proto::Ack> {
2051 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2052 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2053 let paths = envelope
2054 .payload
2055 .paths
2056 .iter()
2057 .map(|s| RepoPath::from_proto(s))
2058 .collect::<Result<Vec<_>>>()?;
2059
2060 repository_handle
2061 .update(&mut cx, |repository_handle, cx| {
2062 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2063 })?
2064 .await??;
2065 Ok(proto::Ack {})
2066 }
2067
2068 async fn handle_open_commit_message_buffer(
2069 this: Entity<Self>,
2070 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2071 mut cx: AsyncApp,
2072 ) -> Result<proto::OpenBufferResponse> {
2073 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2074 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2075 let buffer = repository
2076 .update(&mut cx, |repository, cx| {
2077 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2078 })?
2079 .await?;
2080
2081 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2082 this.update(&mut cx, |this, cx| {
2083 this.buffer_store.update(cx, |buffer_store, cx| {
2084 buffer_store
2085 .create_buffer_for_peer(
2086 &buffer,
2087 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2088 cx,
2089 )
2090 .detach_and_log_err(cx);
2091 })
2092 })?;
2093
2094 Ok(proto::OpenBufferResponse {
2095 buffer_id: buffer_id.to_proto(),
2096 })
2097 }
2098
2099 async fn handle_askpass(
2100 this: Entity<Self>,
2101 envelope: TypedEnvelope<proto::AskPassRequest>,
2102 mut cx: AsyncApp,
2103 ) -> Result<proto::AskPassResponse> {
2104 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2105 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2106
2107 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2108 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2109 debug_panic!("no askpass found");
2110 anyhow::bail!("no askpass found");
2111 };
2112
2113 let response = askpass.ask_password(envelope.payload.prompt).await?;
2114
2115 delegates
2116 .lock()
2117 .insert(envelope.payload.askpass_id, askpass);
2118
2119 response.try_into()
2120 }
2121
2122 async fn handle_check_for_pushed_commits(
2123 this: Entity<Self>,
2124 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2125 mut cx: AsyncApp,
2126 ) -> Result<proto::CheckForPushedCommitsResponse> {
2127 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2128 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2129
2130 let branches = repository_handle
2131 .update(&mut cx, |repository_handle, _| {
2132 repository_handle.check_for_pushed_commits()
2133 })?
2134 .await??;
2135 Ok(proto::CheckForPushedCommitsResponse {
2136 pushed_to: branches
2137 .into_iter()
2138 .map(|commit| commit.to_string())
2139 .collect(),
2140 })
2141 }
2142
2143 async fn handle_git_diff(
2144 this: Entity<Self>,
2145 envelope: TypedEnvelope<proto::GitDiff>,
2146 mut cx: AsyncApp,
2147 ) -> Result<proto::GitDiffResponse> {
2148 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2149 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2150 let diff_type = match envelope.payload.diff_type() {
2151 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2152 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2153 };
2154
2155 let mut diff = repository_handle
2156 .update(&mut cx, |repository_handle, cx| {
2157 repository_handle.diff(diff_type, cx)
2158 })?
2159 .await??;
2160 const ONE_MB: usize = 1_000_000;
2161 if diff.len() > ONE_MB {
2162 diff = diff.chars().take(ONE_MB).collect()
2163 }
2164
2165 Ok(proto::GitDiffResponse { diff })
2166 }
2167
2168 async fn handle_open_unstaged_diff(
2169 this: Entity<Self>,
2170 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2171 mut cx: AsyncApp,
2172 ) -> Result<proto::OpenUnstagedDiffResponse> {
2173 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2174 let diff = this
2175 .update(&mut cx, |this, cx| {
2176 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2177 Some(this.open_unstaged_diff(buffer, cx))
2178 })?
2179 .context("missing buffer")?
2180 .await?;
2181 this.update(&mut cx, |this, _| {
2182 let shared_diffs = this
2183 .shared_diffs
2184 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2185 .or_default();
2186 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2187 })?;
2188 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2189 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2190 }
2191
2192 async fn handle_open_uncommitted_diff(
2193 this: Entity<Self>,
2194 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2195 mut cx: AsyncApp,
2196 ) -> Result<proto::OpenUncommittedDiffResponse> {
2197 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2198 let diff = this
2199 .update(&mut cx, |this, cx| {
2200 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2201 Some(this.open_uncommitted_diff(buffer, cx))
2202 })?
2203 .context("missing buffer")?
2204 .await?;
2205 this.update(&mut cx, |this, _| {
2206 let shared_diffs = this
2207 .shared_diffs
2208 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2209 .or_default();
2210 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2211 })?;
2212 diff.read_with(&cx, |diff, cx| {
2213 use proto::open_uncommitted_diff_response::Mode;
2214
2215 let unstaged_diff = diff.secondary_diff();
2216 let index_snapshot = unstaged_diff.and_then(|diff| {
2217 let diff = diff.read(cx);
2218 diff.base_text_exists().then(|| diff.base_text())
2219 });
2220
2221 let mode;
2222 let staged_text;
2223 let committed_text;
2224 if diff.base_text_exists() {
2225 let committed_snapshot = diff.base_text();
2226 committed_text = Some(committed_snapshot.text());
2227 if let Some(index_text) = index_snapshot {
2228 if index_text.remote_id() == committed_snapshot.remote_id() {
2229 mode = Mode::IndexMatchesHead;
2230 staged_text = None;
2231 } else {
2232 mode = Mode::IndexAndHead;
2233 staged_text = Some(index_text.text());
2234 }
2235 } else {
2236 mode = Mode::IndexAndHead;
2237 staged_text = None;
2238 }
2239 } else {
2240 mode = Mode::IndexAndHead;
2241 committed_text = None;
2242 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2243 }
2244
2245 proto::OpenUncommittedDiffResponse {
2246 committed_text,
2247 staged_text,
2248 mode: mode.into(),
2249 }
2250 })
2251 }
2252
2253 async fn handle_update_diff_bases(
2254 this: Entity<Self>,
2255 request: TypedEnvelope<proto::UpdateDiffBases>,
2256 mut cx: AsyncApp,
2257 ) -> Result<()> {
2258 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2259 this.update(&mut cx, |this, cx| {
2260 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2261 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2262 {
2263 let buffer = buffer.read(cx).text_snapshot();
2264 diff_state.update(cx, |diff_state, cx| {
2265 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2266 })
2267 }
2268 })
2269 }
2270
2271 async fn handle_blame_buffer(
2272 this: Entity<Self>,
2273 envelope: TypedEnvelope<proto::BlameBuffer>,
2274 mut cx: AsyncApp,
2275 ) -> Result<proto::BlameBufferResponse> {
2276 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2277 let version = deserialize_version(&envelope.payload.version);
2278 let buffer = this.read_with(&cx, |this, cx| {
2279 this.buffer_store.read(cx).get_existing(buffer_id)
2280 })??;
2281 buffer
2282 .update(&mut cx, |buffer, _| {
2283 buffer.wait_for_version(version.clone())
2284 })?
2285 .await?;
2286 let blame = this
2287 .update(&mut cx, |this, cx| {
2288 this.blame_buffer(&buffer, Some(version), cx)
2289 })?
2290 .await?;
2291 Ok(serialize_blame_buffer_response(blame))
2292 }
2293
2294 async fn handle_get_permalink_to_line(
2295 this: Entity<Self>,
2296 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2297 mut cx: AsyncApp,
2298 ) -> Result<proto::GetPermalinkToLineResponse> {
2299 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2300 // let version = deserialize_version(&envelope.payload.version);
2301 let selection = {
2302 let proto_selection = envelope
2303 .payload
2304 .selection
2305 .context("no selection to get permalink for defined")?;
2306 proto_selection.start as u32..proto_selection.end as u32
2307 };
2308 let buffer = this.read_with(&cx, |this, cx| {
2309 this.buffer_store.read(cx).get_existing(buffer_id)
2310 })??;
2311 let permalink = this
2312 .update(&mut cx, |this, cx| {
2313 this.get_permalink_to_line(&buffer, selection, cx)
2314 })?
2315 .await?;
2316 Ok(proto::GetPermalinkToLineResponse {
2317 permalink: permalink.to_string(),
2318 })
2319 }
2320
2321 fn repository_for_request(
2322 this: &Entity<Self>,
2323 id: RepositoryId,
2324 cx: &mut AsyncApp,
2325 ) -> Result<Entity<Repository>> {
2326 this.read_with(cx, |this, _| {
2327 this.repositories
2328 .get(&id)
2329 .context("missing repository handle")
2330 .cloned()
2331 })?
2332 }
2333
2334 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2335 self.repositories
2336 .iter()
2337 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2338 .collect()
2339 }
2340
2341 fn process_updated_entries(
2342 &self,
2343 worktree: &Entity<Worktree>,
2344 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2345 cx: &mut App,
2346 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2347 let path_style = worktree.read(cx).path_style();
2348 let mut repo_paths = self
2349 .repositories
2350 .values()
2351 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2352 .collect::<Vec<_>>();
2353 let mut entries: Vec<_> = updated_entries
2354 .iter()
2355 .map(|(path, _, _)| path.clone())
2356 .collect();
2357 entries.sort();
2358 let worktree = worktree.read(cx);
2359
2360 let entries = entries
2361 .into_iter()
2362 .map(|path| worktree.absolutize(&path))
2363 .collect::<Arc<[_]>>();
2364
2365 let executor = cx.background_executor().clone();
2366 cx.background_executor().spawn(async move {
2367 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2368 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2369 let mut tasks = FuturesOrdered::new();
2370 for (repo_path, repo) in repo_paths.into_iter().rev() {
2371 let entries = entries.clone();
2372 let task = executor.spawn(async move {
2373 // Find all repository paths that belong to this repo
2374 let mut ix = entries.partition_point(|path| path < &*repo_path);
2375 if ix == entries.len() {
2376 return None;
2377 };
2378
2379 let mut paths = Vec::new();
2380 // All paths prefixed by a given repo will constitute a continuous range.
2381 while let Some(path) = entries.get(ix)
2382 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2383 &repo_path, path, path_style,
2384 )
2385 {
2386 paths.push((repo_path, ix));
2387 ix += 1;
2388 }
2389 if paths.is_empty() {
2390 None
2391 } else {
2392 Some((repo, paths))
2393 }
2394 });
2395 tasks.push_back(task);
2396 }
2397
2398 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2399 let mut path_was_used = vec![false; entries.len()];
2400 let tasks = tasks.collect::<Vec<_>>().await;
2401 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2402 // We always want to assign a path to it's innermost repository.
2403 for t in tasks {
2404 let Some((repo, paths)) = t else {
2405 continue;
2406 };
2407 let entry = paths_by_git_repo.entry(repo).or_default();
2408 for (repo_path, ix) in paths {
2409 if path_was_used[ix] {
2410 continue;
2411 }
2412 path_was_used[ix] = true;
2413 entry.push(repo_path);
2414 }
2415 }
2416
2417 paths_by_git_repo
2418 })
2419 }
2420}
2421
2422impl BufferGitState {
2423 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2424 Self {
2425 unstaged_diff: Default::default(),
2426 uncommitted_diff: Default::default(),
2427 recalculate_diff_task: Default::default(),
2428 language: Default::default(),
2429 language_registry: Default::default(),
2430 recalculating_tx: postage::watch::channel_with(false).0,
2431 hunk_staging_operation_count: 0,
2432 hunk_staging_operation_count_as_of_write: 0,
2433 head_text: Default::default(),
2434 index_text: Default::default(),
2435 head_changed: Default::default(),
2436 index_changed: Default::default(),
2437 language_changed: Default::default(),
2438 conflict_updated_futures: Default::default(),
2439 conflict_set: Default::default(),
2440 reparse_conflict_markers_task: Default::default(),
2441 }
2442 }
2443
2444 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2445 self.language = buffer.read(cx).language().cloned();
2446 self.language_changed = true;
2447 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2448 }
2449
2450 fn reparse_conflict_markers(
2451 &mut self,
2452 buffer: text::BufferSnapshot,
2453 cx: &mut Context<Self>,
2454 ) -> oneshot::Receiver<()> {
2455 let (tx, rx) = oneshot::channel();
2456
2457 let Some(conflict_set) = self
2458 .conflict_set
2459 .as_ref()
2460 .and_then(|conflict_set| conflict_set.upgrade())
2461 else {
2462 return rx;
2463 };
2464
2465 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2466 if conflict_set.has_conflict {
2467 Some(conflict_set.snapshot())
2468 } else {
2469 None
2470 }
2471 });
2472
2473 if let Some(old_snapshot) = old_snapshot {
2474 self.conflict_updated_futures.push(tx);
2475 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2476 let (snapshot, changed_range) = cx
2477 .background_spawn(async move {
2478 let new_snapshot = ConflictSet::parse(&buffer);
2479 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2480 (new_snapshot, changed_range)
2481 })
2482 .await;
2483 this.update(cx, |this, cx| {
2484 if let Some(conflict_set) = &this.conflict_set {
2485 conflict_set
2486 .update(cx, |conflict_set, cx| {
2487 conflict_set.set_snapshot(snapshot, changed_range, cx);
2488 })
2489 .ok();
2490 }
2491 let futures = std::mem::take(&mut this.conflict_updated_futures);
2492 for tx in futures {
2493 tx.send(()).ok();
2494 }
2495 })
2496 }))
2497 }
2498
2499 rx
2500 }
2501
2502 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2503 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2504 }
2505
2506 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2507 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2508 }
2509
2510 fn handle_base_texts_updated(
2511 &mut self,
2512 buffer: text::BufferSnapshot,
2513 message: proto::UpdateDiffBases,
2514 cx: &mut Context<Self>,
2515 ) {
2516 use proto::update_diff_bases::Mode;
2517
2518 let Some(mode) = Mode::from_i32(message.mode) else {
2519 return;
2520 };
2521
2522 let diff_bases_change = match mode {
2523 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2524 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2525 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2526 Mode::IndexAndHead => DiffBasesChange::SetEach {
2527 index: message.staged_text,
2528 head: message.committed_text,
2529 },
2530 };
2531
2532 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2533 }
2534
2535 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2536 if *self.recalculating_tx.borrow() {
2537 let mut rx = self.recalculating_tx.subscribe();
2538 Some(async move {
2539 loop {
2540 let is_recalculating = rx.recv().await;
2541 if is_recalculating != Some(true) {
2542 break;
2543 }
2544 }
2545 })
2546 } else {
2547 None
2548 }
2549 }
2550
2551 fn diff_bases_changed(
2552 &mut self,
2553 buffer: text::BufferSnapshot,
2554 diff_bases_change: Option<DiffBasesChange>,
2555 cx: &mut Context<Self>,
2556 ) {
2557 match diff_bases_change {
2558 Some(DiffBasesChange::SetIndex(index)) => {
2559 self.index_text = index.map(|mut index| {
2560 text::LineEnding::normalize(&mut index);
2561 Arc::new(index)
2562 });
2563 self.index_changed = true;
2564 }
2565 Some(DiffBasesChange::SetHead(head)) => {
2566 self.head_text = head.map(|mut head| {
2567 text::LineEnding::normalize(&mut head);
2568 Arc::new(head)
2569 });
2570 self.head_changed = true;
2571 }
2572 Some(DiffBasesChange::SetBoth(text)) => {
2573 let text = text.map(|mut text| {
2574 text::LineEnding::normalize(&mut text);
2575 Arc::new(text)
2576 });
2577 self.head_text = text.clone();
2578 self.index_text = text;
2579 self.head_changed = true;
2580 self.index_changed = true;
2581 }
2582 Some(DiffBasesChange::SetEach { index, head }) => {
2583 self.index_text = index.map(|mut index| {
2584 text::LineEnding::normalize(&mut index);
2585 Arc::new(index)
2586 });
2587 self.index_changed = true;
2588 self.head_text = head.map(|mut head| {
2589 text::LineEnding::normalize(&mut head);
2590 Arc::new(head)
2591 });
2592 self.head_changed = true;
2593 }
2594 None => {}
2595 }
2596
2597 self.recalculate_diffs(buffer, cx)
2598 }
2599
2600 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2601 *self.recalculating_tx.borrow_mut() = true;
2602
2603 let language = self.language.clone();
2604 let language_registry = self.language_registry.clone();
2605 let unstaged_diff = self.unstaged_diff();
2606 let uncommitted_diff = self.uncommitted_diff();
2607 let head = self.head_text.clone();
2608 let index = self.index_text.clone();
2609 let index_changed = self.index_changed;
2610 let head_changed = self.head_changed;
2611 let language_changed = self.language_changed;
2612 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2613 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2614 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2615 (None, None) => true,
2616 _ => false,
2617 };
2618 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2619 log::debug!(
2620 "start recalculating diffs for buffer {}",
2621 buffer.remote_id()
2622 );
2623
2624 let mut new_unstaged_diff = None;
2625 if let Some(unstaged_diff) = &unstaged_diff {
2626 new_unstaged_diff = Some(
2627 BufferDiff::update_diff(
2628 unstaged_diff.clone(),
2629 buffer.clone(),
2630 index,
2631 index_changed,
2632 language_changed,
2633 language.clone(),
2634 language_registry.clone(),
2635 cx,
2636 )
2637 .await?,
2638 );
2639 }
2640
2641 let mut new_uncommitted_diff = None;
2642 if let Some(uncommitted_diff) = &uncommitted_diff {
2643 new_uncommitted_diff = if index_matches_head {
2644 new_unstaged_diff.clone()
2645 } else {
2646 Some(
2647 BufferDiff::update_diff(
2648 uncommitted_diff.clone(),
2649 buffer.clone(),
2650 head,
2651 head_changed,
2652 language_changed,
2653 language.clone(),
2654 language_registry.clone(),
2655 cx,
2656 )
2657 .await?,
2658 )
2659 }
2660 }
2661
2662 let cancel = this.update(cx, |this, _| {
2663 // This checks whether all pending stage/unstage operations
2664 // have quiesced (i.e. both the corresponding write and the
2665 // read of that write have completed). If not, then we cancel
2666 // this recalculation attempt to avoid invalidating pending
2667 // state too quickly; another recalculation will come along
2668 // later and clear the pending state once the state of the index has settled.
2669 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2670 *this.recalculating_tx.borrow_mut() = false;
2671 true
2672 } else {
2673 false
2674 }
2675 })?;
2676 if cancel {
2677 log::debug!(
2678 concat!(
2679 "aborting recalculating diffs for buffer {}",
2680 "due to subsequent hunk operations",
2681 ),
2682 buffer.remote_id()
2683 );
2684 return Ok(());
2685 }
2686
2687 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2688 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2689 {
2690 unstaged_diff.update(cx, |diff, cx| {
2691 if language_changed {
2692 diff.language_changed(cx);
2693 }
2694 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2695 })?
2696 } else {
2697 None
2698 };
2699
2700 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2701 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2702 {
2703 uncommitted_diff.update(cx, |diff, cx| {
2704 if language_changed {
2705 diff.language_changed(cx);
2706 }
2707 diff.set_snapshot_with_secondary(
2708 new_uncommitted_diff,
2709 &buffer,
2710 unstaged_changed_range,
2711 true,
2712 cx,
2713 );
2714 })?;
2715 }
2716
2717 log::debug!(
2718 "finished recalculating diffs for buffer {}",
2719 buffer.remote_id()
2720 );
2721
2722 if let Some(this) = this.upgrade() {
2723 this.update(cx, |this, _| {
2724 this.index_changed = false;
2725 this.head_changed = false;
2726 this.language_changed = false;
2727 *this.recalculating_tx.borrow_mut() = false;
2728 })?;
2729 }
2730
2731 Ok(())
2732 }));
2733 }
2734}
2735
2736fn make_remote_delegate(
2737 this: Entity<GitStore>,
2738 project_id: u64,
2739 repository_id: RepositoryId,
2740 askpass_id: u64,
2741 cx: &mut AsyncApp,
2742) -> AskPassDelegate {
2743 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2744 this.update(cx, |this, cx| {
2745 let Some((client, _)) = this.downstream_client() else {
2746 return;
2747 };
2748 let response = client.request(proto::AskPassRequest {
2749 project_id,
2750 repository_id: repository_id.to_proto(),
2751 askpass_id,
2752 prompt,
2753 });
2754 cx.spawn(async move |_, _| {
2755 let mut response = response.await?.response;
2756 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2757 .ok();
2758 response.zeroize();
2759 anyhow::Ok(())
2760 })
2761 .detach_and_log_err(cx);
2762 })
2763 .log_err();
2764 })
2765}
2766
2767impl RepositoryId {
2768 pub fn to_proto(self) -> u64 {
2769 self.0
2770 }
2771
2772 pub fn from_proto(id: u64) -> Self {
2773 RepositoryId(id)
2774 }
2775}
2776
2777impl RepositorySnapshot {
2778 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2779 Self {
2780 id,
2781 statuses_by_path: Default::default(),
2782 work_directory_abs_path,
2783 branch: None,
2784 head_commit: None,
2785 scan_id: 0,
2786 merge: Default::default(),
2787 remote_origin_url: None,
2788 remote_upstream_url: None,
2789 stash_entries: Default::default(),
2790 path_style,
2791 }
2792 }
2793
2794 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2795 proto::UpdateRepository {
2796 branch_summary: self.branch.as_ref().map(branch_to_proto),
2797 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2798 updated_statuses: self
2799 .statuses_by_path
2800 .iter()
2801 .map(|entry| entry.to_proto())
2802 .collect(),
2803 removed_statuses: Default::default(),
2804 current_merge_conflicts: self
2805 .merge
2806 .conflicted_paths
2807 .iter()
2808 .map(|repo_path| repo_path.to_proto())
2809 .collect(),
2810 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2811 project_id,
2812 id: self.id.to_proto(),
2813 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2814 entry_ids: vec![self.id.to_proto()],
2815 scan_id: self.scan_id,
2816 is_last_update: true,
2817 stash_entries: self
2818 .stash_entries
2819 .entries
2820 .iter()
2821 .map(stash_to_proto)
2822 .collect(),
2823 }
2824 }
2825
2826 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2827 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2828 let mut removed_statuses: Vec<String> = Vec::new();
2829
2830 let mut new_statuses = self.statuses_by_path.iter().peekable();
2831 let mut old_statuses = old.statuses_by_path.iter().peekable();
2832
2833 let mut current_new_entry = new_statuses.next();
2834 let mut current_old_entry = old_statuses.next();
2835 loop {
2836 match (current_new_entry, current_old_entry) {
2837 (Some(new_entry), Some(old_entry)) => {
2838 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2839 Ordering::Less => {
2840 updated_statuses.push(new_entry.to_proto());
2841 current_new_entry = new_statuses.next();
2842 }
2843 Ordering::Equal => {
2844 if new_entry.status != old_entry.status {
2845 updated_statuses.push(new_entry.to_proto());
2846 }
2847 current_old_entry = old_statuses.next();
2848 current_new_entry = new_statuses.next();
2849 }
2850 Ordering::Greater => {
2851 removed_statuses.push(old_entry.repo_path.to_proto());
2852 current_old_entry = old_statuses.next();
2853 }
2854 }
2855 }
2856 (None, Some(old_entry)) => {
2857 removed_statuses.push(old_entry.repo_path.to_proto());
2858 current_old_entry = old_statuses.next();
2859 }
2860 (Some(new_entry), None) => {
2861 updated_statuses.push(new_entry.to_proto());
2862 current_new_entry = new_statuses.next();
2863 }
2864 (None, None) => break,
2865 }
2866 }
2867
2868 proto::UpdateRepository {
2869 branch_summary: self.branch.as_ref().map(branch_to_proto),
2870 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2871 updated_statuses,
2872 removed_statuses,
2873 current_merge_conflicts: self
2874 .merge
2875 .conflicted_paths
2876 .iter()
2877 .map(|path| path.to_proto())
2878 .collect(),
2879 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2880 project_id,
2881 id: self.id.to_proto(),
2882 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2883 entry_ids: vec![],
2884 scan_id: self.scan_id,
2885 is_last_update: true,
2886 stash_entries: self
2887 .stash_entries
2888 .entries
2889 .iter()
2890 .map(stash_to_proto)
2891 .collect(),
2892 }
2893 }
2894
2895 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2896 self.statuses_by_path.iter().cloned()
2897 }
2898
2899 pub fn status_summary(&self) -> GitSummary {
2900 self.statuses_by_path.summary().item_summary
2901 }
2902
2903 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2904 self.statuses_by_path
2905 .get(&PathKey(path.0.clone()), ())
2906 .cloned()
2907 }
2908
2909 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2910 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
2911 }
2912
2913 #[inline]
2914 fn abs_path_to_repo_path_inner(
2915 work_directory_abs_path: &Path,
2916 abs_path: &Path,
2917 path_style: PathStyle,
2918 ) -> Option<RepoPath> {
2919 abs_path
2920 .strip_prefix(&work_directory_abs_path)
2921 .ok()
2922 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
2923 }
2924
2925 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2926 self.merge.conflicted_paths.contains(repo_path)
2927 }
2928
2929 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2930 let had_conflict_on_last_merge_head_change =
2931 self.merge.conflicted_paths.contains(repo_path);
2932 let has_conflict_currently = self
2933 .status_for_path(repo_path)
2934 .is_some_and(|entry| entry.status.is_conflicted());
2935 had_conflict_on_last_merge_head_change || has_conflict_currently
2936 }
2937
2938 /// This is the name that will be displayed in the repository selector for this repository.
2939 pub fn display_name(&self) -> SharedString {
2940 self.work_directory_abs_path
2941 .file_name()
2942 .unwrap_or_default()
2943 .to_string_lossy()
2944 .to_string()
2945 .into()
2946 }
2947}
2948
2949pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2950 proto::StashEntry {
2951 oid: entry.oid.as_bytes().to_vec(),
2952 message: entry.message.clone(),
2953 branch: entry.branch.clone(),
2954 index: entry.index as u64,
2955 timestamp: entry.timestamp,
2956 }
2957}
2958
2959pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2960 Ok(StashEntry {
2961 oid: Oid::from_bytes(&entry.oid)?,
2962 message: entry.message.clone(),
2963 index: entry.index as usize,
2964 branch: entry.branch.clone(),
2965 timestamp: entry.timestamp,
2966 })
2967}
2968
2969impl MergeDetails {
2970 async fn load(
2971 backend: &Arc<dyn GitRepository>,
2972 status: &SumTree<StatusEntry>,
2973 prev_snapshot: &RepositorySnapshot,
2974 ) -> Result<(MergeDetails, bool)> {
2975 log::debug!("load merge details");
2976 let message = backend.merge_message().await;
2977 let heads = backend
2978 .revparse_batch(vec![
2979 "MERGE_HEAD".into(),
2980 "CHERRY_PICK_HEAD".into(),
2981 "REBASE_HEAD".into(),
2982 "REVERT_HEAD".into(),
2983 "APPLY_HEAD".into(),
2984 ])
2985 .await
2986 .log_err()
2987 .unwrap_or_default()
2988 .into_iter()
2989 .map(|opt| opt.map(SharedString::from))
2990 .collect::<Vec<_>>();
2991 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2992 let conflicted_paths = if merge_heads_changed {
2993 let current_conflicted_paths = TreeSet::from_ordered_entries(
2994 status
2995 .iter()
2996 .filter(|entry| entry.status.is_conflicted())
2997 .map(|entry| entry.repo_path.clone()),
2998 );
2999
3000 // It can happen that we run a scan while a lengthy merge is in progress
3001 // that will eventually result in conflicts, but before those conflicts
3002 // are reported by `git status`. Since for the moment we only care about
3003 // the merge heads state for the purposes of tracking conflicts, don't update
3004 // this state until we see some conflicts.
3005 if heads.iter().any(Option::is_some)
3006 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3007 && current_conflicted_paths.is_empty()
3008 {
3009 log::debug!("not updating merge heads because no conflicts found");
3010 return Ok((
3011 MergeDetails {
3012 message: message.map(SharedString::from),
3013 ..prev_snapshot.merge.clone()
3014 },
3015 false,
3016 ));
3017 }
3018
3019 current_conflicted_paths
3020 } else {
3021 prev_snapshot.merge.conflicted_paths.clone()
3022 };
3023 let details = MergeDetails {
3024 conflicted_paths,
3025 message: message.map(SharedString::from),
3026 heads,
3027 };
3028 Ok((details, merge_heads_changed))
3029 }
3030}
3031
3032impl Repository {
3033 pub fn snapshot(&self) -> RepositorySnapshot {
3034 self.snapshot.clone()
3035 }
3036
3037 fn local(
3038 id: RepositoryId,
3039 work_directory_abs_path: Arc<Path>,
3040 dot_git_abs_path: Arc<Path>,
3041 repository_dir_abs_path: Arc<Path>,
3042 common_dir_abs_path: Arc<Path>,
3043 project_environment: WeakEntity<ProjectEnvironment>,
3044 fs: Arc<dyn Fs>,
3045 git_store: WeakEntity<GitStore>,
3046 cx: &mut Context<Self>,
3047 ) -> Self {
3048 let snapshot =
3049 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3050 Repository {
3051 this: cx.weak_entity(),
3052 git_store,
3053 snapshot,
3054 commit_message_buffer: None,
3055 askpass_delegates: Default::default(),
3056 paths_needing_status_update: Default::default(),
3057 latest_askpass_id: 0,
3058 job_sender: Repository::spawn_local_git_worker(
3059 work_directory_abs_path,
3060 dot_git_abs_path,
3061 repository_dir_abs_path,
3062 common_dir_abs_path,
3063 project_environment,
3064 fs,
3065 cx,
3066 ),
3067 job_id: 0,
3068 active_jobs: Default::default(),
3069 }
3070 }
3071
3072 fn remote(
3073 id: RepositoryId,
3074 work_directory_abs_path: Arc<Path>,
3075 path_style: PathStyle,
3076 project_id: ProjectId,
3077 client: AnyProtoClient,
3078 git_store: WeakEntity<GitStore>,
3079 cx: &mut Context<Self>,
3080 ) -> Self {
3081 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3082 Self {
3083 this: cx.weak_entity(),
3084 snapshot,
3085 commit_message_buffer: None,
3086 git_store,
3087 paths_needing_status_update: Default::default(),
3088 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3089 askpass_delegates: Default::default(),
3090 latest_askpass_id: 0,
3091 active_jobs: Default::default(),
3092 job_id: 0,
3093 }
3094 }
3095
3096 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3097 self.git_store.upgrade()
3098 }
3099
3100 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3101 let this = cx.weak_entity();
3102 let git_store = self.git_store.clone();
3103 let _ = self.send_keyed_job(
3104 Some(GitJobKey::ReloadBufferDiffBases),
3105 None,
3106 |state, mut cx| async move {
3107 let RepositoryState::Local { backend, .. } = state else {
3108 log::error!("tried to recompute diffs for a non-local repository");
3109 return Ok(());
3110 };
3111
3112 let Some(this) = this.upgrade() else {
3113 return Ok(());
3114 };
3115
3116 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3117 git_store.update(cx, |git_store, cx| {
3118 git_store
3119 .diffs
3120 .iter()
3121 .filter_map(|(buffer_id, diff_state)| {
3122 let buffer_store = git_store.buffer_store.read(cx);
3123 let buffer = buffer_store.get(*buffer_id)?;
3124 let file = File::from_dyn(buffer.read(cx).file())?;
3125 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3126 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3127 log::debug!(
3128 "start reload diff bases for repo path {}",
3129 repo_path.as_unix_str()
3130 );
3131 diff_state.update(cx, |diff_state, _| {
3132 let has_unstaged_diff = diff_state
3133 .unstaged_diff
3134 .as_ref()
3135 .is_some_and(|diff| diff.is_upgradable());
3136 let has_uncommitted_diff = diff_state
3137 .uncommitted_diff
3138 .as_ref()
3139 .is_some_and(|set| set.is_upgradable());
3140
3141 Some((
3142 buffer,
3143 repo_path,
3144 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3145 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3146 ))
3147 })
3148 })
3149 .collect::<Vec<_>>()
3150 })
3151 })??;
3152
3153 let buffer_diff_base_changes = cx
3154 .background_spawn(async move {
3155 let mut changes = Vec::new();
3156 for (buffer, repo_path, current_index_text, current_head_text) in
3157 &repo_diff_state_updates
3158 {
3159 let index_text = if current_index_text.is_some() {
3160 backend.load_index_text(repo_path.clone()).await
3161 } else {
3162 None
3163 };
3164 let head_text = if current_head_text.is_some() {
3165 backend.load_committed_text(repo_path.clone()).await
3166 } else {
3167 None
3168 };
3169
3170 let change =
3171 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3172 (Some(current_index), Some(current_head)) => {
3173 let index_changed =
3174 index_text.as_ref() != current_index.as_deref();
3175 let head_changed =
3176 head_text.as_ref() != current_head.as_deref();
3177 if index_changed && head_changed {
3178 if index_text == head_text {
3179 Some(DiffBasesChange::SetBoth(head_text))
3180 } else {
3181 Some(DiffBasesChange::SetEach {
3182 index: index_text,
3183 head: head_text,
3184 })
3185 }
3186 } else if index_changed {
3187 Some(DiffBasesChange::SetIndex(index_text))
3188 } else if head_changed {
3189 Some(DiffBasesChange::SetHead(head_text))
3190 } else {
3191 None
3192 }
3193 }
3194 (Some(current_index), None) => {
3195 let index_changed =
3196 index_text.as_ref() != current_index.as_deref();
3197 index_changed
3198 .then_some(DiffBasesChange::SetIndex(index_text))
3199 }
3200 (None, Some(current_head)) => {
3201 let head_changed =
3202 head_text.as_ref() != current_head.as_deref();
3203 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3204 }
3205 (None, None) => None,
3206 };
3207
3208 changes.push((buffer.clone(), change))
3209 }
3210 changes
3211 })
3212 .await;
3213
3214 git_store.update(&mut cx, |git_store, cx| {
3215 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3216 let buffer_snapshot = buffer.read(cx).text_snapshot();
3217 let buffer_id = buffer_snapshot.remote_id();
3218 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3219 continue;
3220 };
3221
3222 let downstream_client = git_store.downstream_client();
3223 diff_state.update(cx, |diff_state, cx| {
3224 use proto::update_diff_bases::Mode;
3225
3226 if let Some((diff_bases_change, (client, project_id))) =
3227 diff_bases_change.clone().zip(downstream_client)
3228 {
3229 let (staged_text, committed_text, mode) = match diff_bases_change {
3230 DiffBasesChange::SetIndex(index) => {
3231 (index, None, Mode::IndexOnly)
3232 }
3233 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3234 DiffBasesChange::SetEach { index, head } => {
3235 (index, head, Mode::IndexAndHead)
3236 }
3237 DiffBasesChange::SetBoth(text) => {
3238 (None, text, Mode::IndexMatchesHead)
3239 }
3240 };
3241 client
3242 .send(proto::UpdateDiffBases {
3243 project_id: project_id.to_proto(),
3244 buffer_id: buffer_id.to_proto(),
3245 staged_text,
3246 committed_text,
3247 mode: mode as i32,
3248 })
3249 .log_err();
3250 }
3251
3252 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3253 });
3254 }
3255 })
3256 },
3257 );
3258 }
3259
3260 pub fn send_job<F, Fut, R>(
3261 &mut self,
3262 status: Option<SharedString>,
3263 job: F,
3264 ) -> oneshot::Receiver<R>
3265 where
3266 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3267 Fut: Future<Output = R> + 'static,
3268 R: Send + 'static,
3269 {
3270 self.send_keyed_job(None, status, job)
3271 }
3272
3273 fn send_keyed_job<F, Fut, R>(
3274 &mut self,
3275 key: Option<GitJobKey>,
3276 status: Option<SharedString>,
3277 job: F,
3278 ) -> oneshot::Receiver<R>
3279 where
3280 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3281 Fut: Future<Output = R> + 'static,
3282 R: Send + 'static,
3283 {
3284 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3285 let job_id = post_inc(&mut self.job_id);
3286 let this = self.this.clone();
3287 self.job_sender
3288 .unbounded_send(GitJob {
3289 key,
3290 job: Box::new(move |state, cx: &mut AsyncApp| {
3291 let job = job(state, cx.clone());
3292 cx.spawn(async move |cx| {
3293 if let Some(s) = status.clone() {
3294 this.update(cx, |this, cx| {
3295 this.active_jobs.insert(
3296 job_id,
3297 JobInfo {
3298 start: Instant::now(),
3299 message: s.clone(),
3300 },
3301 );
3302
3303 cx.notify();
3304 })
3305 .ok();
3306 }
3307 let result = job.await;
3308
3309 this.update(cx, |this, cx| {
3310 this.active_jobs.remove(&job_id);
3311 cx.notify();
3312 })
3313 .ok();
3314
3315 result_tx.send(result).ok();
3316 })
3317 }),
3318 })
3319 .ok();
3320 result_rx
3321 }
3322
3323 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3324 let Some(git_store) = self.git_store.upgrade() else {
3325 return;
3326 };
3327 let entity = cx.entity();
3328 git_store.update(cx, |git_store, cx| {
3329 let Some((&id, _)) = git_store
3330 .repositories
3331 .iter()
3332 .find(|(_, handle)| *handle == &entity)
3333 else {
3334 return;
3335 };
3336 git_store.active_repo_id = Some(id);
3337 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3338 });
3339 }
3340
3341 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3342 self.snapshot.status()
3343 }
3344
3345 pub fn cached_stash(&self) -> GitStash {
3346 self.snapshot.stash_entries.clone()
3347 }
3348
3349 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3350 let git_store = self.git_store.upgrade()?;
3351 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3352 let abs_path = self
3353 .snapshot
3354 .work_directory_abs_path
3355 .join(path.as_std_path());
3356 let abs_path = SanitizedPath::new(&abs_path);
3357 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3358 Some(ProjectPath {
3359 worktree_id: worktree.read(cx).id(),
3360 path: relative_path,
3361 })
3362 }
3363
3364 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3365 let git_store = self.git_store.upgrade()?;
3366 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3367 let abs_path = worktree_store.absolutize(path, cx)?;
3368 self.snapshot.abs_path_to_repo_path(&abs_path)
3369 }
3370
3371 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3372 other
3373 .read(cx)
3374 .snapshot
3375 .work_directory_abs_path
3376 .starts_with(&self.snapshot.work_directory_abs_path)
3377 }
3378
3379 pub fn open_commit_buffer(
3380 &mut self,
3381 languages: Option<Arc<LanguageRegistry>>,
3382 buffer_store: Entity<BufferStore>,
3383 cx: &mut Context<Self>,
3384 ) -> Task<Result<Entity<Buffer>>> {
3385 let id = self.id;
3386 if let Some(buffer) = self.commit_message_buffer.clone() {
3387 return Task::ready(Ok(buffer));
3388 }
3389 let this = cx.weak_entity();
3390
3391 let rx = self.send_job(None, move |state, mut cx| async move {
3392 let Some(this) = this.upgrade() else {
3393 bail!("git store was dropped");
3394 };
3395 match state {
3396 RepositoryState::Local { .. } => {
3397 this.update(&mut cx, |_, cx| {
3398 Self::open_local_commit_buffer(languages, buffer_store, cx)
3399 })?
3400 .await
3401 }
3402 RepositoryState::Remote { project_id, client } => {
3403 let request = client.request(proto::OpenCommitMessageBuffer {
3404 project_id: project_id.0,
3405 repository_id: id.to_proto(),
3406 });
3407 let response = request.await.context("requesting to open commit buffer")?;
3408 let buffer_id = BufferId::new(response.buffer_id)?;
3409 let buffer = buffer_store
3410 .update(&mut cx, |buffer_store, cx| {
3411 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3412 })?
3413 .await?;
3414 if let Some(language_registry) = languages {
3415 let git_commit_language =
3416 language_registry.language_for_name("Git Commit").await?;
3417 buffer.update(&mut cx, |buffer, cx| {
3418 buffer.set_language(Some(git_commit_language), cx);
3419 })?;
3420 }
3421 this.update(&mut cx, |this, _| {
3422 this.commit_message_buffer = Some(buffer.clone());
3423 })?;
3424 Ok(buffer)
3425 }
3426 }
3427 });
3428
3429 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3430 }
3431
3432 fn open_local_commit_buffer(
3433 language_registry: Option<Arc<LanguageRegistry>>,
3434 buffer_store: Entity<BufferStore>,
3435 cx: &mut Context<Self>,
3436 ) -> Task<Result<Entity<Buffer>>> {
3437 cx.spawn(async move |repository, cx| {
3438 let buffer = buffer_store
3439 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3440 .await?;
3441
3442 if let Some(language_registry) = language_registry {
3443 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3444 buffer.update(cx, |buffer, cx| {
3445 buffer.set_language(Some(git_commit_language), cx);
3446 })?;
3447 }
3448
3449 repository.update(cx, |repository, _| {
3450 repository.commit_message_buffer = Some(buffer.clone());
3451 })?;
3452 Ok(buffer)
3453 })
3454 }
3455
3456 pub fn checkout_files(
3457 &mut self,
3458 commit: &str,
3459 paths: Vec<RepoPath>,
3460 _cx: &mut App,
3461 ) -> oneshot::Receiver<Result<()>> {
3462 let commit = commit.to_string();
3463 let id = self.id;
3464
3465 self.send_job(
3466 Some(format!("git checkout {}", commit).into()),
3467 move |git_repo, _| async move {
3468 match git_repo {
3469 RepositoryState::Local {
3470 backend,
3471 environment,
3472 ..
3473 } => {
3474 backend
3475 .checkout_files(commit, paths, environment.clone())
3476 .await
3477 }
3478 RepositoryState::Remote { project_id, client } => {
3479 client
3480 .request(proto::GitCheckoutFiles {
3481 project_id: project_id.0,
3482 repository_id: id.to_proto(),
3483 commit,
3484 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3485 })
3486 .await?;
3487
3488 Ok(())
3489 }
3490 }
3491 },
3492 )
3493 }
3494
3495 pub fn reset(
3496 &mut self,
3497 commit: String,
3498 reset_mode: ResetMode,
3499 _cx: &mut App,
3500 ) -> oneshot::Receiver<Result<()>> {
3501 let id = self.id;
3502
3503 self.send_job(None, move |git_repo, _| async move {
3504 match git_repo {
3505 RepositoryState::Local {
3506 backend,
3507 environment,
3508 ..
3509 } => backend.reset(commit, reset_mode, environment).await,
3510 RepositoryState::Remote { project_id, client } => {
3511 client
3512 .request(proto::GitReset {
3513 project_id: project_id.0,
3514 repository_id: id.to_proto(),
3515 commit,
3516 mode: match reset_mode {
3517 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3518 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3519 },
3520 })
3521 .await?;
3522
3523 Ok(())
3524 }
3525 }
3526 })
3527 }
3528
3529 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3530 let id = self.id;
3531 self.send_job(None, move |git_repo, _cx| async move {
3532 match git_repo {
3533 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3534 RepositoryState::Remote { project_id, client } => {
3535 let resp = client
3536 .request(proto::GitShow {
3537 project_id: project_id.0,
3538 repository_id: id.to_proto(),
3539 commit,
3540 })
3541 .await?;
3542
3543 Ok(CommitDetails {
3544 sha: resp.sha.into(),
3545 message: resp.message.into(),
3546 commit_timestamp: resp.commit_timestamp,
3547 author_email: resp.author_email.into(),
3548 author_name: resp.author_name.into(),
3549 })
3550 }
3551 }
3552 })
3553 }
3554
3555 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3556 let id = self.id;
3557 self.send_job(None, move |git_repo, cx| async move {
3558 match git_repo {
3559 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3560 RepositoryState::Remote {
3561 client, project_id, ..
3562 } => {
3563 let response = client
3564 .request(proto::LoadCommitDiff {
3565 project_id: project_id.0,
3566 repository_id: id.to_proto(),
3567 commit,
3568 })
3569 .await?;
3570 Ok(CommitDiff {
3571 files: response
3572 .files
3573 .into_iter()
3574 .map(|file| {
3575 Ok(CommitFile {
3576 path: RepoPath::from_proto(&file.path)?,
3577 old_text: file.old_text,
3578 new_text: file.new_text,
3579 })
3580 })
3581 .collect::<Result<Vec<_>>>()?,
3582 })
3583 }
3584 }
3585 })
3586 }
3587
3588 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3589 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3590 }
3591
3592 pub fn stage_entries(
3593 &self,
3594 entries: Vec<RepoPath>,
3595 cx: &mut Context<Self>,
3596 ) -> Task<anyhow::Result<()>> {
3597 if entries.is_empty() {
3598 return Task::ready(Ok(()));
3599 }
3600 let id = self.id;
3601
3602 let mut save_futures = Vec::new();
3603 if let Some(buffer_store) = self.buffer_store(cx) {
3604 buffer_store.update(cx, |buffer_store, cx| {
3605 for path in &entries {
3606 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3607 continue;
3608 };
3609 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3610 && buffer
3611 .read(cx)
3612 .file()
3613 .is_some_and(|file| file.disk_state().exists())
3614 {
3615 save_futures.push(buffer_store.save_buffer(buffer, cx));
3616 }
3617 }
3618 })
3619 }
3620
3621 cx.spawn(async move |this, cx| {
3622 for save_future in save_futures {
3623 save_future.await?;
3624 }
3625
3626 this.update(cx, |this, _| {
3627 this.send_job(None, move |git_repo, _cx| async move {
3628 match git_repo {
3629 RepositoryState::Local {
3630 backend,
3631 environment,
3632 ..
3633 } => backend.stage_paths(entries, environment.clone()).await,
3634 RepositoryState::Remote { project_id, client } => {
3635 client
3636 .request(proto::Stage {
3637 project_id: project_id.0,
3638 repository_id: id.to_proto(),
3639 paths: entries
3640 .into_iter()
3641 .map(|repo_path| repo_path.to_proto())
3642 .collect(),
3643 })
3644 .await
3645 .context("sending stage request")?;
3646
3647 Ok(())
3648 }
3649 }
3650 })
3651 })?
3652 .await??;
3653
3654 Ok(())
3655 })
3656 }
3657
3658 pub fn unstage_entries(
3659 &self,
3660 entries: Vec<RepoPath>,
3661 cx: &mut Context<Self>,
3662 ) -> Task<anyhow::Result<()>> {
3663 if entries.is_empty() {
3664 return Task::ready(Ok(()));
3665 }
3666 let id = self.id;
3667
3668 let mut save_futures = Vec::new();
3669 if let Some(buffer_store) = self.buffer_store(cx) {
3670 buffer_store.update(cx, |buffer_store, cx| {
3671 for path in &entries {
3672 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3673 continue;
3674 };
3675 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3676 && buffer
3677 .read(cx)
3678 .file()
3679 .is_some_and(|file| file.disk_state().exists())
3680 {
3681 save_futures.push(buffer_store.save_buffer(buffer, cx));
3682 }
3683 }
3684 })
3685 }
3686
3687 cx.spawn(async move |this, cx| {
3688 for save_future in save_futures {
3689 save_future.await?;
3690 }
3691
3692 this.update(cx, |this, _| {
3693 this.send_job(None, move |git_repo, _cx| async move {
3694 match git_repo {
3695 RepositoryState::Local {
3696 backend,
3697 environment,
3698 ..
3699 } => backend.unstage_paths(entries, environment).await,
3700 RepositoryState::Remote { project_id, client } => {
3701 client
3702 .request(proto::Unstage {
3703 project_id: project_id.0,
3704 repository_id: id.to_proto(),
3705 paths: entries
3706 .into_iter()
3707 .map(|repo_path| repo_path.to_proto())
3708 .collect(),
3709 })
3710 .await
3711 .context("sending unstage request")?;
3712
3713 Ok(())
3714 }
3715 }
3716 })
3717 })?
3718 .await??;
3719
3720 Ok(())
3721 })
3722 }
3723
3724 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3725 let to_stage = self
3726 .cached_status()
3727 .filter(|entry| !entry.status.staging().is_fully_staged())
3728 .map(|entry| entry.repo_path)
3729 .collect();
3730 self.stage_entries(to_stage, cx)
3731 }
3732
3733 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3734 let to_unstage = self
3735 .cached_status()
3736 .filter(|entry| entry.status.staging().has_staged())
3737 .map(|entry| entry.repo_path)
3738 .collect();
3739 self.unstage_entries(to_unstage, cx)
3740 }
3741
3742 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3743 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3744
3745 self.stash_entries(to_stash, cx)
3746 }
3747
3748 pub fn stash_entries(
3749 &mut self,
3750 entries: Vec<RepoPath>,
3751 cx: &mut Context<Self>,
3752 ) -> Task<anyhow::Result<()>> {
3753 let id = self.id;
3754
3755 cx.spawn(async move |this, cx| {
3756 this.update(cx, |this, _| {
3757 this.send_job(None, move |git_repo, _cx| async move {
3758 match git_repo {
3759 RepositoryState::Local {
3760 backend,
3761 environment,
3762 ..
3763 } => backend.stash_paths(entries, environment).await,
3764 RepositoryState::Remote { project_id, client } => {
3765 client
3766 .request(proto::Stash {
3767 project_id: project_id.0,
3768 repository_id: id.to_proto(),
3769 paths: entries
3770 .into_iter()
3771 .map(|repo_path| repo_path.to_proto())
3772 .collect(),
3773 })
3774 .await
3775 .context("sending stash request")?;
3776 Ok(())
3777 }
3778 }
3779 })
3780 })?
3781 .await??;
3782 Ok(())
3783 })
3784 }
3785
3786 pub fn stash_pop(
3787 &mut self,
3788 index: Option<usize>,
3789 cx: &mut Context<Self>,
3790 ) -> Task<anyhow::Result<()>> {
3791 let id = self.id;
3792 cx.spawn(async move |this, cx| {
3793 this.update(cx, |this, _| {
3794 this.send_job(None, move |git_repo, _cx| async move {
3795 match git_repo {
3796 RepositoryState::Local {
3797 backend,
3798 environment,
3799 ..
3800 } => backend.stash_pop(index, environment).await,
3801 RepositoryState::Remote { project_id, client } => {
3802 client
3803 .request(proto::StashPop {
3804 project_id: project_id.0,
3805 repository_id: id.to_proto(),
3806 stash_index: index.map(|i| i as u64),
3807 })
3808 .await
3809 .context("sending stash pop request")?;
3810 Ok(())
3811 }
3812 }
3813 })
3814 })?
3815 .await??;
3816 Ok(())
3817 })
3818 }
3819
3820 pub fn stash_apply(
3821 &mut self,
3822 index: Option<usize>,
3823 cx: &mut Context<Self>,
3824 ) -> Task<anyhow::Result<()>> {
3825 let id = self.id;
3826 cx.spawn(async move |this, cx| {
3827 this.update(cx, |this, _| {
3828 this.send_job(None, move |git_repo, _cx| async move {
3829 match git_repo {
3830 RepositoryState::Local {
3831 backend,
3832 environment,
3833 ..
3834 } => backend.stash_apply(index, environment).await,
3835 RepositoryState::Remote { project_id, client } => {
3836 client
3837 .request(proto::StashApply {
3838 project_id: project_id.0,
3839 repository_id: id.to_proto(),
3840 stash_index: index.map(|i| i as u64),
3841 })
3842 .await
3843 .context("sending stash apply request")?;
3844 Ok(())
3845 }
3846 }
3847 })
3848 })?
3849 .await??;
3850 Ok(())
3851 })
3852 }
3853
3854 pub fn stash_drop(
3855 &mut self,
3856 index: Option<usize>,
3857 cx: &mut Context<Self>,
3858 ) -> oneshot::Receiver<anyhow::Result<()>> {
3859 let id = self.id;
3860 let updates_tx = self
3861 .git_store()
3862 .and_then(|git_store| match &git_store.read(cx).state {
3863 GitStoreState::Local { downstream, .. } => downstream
3864 .as_ref()
3865 .map(|downstream| downstream.updates_tx.clone()),
3866 _ => None,
3867 });
3868 let this = cx.weak_entity();
3869 self.send_job(None, move |git_repo, mut cx| async move {
3870 match git_repo {
3871 RepositoryState::Local {
3872 backend,
3873 environment,
3874 ..
3875 } => {
3876 let result = backend.stash_drop(index, environment).await;
3877 if result.is_ok()
3878 && let Ok(stash_entries) = backend.stash_entries().await
3879 {
3880 let snapshot = this.update(&mut cx, |this, cx| {
3881 this.snapshot.stash_entries = stash_entries;
3882 let snapshot = this.snapshot.clone();
3883 cx.emit(RepositoryEvent::Updated {
3884 full_scan: false,
3885 new_instance: false,
3886 });
3887 snapshot
3888 })?;
3889 if let Some(updates_tx) = updates_tx {
3890 updates_tx
3891 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3892 .ok();
3893 }
3894 }
3895
3896 result
3897 }
3898 RepositoryState::Remote { project_id, client } => {
3899 client
3900 .request(proto::StashDrop {
3901 project_id: project_id.0,
3902 repository_id: id.to_proto(),
3903 stash_index: index.map(|i| i as u64),
3904 })
3905 .await
3906 .context("sending stash pop request")?;
3907 Ok(())
3908 }
3909 }
3910 })
3911 }
3912
3913 pub fn commit(
3914 &mut self,
3915 message: SharedString,
3916 name_and_email: Option<(SharedString, SharedString)>,
3917 options: CommitOptions,
3918 _cx: &mut App,
3919 ) -> oneshot::Receiver<Result<()>> {
3920 let id = self.id;
3921
3922 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3923 match git_repo {
3924 RepositoryState::Local {
3925 backend,
3926 environment,
3927 ..
3928 } => {
3929 backend
3930 .commit(message, name_and_email, options, environment)
3931 .await
3932 }
3933 RepositoryState::Remote { project_id, client } => {
3934 let (name, email) = name_and_email.unzip();
3935 client
3936 .request(proto::Commit {
3937 project_id: project_id.0,
3938 repository_id: id.to_proto(),
3939 message: String::from(message),
3940 name: name.map(String::from),
3941 email: email.map(String::from),
3942 options: Some(proto::commit::CommitOptions {
3943 amend: options.amend,
3944 signoff: options.signoff,
3945 }),
3946 })
3947 .await
3948 .context("sending commit request")?;
3949
3950 Ok(())
3951 }
3952 }
3953 })
3954 }
3955
3956 pub fn fetch(
3957 &mut self,
3958 fetch_options: FetchOptions,
3959 askpass: AskPassDelegate,
3960 _cx: &mut App,
3961 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3962 let askpass_delegates = self.askpass_delegates.clone();
3963 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3964 let id = self.id;
3965
3966 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3967 match git_repo {
3968 RepositoryState::Local {
3969 backend,
3970 environment,
3971 ..
3972 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3973 RepositoryState::Remote { project_id, client } => {
3974 askpass_delegates.lock().insert(askpass_id, askpass);
3975 let _defer = util::defer(|| {
3976 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3977 debug_assert!(askpass_delegate.is_some());
3978 });
3979
3980 let response = client
3981 .request(proto::Fetch {
3982 project_id: project_id.0,
3983 repository_id: id.to_proto(),
3984 askpass_id,
3985 remote: fetch_options.to_proto(),
3986 })
3987 .await
3988 .context("sending fetch request")?;
3989
3990 Ok(RemoteCommandOutput {
3991 stdout: response.stdout,
3992 stderr: response.stderr,
3993 })
3994 }
3995 }
3996 })
3997 }
3998
3999 pub fn push(
4000 &mut self,
4001 branch: SharedString,
4002 remote: SharedString,
4003 options: Option<PushOptions>,
4004 askpass: AskPassDelegate,
4005 cx: &mut Context<Self>,
4006 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4007 let askpass_delegates = self.askpass_delegates.clone();
4008 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4009 let id = self.id;
4010
4011 let args = options
4012 .map(|option| match option {
4013 PushOptions::SetUpstream => " --set-upstream",
4014 PushOptions::Force => " --force-with-lease",
4015 })
4016 .unwrap_or("");
4017
4018 let updates_tx = self
4019 .git_store()
4020 .and_then(|git_store| match &git_store.read(cx).state {
4021 GitStoreState::Local { downstream, .. } => downstream
4022 .as_ref()
4023 .map(|downstream| downstream.updates_tx.clone()),
4024 _ => None,
4025 });
4026
4027 let this = cx.weak_entity();
4028 self.send_job(
4029 Some(format!("git push {} {} {}", args, branch, remote).into()),
4030 move |git_repo, mut cx| async move {
4031 match git_repo {
4032 RepositoryState::Local {
4033 backend,
4034 environment,
4035 ..
4036 } => {
4037 let result = backend
4038 .push(
4039 branch.to_string(),
4040 remote.to_string(),
4041 options,
4042 askpass,
4043 environment.clone(),
4044 cx.clone(),
4045 )
4046 .await;
4047 if result.is_ok() {
4048 let branches = backend.branches().await?;
4049 let branch = branches.into_iter().find(|branch| branch.is_head);
4050 log::info!("head branch after scan is {branch:?}");
4051 let snapshot = this.update(&mut cx, |this, cx| {
4052 this.snapshot.branch = branch;
4053 let snapshot = this.snapshot.clone();
4054 cx.emit(RepositoryEvent::Updated {
4055 full_scan: false,
4056 new_instance: false,
4057 });
4058 snapshot
4059 })?;
4060 if let Some(updates_tx) = updates_tx {
4061 updates_tx
4062 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4063 .ok();
4064 }
4065 }
4066 result
4067 }
4068 RepositoryState::Remote { project_id, client } => {
4069 askpass_delegates.lock().insert(askpass_id, askpass);
4070 let _defer = util::defer(|| {
4071 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4072 debug_assert!(askpass_delegate.is_some());
4073 });
4074 let response = client
4075 .request(proto::Push {
4076 project_id: project_id.0,
4077 repository_id: id.to_proto(),
4078 askpass_id,
4079 branch_name: branch.to_string(),
4080 remote_name: remote.to_string(),
4081 options: options.map(|options| match options {
4082 PushOptions::Force => proto::push::PushOptions::Force,
4083 PushOptions::SetUpstream => {
4084 proto::push::PushOptions::SetUpstream
4085 }
4086 }
4087 as i32),
4088 })
4089 .await
4090 .context("sending push request")?;
4091
4092 Ok(RemoteCommandOutput {
4093 stdout: response.stdout,
4094 stderr: response.stderr,
4095 })
4096 }
4097 }
4098 },
4099 )
4100 }
4101
4102 pub fn pull(
4103 &mut self,
4104 branch: SharedString,
4105 remote: SharedString,
4106 askpass: AskPassDelegate,
4107 _cx: &mut App,
4108 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4109 let askpass_delegates = self.askpass_delegates.clone();
4110 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4111 let id = self.id;
4112
4113 self.send_job(
4114 Some(format!("git pull {} {}", remote, branch).into()),
4115 move |git_repo, cx| async move {
4116 match git_repo {
4117 RepositoryState::Local {
4118 backend,
4119 environment,
4120 ..
4121 } => {
4122 backend
4123 .pull(
4124 branch.to_string(),
4125 remote.to_string(),
4126 askpass,
4127 environment.clone(),
4128 cx,
4129 )
4130 .await
4131 }
4132 RepositoryState::Remote { project_id, client } => {
4133 askpass_delegates.lock().insert(askpass_id, askpass);
4134 let _defer = util::defer(|| {
4135 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4136 debug_assert!(askpass_delegate.is_some());
4137 });
4138 let response = client
4139 .request(proto::Pull {
4140 project_id: project_id.0,
4141 repository_id: id.to_proto(),
4142 askpass_id,
4143 branch_name: branch.to_string(),
4144 remote_name: remote.to_string(),
4145 })
4146 .await
4147 .context("sending pull request")?;
4148
4149 Ok(RemoteCommandOutput {
4150 stdout: response.stdout,
4151 stderr: response.stderr,
4152 })
4153 }
4154 }
4155 },
4156 )
4157 }
4158
4159 fn spawn_set_index_text_job(
4160 &mut self,
4161 path: RepoPath,
4162 content: Option<String>,
4163 hunk_staging_operation_count: Option<usize>,
4164 cx: &mut Context<Self>,
4165 ) -> oneshot::Receiver<anyhow::Result<()>> {
4166 let id = self.id;
4167 let this = cx.weak_entity();
4168 let git_store = self.git_store.clone();
4169 self.send_keyed_job(
4170 Some(GitJobKey::WriteIndex(path.clone())),
4171 None,
4172 move |git_repo, mut cx| async move {
4173 log::debug!(
4174 "start updating index text for buffer {}",
4175 path.as_unix_str()
4176 );
4177 match git_repo {
4178 RepositoryState::Local {
4179 backend,
4180 environment,
4181 ..
4182 } => {
4183 backend
4184 .set_index_text(path.clone(), content, environment.clone())
4185 .await?;
4186 }
4187 RepositoryState::Remote { project_id, client } => {
4188 client
4189 .request(proto::SetIndexText {
4190 project_id: project_id.0,
4191 repository_id: id.to_proto(),
4192 path: path.to_proto(),
4193 text: content,
4194 })
4195 .await?;
4196 }
4197 }
4198 log::debug!(
4199 "finish updating index text for buffer {}",
4200 path.as_unix_str()
4201 );
4202
4203 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4204 let project_path = this
4205 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4206 .ok()
4207 .flatten();
4208 git_store.update(&mut cx, |git_store, cx| {
4209 let buffer_id = git_store
4210 .buffer_store
4211 .read(cx)
4212 .get_by_path(&project_path?)?
4213 .read(cx)
4214 .remote_id();
4215 let diff_state = git_store.diffs.get(&buffer_id)?;
4216 diff_state.update(cx, |diff_state, _| {
4217 diff_state.hunk_staging_operation_count_as_of_write =
4218 hunk_staging_operation_count;
4219 });
4220 Some(())
4221 })?;
4222 }
4223 Ok(())
4224 },
4225 )
4226 }
4227
4228 pub fn get_remotes(
4229 &mut self,
4230 branch_name: Option<String>,
4231 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4232 let id = self.id;
4233 self.send_job(None, move |repo, _cx| async move {
4234 match repo {
4235 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4236 RepositoryState::Remote { project_id, client } => {
4237 let response = client
4238 .request(proto::GetRemotes {
4239 project_id: project_id.0,
4240 repository_id: id.to_proto(),
4241 branch_name,
4242 })
4243 .await?;
4244
4245 let remotes = response
4246 .remotes
4247 .into_iter()
4248 .map(|remotes| git::repository::Remote {
4249 name: remotes.name.into(),
4250 })
4251 .collect();
4252
4253 Ok(remotes)
4254 }
4255 }
4256 })
4257 }
4258
4259 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4260 let id = self.id;
4261 self.send_job(None, move |repo, _| async move {
4262 match repo {
4263 RepositoryState::Local { backend, .. } => backend.branches().await,
4264 RepositoryState::Remote { project_id, client } => {
4265 let response = client
4266 .request(proto::GitGetBranches {
4267 project_id: project_id.0,
4268 repository_id: id.to_proto(),
4269 })
4270 .await?;
4271
4272 let branches = response
4273 .branches
4274 .into_iter()
4275 .map(|branch| proto_to_branch(&branch))
4276 .collect();
4277
4278 Ok(branches)
4279 }
4280 }
4281 })
4282 }
4283
4284 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4285 let id = self.id;
4286 self.send_job(None, move |repo, _| async move {
4287 match repo {
4288 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4289 RepositoryState::Remote { project_id, client } => {
4290 let response = client
4291 .request(proto::GetDefaultBranch {
4292 project_id: project_id.0,
4293 repository_id: id.to_proto(),
4294 })
4295 .await?;
4296
4297 anyhow::Ok(response.branch.map(SharedString::from))
4298 }
4299 }
4300 })
4301 }
4302
4303 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4304 let id = self.id;
4305 self.send_job(None, move |repo, _cx| async move {
4306 match repo {
4307 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4308 RepositoryState::Remote { project_id, client } => {
4309 let response = client
4310 .request(proto::GitDiff {
4311 project_id: project_id.0,
4312 repository_id: id.to_proto(),
4313 diff_type: match diff_type {
4314 DiffType::HeadToIndex => {
4315 proto::git_diff::DiffType::HeadToIndex.into()
4316 }
4317 DiffType::HeadToWorktree => {
4318 proto::git_diff::DiffType::HeadToWorktree.into()
4319 }
4320 },
4321 })
4322 .await?;
4323
4324 Ok(response.diff)
4325 }
4326 }
4327 })
4328 }
4329
4330 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4331 let id = self.id;
4332 self.send_job(
4333 Some(format!("git switch -c {branch_name}").into()),
4334 move |repo, _cx| async move {
4335 match repo {
4336 RepositoryState::Local { backend, .. } => {
4337 backend.create_branch(branch_name).await
4338 }
4339 RepositoryState::Remote { project_id, client } => {
4340 client
4341 .request(proto::GitCreateBranch {
4342 project_id: project_id.0,
4343 repository_id: id.to_proto(),
4344 branch_name,
4345 })
4346 .await?;
4347
4348 Ok(())
4349 }
4350 }
4351 },
4352 )
4353 }
4354
4355 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4356 let id = self.id;
4357 self.send_job(
4358 Some(format!("git switch {branch_name}").into()),
4359 move |repo, _cx| async move {
4360 match repo {
4361 RepositoryState::Local { backend, .. } => {
4362 backend.change_branch(branch_name).await
4363 }
4364 RepositoryState::Remote { project_id, client } => {
4365 client
4366 .request(proto::GitChangeBranch {
4367 project_id: project_id.0,
4368 repository_id: id.to_proto(),
4369 branch_name,
4370 })
4371 .await?;
4372
4373 Ok(())
4374 }
4375 }
4376 },
4377 )
4378 }
4379
4380 pub fn rename_branch(
4381 &mut self,
4382 branch: String,
4383 new_name: String,
4384 ) -> oneshot::Receiver<Result<()>> {
4385 let id = self.id;
4386 self.send_job(
4387 Some(format!("git branch -m {branch} {new_name}").into()),
4388 move |repo, _cx| async move {
4389 match repo {
4390 RepositoryState::Local { backend, .. } => {
4391 backend.rename_branch(branch, new_name).await
4392 }
4393 RepositoryState::Remote { project_id, client } => {
4394 client
4395 .request(proto::GitRenameBranch {
4396 project_id: project_id.0,
4397 repository_id: id.to_proto(),
4398 branch,
4399 new_name,
4400 })
4401 .await?;
4402
4403 Ok(())
4404 }
4405 }
4406 },
4407 )
4408 }
4409
4410 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4411 let id = self.id;
4412 self.send_job(None, move |repo, _cx| async move {
4413 match repo {
4414 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4415 RepositoryState::Remote { project_id, client } => {
4416 let response = client
4417 .request(proto::CheckForPushedCommits {
4418 project_id: project_id.0,
4419 repository_id: id.to_proto(),
4420 })
4421 .await?;
4422
4423 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4424
4425 Ok(branches)
4426 }
4427 }
4428 })
4429 }
4430
4431 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4432 self.send_job(None, |repo, _cx| async move {
4433 match repo {
4434 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4435 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4436 }
4437 })
4438 }
4439
4440 pub fn restore_checkpoint(
4441 &mut self,
4442 checkpoint: GitRepositoryCheckpoint,
4443 ) -> oneshot::Receiver<Result<()>> {
4444 self.send_job(None, move |repo, _cx| async move {
4445 match repo {
4446 RepositoryState::Local { backend, .. } => {
4447 backend.restore_checkpoint(checkpoint).await
4448 }
4449 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4450 }
4451 })
4452 }
4453
4454 pub(crate) fn apply_remote_update(
4455 &mut self,
4456 update: proto::UpdateRepository,
4457 is_new: bool,
4458 cx: &mut Context<Self>,
4459 ) -> Result<()> {
4460 let conflicted_paths = TreeSet::from_ordered_entries(
4461 update
4462 .current_merge_conflicts
4463 .into_iter()
4464 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4465 );
4466 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4467 self.snapshot.head_commit = update
4468 .head_commit_details
4469 .as_ref()
4470 .map(proto_to_commit_details);
4471
4472 self.snapshot.merge.conflicted_paths = conflicted_paths;
4473 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4474 self.snapshot.stash_entries = GitStash {
4475 entries: update
4476 .stash_entries
4477 .iter()
4478 .filter_map(|entry| proto_to_stash(entry).ok())
4479 .collect(),
4480 };
4481
4482 let edits = update
4483 .removed_statuses
4484 .into_iter()
4485 .filter_map(|path| {
4486 Some(sum_tree::Edit::Remove(PathKey(
4487 RelPath::from_proto(&path).log_err()?,
4488 )))
4489 })
4490 .chain(
4491 update
4492 .updated_statuses
4493 .into_iter()
4494 .filter_map(|updated_status| {
4495 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4496 }),
4497 )
4498 .collect::<Vec<_>>();
4499 self.snapshot.statuses_by_path.edit(edits, ());
4500 if update.is_last_update {
4501 self.snapshot.scan_id = update.scan_id;
4502 }
4503 cx.emit(RepositoryEvent::Updated {
4504 full_scan: true,
4505 new_instance: is_new,
4506 });
4507 Ok(())
4508 }
4509
4510 pub fn compare_checkpoints(
4511 &mut self,
4512 left: GitRepositoryCheckpoint,
4513 right: GitRepositoryCheckpoint,
4514 ) -> oneshot::Receiver<Result<bool>> {
4515 self.send_job(None, move |repo, _cx| async move {
4516 match repo {
4517 RepositoryState::Local { backend, .. } => {
4518 backend.compare_checkpoints(left, right).await
4519 }
4520 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4521 }
4522 })
4523 }
4524
4525 pub fn diff_checkpoints(
4526 &mut self,
4527 base_checkpoint: GitRepositoryCheckpoint,
4528 target_checkpoint: GitRepositoryCheckpoint,
4529 ) -> oneshot::Receiver<Result<String>> {
4530 self.send_job(None, move |repo, _cx| async move {
4531 match repo {
4532 RepositoryState::Local { backend, .. } => {
4533 backend
4534 .diff_checkpoints(base_checkpoint, target_checkpoint)
4535 .await
4536 }
4537 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4538 }
4539 })
4540 }
4541
4542 fn schedule_scan(
4543 &mut self,
4544 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4545 cx: &mut Context<Self>,
4546 ) {
4547 let this = cx.weak_entity();
4548 let _ = self.send_keyed_job(
4549 Some(GitJobKey::ReloadGitState),
4550 None,
4551 |state, mut cx| async move {
4552 log::debug!("run scheduled git status scan");
4553
4554 let Some(this) = this.upgrade() else {
4555 return Ok(());
4556 };
4557 let RepositoryState::Local { backend, .. } = state else {
4558 bail!("not a local repository")
4559 };
4560 let (snapshot, events) = this
4561 .update(&mut cx, |this, _| {
4562 this.paths_needing_status_update.clear();
4563 compute_snapshot(
4564 this.id,
4565 this.work_directory_abs_path.clone(),
4566 this.snapshot.clone(),
4567 backend.clone(),
4568 )
4569 })?
4570 .await?;
4571 this.update(&mut cx, |this, cx| {
4572 this.snapshot = snapshot.clone();
4573 for event in events {
4574 cx.emit(event);
4575 }
4576 })?;
4577 if let Some(updates_tx) = updates_tx {
4578 updates_tx
4579 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4580 .ok();
4581 }
4582 Ok(())
4583 },
4584 );
4585 }
4586
4587 fn spawn_local_git_worker(
4588 work_directory_abs_path: Arc<Path>,
4589 dot_git_abs_path: Arc<Path>,
4590 _repository_dir_abs_path: Arc<Path>,
4591 _common_dir_abs_path: Arc<Path>,
4592 project_environment: WeakEntity<ProjectEnvironment>,
4593 fs: Arc<dyn Fs>,
4594 cx: &mut Context<Self>,
4595 ) -> mpsc::UnboundedSender<GitJob> {
4596 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4597
4598 cx.spawn(async move |_, cx| {
4599 let environment = project_environment
4600 .upgrade()
4601 .context("missing project environment")?
4602 .update(cx, |project_environment, cx| {
4603 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4604 })?
4605 .await
4606 .unwrap_or_else(|| {
4607 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4608 HashMap::default()
4609 });
4610 let backend = cx
4611 .background_spawn(async move {
4612 fs.open_repo(&dot_git_abs_path)
4613 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4614 })
4615 .await?;
4616
4617 if let Some(git_hosting_provider_registry) =
4618 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4619 {
4620 git_hosting_providers::register_additional_providers(
4621 git_hosting_provider_registry,
4622 backend.clone(),
4623 );
4624 }
4625
4626 let state = RepositoryState::Local {
4627 backend,
4628 environment: Arc::new(environment),
4629 };
4630 let mut jobs = VecDeque::new();
4631 loop {
4632 while let Ok(Some(next_job)) = job_rx.try_next() {
4633 jobs.push_back(next_job);
4634 }
4635
4636 if let Some(job) = jobs.pop_front() {
4637 if let Some(current_key) = &job.key
4638 && jobs
4639 .iter()
4640 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4641 {
4642 continue;
4643 }
4644 (job.job)(state.clone(), cx).await;
4645 } else if let Some(job) = job_rx.next().await {
4646 jobs.push_back(job);
4647 } else {
4648 break;
4649 }
4650 }
4651 anyhow::Ok(())
4652 })
4653 .detach_and_log_err(cx);
4654
4655 job_tx
4656 }
4657
4658 fn spawn_remote_git_worker(
4659 project_id: ProjectId,
4660 client: AnyProtoClient,
4661 cx: &mut Context<Self>,
4662 ) -> mpsc::UnboundedSender<GitJob> {
4663 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4664
4665 cx.spawn(async move |_, cx| {
4666 let state = RepositoryState::Remote { project_id, client };
4667 let mut jobs = VecDeque::new();
4668 loop {
4669 while let Ok(Some(next_job)) = job_rx.try_next() {
4670 jobs.push_back(next_job);
4671 }
4672
4673 if let Some(job) = jobs.pop_front() {
4674 if let Some(current_key) = &job.key
4675 && jobs
4676 .iter()
4677 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4678 {
4679 continue;
4680 }
4681 (job.job)(state.clone(), cx).await;
4682 } else if let Some(job) = job_rx.next().await {
4683 jobs.push_back(job);
4684 } else {
4685 break;
4686 }
4687 }
4688 anyhow::Ok(())
4689 })
4690 .detach_and_log_err(cx);
4691
4692 job_tx
4693 }
4694
4695 fn load_staged_text(
4696 &mut self,
4697 buffer_id: BufferId,
4698 repo_path: RepoPath,
4699 cx: &App,
4700 ) -> Task<Result<Option<String>>> {
4701 let rx = self.send_job(None, move |state, _| async move {
4702 match state {
4703 RepositoryState::Local { backend, .. } => {
4704 anyhow::Ok(backend.load_index_text(repo_path).await)
4705 }
4706 RepositoryState::Remote { project_id, client } => {
4707 let response = client
4708 .request(proto::OpenUnstagedDiff {
4709 project_id: project_id.to_proto(),
4710 buffer_id: buffer_id.to_proto(),
4711 })
4712 .await?;
4713 Ok(response.staged_text)
4714 }
4715 }
4716 });
4717 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4718 }
4719
4720 fn load_committed_text(
4721 &mut self,
4722 buffer_id: BufferId,
4723 repo_path: RepoPath,
4724 cx: &App,
4725 ) -> Task<Result<DiffBasesChange>> {
4726 let rx = self.send_job(None, move |state, _| async move {
4727 match state {
4728 RepositoryState::Local { backend, .. } => {
4729 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4730 let staged_text = backend.load_index_text(repo_path).await;
4731 let diff_bases_change = if committed_text == staged_text {
4732 DiffBasesChange::SetBoth(committed_text)
4733 } else {
4734 DiffBasesChange::SetEach {
4735 index: staged_text,
4736 head: committed_text,
4737 }
4738 };
4739 anyhow::Ok(diff_bases_change)
4740 }
4741 RepositoryState::Remote { project_id, client } => {
4742 use proto::open_uncommitted_diff_response::Mode;
4743
4744 let response = client
4745 .request(proto::OpenUncommittedDiff {
4746 project_id: project_id.to_proto(),
4747 buffer_id: buffer_id.to_proto(),
4748 })
4749 .await?;
4750 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4751 let bases = match mode {
4752 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4753 Mode::IndexAndHead => DiffBasesChange::SetEach {
4754 head: response.committed_text,
4755 index: response.staged_text,
4756 },
4757 };
4758 Ok(bases)
4759 }
4760 }
4761 });
4762
4763 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4764 }
4765
4766 fn paths_changed(
4767 &mut self,
4768 paths: Vec<RepoPath>,
4769 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4770 cx: &mut Context<Self>,
4771 ) {
4772 self.paths_needing_status_update.extend(paths);
4773
4774 let this = cx.weak_entity();
4775 let _ = self.send_keyed_job(
4776 Some(GitJobKey::RefreshStatuses),
4777 None,
4778 |state, mut cx| async move {
4779 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4780 (
4781 this.snapshot.clone(),
4782 mem::take(&mut this.paths_needing_status_update),
4783 )
4784 })?;
4785 let RepositoryState::Local { backend, .. } = state else {
4786 bail!("not a local repository")
4787 };
4788
4789 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4790 if paths.is_empty() {
4791 return Ok(());
4792 }
4793 let statuses = backend.status(&paths).await?;
4794 let stash_entries = backend.stash_entries().await?;
4795
4796 let changed_path_statuses = cx
4797 .background_spawn(async move {
4798 let mut changed_path_statuses = Vec::new();
4799 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4800 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4801
4802 for (repo_path, status) in &*statuses.entries {
4803 changed_paths.remove(repo_path);
4804 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4805 && cursor.item().is_some_and(|entry| entry.status == *status)
4806 {
4807 continue;
4808 }
4809
4810 changed_path_statuses.push(Edit::Insert(StatusEntry {
4811 repo_path: repo_path.clone(),
4812 status: *status,
4813 }));
4814 }
4815 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4816 for path in changed_paths.into_iter() {
4817 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4818 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4819 }
4820 }
4821 changed_path_statuses
4822 })
4823 .await;
4824
4825 this.update(&mut cx, |this, cx| {
4826 let needs_update = !changed_path_statuses.is_empty()
4827 || this.snapshot.stash_entries != stash_entries;
4828 this.snapshot.stash_entries = stash_entries;
4829 if !changed_path_statuses.is_empty() {
4830 this.snapshot
4831 .statuses_by_path
4832 .edit(changed_path_statuses, ());
4833 this.snapshot.scan_id += 1;
4834 }
4835
4836 if needs_update && let Some(updates_tx) = updates_tx {
4837 updates_tx
4838 .unbounded_send(DownstreamUpdate::UpdateRepository(
4839 this.snapshot.clone(),
4840 ))
4841 .ok();
4842 }
4843 cx.emit(RepositoryEvent::Updated {
4844 full_scan: false,
4845 new_instance: false,
4846 });
4847 })
4848 },
4849 );
4850 }
4851
4852 /// currently running git command and when it started
4853 pub fn current_job(&self) -> Option<JobInfo> {
4854 self.active_jobs.values().next().cloned()
4855 }
4856
4857 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4858 self.send_job(None, |_, _| async {})
4859 }
4860}
4861
4862fn get_permalink_in_rust_registry_src(
4863 provider_registry: Arc<GitHostingProviderRegistry>,
4864 path: PathBuf,
4865 selection: Range<u32>,
4866) -> Result<url::Url> {
4867 #[derive(Deserialize)]
4868 struct CargoVcsGit {
4869 sha1: String,
4870 }
4871
4872 #[derive(Deserialize)]
4873 struct CargoVcsInfo {
4874 git: CargoVcsGit,
4875 path_in_vcs: String,
4876 }
4877
4878 #[derive(Deserialize)]
4879 struct CargoPackage {
4880 repository: String,
4881 }
4882
4883 #[derive(Deserialize)]
4884 struct CargoToml {
4885 package: CargoPackage,
4886 }
4887
4888 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4889 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4890 Some((dir, json))
4891 }) else {
4892 bail!("No .cargo_vcs_info.json found in parent directories")
4893 };
4894 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4895 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4896 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4897 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4898 .context("parsing package.repository field of manifest")?;
4899 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4900 let permalink = provider.build_permalink(
4901 remote,
4902 BuildPermalinkParams {
4903 sha: &cargo_vcs_info.git.sha1,
4904 path: &path.to_string_lossy(),
4905 selection: Some(selection),
4906 },
4907 );
4908 Ok(permalink)
4909}
4910
4911fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4912 let Some(blame) = blame else {
4913 return proto::BlameBufferResponse {
4914 blame_response: None,
4915 };
4916 };
4917
4918 let entries = blame
4919 .entries
4920 .into_iter()
4921 .map(|entry| proto::BlameEntry {
4922 sha: entry.sha.as_bytes().into(),
4923 start_line: entry.range.start,
4924 end_line: entry.range.end,
4925 original_line_number: entry.original_line_number,
4926 author: entry.author,
4927 author_mail: entry.author_mail,
4928 author_time: entry.author_time,
4929 author_tz: entry.author_tz,
4930 committer: entry.committer_name,
4931 committer_mail: entry.committer_email,
4932 committer_time: entry.committer_time,
4933 committer_tz: entry.committer_tz,
4934 summary: entry.summary,
4935 previous: entry.previous,
4936 filename: entry.filename,
4937 })
4938 .collect::<Vec<_>>();
4939
4940 let messages = blame
4941 .messages
4942 .into_iter()
4943 .map(|(oid, message)| proto::CommitMessage {
4944 oid: oid.as_bytes().into(),
4945 message,
4946 })
4947 .collect::<Vec<_>>();
4948
4949 proto::BlameBufferResponse {
4950 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4951 entries,
4952 messages,
4953 remote_url: blame.remote_url,
4954 }),
4955 }
4956}
4957
4958fn deserialize_blame_buffer_response(
4959 response: proto::BlameBufferResponse,
4960) -> Option<git::blame::Blame> {
4961 let response = response.blame_response?;
4962 let entries = response
4963 .entries
4964 .into_iter()
4965 .filter_map(|entry| {
4966 Some(git::blame::BlameEntry {
4967 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4968 range: entry.start_line..entry.end_line,
4969 original_line_number: entry.original_line_number,
4970 committer_name: entry.committer,
4971 committer_time: entry.committer_time,
4972 committer_tz: entry.committer_tz,
4973 committer_email: entry.committer_mail,
4974 author: entry.author,
4975 author_mail: entry.author_mail,
4976 author_time: entry.author_time,
4977 author_tz: entry.author_tz,
4978 summary: entry.summary,
4979 previous: entry.previous,
4980 filename: entry.filename,
4981 })
4982 })
4983 .collect::<Vec<_>>();
4984
4985 let messages = response
4986 .messages
4987 .into_iter()
4988 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4989 .collect::<HashMap<_, _>>();
4990
4991 Some(Blame {
4992 entries,
4993 messages,
4994 remote_url: response.remote_url,
4995 })
4996}
4997
4998fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4999 proto::Branch {
5000 is_head: branch.is_head,
5001 ref_name: branch.ref_name.to_string(),
5002 unix_timestamp: branch
5003 .most_recent_commit
5004 .as_ref()
5005 .map(|commit| commit.commit_timestamp as u64),
5006 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5007 ref_name: upstream.ref_name.to_string(),
5008 tracking: upstream
5009 .tracking
5010 .status()
5011 .map(|upstream| proto::UpstreamTracking {
5012 ahead: upstream.ahead as u64,
5013 behind: upstream.behind as u64,
5014 }),
5015 }),
5016 most_recent_commit: branch
5017 .most_recent_commit
5018 .as_ref()
5019 .map(|commit| proto::CommitSummary {
5020 sha: commit.sha.to_string(),
5021 subject: commit.subject.to_string(),
5022 commit_timestamp: commit.commit_timestamp,
5023 author_name: commit.author_name.to_string(),
5024 }),
5025 }
5026}
5027
5028fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5029 git::repository::Branch {
5030 is_head: proto.is_head,
5031 ref_name: proto.ref_name.clone().into(),
5032 upstream: proto
5033 .upstream
5034 .as_ref()
5035 .map(|upstream| git::repository::Upstream {
5036 ref_name: upstream.ref_name.to_string().into(),
5037 tracking: upstream
5038 .tracking
5039 .as_ref()
5040 .map(|tracking| {
5041 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5042 ahead: tracking.ahead as u32,
5043 behind: tracking.behind as u32,
5044 })
5045 })
5046 .unwrap_or(git::repository::UpstreamTracking::Gone),
5047 }),
5048 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5049 git::repository::CommitSummary {
5050 sha: commit.sha.to_string().into(),
5051 subject: commit.subject.to_string().into(),
5052 commit_timestamp: commit.commit_timestamp,
5053 author_name: commit.author_name.to_string().into(),
5054 has_parent: true,
5055 }
5056 }),
5057 }
5058}
5059
5060fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5061 proto::GitCommitDetails {
5062 sha: commit.sha.to_string(),
5063 message: commit.message.to_string(),
5064 commit_timestamp: commit.commit_timestamp,
5065 author_email: commit.author_email.to_string(),
5066 author_name: commit.author_name.to_string(),
5067 }
5068}
5069
5070fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5071 CommitDetails {
5072 sha: proto.sha.clone().into(),
5073 message: proto.message.clone().into(),
5074 commit_timestamp: proto.commit_timestamp,
5075 author_email: proto.author_email.clone().into(),
5076 author_name: proto.author_name.clone().into(),
5077 }
5078}
5079
5080async fn compute_snapshot(
5081 id: RepositoryId,
5082 work_directory_abs_path: Arc<Path>,
5083 prev_snapshot: RepositorySnapshot,
5084 backend: Arc<dyn GitRepository>,
5085) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5086 let mut events = Vec::new();
5087 let branches = backend.branches().await?;
5088 let branch = branches.into_iter().find(|branch| branch.is_head);
5089 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5090 let stash_entries = backend.stash_entries().await?;
5091 let statuses_by_path = SumTree::from_iter(
5092 statuses
5093 .entries
5094 .iter()
5095 .map(|(repo_path, status)| StatusEntry {
5096 repo_path: repo_path.clone(),
5097 status: *status,
5098 }),
5099 (),
5100 );
5101 let (merge_details, merge_heads_changed) =
5102 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5103 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5104
5105 if merge_heads_changed
5106 || branch != prev_snapshot.branch
5107 || statuses_by_path != prev_snapshot.statuses_by_path
5108 {
5109 events.push(RepositoryEvent::Updated {
5110 full_scan: true,
5111 new_instance: false,
5112 });
5113 }
5114
5115 // Cache merge conflict paths so they don't change from staging/unstaging,
5116 // until the merge heads change (at commit time, etc.).
5117 if merge_heads_changed {
5118 events.push(RepositoryEvent::MergeHeadsChanged);
5119 }
5120
5121 // Useful when branch is None in detached head state
5122 let head_commit = match backend.head_sha().await {
5123 Some(head_sha) => backend.show(head_sha).await.log_err(),
5124 None => None,
5125 };
5126
5127 // Used by edit prediction data collection
5128 let remote_origin_url = backend.remote_url("origin");
5129 let remote_upstream_url = backend.remote_url("upstream");
5130
5131 let snapshot = RepositorySnapshot {
5132 id,
5133 statuses_by_path,
5134 work_directory_abs_path,
5135 path_style: prev_snapshot.path_style,
5136 scan_id: prev_snapshot.scan_id + 1,
5137 branch,
5138 head_commit,
5139 merge: merge_details,
5140 remote_origin_url,
5141 remote_upstream_url,
5142 stash_entries,
5143 };
5144
5145 Ok((snapshot, events))
5146}
5147
5148fn status_from_proto(
5149 simple_status: i32,
5150 status: Option<proto::GitFileStatus>,
5151) -> anyhow::Result<FileStatus> {
5152 use proto::git_file_status::Variant;
5153
5154 let Some(variant) = status.and_then(|status| status.variant) else {
5155 let code = proto::GitStatus::from_i32(simple_status)
5156 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5157 let result = match code {
5158 proto::GitStatus::Added => TrackedStatus {
5159 worktree_status: StatusCode::Added,
5160 index_status: StatusCode::Unmodified,
5161 }
5162 .into(),
5163 proto::GitStatus::Modified => TrackedStatus {
5164 worktree_status: StatusCode::Modified,
5165 index_status: StatusCode::Unmodified,
5166 }
5167 .into(),
5168 proto::GitStatus::Conflict => UnmergedStatus {
5169 first_head: UnmergedStatusCode::Updated,
5170 second_head: UnmergedStatusCode::Updated,
5171 }
5172 .into(),
5173 proto::GitStatus::Deleted => TrackedStatus {
5174 worktree_status: StatusCode::Deleted,
5175 index_status: StatusCode::Unmodified,
5176 }
5177 .into(),
5178 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5179 };
5180 return Ok(result);
5181 };
5182
5183 let result = match variant {
5184 Variant::Untracked(_) => FileStatus::Untracked,
5185 Variant::Ignored(_) => FileStatus::Ignored,
5186 Variant::Unmerged(unmerged) => {
5187 let [first_head, second_head] =
5188 [unmerged.first_head, unmerged.second_head].map(|head| {
5189 let code = proto::GitStatus::from_i32(head)
5190 .with_context(|| format!("Invalid git status code: {head}"))?;
5191 let result = match code {
5192 proto::GitStatus::Added => UnmergedStatusCode::Added,
5193 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5194 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5195 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5196 };
5197 Ok(result)
5198 });
5199 let [first_head, second_head] = [first_head?, second_head?];
5200 UnmergedStatus {
5201 first_head,
5202 second_head,
5203 }
5204 .into()
5205 }
5206 Variant::Tracked(tracked) => {
5207 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5208 .map(|status| {
5209 let code = proto::GitStatus::from_i32(status)
5210 .with_context(|| format!("Invalid git status code: {status}"))?;
5211 let result = match code {
5212 proto::GitStatus::Modified => StatusCode::Modified,
5213 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5214 proto::GitStatus::Added => StatusCode::Added,
5215 proto::GitStatus::Deleted => StatusCode::Deleted,
5216 proto::GitStatus::Renamed => StatusCode::Renamed,
5217 proto::GitStatus::Copied => StatusCode::Copied,
5218 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5219 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5220 };
5221 Ok(result)
5222 });
5223 let [index_status, worktree_status] = [index_status?, worktree_status?];
5224 TrackedStatus {
5225 index_status,
5226 worktree_status,
5227 }
5228 .into()
5229 }
5230 };
5231 Ok(result)
5232}
5233
5234fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5235 use proto::git_file_status::{Tracked, Unmerged, Variant};
5236
5237 let variant = match status {
5238 FileStatus::Untracked => Variant::Untracked(Default::default()),
5239 FileStatus::Ignored => Variant::Ignored(Default::default()),
5240 FileStatus::Unmerged(UnmergedStatus {
5241 first_head,
5242 second_head,
5243 }) => Variant::Unmerged(Unmerged {
5244 first_head: unmerged_status_to_proto(first_head),
5245 second_head: unmerged_status_to_proto(second_head),
5246 }),
5247 FileStatus::Tracked(TrackedStatus {
5248 index_status,
5249 worktree_status,
5250 }) => Variant::Tracked(Tracked {
5251 index_status: tracked_status_to_proto(index_status),
5252 worktree_status: tracked_status_to_proto(worktree_status),
5253 }),
5254 };
5255 proto::GitFileStatus {
5256 variant: Some(variant),
5257 }
5258}
5259
5260fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5261 match code {
5262 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5263 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5264 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5265 }
5266}
5267
5268fn tracked_status_to_proto(code: StatusCode) -> i32 {
5269 match code {
5270 StatusCode::Added => proto::GitStatus::Added as _,
5271 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5272 StatusCode::Modified => proto::GitStatus::Modified as _,
5273 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5274 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5275 StatusCode::Copied => proto::GitStatus::Copied as _,
5276 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5277 }
5278}