1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::{AskPassDelegate, EncryptedPassword};
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use text::{Bias, BufferId};
66use util::{
67 ResultExt, debug_panic,
68 paths::{PathStyle, SanitizedPath},
69 post_inc,
70 rel_path::RelPath,
71};
72use worktree::{
73 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
74 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
75};
76use zeroize::Zeroize;
77
78pub struct GitStore {
79 state: GitStoreState,
80 buffer_store: Entity<BufferStore>,
81 worktree_store: Entity<WorktreeStore>,
82 repositories: HashMap<RepositoryId, Entity<Repository>>,
83 active_repo_id: Option<RepositoryId>,
84 #[allow(clippy::type_complexity)]
85 loading_diffs:
86 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
87 diffs: HashMap<BufferId, Entity<BufferGitState>>,
88 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
89 _subscriptions: Vec<Subscription>,
90}
91
92#[derive(Default)]
93struct SharedDiffs {
94 unstaged: Option<Entity<BufferDiff>>,
95 uncommitted: Option<Entity<BufferDiff>>,
96}
97
98struct BufferGitState {
99 unstaged_diff: Option<WeakEntity<BufferDiff>>,
100 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
101 conflict_set: Option<WeakEntity<ConflictSet>>,
102 recalculate_diff_task: Option<Task<Result<()>>>,
103 reparse_conflict_markers_task: Option<Task<Result<()>>>,
104 language: Option<Arc<Language>>,
105 language_registry: Option<Arc<LanguageRegistry>>,
106 conflict_updated_futures: Vec<oneshot::Sender<()>>,
107 recalculating_tx: postage::watch::Sender<bool>,
108
109 /// These operation counts are used to ensure that head and index text
110 /// values read from the git repository are up-to-date with any hunk staging
111 /// operations that have been performed on the BufferDiff.
112 ///
113 /// The operation count is incremented immediately when the user initiates a
114 /// hunk stage/unstage operation. Then, upon finishing writing the new index
115 /// text do disk, the `operation count as of write` is updated to reflect
116 /// the operation count that prompted the write.
117 hunk_staging_operation_count: usize,
118 hunk_staging_operation_count_as_of_write: usize,
119
120 head_text: Option<Arc<String>>,
121 index_text: Option<Arc<String>>,
122 head_changed: bool,
123 index_changed: bool,
124 language_changed: bool,
125}
126
127#[derive(Clone, Debug)]
128enum DiffBasesChange {
129 SetIndex(Option<String>),
130 SetHead(Option<String>),
131 SetEach {
132 index: Option<String>,
133 head: Option<String>,
134 },
135 SetBoth(Option<String>),
136}
137
138#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
139enum DiffKind {
140 Unstaged,
141 Uncommitted,
142}
143
144enum GitStoreState {
145 Local {
146 next_repository_id: Arc<AtomicU64>,
147 downstream: Option<LocalDownstreamState>,
148 project_environment: Entity<ProjectEnvironment>,
149 fs: Arc<dyn Fs>,
150 },
151 Remote {
152 upstream_client: AnyProtoClient,
153 upstream_project_id: u64,
154 downstream: Option<(AnyProtoClient, ProjectId)>,
155 },
156}
157
158enum DownstreamUpdate {
159 UpdateRepository(RepositorySnapshot),
160 RemoveRepository(RepositoryId),
161}
162
163struct LocalDownstreamState {
164 client: AnyProtoClient,
165 project_id: ProjectId,
166 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
167 _task: Task<Result<()>>,
168}
169
170#[derive(Clone, Debug)]
171pub struct GitStoreCheckpoint {
172 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
173}
174
175#[derive(Clone, Debug, PartialEq, Eq)]
176pub struct StatusEntry {
177 pub repo_path: RepoPath,
178 pub status: FileStatus,
179}
180
181impl StatusEntry {
182 fn to_proto(&self) -> proto::StatusEntry {
183 let simple_status = match self.status {
184 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
185 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
186 FileStatus::Tracked(TrackedStatus {
187 index_status,
188 worktree_status,
189 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
190 worktree_status
191 } else {
192 index_status
193 }),
194 };
195
196 proto::StatusEntry {
197 repo_path: self.repo_path.to_proto(),
198 simple_status,
199 status: Some(status_to_proto(self.status)),
200 }
201 }
202}
203
204impl TryFrom<proto::StatusEntry> for StatusEntry {
205 type Error = anyhow::Error;
206
207 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
208 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
209 let status = status_from_proto(value.simple_status, value.status)?;
210 Ok(Self { repo_path, status })
211 }
212}
213
214impl sum_tree::Item for StatusEntry {
215 type Summary = PathSummary<GitSummary>;
216
217 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
218 PathSummary {
219 max_path: self.repo_path.0.clone(),
220 item_summary: self.status.summary(),
221 }
222 }
223}
224
225impl sum_tree::KeyedItem for StatusEntry {
226 type Key = PathKey;
227
228 fn key(&self) -> Self::Key {
229 PathKey(self.repo_path.0.clone())
230 }
231}
232
233#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
234pub struct RepositoryId(pub u64);
235
236#[derive(Clone, Debug, Default, PartialEq, Eq)]
237pub struct MergeDetails {
238 pub conflicted_paths: TreeSet<RepoPath>,
239 pub message: Option<SharedString>,
240 pub heads: Vec<Option<SharedString>>,
241}
242
243#[derive(Clone, Debug, PartialEq, Eq)]
244pub struct RepositorySnapshot {
245 pub id: RepositoryId,
246 pub statuses_by_path: SumTree<StatusEntry>,
247 pub work_directory_abs_path: Arc<Path>,
248 pub path_style: PathStyle,
249 pub branch: Option<Branch>,
250 pub head_commit: Option<CommitDetails>,
251 pub scan_id: u64,
252 pub merge: MergeDetails,
253 pub remote_origin_url: Option<String>,
254 pub remote_upstream_url: Option<String>,
255 pub stash_entries: GitStash,
256}
257
258type JobId = u64;
259
260#[derive(Clone, Debug, PartialEq, Eq)]
261pub struct JobInfo {
262 pub start: Instant,
263 pub message: SharedString,
264}
265
266pub struct Repository {
267 this: WeakEntity<Self>,
268 snapshot: RepositorySnapshot,
269 commit_message_buffer: Option<Entity<Buffer>>,
270 git_store: WeakEntity<GitStore>,
271 // For a local repository, holds paths that have had worktree events since the last status scan completed,
272 // and that should be examined during the next status scan.
273 paths_needing_status_update: BTreeSet<RepoPath>,
274 job_sender: mpsc::UnboundedSender<GitJob>,
275 active_jobs: HashMap<JobId, JobInfo>,
276 job_id: JobId,
277 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
278 latest_askpass_id: u64,
279}
280
281impl std::ops::Deref for Repository {
282 type Target = RepositorySnapshot;
283
284 fn deref(&self) -> &Self::Target {
285 &self.snapshot
286 }
287}
288
289#[derive(Clone)]
290pub enum RepositoryState {
291 Local {
292 backend: Arc<dyn GitRepository>,
293 environment: Arc<HashMap<String, String>>,
294 },
295 Remote {
296 project_id: ProjectId,
297 client: AnyProtoClient,
298 },
299}
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub enum RepositoryEvent {
303 Updated { full_scan: bool, new_instance: bool },
304 MergeHeadsChanged,
305 PathsChanged,
306}
307
308#[derive(Clone, Debug)]
309pub struct JobsUpdated;
310
311#[derive(Debug)]
312pub enum GitStoreEvent {
313 ActiveRepositoryChanged(Option<RepositoryId>),
314 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
315 RepositoryAdded(RepositoryId),
316 RepositoryRemoved(RepositoryId),
317 IndexWriteError(anyhow::Error),
318 JobsUpdated,
319 ConflictsUpdated,
320}
321
322impl EventEmitter<RepositoryEvent> for Repository {}
323impl EventEmitter<JobsUpdated> for Repository {}
324impl EventEmitter<GitStoreEvent> for GitStore {}
325
326pub struct GitJob {
327 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
328 key: Option<GitJobKey>,
329}
330
331#[derive(PartialEq, Eq)]
332enum GitJobKey {
333 WriteIndex(RepoPath),
334 ReloadBufferDiffBases,
335 RefreshStatuses,
336 ReloadGitState,
337}
338
339impl GitStore {
340 pub fn local(
341 worktree_store: &Entity<WorktreeStore>,
342 buffer_store: Entity<BufferStore>,
343 environment: Entity<ProjectEnvironment>,
344 fs: Arc<dyn Fs>,
345 cx: &mut Context<Self>,
346 ) -> Self {
347 Self::new(
348 worktree_store.clone(),
349 buffer_store,
350 GitStoreState::Local {
351 next_repository_id: Arc::new(AtomicU64::new(1)),
352 downstream: None,
353 project_environment: environment,
354 fs,
355 },
356 cx,
357 )
358 }
359
360 pub fn remote(
361 worktree_store: &Entity<WorktreeStore>,
362 buffer_store: Entity<BufferStore>,
363 upstream_client: AnyProtoClient,
364 project_id: u64,
365 cx: &mut Context<Self>,
366 ) -> Self {
367 Self::new(
368 worktree_store.clone(),
369 buffer_store,
370 GitStoreState::Remote {
371 upstream_client,
372 upstream_project_id: project_id,
373 downstream: None,
374 },
375 cx,
376 )
377 }
378
379 fn new(
380 worktree_store: Entity<WorktreeStore>,
381 buffer_store: Entity<BufferStore>,
382 state: GitStoreState,
383 cx: &mut Context<Self>,
384 ) -> Self {
385 let _subscriptions = vec![
386 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
387 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
388 ];
389
390 GitStore {
391 state,
392 buffer_store,
393 worktree_store,
394 repositories: HashMap::default(),
395 active_repo_id: None,
396 _subscriptions,
397 loading_diffs: HashMap::default(),
398 shared_diffs: HashMap::default(),
399 diffs: HashMap::default(),
400 }
401 }
402
403 pub fn init(client: &AnyProtoClient) {
404 client.add_entity_request_handler(Self::handle_get_remotes);
405 client.add_entity_request_handler(Self::handle_get_branches);
406 client.add_entity_request_handler(Self::handle_get_default_branch);
407 client.add_entity_request_handler(Self::handle_change_branch);
408 client.add_entity_request_handler(Self::handle_create_branch);
409 client.add_entity_request_handler(Self::handle_rename_branch);
410 client.add_entity_request_handler(Self::handle_git_init);
411 client.add_entity_request_handler(Self::handle_push);
412 client.add_entity_request_handler(Self::handle_pull);
413 client.add_entity_request_handler(Self::handle_fetch);
414 client.add_entity_request_handler(Self::handle_stage);
415 client.add_entity_request_handler(Self::handle_unstage);
416 client.add_entity_request_handler(Self::handle_stash);
417 client.add_entity_request_handler(Self::handle_stash_pop);
418 client.add_entity_request_handler(Self::handle_stash_apply);
419 client.add_entity_request_handler(Self::handle_stash_drop);
420 client.add_entity_request_handler(Self::handle_commit);
421 client.add_entity_request_handler(Self::handle_reset);
422 client.add_entity_request_handler(Self::handle_show);
423 client.add_entity_request_handler(Self::handle_load_commit_diff);
424 client.add_entity_request_handler(Self::handle_checkout_files);
425 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
426 client.add_entity_request_handler(Self::handle_set_index_text);
427 client.add_entity_request_handler(Self::handle_askpass);
428 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
429 client.add_entity_request_handler(Self::handle_git_diff);
430 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
431 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
432 client.add_entity_message_handler(Self::handle_update_diff_bases);
433 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
434 client.add_entity_request_handler(Self::handle_blame_buffer);
435 client.add_entity_message_handler(Self::handle_update_repository);
436 client.add_entity_message_handler(Self::handle_remove_repository);
437 client.add_entity_request_handler(Self::handle_git_clone);
438 }
439
440 pub fn is_local(&self) -> bool {
441 matches!(self.state, GitStoreState::Local { .. })
442 }
443 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
444 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
445 let id = repo.read(cx).id;
446 if self.active_repo_id != Some(id) {
447 self.active_repo_id = Some(id);
448 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
449 }
450 }
451 }
452
453 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
454 match &mut self.state {
455 GitStoreState::Remote {
456 downstream: downstream_client,
457 ..
458 } => {
459 for repo in self.repositories.values() {
460 let update = repo.read(cx).snapshot.initial_update(project_id);
461 for update in split_repository_update(update) {
462 client.send(update).log_err();
463 }
464 }
465 *downstream_client = Some((client, ProjectId(project_id)));
466 }
467 GitStoreState::Local {
468 downstream: downstream_client,
469 ..
470 } => {
471 let mut snapshots = HashMap::default();
472 let (updates_tx, mut updates_rx) = mpsc::unbounded();
473 for repo in self.repositories.values() {
474 updates_tx
475 .unbounded_send(DownstreamUpdate::UpdateRepository(
476 repo.read(cx).snapshot.clone(),
477 ))
478 .ok();
479 }
480 *downstream_client = Some(LocalDownstreamState {
481 client: client.clone(),
482 project_id: ProjectId(project_id),
483 updates_tx,
484 _task: cx.spawn(async move |this, cx| {
485 cx.background_spawn(async move {
486 while let Some(update) = updates_rx.next().await {
487 match update {
488 DownstreamUpdate::UpdateRepository(snapshot) => {
489 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
490 {
491 let update =
492 snapshot.build_update(old_snapshot, project_id);
493 *old_snapshot = snapshot;
494 for update in split_repository_update(update) {
495 client.send(update)?;
496 }
497 } else {
498 let update = snapshot.initial_update(project_id);
499 for update in split_repository_update(update) {
500 client.send(update)?;
501 }
502 snapshots.insert(snapshot.id, snapshot);
503 }
504 }
505 DownstreamUpdate::RemoveRepository(id) => {
506 client.send(proto::RemoveRepository {
507 project_id,
508 id: id.to_proto(),
509 })?;
510 }
511 }
512 }
513 anyhow::Ok(())
514 })
515 .await
516 .ok();
517 this.update(cx, |this, _| {
518 if let GitStoreState::Local {
519 downstream: downstream_client,
520 ..
521 } = &mut this.state
522 {
523 downstream_client.take();
524 } else {
525 unreachable!("unshared called on remote store");
526 }
527 })
528 }),
529 });
530 }
531 }
532 }
533
534 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
535 match &mut self.state {
536 GitStoreState::Local {
537 downstream: downstream_client,
538 ..
539 } => {
540 downstream_client.take();
541 }
542 GitStoreState::Remote {
543 downstream: downstream_client,
544 ..
545 } => {
546 downstream_client.take();
547 }
548 }
549 self.shared_diffs.clear();
550 }
551
552 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
553 self.shared_diffs.remove(peer_id);
554 }
555
556 pub fn active_repository(&self) -> Option<Entity<Repository>> {
557 self.active_repo_id
558 .as_ref()
559 .map(|id| self.repositories[id].clone())
560 }
561
562 pub fn open_unstaged_diff(
563 &mut self,
564 buffer: Entity<Buffer>,
565 cx: &mut Context<Self>,
566 ) -> Task<Result<Entity<BufferDiff>>> {
567 let buffer_id = buffer.read(cx).remote_id();
568 if let Some(diff_state) = self.diffs.get(&buffer_id)
569 && let Some(unstaged_diff) = diff_state
570 .read(cx)
571 .unstaged_diff
572 .as_ref()
573 .and_then(|weak| weak.upgrade())
574 {
575 if let Some(task) =
576 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
577 {
578 return cx.background_executor().spawn(async move {
579 task.await;
580 Ok(unstaged_diff)
581 });
582 }
583 return Task::ready(Ok(unstaged_diff));
584 }
585
586 let Some((repo, repo_path)) =
587 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
588 else {
589 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
590 };
591
592 let task = self
593 .loading_diffs
594 .entry((buffer_id, DiffKind::Unstaged))
595 .or_insert_with(|| {
596 let staged_text = repo.update(cx, |repo, cx| {
597 repo.load_staged_text(buffer_id, repo_path, cx)
598 });
599 cx.spawn(async move |this, cx| {
600 Self::open_diff_internal(
601 this,
602 DiffKind::Unstaged,
603 staged_text.await.map(DiffBasesChange::SetIndex),
604 buffer,
605 cx,
606 )
607 .await
608 .map_err(Arc::new)
609 })
610 .shared()
611 })
612 .clone();
613
614 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
615 }
616
617 pub fn open_uncommitted_diff(
618 &mut self,
619 buffer: Entity<Buffer>,
620 cx: &mut Context<Self>,
621 ) -> Task<Result<Entity<BufferDiff>>> {
622 let buffer_id = buffer.read(cx).remote_id();
623
624 if let Some(diff_state) = self.diffs.get(&buffer_id)
625 && let Some(uncommitted_diff) = diff_state
626 .read(cx)
627 .uncommitted_diff
628 .as_ref()
629 .and_then(|weak| weak.upgrade())
630 {
631 if let Some(task) =
632 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
633 {
634 return cx.background_executor().spawn(async move {
635 task.await;
636 Ok(uncommitted_diff)
637 });
638 }
639 return Task::ready(Ok(uncommitted_diff));
640 }
641
642 let Some((repo, repo_path)) =
643 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
644 else {
645 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
646 };
647
648 let task = self
649 .loading_diffs
650 .entry((buffer_id, DiffKind::Uncommitted))
651 .or_insert_with(|| {
652 let changes = repo.update(cx, |repo, cx| {
653 repo.load_committed_text(buffer_id, repo_path, cx)
654 });
655
656 cx.spawn(async move |this, cx| {
657 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
658 .await
659 .map_err(Arc::new)
660 })
661 .shared()
662 })
663 .clone();
664
665 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
666 }
667
668 async fn open_diff_internal(
669 this: WeakEntity<Self>,
670 kind: DiffKind,
671 texts: Result<DiffBasesChange>,
672 buffer_entity: Entity<Buffer>,
673 cx: &mut AsyncApp,
674 ) -> Result<Entity<BufferDiff>> {
675 let diff_bases_change = match texts {
676 Err(e) => {
677 this.update(cx, |this, cx| {
678 let buffer = buffer_entity.read(cx);
679 let buffer_id = buffer.remote_id();
680 this.loading_diffs.remove(&(buffer_id, kind));
681 })?;
682 return Err(e);
683 }
684 Ok(change) => change,
685 };
686
687 this.update(cx, |this, cx| {
688 let buffer = buffer_entity.read(cx);
689 let buffer_id = buffer.remote_id();
690 let language = buffer.language().cloned();
691 let language_registry = buffer.language_registry();
692 let text_snapshot = buffer.text_snapshot();
693 this.loading_diffs.remove(&(buffer_id, kind));
694
695 let git_store = cx.weak_entity();
696 let diff_state = this
697 .diffs
698 .entry(buffer_id)
699 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
700
701 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
702
703 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
704 diff_state.update(cx, |diff_state, cx| {
705 diff_state.language = language;
706 diff_state.language_registry = language_registry;
707
708 match kind {
709 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
710 DiffKind::Uncommitted => {
711 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
712 diff
713 } else {
714 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
715 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
716 unstaged_diff
717 };
718
719 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
720 diff_state.uncommitted_diff = Some(diff.downgrade())
721 }
722 }
723
724 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
725 let rx = diff_state.wait_for_recalculation();
726
727 anyhow::Ok(async move {
728 if let Some(rx) = rx {
729 rx.await;
730 }
731 Ok(diff)
732 })
733 })
734 })??
735 .await
736 }
737
738 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
739 let diff_state = self.diffs.get(&buffer_id)?;
740 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
741 }
742
743 pub fn get_uncommitted_diff(
744 &self,
745 buffer_id: BufferId,
746 cx: &App,
747 ) -> Option<Entity<BufferDiff>> {
748 let diff_state = self.diffs.get(&buffer_id)?;
749 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
750 }
751
752 pub fn open_conflict_set(
753 &mut self,
754 buffer: Entity<Buffer>,
755 cx: &mut Context<Self>,
756 ) -> Entity<ConflictSet> {
757 log::debug!("open conflict set");
758 let buffer_id = buffer.read(cx).remote_id();
759
760 if let Some(git_state) = self.diffs.get(&buffer_id)
761 && let Some(conflict_set) = git_state
762 .read(cx)
763 .conflict_set
764 .as_ref()
765 .and_then(|weak| weak.upgrade())
766 {
767 let conflict_set = conflict_set;
768 let buffer_snapshot = buffer.read(cx).text_snapshot();
769
770 git_state.update(cx, |state, cx| {
771 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
772 });
773
774 return conflict_set;
775 }
776
777 let is_unmerged = self
778 .repository_and_path_for_buffer_id(buffer_id, cx)
779 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
780 let git_store = cx.weak_entity();
781 let buffer_git_state = self
782 .diffs
783 .entry(buffer_id)
784 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
785 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
786
787 self._subscriptions
788 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
789 cx.emit(GitStoreEvent::ConflictsUpdated);
790 }));
791
792 buffer_git_state.update(cx, |state, cx| {
793 state.conflict_set = Some(conflict_set.downgrade());
794 let buffer_snapshot = buffer.read(cx).text_snapshot();
795 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
796 });
797
798 conflict_set
799 }
800
801 pub fn project_path_git_status(
802 &self,
803 project_path: &ProjectPath,
804 cx: &App,
805 ) -> Option<FileStatus> {
806 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
807 Some(repo.read(cx).status_for_path(&repo_path)?.status)
808 }
809
810 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
811 let mut work_directory_abs_paths = Vec::new();
812 let mut checkpoints = Vec::new();
813 for repository in self.repositories.values() {
814 repository.update(cx, |repository, _| {
815 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
816 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
817 });
818 }
819
820 cx.background_executor().spawn(async move {
821 let checkpoints = future::try_join_all(checkpoints).await?;
822 Ok(GitStoreCheckpoint {
823 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
824 .into_iter()
825 .zip(checkpoints)
826 .collect(),
827 })
828 })
829 }
830
831 pub fn restore_checkpoint(
832 &self,
833 checkpoint: GitStoreCheckpoint,
834 cx: &mut App,
835 ) -> Task<Result<()>> {
836 let repositories_by_work_dir_abs_path = self
837 .repositories
838 .values()
839 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
840 .collect::<HashMap<_, _>>();
841
842 let mut tasks = Vec::new();
843 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
844 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
845 let restore = repository.update(cx, |repository, _| {
846 repository.restore_checkpoint(checkpoint)
847 });
848 tasks.push(async move { restore.await? });
849 }
850 }
851 cx.background_spawn(async move {
852 future::try_join_all(tasks).await?;
853 Ok(())
854 })
855 }
856
857 /// Compares two checkpoints, returning true if they are equal.
858 pub fn compare_checkpoints(
859 &self,
860 left: GitStoreCheckpoint,
861 mut right: GitStoreCheckpoint,
862 cx: &mut App,
863 ) -> Task<Result<bool>> {
864 let repositories_by_work_dir_abs_path = self
865 .repositories
866 .values()
867 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
868 .collect::<HashMap<_, _>>();
869
870 let mut tasks = Vec::new();
871 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
872 if let Some(right_checkpoint) = right
873 .checkpoints_by_work_dir_abs_path
874 .remove(&work_dir_abs_path)
875 {
876 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
877 {
878 let compare = repository.update(cx, |repository, _| {
879 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
880 });
881
882 tasks.push(async move { compare.await? });
883 }
884 } else {
885 return Task::ready(Ok(false));
886 }
887 }
888 cx.background_spawn(async move {
889 Ok(future::try_join_all(tasks)
890 .await?
891 .into_iter()
892 .all(|result| result))
893 })
894 }
895
896 /// Blames a buffer.
897 pub fn blame_buffer(
898 &self,
899 buffer: &Entity<Buffer>,
900 version: Option<clock::Global>,
901 cx: &mut App,
902 ) -> Task<Result<Option<Blame>>> {
903 let buffer = buffer.read(cx);
904 let Some((repo, repo_path)) =
905 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
906 else {
907 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
908 };
909 let content = match &version {
910 Some(version) => buffer.rope_for_version(version),
911 None => buffer.as_rope().clone(),
912 };
913 let version = version.unwrap_or(buffer.version());
914 let buffer_id = buffer.remote_id();
915
916 let rx = repo.update(cx, |repo, _| {
917 repo.send_job(None, move |state, _| async move {
918 match state {
919 RepositoryState::Local { backend, .. } => backend
920 .blame(repo_path.clone(), content)
921 .await
922 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
923 .map(Some),
924 RepositoryState::Remote { project_id, client } => {
925 let response = client
926 .request(proto::BlameBuffer {
927 project_id: project_id.to_proto(),
928 buffer_id: buffer_id.into(),
929 version: serialize_version(&version),
930 })
931 .await?;
932 Ok(deserialize_blame_buffer_response(response))
933 }
934 }
935 })
936 });
937
938 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
939 }
940
941 pub fn get_permalink_to_line(
942 &self,
943 buffer: &Entity<Buffer>,
944 selection: Range<u32>,
945 cx: &mut App,
946 ) -> Task<Result<url::Url>> {
947 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
948 return Task::ready(Err(anyhow!("buffer has no file")));
949 };
950
951 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
952 &(file.worktree.read(cx).id(), file.path.clone()).into(),
953 cx,
954 ) else {
955 // If we're not in a Git repo, check whether this is a Rust source
956 // file in the Cargo registry (presumably opened with go-to-definition
957 // from a normal Rust file). If so, we can put together a permalink
958 // using crate metadata.
959 if buffer
960 .read(cx)
961 .language()
962 .is_none_or(|lang| lang.name() != "Rust".into())
963 {
964 return Task::ready(Err(anyhow!("no permalink available")));
965 }
966 let file_path = file.worktree.read(cx).absolutize(&file.path);
967 return cx.spawn(async move |cx| {
968 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
969 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
970 .context("no permalink available")
971 });
972
973 // TODO remote case
974 };
975
976 let buffer_id = buffer.read(cx).remote_id();
977 let branch = repo.read(cx).branch.clone();
978 let remote = branch
979 .as_ref()
980 .and_then(|b| b.upstream.as_ref())
981 .and_then(|b| b.remote_name())
982 .unwrap_or("origin")
983 .to_string();
984
985 let rx = repo.update(cx, |repo, _| {
986 repo.send_job(None, move |state, cx| async move {
987 match state {
988 RepositoryState::Local { backend, .. } => {
989 let origin_url = backend
990 .remote_url(&remote)
991 .with_context(|| format!("remote \"{remote}\" not found"))?;
992
993 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
994
995 let provider_registry =
996 cx.update(GitHostingProviderRegistry::default_global)?;
997
998 let (provider, remote) =
999 parse_git_remote_url(provider_registry, &origin_url)
1000 .context("parsing Git remote URL")?;
1001
1002 let path = repo_path.as_unix_str();
1003
1004 Ok(provider.build_permalink(
1005 remote,
1006 BuildPermalinkParams {
1007 sha: &sha,
1008 path,
1009 selection: Some(selection),
1010 },
1011 ))
1012 }
1013 RepositoryState::Remote { project_id, client } => {
1014 let response = client
1015 .request(proto::GetPermalinkToLine {
1016 project_id: project_id.to_proto(),
1017 buffer_id: buffer_id.into(),
1018 selection: Some(proto::Range {
1019 start: selection.start as u64,
1020 end: selection.end as u64,
1021 }),
1022 })
1023 .await?;
1024
1025 url::Url::parse(&response.permalink).context("failed to parse permalink")
1026 }
1027 }
1028 })
1029 });
1030 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1031 }
1032
1033 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1034 match &self.state {
1035 GitStoreState::Local {
1036 downstream: downstream_client,
1037 ..
1038 } => downstream_client
1039 .as_ref()
1040 .map(|state| (state.client.clone(), state.project_id)),
1041 GitStoreState::Remote {
1042 downstream: downstream_client,
1043 ..
1044 } => downstream_client.clone(),
1045 }
1046 }
1047
1048 fn upstream_client(&self) -> Option<AnyProtoClient> {
1049 match &self.state {
1050 GitStoreState::Local { .. } => None,
1051 GitStoreState::Remote {
1052 upstream_client, ..
1053 } => Some(upstream_client.clone()),
1054 }
1055 }
1056
1057 fn on_worktree_store_event(
1058 &mut self,
1059 worktree_store: Entity<WorktreeStore>,
1060 event: &WorktreeStoreEvent,
1061 cx: &mut Context<Self>,
1062 ) {
1063 let GitStoreState::Local {
1064 project_environment,
1065 downstream,
1066 next_repository_id,
1067 fs,
1068 } = &self.state
1069 else {
1070 return;
1071 };
1072
1073 match event {
1074 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1075 if let Some(worktree) = self
1076 .worktree_store
1077 .read(cx)
1078 .worktree_for_id(*worktree_id, cx)
1079 {
1080 let paths_by_git_repo =
1081 self.process_updated_entries(&worktree, updated_entries, cx);
1082 let downstream = downstream
1083 .as_ref()
1084 .map(|downstream| downstream.updates_tx.clone());
1085 cx.spawn(async move |_, cx| {
1086 let paths_by_git_repo = paths_by_git_repo.await;
1087 for (repo, paths) in paths_by_git_repo {
1088 repo.update(cx, |repo, cx| {
1089 repo.paths_changed(paths, downstream.clone(), cx);
1090 })
1091 .ok();
1092 }
1093 })
1094 .detach();
1095 }
1096 }
1097 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1098 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1099 else {
1100 return;
1101 };
1102 if !worktree.read(cx).is_visible() {
1103 log::debug!(
1104 "not adding repositories for local worktree {:?} because it's not visible",
1105 worktree.read(cx).abs_path()
1106 );
1107 return;
1108 }
1109 self.update_repositories_from_worktree(
1110 project_environment.clone(),
1111 next_repository_id.clone(),
1112 downstream
1113 .as_ref()
1114 .map(|downstream| downstream.updates_tx.clone()),
1115 changed_repos.clone(),
1116 fs.clone(),
1117 cx,
1118 );
1119 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1120 }
1121 _ => {}
1122 }
1123 }
1124 fn on_repository_event(
1125 &mut self,
1126 repo: Entity<Repository>,
1127 event: &RepositoryEvent,
1128 cx: &mut Context<Self>,
1129 ) {
1130 let id = repo.read(cx).id;
1131 let repo_snapshot = repo.read(cx).snapshot.clone();
1132 for (buffer_id, diff) in self.diffs.iter() {
1133 if let Some((buffer_repo, repo_path)) =
1134 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1135 && buffer_repo == repo
1136 {
1137 diff.update(cx, |diff, cx| {
1138 if let Some(conflict_set) = &diff.conflict_set {
1139 let conflict_status_changed =
1140 conflict_set.update(cx, |conflict_set, cx| {
1141 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1142 conflict_set.set_has_conflict(has_conflict, cx)
1143 })?;
1144 if conflict_status_changed {
1145 let buffer_store = self.buffer_store.read(cx);
1146 if let Some(buffer) = buffer_store.get(*buffer_id) {
1147 let _ = diff
1148 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1149 }
1150 }
1151 }
1152 anyhow::Ok(())
1153 })
1154 .ok();
1155 }
1156 }
1157 cx.emit(GitStoreEvent::RepositoryUpdated(
1158 id,
1159 event.clone(),
1160 self.active_repo_id == Some(id),
1161 ))
1162 }
1163
1164 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1165 cx.emit(GitStoreEvent::JobsUpdated)
1166 }
1167
1168 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1169 fn update_repositories_from_worktree(
1170 &mut self,
1171 project_environment: Entity<ProjectEnvironment>,
1172 next_repository_id: Arc<AtomicU64>,
1173 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1174 updated_git_repositories: UpdatedGitRepositoriesSet,
1175 fs: Arc<dyn Fs>,
1176 cx: &mut Context<Self>,
1177 ) {
1178 let mut removed_ids = Vec::new();
1179 for update in updated_git_repositories.iter() {
1180 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1181 let existing_work_directory_abs_path =
1182 repo.read(cx).work_directory_abs_path.clone();
1183 Some(&existing_work_directory_abs_path)
1184 == update.old_work_directory_abs_path.as_ref()
1185 || Some(&existing_work_directory_abs_path)
1186 == update.new_work_directory_abs_path.as_ref()
1187 }) {
1188 if let Some(new_work_directory_abs_path) =
1189 update.new_work_directory_abs_path.clone()
1190 {
1191 existing.update(cx, |existing, cx| {
1192 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1193 existing.schedule_scan(updates_tx.clone(), cx);
1194 });
1195 } else {
1196 removed_ids.push(*id);
1197 }
1198 } else if let UpdatedGitRepository {
1199 new_work_directory_abs_path: Some(work_directory_abs_path),
1200 dot_git_abs_path: Some(dot_git_abs_path),
1201 repository_dir_abs_path: Some(repository_dir_abs_path),
1202 common_dir_abs_path: Some(common_dir_abs_path),
1203 ..
1204 } = update
1205 {
1206 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1207 let git_store = cx.weak_entity();
1208 let repo = cx.new(|cx| {
1209 let mut repo = Repository::local(
1210 id,
1211 work_directory_abs_path.clone(),
1212 dot_git_abs_path.clone(),
1213 repository_dir_abs_path.clone(),
1214 common_dir_abs_path.clone(),
1215 project_environment.downgrade(),
1216 fs.clone(),
1217 git_store,
1218 cx,
1219 );
1220 repo.schedule_scan(updates_tx.clone(), cx);
1221 repo
1222 });
1223 self._subscriptions
1224 .push(cx.subscribe(&repo, Self::on_repository_event));
1225 self._subscriptions
1226 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1227 self.repositories.insert(id, repo);
1228 cx.emit(GitStoreEvent::RepositoryAdded(id));
1229 self.active_repo_id.get_or_insert_with(|| {
1230 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1231 id
1232 });
1233 }
1234 }
1235
1236 for id in removed_ids {
1237 if self.active_repo_id == Some(id) {
1238 self.active_repo_id = None;
1239 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1240 }
1241 self.repositories.remove(&id);
1242 if let Some(updates_tx) = updates_tx.as_ref() {
1243 updates_tx
1244 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1245 .ok();
1246 }
1247 }
1248 }
1249
1250 fn on_buffer_store_event(
1251 &mut self,
1252 _: Entity<BufferStore>,
1253 event: &BufferStoreEvent,
1254 cx: &mut Context<Self>,
1255 ) {
1256 match event {
1257 BufferStoreEvent::BufferAdded(buffer) => {
1258 cx.subscribe(buffer, |this, buffer, event, cx| {
1259 if let BufferEvent::LanguageChanged = event {
1260 let buffer_id = buffer.read(cx).remote_id();
1261 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1262 diff_state.update(cx, |diff_state, cx| {
1263 diff_state.buffer_language_changed(buffer, cx);
1264 });
1265 }
1266 }
1267 })
1268 .detach();
1269 }
1270 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1271 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1272 diffs.remove(buffer_id);
1273 }
1274 }
1275 BufferStoreEvent::BufferDropped(buffer_id) => {
1276 self.diffs.remove(buffer_id);
1277 for diffs in self.shared_diffs.values_mut() {
1278 diffs.remove(buffer_id);
1279 }
1280 }
1281
1282 _ => {}
1283 }
1284 }
1285
1286 pub fn recalculate_buffer_diffs(
1287 &mut self,
1288 buffers: Vec<Entity<Buffer>>,
1289 cx: &mut Context<Self>,
1290 ) -> impl Future<Output = ()> + use<> {
1291 let mut futures = Vec::new();
1292 for buffer in buffers {
1293 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1294 let buffer = buffer.read(cx).text_snapshot();
1295 diff_state.update(cx, |diff_state, cx| {
1296 diff_state.recalculate_diffs(buffer.clone(), cx);
1297 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1298 });
1299 futures.push(diff_state.update(cx, |diff_state, cx| {
1300 diff_state
1301 .reparse_conflict_markers(buffer, cx)
1302 .map(|_| {})
1303 .boxed()
1304 }));
1305 }
1306 }
1307 async move {
1308 futures::future::join_all(futures).await;
1309 }
1310 }
1311
1312 fn on_buffer_diff_event(
1313 &mut self,
1314 diff: Entity<buffer_diff::BufferDiff>,
1315 event: &BufferDiffEvent,
1316 cx: &mut Context<Self>,
1317 ) {
1318 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1319 let buffer_id = diff.read(cx).buffer_id;
1320 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1321 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1322 diff_state.hunk_staging_operation_count += 1;
1323 diff_state.hunk_staging_operation_count
1324 });
1325 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1326 let recv = repo.update(cx, |repo, cx| {
1327 log::debug!("hunks changed for {}", path.as_unix_str());
1328 repo.spawn_set_index_text_job(
1329 path,
1330 new_index_text.as_ref().map(|rope| rope.to_string()),
1331 Some(hunk_staging_operation_count),
1332 cx,
1333 )
1334 });
1335 let diff = diff.downgrade();
1336 cx.spawn(async move |this, cx| {
1337 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1338 diff.update(cx, |diff, cx| {
1339 diff.clear_pending_hunks(cx);
1340 })
1341 .ok();
1342 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1343 .ok();
1344 }
1345 })
1346 .detach();
1347 }
1348 }
1349 }
1350 }
1351
1352 fn local_worktree_git_repos_changed(
1353 &mut self,
1354 worktree: Entity<Worktree>,
1355 changed_repos: &UpdatedGitRepositoriesSet,
1356 cx: &mut Context<Self>,
1357 ) {
1358 log::debug!("local worktree repos changed");
1359 debug_assert!(worktree.read(cx).is_local());
1360
1361 for repository in self.repositories.values() {
1362 repository.update(cx, |repository, cx| {
1363 let repo_abs_path = &repository.work_directory_abs_path;
1364 if changed_repos.iter().any(|update| {
1365 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1366 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1367 }) {
1368 repository.reload_buffer_diff_bases(cx);
1369 }
1370 });
1371 }
1372 }
1373
1374 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1375 &self.repositories
1376 }
1377
1378 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1379 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1380 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1381 Some(status.status)
1382 }
1383
1384 pub fn repository_and_path_for_buffer_id(
1385 &self,
1386 buffer_id: BufferId,
1387 cx: &App,
1388 ) -> Option<(Entity<Repository>, RepoPath)> {
1389 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1390 let project_path = buffer.read(cx).project_path(cx)?;
1391 self.repository_and_path_for_project_path(&project_path, cx)
1392 }
1393
1394 pub fn repository_and_path_for_project_path(
1395 &self,
1396 path: &ProjectPath,
1397 cx: &App,
1398 ) -> Option<(Entity<Repository>, RepoPath)> {
1399 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1400 self.repositories
1401 .values()
1402 .filter_map(|repo| {
1403 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1404 Some((repo.clone(), repo_path))
1405 })
1406 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1407 }
1408
1409 pub fn git_init(
1410 &self,
1411 path: Arc<Path>,
1412 fallback_branch_name: String,
1413 cx: &App,
1414 ) -> Task<Result<()>> {
1415 match &self.state {
1416 GitStoreState::Local { fs, .. } => {
1417 let fs = fs.clone();
1418 cx.background_executor()
1419 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1420 }
1421 GitStoreState::Remote {
1422 upstream_client,
1423 upstream_project_id: project_id,
1424 ..
1425 } => {
1426 let client = upstream_client.clone();
1427 let project_id = *project_id;
1428 cx.background_executor().spawn(async move {
1429 client
1430 .request(proto::GitInit {
1431 project_id: project_id,
1432 abs_path: path.to_string_lossy().into_owned(),
1433 fallback_branch_name,
1434 })
1435 .await?;
1436 Ok(())
1437 })
1438 }
1439 }
1440 }
1441
1442 pub fn git_clone(
1443 &self,
1444 repo: String,
1445 path: impl Into<Arc<std::path::Path>>,
1446 cx: &App,
1447 ) -> Task<Result<()>> {
1448 let path = path.into();
1449 match &self.state {
1450 GitStoreState::Local { fs, .. } => {
1451 let fs = fs.clone();
1452 cx.background_executor()
1453 .spawn(async move { fs.git_clone(&repo, &path).await })
1454 }
1455 GitStoreState::Remote {
1456 upstream_client,
1457 upstream_project_id,
1458 ..
1459 } => {
1460 if upstream_client.is_via_collab() {
1461 return Task::ready(Err(anyhow!(
1462 "Git Clone isn't supported for project guests"
1463 )));
1464 }
1465 let request = upstream_client.request(proto::GitClone {
1466 project_id: *upstream_project_id,
1467 abs_path: path.to_string_lossy().into_owned(),
1468 remote_repo: repo,
1469 });
1470
1471 cx.background_spawn(async move {
1472 let result = request.await?;
1473
1474 match result.success {
1475 true => Ok(()),
1476 false => Err(anyhow!("Git Clone failed")),
1477 }
1478 })
1479 }
1480 }
1481 }
1482
1483 async fn handle_update_repository(
1484 this: Entity<Self>,
1485 envelope: TypedEnvelope<proto::UpdateRepository>,
1486 mut cx: AsyncApp,
1487 ) -> Result<()> {
1488 this.update(&mut cx, |this, cx| {
1489 let path_style = this.worktree_store.read(cx).path_style();
1490 let mut update = envelope.payload;
1491
1492 let id = RepositoryId::from_proto(update.id);
1493 let client = this.upstream_client().context("no upstream client")?;
1494
1495 let mut is_new = false;
1496 let repo = this.repositories.entry(id).or_insert_with(|| {
1497 is_new = true;
1498 let git_store = cx.weak_entity();
1499 cx.new(|cx| {
1500 Repository::remote(
1501 id,
1502 Path::new(&update.abs_path).into(),
1503 path_style,
1504 ProjectId(update.project_id),
1505 client,
1506 git_store,
1507 cx,
1508 )
1509 })
1510 });
1511 if is_new {
1512 this._subscriptions
1513 .push(cx.subscribe(repo, Self::on_repository_event))
1514 }
1515
1516 repo.update(cx, {
1517 let update = update.clone();
1518 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1519 })?;
1520
1521 this.active_repo_id.get_or_insert_with(|| {
1522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1523 id
1524 });
1525
1526 if let Some((client, project_id)) = this.downstream_client() {
1527 update.project_id = project_id.to_proto();
1528 client.send(update).log_err();
1529 }
1530 Ok(())
1531 })?
1532 }
1533
1534 async fn handle_remove_repository(
1535 this: Entity<Self>,
1536 envelope: TypedEnvelope<proto::RemoveRepository>,
1537 mut cx: AsyncApp,
1538 ) -> Result<()> {
1539 this.update(&mut cx, |this, cx| {
1540 let mut update = envelope.payload;
1541 let id = RepositoryId::from_proto(update.id);
1542 this.repositories.remove(&id);
1543 if let Some((client, project_id)) = this.downstream_client() {
1544 update.project_id = project_id.to_proto();
1545 client.send(update).log_err();
1546 }
1547 if this.active_repo_id == Some(id) {
1548 this.active_repo_id = None;
1549 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1550 }
1551 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1552 })
1553 }
1554
1555 async fn handle_git_init(
1556 this: Entity<Self>,
1557 envelope: TypedEnvelope<proto::GitInit>,
1558 cx: AsyncApp,
1559 ) -> Result<proto::Ack> {
1560 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1561 let name = envelope.payload.fallback_branch_name;
1562 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1563 .await?;
1564
1565 Ok(proto::Ack {})
1566 }
1567
1568 async fn handle_git_clone(
1569 this: Entity<Self>,
1570 envelope: TypedEnvelope<proto::GitClone>,
1571 cx: AsyncApp,
1572 ) -> Result<proto::GitCloneResponse> {
1573 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1574 let repo_name = envelope.payload.remote_repo;
1575 let result = cx
1576 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1577 .await;
1578
1579 Ok(proto::GitCloneResponse {
1580 success: result.is_ok(),
1581 })
1582 }
1583
1584 async fn handle_fetch(
1585 this: Entity<Self>,
1586 envelope: TypedEnvelope<proto::Fetch>,
1587 mut cx: AsyncApp,
1588 ) -> Result<proto::RemoteMessageResponse> {
1589 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1590 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1591 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1592 let askpass_id = envelope.payload.askpass_id;
1593
1594 let askpass = make_remote_delegate(
1595 this,
1596 envelope.payload.project_id,
1597 repository_id,
1598 askpass_id,
1599 &mut cx,
1600 );
1601
1602 let remote_output = repository_handle
1603 .update(&mut cx, |repository_handle, cx| {
1604 repository_handle.fetch(fetch_options, askpass, cx)
1605 })?
1606 .await??;
1607
1608 Ok(proto::RemoteMessageResponse {
1609 stdout: remote_output.stdout,
1610 stderr: remote_output.stderr,
1611 })
1612 }
1613
1614 async fn handle_push(
1615 this: Entity<Self>,
1616 envelope: TypedEnvelope<proto::Push>,
1617 mut cx: AsyncApp,
1618 ) -> Result<proto::RemoteMessageResponse> {
1619 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1620 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1621
1622 let askpass_id = envelope.payload.askpass_id;
1623 let askpass = make_remote_delegate(
1624 this,
1625 envelope.payload.project_id,
1626 repository_id,
1627 askpass_id,
1628 &mut cx,
1629 );
1630
1631 let options = envelope
1632 .payload
1633 .options
1634 .as_ref()
1635 .map(|_| match envelope.payload.options() {
1636 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1637 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1638 });
1639
1640 let branch_name = envelope.payload.branch_name.into();
1641 let remote_name = envelope.payload.remote_name.into();
1642
1643 let remote_output = repository_handle
1644 .update(&mut cx, |repository_handle, cx| {
1645 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1646 })?
1647 .await??;
1648 Ok(proto::RemoteMessageResponse {
1649 stdout: remote_output.stdout,
1650 stderr: remote_output.stderr,
1651 })
1652 }
1653
1654 async fn handle_pull(
1655 this: Entity<Self>,
1656 envelope: TypedEnvelope<proto::Pull>,
1657 mut cx: AsyncApp,
1658 ) -> Result<proto::RemoteMessageResponse> {
1659 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1660 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1661 let askpass_id = envelope.payload.askpass_id;
1662 let askpass = make_remote_delegate(
1663 this,
1664 envelope.payload.project_id,
1665 repository_id,
1666 askpass_id,
1667 &mut cx,
1668 );
1669
1670 let branch_name = envelope.payload.branch_name.into();
1671 let remote_name = envelope.payload.remote_name.into();
1672
1673 let remote_message = repository_handle
1674 .update(&mut cx, |repository_handle, cx| {
1675 repository_handle.pull(branch_name, remote_name, askpass, cx)
1676 })?
1677 .await??;
1678
1679 Ok(proto::RemoteMessageResponse {
1680 stdout: remote_message.stdout,
1681 stderr: remote_message.stderr,
1682 })
1683 }
1684
1685 async fn handle_stage(
1686 this: Entity<Self>,
1687 envelope: TypedEnvelope<proto::Stage>,
1688 mut cx: AsyncApp,
1689 ) -> Result<proto::Ack> {
1690 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1691 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1692
1693 let entries = envelope
1694 .payload
1695 .paths
1696 .into_iter()
1697 .map(|path| RepoPath::new(&path))
1698 .collect::<Result<Vec<_>>>()?;
1699
1700 repository_handle
1701 .update(&mut cx, |repository_handle, cx| {
1702 repository_handle.stage_entries(entries, cx)
1703 })?
1704 .await?;
1705 Ok(proto::Ack {})
1706 }
1707
1708 async fn handle_unstage(
1709 this: Entity<Self>,
1710 envelope: TypedEnvelope<proto::Unstage>,
1711 mut cx: AsyncApp,
1712 ) -> Result<proto::Ack> {
1713 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1714 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1715
1716 let entries = envelope
1717 .payload
1718 .paths
1719 .into_iter()
1720 .map(|path| RepoPath::new(&path))
1721 .collect::<Result<Vec<_>>>()?;
1722
1723 repository_handle
1724 .update(&mut cx, |repository_handle, cx| {
1725 repository_handle.unstage_entries(entries, cx)
1726 })?
1727 .await?;
1728
1729 Ok(proto::Ack {})
1730 }
1731
1732 async fn handle_stash(
1733 this: Entity<Self>,
1734 envelope: TypedEnvelope<proto::Stash>,
1735 mut cx: AsyncApp,
1736 ) -> Result<proto::Ack> {
1737 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1738 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1739
1740 let entries = envelope
1741 .payload
1742 .paths
1743 .into_iter()
1744 .map(|path| RepoPath::new(&path))
1745 .collect::<Result<Vec<_>>>()?;
1746
1747 repository_handle
1748 .update(&mut cx, |repository_handle, cx| {
1749 repository_handle.stash_entries(entries, cx)
1750 })?
1751 .await?;
1752
1753 Ok(proto::Ack {})
1754 }
1755
1756 async fn handle_stash_pop(
1757 this: Entity<Self>,
1758 envelope: TypedEnvelope<proto::StashPop>,
1759 mut cx: AsyncApp,
1760 ) -> Result<proto::Ack> {
1761 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1762 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1763 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1764
1765 repository_handle
1766 .update(&mut cx, |repository_handle, cx| {
1767 repository_handle.stash_pop(stash_index, cx)
1768 })?
1769 .await?;
1770
1771 Ok(proto::Ack {})
1772 }
1773
1774 async fn handle_stash_apply(
1775 this: Entity<Self>,
1776 envelope: TypedEnvelope<proto::StashApply>,
1777 mut cx: AsyncApp,
1778 ) -> Result<proto::Ack> {
1779 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1780 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1781 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1782
1783 repository_handle
1784 .update(&mut cx, |repository_handle, cx| {
1785 repository_handle.stash_apply(stash_index, cx)
1786 })?
1787 .await?;
1788
1789 Ok(proto::Ack {})
1790 }
1791
1792 async fn handle_stash_drop(
1793 this: Entity<Self>,
1794 envelope: TypedEnvelope<proto::StashDrop>,
1795 mut cx: AsyncApp,
1796 ) -> Result<proto::Ack> {
1797 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1798 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1799 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1800
1801 repository_handle
1802 .update(&mut cx, |repository_handle, cx| {
1803 repository_handle.stash_drop(stash_index, cx)
1804 })?
1805 .await??;
1806
1807 Ok(proto::Ack {})
1808 }
1809
1810 async fn handle_set_index_text(
1811 this: Entity<Self>,
1812 envelope: TypedEnvelope<proto::SetIndexText>,
1813 mut cx: AsyncApp,
1814 ) -> Result<proto::Ack> {
1815 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1816 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1817 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1818
1819 repository_handle
1820 .update(&mut cx, |repository_handle, cx| {
1821 repository_handle.spawn_set_index_text_job(
1822 repo_path,
1823 envelope.payload.text,
1824 None,
1825 cx,
1826 )
1827 })?
1828 .await??;
1829 Ok(proto::Ack {})
1830 }
1831
1832 async fn handle_commit(
1833 this: Entity<Self>,
1834 envelope: TypedEnvelope<proto::Commit>,
1835 mut cx: AsyncApp,
1836 ) -> Result<proto::Ack> {
1837 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1838 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1839
1840 let message = SharedString::from(envelope.payload.message);
1841 let name = envelope.payload.name.map(SharedString::from);
1842 let email = envelope.payload.email.map(SharedString::from);
1843 let options = envelope.payload.options.unwrap_or_default();
1844
1845 repository_handle
1846 .update(&mut cx, |repository_handle, cx| {
1847 repository_handle.commit(
1848 message,
1849 name.zip(email),
1850 CommitOptions {
1851 amend: options.amend,
1852 signoff: options.signoff,
1853 },
1854 cx,
1855 )
1856 })?
1857 .await??;
1858 Ok(proto::Ack {})
1859 }
1860
1861 async fn handle_get_remotes(
1862 this: Entity<Self>,
1863 envelope: TypedEnvelope<proto::GetRemotes>,
1864 mut cx: AsyncApp,
1865 ) -> Result<proto::GetRemotesResponse> {
1866 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1867 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1868
1869 let branch_name = envelope.payload.branch_name;
1870
1871 let remotes = repository_handle
1872 .update(&mut cx, |repository_handle, _| {
1873 repository_handle.get_remotes(branch_name)
1874 })?
1875 .await??;
1876
1877 Ok(proto::GetRemotesResponse {
1878 remotes: remotes
1879 .into_iter()
1880 .map(|remotes| proto::get_remotes_response::Remote {
1881 name: remotes.name.to_string(),
1882 })
1883 .collect::<Vec<_>>(),
1884 })
1885 }
1886
1887 async fn handle_get_branches(
1888 this: Entity<Self>,
1889 envelope: TypedEnvelope<proto::GitGetBranches>,
1890 mut cx: AsyncApp,
1891 ) -> Result<proto::GitBranchesResponse> {
1892 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1893 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1894
1895 let branches = repository_handle
1896 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1897 .await??;
1898
1899 Ok(proto::GitBranchesResponse {
1900 branches: branches
1901 .into_iter()
1902 .map(|branch| branch_to_proto(&branch))
1903 .collect::<Vec<_>>(),
1904 })
1905 }
1906 async fn handle_get_default_branch(
1907 this: Entity<Self>,
1908 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1909 mut cx: AsyncApp,
1910 ) -> Result<proto::GetDefaultBranchResponse> {
1911 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1912 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1913
1914 let branch = repository_handle
1915 .update(&mut cx, |repository_handle, _| {
1916 repository_handle.default_branch()
1917 })?
1918 .await??
1919 .map(Into::into);
1920
1921 Ok(proto::GetDefaultBranchResponse { branch })
1922 }
1923 async fn handle_create_branch(
1924 this: Entity<Self>,
1925 envelope: TypedEnvelope<proto::GitCreateBranch>,
1926 mut cx: AsyncApp,
1927 ) -> Result<proto::Ack> {
1928 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1929 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1930 let branch_name = envelope.payload.branch_name;
1931
1932 repository_handle
1933 .update(&mut cx, |repository_handle, _| {
1934 repository_handle.create_branch(branch_name)
1935 })?
1936 .await??;
1937
1938 Ok(proto::Ack {})
1939 }
1940
1941 async fn handle_change_branch(
1942 this: Entity<Self>,
1943 envelope: TypedEnvelope<proto::GitChangeBranch>,
1944 mut cx: AsyncApp,
1945 ) -> Result<proto::Ack> {
1946 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1947 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1948 let branch_name = envelope.payload.branch_name;
1949
1950 repository_handle
1951 .update(&mut cx, |repository_handle, _| {
1952 repository_handle.change_branch(branch_name)
1953 })?
1954 .await??;
1955
1956 Ok(proto::Ack {})
1957 }
1958
1959 async fn handle_rename_branch(
1960 this: Entity<Self>,
1961 envelope: TypedEnvelope<proto::GitRenameBranch>,
1962 mut cx: AsyncApp,
1963 ) -> Result<proto::Ack> {
1964 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1965 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1966 let branch = envelope.payload.branch;
1967 let new_name = envelope.payload.new_name;
1968
1969 repository_handle
1970 .update(&mut cx, |repository_handle, _| {
1971 repository_handle.rename_branch(branch, new_name)
1972 })?
1973 .await??;
1974
1975 Ok(proto::Ack {})
1976 }
1977
1978 async fn handle_show(
1979 this: Entity<Self>,
1980 envelope: TypedEnvelope<proto::GitShow>,
1981 mut cx: AsyncApp,
1982 ) -> Result<proto::GitCommitDetails> {
1983 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1984 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1985
1986 let commit = repository_handle
1987 .update(&mut cx, |repository_handle, _| {
1988 repository_handle.show(envelope.payload.commit)
1989 })?
1990 .await??;
1991 Ok(proto::GitCommitDetails {
1992 sha: commit.sha.into(),
1993 message: commit.message.into(),
1994 commit_timestamp: commit.commit_timestamp,
1995 author_email: commit.author_email.into(),
1996 author_name: commit.author_name.into(),
1997 })
1998 }
1999
2000 async fn handle_load_commit_diff(
2001 this: Entity<Self>,
2002 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2003 mut cx: AsyncApp,
2004 ) -> Result<proto::LoadCommitDiffResponse> {
2005 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2006 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2007
2008 let commit_diff = repository_handle
2009 .update(&mut cx, |repository_handle, _| {
2010 repository_handle.load_commit_diff(envelope.payload.commit)
2011 })?
2012 .await??;
2013 Ok(proto::LoadCommitDiffResponse {
2014 files: commit_diff
2015 .files
2016 .into_iter()
2017 .map(|file| proto::CommitFile {
2018 path: file.path.to_proto(),
2019 old_text: file.old_text,
2020 new_text: file.new_text,
2021 })
2022 .collect(),
2023 })
2024 }
2025
2026 async fn handle_reset(
2027 this: Entity<Self>,
2028 envelope: TypedEnvelope<proto::GitReset>,
2029 mut cx: AsyncApp,
2030 ) -> Result<proto::Ack> {
2031 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2032 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2033
2034 let mode = match envelope.payload.mode() {
2035 git_reset::ResetMode::Soft => ResetMode::Soft,
2036 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2037 };
2038
2039 repository_handle
2040 .update(&mut cx, |repository_handle, cx| {
2041 repository_handle.reset(envelope.payload.commit, mode, cx)
2042 })?
2043 .await??;
2044 Ok(proto::Ack {})
2045 }
2046
2047 async fn handle_checkout_files(
2048 this: Entity<Self>,
2049 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2050 mut cx: AsyncApp,
2051 ) -> Result<proto::Ack> {
2052 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2053 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2054 let paths = envelope
2055 .payload
2056 .paths
2057 .iter()
2058 .map(|s| RepoPath::from_proto(s))
2059 .collect::<Result<Vec<_>>>()?;
2060
2061 repository_handle
2062 .update(&mut cx, |repository_handle, cx| {
2063 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2064 })?
2065 .await??;
2066 Ok(proto::Ack {})
2067 }
2068
2069 async fn handle_open_commit_message_buffer(
2070 this: Entity<Self>,
2071 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2072 mut cx: AsyncApp,
2073 ) -> Result<proto::OpenBufferResponse> {
2074 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2075 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2076 let buffer = repository
2077 .update(&mut cx, |repository, cx| {
2078 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2079 })?
2080 .await?;
2081
2082 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2083 this.update(&mut cx, |this, cx| {
2084 this.buffer_store.update(cx, |buffer_store, cx| {
2085 buffer_store
2086 .create_buffer_for_peer(
2087 &buffer,
2088 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2089 cx,
2090 )
2091 .detach_and_log_err(cx);
2092 })
2093 })?;
2094
2095 Ok(proto::OpenBufferResponse {
2096 buffer_id: buffer_id.to_proto(),
2097 })
2098 }
2099
2100 async fn handle_askpass(
2101 this: Entity<Self>,
2102 envelope: TypedEnvelope<proto::AskPassRequest>,
2103 mut cx: AsyncApp,
2104 ) -> Result<proto::AskPassResponse> {
2105 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2106 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2107
2108 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2109 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2110 debug_panic!("no askpass found");
2111 anyhow::bail!("no askpass found");
2112 };
2113
2114 let response = askpass
2115 .ask_password(envelope.payload.prompt)
2116 .await
2117 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2118
2119 delegates
2120 .lock()
2121 .insert(envelope.payload.askpass_id, askpass);
2122
2123 response.try_into()
2124 }
2125
2126 async fn handle_check_for_pushed_commits(
2127 this: Entity<Self>,
2128 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2129 mut cx: AsyncApp,
2130 ) -> Result<proto::CheckForPushedCommitsResponse> {
2131 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2132 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2133
2134 let branches = repository_handle
2135 .update(&mut cx, |repository_handle, _| {
2136 repository_handle.check_for_pushed_commits()
2137 })?
2138 .await??;
2139 Ok(proto::CheckForPushedCommitsResponse {
2140 pushed_to: branches
2141 .into_iter()
2142 .map(|commit| commit.to_string())
2143 .collect(),
2144 })
2145 }
2146
2147 async fn handle_git_diff(
2148 this: Entity<Self>,
2149 envelope: TypedEnvelope<proto::GitDiff>,
2150 mut cx: AsyncApp,
2151 ) -> Result<proto::GitDiffResponse> {
2152 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2153 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2154 let diff_type = match envelope.payload.diff_type() {
2155 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2156 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2157 };
2158
2159 let mut diff = repository_handle
2160 .update(&mut cx, |repository_handle, cx| {
2161 repository_handle.diff(diff_type, cx)
2162 })?
2163 .await??;
2164 const ONE_MB: usize = 1_000_000;
2165 if diff.len() > ONE_MB {
2166 diff = diff.chars().take(ONE_MB).collect()
2167 }
2168
2169 Ok(proto::GitDiffResponse { diff })
2170 }
2171
2172 async fn handle_open_unstaged_diff(
2173 this: Entity<Self>,
2174 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2175 mut cx: AsyncApp,
2176 ) -> Result<proto::OpenUnstagedDiffResponse> {
2177 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2178 let diff = this
2179 .update(&mut cx, |this, cx| {
2180 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2181 Some(this.open_unstaged_diff(buffer, cx))
2182 })?
2183 .context("missing buffer")?
2184 .await?;
2185 this.update(&mut cx, |this, _| {
2186 let shared_diffs = this
2187 .shared_diffs
2188 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2189 .or_default();
2190 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2191 })?;
2192 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2193 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2194 }
2195
2196 async fn handle_open_uncommitted_diff(
2197 this: Entity<Self>,
2198 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2199 mut cx: AsyncApp,
2200 ) -> Result<proto::OpenUncommittedDiffResponse> {
2201 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2202 let diff = this
2203 .update(&mut cx, |this, cx| {
2204 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2205 Some(this.open_uncommitted_diff(buffer, cx))
2206 })?
2207 .context("missing buffer")?
2208 .await?;
2209 this.update(&mut cx, |this, _| {
2210 let shared_diffs = this
2211 .shared_diffs
2212 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2213 .or_default();
2214 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2215 })?;
2216 diff.read_with(&cx, |diff, cx| {
2217 use proto::open_uncommitted_diff_response::Mode;
2218
2219 let unstaged_diff = diff.secondary_diff();
2220 let index_snapshot = unstaged_diff.and_then(|diff| {
2221 let diff = diff.read(cx);
2222 diff.base_text_exists().then(|| diff.base_text())
2223 });
2224
2225 let mode;
2226 let staged_text;
2227 let committed_text;
2228 if diff.base_text_exists() {
2229 let committed_snapshot = diff.base_text();
2230 committed_text = Some(committed_snapshot.text());
2231 if let Some(index_text) = index_snapshot {
2232 if index_text.remote_id() == committed_snapshot.remote_id() {
2233 mode = Mode::IndexMatchesHead;
2234 staged_text = None;
2235 } else {
2236 mode = Mode::IndexAndHead;
2237 staged_text = Some(index_text.text());
2238 }
2239 } else {
2240 mode = Mode::IndexAndHead;
2241 staged_text = None;
2242 }
2243 } else {
2244 mode = Mode::IndexAndHead;
2245 committed_text = None;
2246 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2247 }
2248
2249 proto::OpenUncommittedDiffResponse {
2250 committed_text,
2251 staged_text,
2252 mode: mode.into(),
2253 }
2254 })
2255 }
2256
2257 async fn handle_update_diff_bases(
2258 this: Entity<Self>,
2259 request: TypedEnvelope<proto::UpdateDiffBases>,
2260 mut cx: AsyncApp,
2261 ) -> Result<()> {
2262 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2263 this.update(&mut cx, |this, cx| {
2264 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2265 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2266 {
2267 let buffer = buffer.read(cx).text_snapshot();
2268 diff_state.update(cx, |diff_state, cx| {
2269 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2270 })
2271 }
2272 })
2273 }
2274
2275 async fn handle_blame_buffer(
2276 this: Entity<Self>,
2277 envelope: TypedEnvelope<proto::BlameBuffer>,
2278 mut cx: AsyncApp,
2279 ) -> Result<proto::BlameBufferResponse> {
2280 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2281 let version = deserialize_version(&envelope.payload.version);
2282 let buffer = this.read_with(&cx, |this, cx| {
2283 this.buffer_store.read(cx).get_existing(buffer_id)
2284 })??;
2285 buffer
2286 .update(&mut cx, |buffer, _| {
2287 buffer.wait_for_version(version.clone())
2288 })?
2289 .await?;
2290 let blame = this
2291 .update(&mut cx, |this, cx| {
2292 this.blame_buffer(&buffer, Some(version), cx)
2293 })?
2294 .await?;
2295 Ok(serialize_blame_buffer_response(blame))
2296 }
2297
2298 async fn handle_get_permalink_to_line(
2299 this: Entity<Self>,
2300 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2301 mut cx: AsyncApp,
2302 ) -> Result<proto::GetPermalinkToLineResponse> {
2303 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2304 // let version = deserialize_version(&envelope.payload.version);
2305 let selection = {
2306 let proto_selection = envelope
2307 .payload
2308 .selection
2309 .context("no selection to get permalink for defined")?;
2310 proto_selection.start as u32..proto_selection.end as u32
2311 };
2312 let buffer = this.read_with(&cx, |this, cx| {
2313 this.buffer_store.read(cx).get_existing(buffer_id)
2314 })??;
2315 let permalink = this
2316 .update(&mut cx, |this, cx| {
2317 this.get_permalink_to_line(&buffer, selection, cx)
2318 })?
2319 .await?;
2320 Ok(proto::GetPermalinkToLineResponse {
2321 permalink: permalink.to_string(),
2322 })
2323 }
2324
2325 fn repository_for_request(
2326 this: &Entity<Self>,
2327 id: RepositoryId,
2328 cx: &mut AsyncApp,
2329 ) -> Result<Entity<Repository>> {
2330 this.read_with(cx, |this, _| {
2331 this.repositories
2332 .get(&id)
2333 .context("missing repository handle")
2334 .cloned()
2335 })?
2336 }
2337
2338 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2339 self.repositories
2340 .iter()
2341 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2342 .collect()
2343 }
2344
2345 fn process_updated_entries(
2346 &self,
2347 worktree: &Entity<Worktree>,
2348 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2349 cx: &mut App,
2350 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2351 let path_style = worktree.read(cx).path_style();
2352 let mut repo_paths = self
2353 .repositories
2354 .values()
2355 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2356 .collect::<Vec<_>>();
2357 let mut entries: Vec<_> = updated_entries
2358 .iter()
2359 .map(|(path, _, _)| path.clone())
2360 .collect();
2361 entries.sort();
2362 let worktree = worktree.read(cx);
2363
2364 let entries = entries
2365 .into_iter()
2366 .map(|path| worktree.absolutize(&path))
2367 .collect::<Arc<[_]>>();
2368
2369 let executor = cx.background_executor().clone();
2370 cx.background_executor().spawn(async move {
2371 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2372 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2373 let mut tasks = FuturesOrdered::new();
2374 for (repo_path, repo) in repo_paths.into_iter().rev() {
2375 let entries = entries.clone();
2376 let task = executor.spawn(async move {
2377 // Find all repository paths that belong to this repo
2378 let mut ix = entries.partition_point(|path| path < &*repo_path);
2379 if ix == entries.len() {
2380 return None;
2381 };
2382
2383 let mut paths = Vec::new();
2384 // All paths prefixed by a given repo will constitute a continuous range.
2385 while let Some(path) = entries.get(ix)
2386 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2387 &repo_path, path, path_style,
2388 )
2389 {
2390 paths.push((repo_path, ix));
2391 ix += 1;
2392 }
2393 if paths.is_empty() {
2394 None
2395 } else {
2396 Some((repo, paths))
2397 }
2398 });
2399 tasks.push_back(task);
2400 }
2401
2402 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2403 let mut path_was_used = vec![false; entries.len()];
2404 let tasks = tasks.collect::<Vec<_>>().await;
2405 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2406 // We always want to assign a path to it's innermost repository.
2407 for t in tasks {
2408 let Some((repo, paths)) = t else {
2409 continue;
2410 };
2411 let entry = paths_by_git_repo.entry(repo).or_default();
2412 for (repo_path, ix) in paths {
2413 if path_was_used[ix] {
2414 continue;
2415 }
2416 path_was_used[ix] = true;
2417 entry.push(repo_path);
2418 }
2419 }
2420
2421 paths_by_git_repo
2422 })
2423 }
2424}
2425
2426impl BufferGitState {
2427 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2428 Self {
2429 unstaged_diff: Default::default(),
2430 uncommitted_diff: Default::default(),
2431 recalculate_diff_task: Default::default(),
2432 language: Default::default(),
2433 language_registry: Default::default(),
2434 recalculating_tx: postage::watch::channel_with(false).0,
2435 hunk_staging_operation_count: 0,
2436 hunk_staging_operation_count_as_of_write: 0,
2437 head_text: Default::default(),
2438 index_text: Default::default(),
2439 head_changed: Default::default(),
2440 index_changed: Default::default(),
2441 language_changed: Default::default(),
2442 conflict_updated_futures: Default::default(),
2443 conflict_set: Default::default(),
2444 reparse_conflict_markers_task: Default::default(),
2445 }
2446 }
2447
2448 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2449 self.language = buffer.read(cx).language().cloned();
2450 self.language_changed = true;
2451 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2452 }
2453
2454 fn reparse_conflict_markers(
2455 &mut self,
2456 buffer: text::BufferSnapshot,
2457 cx: &mut Context<Self>,
2458 ) -> oneshot::Receiver<()> {
2459 let (tx, rx) = oneshot::channel();
2460
2461 let Some(conflict_set) = self
2462 .conflict_set
2463 .as_ref()
2464 .and_then(|conflict_set| conflict_set.upgrade())
2465 else {
2466 return rx;
2467 };
2468
2469 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2470 if conflict_set.has_conflict {
2471 Some(conflict_set.snapshot())
2472 } else {
2473 None
2474 }
2475 });
2476
2477 if let Some(old_snapshot) = old_snapshot {
2478 self.conflict_updated_futures.push(tx);
2479 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2480 let (snapshot, changed_range) = cx
2481 .background_spawn(async move {
2482 let new_snapshot = ConflictSet::parse(&buffer);
2483 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2484 (new_snapshot, changed_range)
2485 })
2486 .await;
2487 this.update(cx, |this, cx| {
2488 if let Some(conflict_set) = &this.conflict_set {
2489 conflict_set
2490 .update(cx, |conflict_set, cx| {
2491 conflict_set.set_snapshot(snapshot, changed_range, cx);
2492 })
2493 .ok();
2494 }
2495 let futures = std::mem::take(&mut this.conflict_updated_futures);
2496 for tx in futures {
2497 tx.send(()).ok();
2498 }
2499 })
2500 }))
2501 }
2502
2503 rx
2504 }
2505
2506 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2507 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2508 }
2509
2510 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2511 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2512 }
2513
2514 fn handle_base_texts_updated(
2515 &mut self,
2516 buffer: text::BufferSnapshot,
2517 message: proto::UpdateDiffBases,
2518 cx: &mut Context<Self>,
2519 ) {
2520 use proto::update_diff_bases::Mode;
2521
2522 let Some(mode) = Mode::from_i32(message.mode) else {
2523 return;
2524 };
2525
2526 let diff_bases_change = match mode {
2527 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2528 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2529 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2530 Mode::IndexAndHead => DiffBasesChange::SetEach {
2531 index: message.staged_text,
2532 head: message.committed_text,
2533 },
2534 };
2535
2536 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2537 }
2538
2539 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2540 if *self.recalculating_tx.borrow() {
2541 let mut rx = self.recalculating_tx.subscribe();
2542 Some(async move {
2543 loop {
2544 let is_recalculating = rx.recv().await;
2545 if is_recalculating != Some(true) {
2546 break;
2547 }
2548 }
2549 })
2550 } else {
2551 None
2552 }
2553 }
2554
2555 fn diff_bases_changed(
2556 &mut self,
2557 buffer: text::BufferSnapshot,
2558 diff_bases_change: Option<DiffBasesChange>,
2559 cx: &mut Context<Self>,
2560 ) {
2561 match diff_bases_change {
2562 Some(DiffBasesChange::SetIndex(index)) => {
2563 self.index_text = index.map(|mut index| {
2564 text::LineEnding::normalize(&mut index);
2565 Arc::new(index)
2566 });
2567 self.index_changed = true;
2568 }
2569 Some(DiffBasesChange::SetHead(head)) => {
2570 self.head_text = head.map(|mut head| {
2571 text::LineEnding::normalize(&mut head);
2572 Arc::new(head)
2573 });
2574 self.head_changed = true;
2575 }
2576 Some(DiffBasesChange::SetBoth(text)) => {
2577 let text = text.map(|mut text| {
2578 text::LineEnding::normalize(&mut text);
2579 Arc::new(text)
2580 });
2581 self.head_text = text.clone();
2582 self.index_text = text;
2583 self.head_changed = true;
2584 self.index_changed = true;
2585 }
2586 Some(DiffBasesChange::SetEach { index, head }) => {
2587 self.index_text = index.map(|mut index| {
2588 text::LineEnding::normalize(&mut index);
2589 Arc::new(index)
2590 });
2591 self.index_changed = true;
2592 self.head_text = head.map(|mut head| {
2593 text::LineEnding::normalize(&mut head);
2594 Arc::new(head)
2595 });
2596 self.head_changed = true;
2597 }
2598 None => {}
2599 }
2600
2601 self.recalculate_diffs(buffer, cx)
2602 }
2603
2604 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2605 *self.recalculating_tx.borrow_mut() = true;
2606
2607 let language = self.language.clone();
2608 let language_registry = self.language_registry.clone();
2609 let unstaged_diff = self.unstaged_diff();
2610 let uncommitted_diff = self.uncommitted_diff();
2611 let head = self.head_text.clone();
2612 let index = self.index_text.clone();
2613 let index_changed = self.index_changed;
2614 let head_changed = self.head_changed;
2615 let language_changed = self.language_changed;
2616 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2617 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2618 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2619 (None, None) => true,
2620 _ => false,
2621 };
2622 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2623 log::debug!(
2624 "start recalculating diffs for buffer {}",
2625 buffer.remote_id()
2626 );
2627
2628 let mut new_unstaged_diff = None;
2629 if let Some(unstaged_diff) = &unstaged_diff {
2630 new_unstaged_diff = Some(
2631 BufferDiff::update_diff(
2632 unstaged_diff.clone(),
2633 buffer.clone(),
2634 index,
2635 index_changed,
2636 language_changed,
2637 language.clone(),
2638 language_registry.clone(),
2639 cx,
2640 )
2641 .await?,
2642 );
2643 }
2644
2645 let mut new_uncommitted_diff = None;
2646 if let Some(uncommitted_diff) = &uncommitted_diff {
2647 new_uncommitted_diff = if index_matches_head {
2648 new_unstaged_diff.clone()
2649 } else {
2650 Some(
2651 BufferDiff::update_diff(
2652 uncommitted_diff.clone(),
2653 buffer.clone(),
2654 head,
2655 head_changed,
2656 language_changed,
2657 language.clone(),
2658 language_registry.clone(),
2659 cx,
2660 )
2661 .await?,
2662 )
2663 }
2664 }
2665
2666 let cancel = this.update(cx, |this, _| {
2667 // This checks whether all pending stage/unstage operations
2668 // have quiesced (i.e. both the corresponding write and the
2669 // read of that write have completed). If not, then we cancel
2670 // this recalculation attempt to avoid invalidating pending
2671 // state too quickly; another recalculation will come along
2672 // later and clear the pending state once the state of the index has settled.
2673 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2674 *this.recalculating_tx.borrow_mut() = false;
2675 true
2676 } else {
2677 false
2678 }
2679 })?;
2680 if cancel {
2681 log::debug!(
2682 concat!(
2683 "aborting recalculating diffs for buffer {}",
2684 "due to subsequent hunk operations",
2685 ),
2686 buffer.remote_id()
2687 );
2688 return Ok(());
2689 }
2690
2691 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2692 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2693 {
2694 unstaged_diff.update(cx, |diff, cx| {
2695 if language_changed {
2696 diff.language_changed(cx);
2697 }
2698 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2699 })?
2700 } else {
2701 None
2702 };
2703
2704 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2705 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2706 {
2707 uncommitted_diff.update(cx, |diff, cx| {
2708 if language_changed {
2709 diff.language_changed(cx);
2710 }
2711 diff.set_snapshot_with_secondary(
2712 new_uncommitted_diff,
2713 &buffer,
2714 unstaged_changed_range,
2715 true,
2716 cx,
2717 );
2718 })?;
2719 }
2720
2721 log::debug!(
2722 "finished recalculating diffs for buffer {}",
2723 buffer.remote_id()
2724 );
2725
2726 if let Some(this) = this.upgrade() {
2727 this.update(cx, |this, _| {
2728 this.index_changed = false;
2729 this.head_changed = false;
2730 this.language_changed = false;
2731 *this.recalculating_tx.borrow_mut() = false;
2732 })?;
2733 }
2734
2735 Ok(())
2736 }));
2737 }
2738}
2739
2740fn make_remote_delegate(
2741 this: Entity<GitStore>,
2742 project_id: u64,
2743 repository_id: RepositoryId,
2744 askpass_id: u64,
2745 cx: &mut AsyncApp,
2746) -> AskPassDelegate {
2747 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2748 this.update(cx, |this, cx| {
2749 let Some((client, _)) = this.downstream_client() else {
2750 return;
2751 };
2752 let response = client.request(proto::AskPassRequest {
2753 project_id,
2754 repository_id: repository_id.to_proto(),
2755 askpass_id,
2756 prompt,
2757 });
2758 cx.spawn(async move |_, _| {
2759 let mut response = response.await?.response;
2760 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2761 .ok();
2762 response.zeroize();
2763 anyhow::Ok(())
2764 })
2765 .detach_and_log_err(cx);
2766 })
2767 .log_err();
2768 })
2769}
2770
2771impl RepositoryId {
2772 pub fn to_proto(self) -> u64 {
2773 self.0
2774 }
2775
2776 pub fn from_proto(id: u64) -> Self {
2777 RepositoryId(id)
2778 }
2779}
2780
2781impl RepositorySnapshot {
2782 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2783 Self {
2784 id,
2785 statuses_by_path: Default::default(),
2786 work_directory_abs_path,
2787 branch: None,
2788 head_commit: None,
2789 scan_id: 0,
2790 merge: Default::default(),
2791 remote_origin_url: None,
2792 remote_upstream_url: None,
2793 stash_entries: Default::default(),
2794 path_style,
2795 }
2796 }
2797
2798 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2799 proto::UpdateRepository {
2800 branch_summary: self.branch.as_ref().map(branch_to_proto),
2801 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2802 updated_statuses: self
2803 .statuses_by_path
2804 .iter()
2805 .map(|entry| entry.to_proto())
2806 .collect(),
2807 removed_statuses: Default::default(),
2808 current_merge_conflicts: self
2809 .merge
2810 .conflicted_paths
2811 .iter()
2812 .map(|repo_path| repo_path.to_proto())
2813 .collect(),
2814 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2815 project_id,
2816 id: self.id.to_proto(),
2817 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2818 entry_ids: vec![self.id.to_proto()],
2819 scan_id: self.scan_id,
2820 is_last_update: true,
2821 stash_entries: self
2822 .stash_entries
2823 .entries
2824 .iter()
2825 .map(stash_to_proto)
2826 .collect(),
2827 }
2828 }
2829
2830 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2831 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2832 let mut removed_statuses: Vec<String> = Vec::new();
2833
2834 let mut new_statuses = self.statuses_by_path.iter().peekable();
2835 let mut old_statuses = old.statuses_by_path.iter().peekable();
2836
2837 let mut current_new_entry = new_statuses.next();
2838 let mut current_old_entry = old_statuses.next();
2839 loop {
2840 match (current_new_entry, current_old_entry) {
2841 (Some(new_entry), Some(old_entry)) => {
2842 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2843 Ordering::Less => {
2844 updated_statuses.push(new_entry.to_proto());
2845 current_new_entry = new_statuses.next();
2846 }
2847 Ordering::Equal => {
2848 if new_entry.status != old_entry.status {
2849 updated_statuses.push(new_entry.to_proto());
2850 }
2851 current_old_entry = old_statuses.next();
2852 current_new_entry = new_statuses.next();
2853 }
2854 Ordering::Greater => {
2855 removed_statuses.push(old_entry.repo_path.to_proto());
2856 current_old_entry = old_statuses.next();
2857 }
2858 }
2859 }
2860 (None, Some(old_entry)) => {
2861 removed_statuses.push(old_entry.repo_path.to_proto());
2862 current_old_entry = old_statuses.next();
2863 }
2864 (Some(new_entry), None) => {
2865 updated_statuses.push(new_entry.to_proto());
2866 current_new_entry = new_statuses.next();
2867 }
2868 (None, None) => break,
2869 }
2870 }
2871
2872 proto::UpdateRepository {
2873 branch_summary: self.branch.as_ref().map(branch_to_proto),
2874 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2875 updated_statuses,
2876 removed_statuses,
2877 current_merge_conflicts: self
2878 .merge
2879 .conflicted_paths
2880 .iter()
2881 .map(|path| path.to_proto())
2882 .collect(),
2883 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2884 project_id,
2885 id: self.id.to_proto(),
2886 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2887 entry_ids: vec![],
2888 scan_id: self.scan_id,
2889 is_last_update: true,
2890 stash_entries: self
2891 .stash_entries
2892 .entries
2893 .iter()
2894 .map(stash_to_proto)
2895 .collect(),
2896 }
2897 }
2898
2899 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2900 self.statuses_by_path.iter().cloned()
2901 }
2902
2903 pub fn status_summary(&self) -> GitSummary {
2904 self.statuses_by_path.summary().item_summary
2905 }
2906
2907 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2908 self.statuses_by_path
2909 .get(&PathKey(path.0.clone()), ())
2910 .cloned()
2911 }
2912
2913 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2914 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
2915 }
2916
2917 #[inline]
2918 fn abs_path_to_repo_path_inner(
2919 work_directory_abs_path: &Path,
2920 abs_path: &Path,
2921 path_style: PathStyle,
2922 ) -> Option<RepoPath> {
2923 abs_path
2924 .strip_prefix(&work_directory_abs_path)
2925 .ok()
2926 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
2927 }
2928
2929 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2930 self.merge.conflicted_paths.contains(repo_path)
2931 }
2932
2933 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2934 let had_conflict_on_last_merge_head_change =
2935 self.merge.conflicted_paths.contains(repo_path);
2936 let has_conflict_currently = self
2937 .status_for_path(repo_path)
2938 .is_some_and(|entry| entry.status.is_conflicted());
2939 had_conflict_on_last_merge_head_change || has_conflict_currently
2940 }
2941
2942 /// This is the name that will be displayed in the repository selector for this repository.
2943 pub fn display_name(&self) -> SharedString {
2944 self.work_directory_abs_path
2945 .file_name()
2946 .unwrap_or_default()
2947 .to_string_lossy()
2948 .to_string()
2949 .into()
2950 }
2951}
2952
2953pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2954 proto::StashEntry {
2955 oid: entry.oid.as_bytes().to_vec(),
2956 message: entry.message.clone(),
2957 branch: entry.branch.clone(),
2958 index: entry.index as u64,
2959 timestamp: entry.timestamp,
2960 }
2961}
2962
2963pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2964 Ok(StashEntry {
2965 oid: Oid::from_bytes(&entry.oid)?,
2966 message: entry.message.clone(),
2967 index: entry.index as usize,
2968 branch: entry.branch.clone(),
2969 timestamp: entry.timestamp,
2970 })
2971}
2972
2973impl MergeDetails {
2974 async fn load(
2975 backend: &Arc<dyn GitRepository>,
2976 status: &SumTree<StatusEntry>,
2977 prev_snapshot: &RepositorySnapshot,
2978 ) -> Result<(MergeDetails, bool)> {
2979 log::debug!("load merge details");
2980 let message = backend.merge_message().await;
2981 let heads = backend
2982 .revparse_batch(vec![
2983 "MERGE_HEAD".into(),
2984 "CHERRY_PICK_HEAD".into(),
2985 "REBASE_HEAD".into(),
2986 "REVERT_HEAD".into(),
2987 "APPLY_HEAD".into(),
2988 ])
2989 .await
2990 .log_err()
2991 .unwrap_or_default()
2992 .into_iter()
2993 .map(|opt| opt.map(SharedString::from))
2994 .collect::<Vec<_>>();
2995 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2996 let conflicted_paths = if merge_heads_changed {
2997 let current_conflicted_paths = TreeSet::from_ordered_entries(
2998 status
2999 .iter()
3000 .filter(|entry| entry.status.is_conflicted())
3001 .map(|entry| entry.repo_path.clone()),
3002 );
3003
3004 // It can happen that we run a scan while a lengthy merge is in progress
3005 // that will eventually result in conflicts, but before those conflicts
3006 // are reported by `git status`. Since for the moment we only care about
3007 // the merge heads state for the purposes of tracking conflicts, don't update
3008 // this state until we see some conflicts.
3009 if heads.iter().any(Option::is_some)
3010 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3011 && current_conflicted_paths.is_empty()
3012 {
3013 log::debug!("not updating merge heads because no conflicts found");
3014 return Ok((
3015 MergeDetails {
3016 message: message.map(SharedString::from),
3017 ..prev_snapshot.merge.clone()
3018 },
3019 false,
3020 ));
3021 }
3022
3023 current_conflicted_paths
3024 } else {
3025 prev_snapshot.merge.conflicted_paths.clone()
3026 };
3027 let details = MergeDetails {
3028 conflicted_paths,
3029 message: message.map(SharedString::from),
3030 heads,
3031 };
3032 Ok((details, merge_heads_changed))
3033 }
3034}
3035
3036impl Repository {
3037 pub fn snapshot(&self) -> RepositorySnapshot {
3038 self.snapshot.clone()
3039 }
3040
3041 fn local(
3042 id: RepositoryId,
3043 work_directory_abs_path: Arc<Path>,
3044 dot_git_abs_path: Arc<Path>,
3045 repository_dir_abs_path: Arc<Path>,
3046 common_dir_abs_path: Arc<Path>,
3047 project_environment: WeakEntity<ProjectEnvironment>,
3048 fs: Arc<dyn Fs>,
3049 git_store: WeakEntity<GitStore>,
3050 cx: &mut Context<Self>,
3051 ) -> Self {
3052 let snapshot =
3053 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3054 Repository {
3055 this: cx.weak_entity(),
3056 git_store,
3057 snapshot,
3058 commit_message_buffer: None,
3059 askpass_delegates: Default::default(),
3060 paths_needing_status_update: Default::default(),
3061 latest_askpass_id: 0,
3062 job_sender: Repository::spawn_local_git_worker(
3063 work_directory_abs_path,
3064 dot_git_abs_path,
3065 repository_dir_abs_path,
3066 common_dir_abs_path,
3067 project_environment,
3068 fs,
3069 cx,
3070 ),
3071 job_id: 0,
3072 active_jobs: Default::default(),
3073 }
3074 }
3075
3076 fn remote(
3077 id: RepositoryId,
3078 work_directory_abs_path: Arc<Path>,
3079 path_style: PathStyle,
3080 project_id: ProjectId,
3081 client: AnyProtoClient,
3082 git_store: WeakEntity<GitStore>,
3083 cx: &mut Context<Self>,
3084 ) -> Self {
3085 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3086 Self {
3087 this: cx.weak_entity(),
3088 snapshot,
3089 commit_message_buffer: None,
3090 git_store,
3091 paths_needing_status_update: Default::default(),
3092 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3093 askpass_delegates: Default::default(),
3094 latest_askpass_id: 0,
3095 active_jobs: Default::default(),
3096 job_id: 0,
3097 }
3098 }
3099
3100 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3101 self.git_store.upgrade()
3102 }
3103
3104 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3105 let this = cx.weak_entity();
3106 let git_store = self.git_store.clone();
3107 let _ = self.send_keyed_job(
3108 Some(GitJobKey::ReloadBufferDiffBases),
3109 None,
3110 |state, mut cx| async move {
3111 let RepositoryState::Local { backend, .. } = state else {
3112 log::error!("tried to recompute diffs for a non-local repository");
3113 return Ok(());
3114 };
3115
3116 let Some(this) = this.upgrade() else {
3117 return Ok(());
3118 };
3119
3120 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3121 git_store.update(cx, |git_store, cx| {
3122 git_store
3123 .diffs
3124 .iter()
3125 .filter_map(|(buffer_id, diff_state)| {
3126 let buffer_store = git_store.buffer_store.read(cx);
3127 let buffer = buffer_store.get(*buffer_id)?;
3128 let file = File::from_dyn(buffer.read(cx).file())?;
3129 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3130 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3131 log::debug!(
3132 "start reload diff bases for repo path {}",
3133 repo_path.as_unix_str()
3134 );
3135 diff_state.update(cx, |diff_state, _| {
3136 let has_unstaged_diff = diff_state
3137 .unstaged_diff
3138 .as_ref()
3139 .is_some_and(|diff| diff.is_upgradable());
3140 let has_uncommitted_diff = diff_state
3141 .uncommitted_diff
3142 .as_ref()
3143 .is_some_and(|set| set.is_upgradable());
3144
3145 Some((
3146 buffer,
3147 repo_path,
3148 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3149 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3150 ))
3151 })
3152 })
3153 .collect::<Vec<_>>()
3154 })
3155 })??;
3156
3157 let buffer_diff_base_changes = cx
3158 .background_spawn(async move {
3159 let mut changes = Vec::new();
3160 for (buffer, repo_path, current_index_text, current_head_text) in
3161 &repo_diff_state_updates
3162 {
3163 let index_text = if current_index_text.is_some() {
3164 backend.load_index_text(repo_path.clone()).await
3165 } else {
3166 None
3167 };
3168 let head_text = if current_head_text.is_some() {
3169 backend.load_committed_text(repo_path.clone()).await
3170 } else {
3171 None
3172 };
3173
3174 let change =
3175 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3176 (Some(current_index), Some(current_head)) => {
3177 let index_changed =
3178 index_text.as_ref() != current_index.as_deref();
3179 let head_changed =
3180 head_text.as_ref() != current_head.as_deref();
3181 if index_changed && head_changed {
3182 if index_text == head_text {
3183 Some(DiffBasesChange::SetBoth(head_text))
3184 } else {
3185 Some(DiffBasesChange::SetEach {
3186 index: index_text,
3187 head: head_text,
3188 })
3189 }
3190 } else if index_changed {
3191 Some(DiffBasesChange::SetIndex(index_text))
3192 } else if head_changed {
3193 Some(DiffBasesChange::SetHead(head_text))
3194 } else {
3195 None
3196 }
3197 }
3198 (Some(current_index), None) => {
3199 let index_changed =
3200 index_text.as_ref() != current_index.as_deref();
3201 index_changed
3202 .then_some(DiffBasesChange::SetIndex(index_text))
3203 }
3204 (None, Some(current_head)) => {
3205 let head_changed =
3206 head_text.as_ref() != current_head.as_deref();
3207 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3208 }
3209 (None, None) => None,
3210 };
3211
3212 changes.push((buffer.clone(), change))
3213 }
3214 changes
3215 })
3216 .await;
3217
3218 git_store.update(&mut cx, |git_store, cx| {
3219 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3220 let buffer_snapshot = buffer.read(cx).text_snapshot();
3221 let buffer_id = buffer_snapshot.remote_id();
3222 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3223 continue;
3224 };
3225
3226 let downstream_client = git_store.downstream_client();
3227 diff_state.update(cx, |diff_state, cx| {
3228 use proto::update_diff_bases::Mode;
3229
3230 if let Some((diff_bases_change, (client, project_id))) =
3231 diff_bases_change.clone().zip(downstream_client)
3232 {
3233 let (staged_text, committed_text, mode) = match diff_bases_change {
3234 DiffBasesChange::SetIndex(index) => {
3235 (index, None, Mode::IndexOnly)
3236 }
3237 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3238 DiffBasesChange::SetEach { index, head } => {
3239 (index, head, Mode::IndexAndHead)
3240 }
3241 DiffBasesChange::SetBoth(text) => {
3242 (None, text, Mode::IndexMatchesHead)
3243 }
3244 };
3245 client
3246 .send(proto::UpdateDiffBases {
3247 project_id: project_id.to_proto(),
3248 buffer_id: buffer_id.to_proto(),
3249 staged_text,
3250 committed_text,
3251 mode: mode as i32,
3252 })
3253 .log_err();
3254 }
3255
3256 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3257 });
3258 }
3259 })
3260 },
3261 );
3262 }
3263
3264 pub fn send_job<F, Fut, R>(
3265 &mut self,
3266 status: Option<SharedString>,
3267 job: F,
3268 ) -> oneshot::Receiver<R>
3269 where
3270 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3271 Fut: Future<Output = R> + 'static,
3272 R: Send + 'static,
3273 {
3274 self.send_keyed_job(None, status, job)
3275 }
3276
3277 fn send_keyed_job<F, Fut, R>(
3278 &mut self,
3279 key: Option<GitJobKey>,
3280 status: Option<SharedString>,
3281 job: F,
3282 ) -> oneshot::Receiver<R>
3283 where
3284 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3285 Fut: Future<Output = R> + 'static,
3286 R: Send + 'static,
3287 {
3288 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3289 let job_id = post_inc(&mut self.job_id);
3290 let this = self.this.clone();
3291 self.job_sender
3292 .unbounded_send(GitJob {
3293 key,
3294 job: Box::new(move |state, cx: &mut AsyncApp| {
3295 let job = job(state, cx.clone());
3296 cx.spawn(async move |cx| {
3297 if let Some(s) = status.clone() {
3298 this.update(cx, |this, cx| {
3299 this.active_jobs.insert(
3300 job_id,
3301 JobInfo {
3302 start: Instant::now(),
3303 message: s.clone(),
3304 },
3305 );
3306
3307 cx.notify();
3308 })
3309 .ok();
3310 }
3311 let result = job.await;
3312
3313 this.update(cx, |this, cx| {
3314 this.active_jobs.remove(&job_id);
3315 cx.notify();
3316 })
3317 .ok();
3318
3319 result_tx.send(result).ok();
3320 })
3321 }),
3322 })
3323 .ok();
3324 result_rx
3325 }
3326
3327 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3328 let Some(git_store) = self.git_store.upgrade() else {
3329 return;
3330 };
3331 let entity = cx.entity();
3332 git_store.update(cx, |git_store, cx| {
3333 let Some((&id, _)) = git_store
3334 .repositories
3335 .iter()
3336 .find(|(_, handle)| *handle == &entity)
3337 else {
3338 return;
3339 };
3340 git_store.active_repo_id = Some(id);
3341 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3342 });
3343 }
3344
3345 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3346 self.snapshot.status()
3347 }
3348
3349 pub fn cached_stash(&self) -> GitStash {
3350 self.snapshot.stash_entries.clone()
3351 }
3352
3353 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3354 let git_store = self.git_store.upgrade()?;
3355 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3356 let abs_path = self
3357 .snapshot
3358 .work_directory_abs_path
3359 .join(path.as_std_path());
3360 let abs_path = SanitizedPath::new(&abs_path);
3361 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3362 Some(ProjectPath {
3363 worktree_id: worktree.read(cx).id(),
3364 path: relative_path,
3365 })
3366 }
3367
3368 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3369 let git_store = self.git_store.upgrade()?;
3370 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3371 let abs_path = worktree_store.absolutize(path, cx)?;
3372 self.snapshot.abs_path_to_repo_path(&abs_path)
3373 }
3374
3375 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3376 other
3377 .read(cx)
3378 .snapshot
3379 .work_directory_abs_path
3380 .starts_with(&self.snapshot.work_directory_abs_path)
3381 }
3382
3383 pub fn open_commit_buffer(
3384 &mut self,
3385 languages: Option<Arc<LanguageRegistry>>,
3386 buffer_store: Entity<BufferStore>,
3387 cx: &mut Context<Self>,
3388 ) -> Task<Result<Entity<Buffer>>> {
3389 let id = self.id;
3390 if let Some(buffer) = self.commit_message_buffer.clone() {
3391 return Task::ready(Ok(buffer));
3392 }
3393 let this = cx.weak_entity();
3394
3395 let rx = self.send_job(None, move |state, mut cx| async move {
3396 let Some(this) = this.upgrade() else {
3397 bail!("git store was dropped");
3398 };
3399 match state {
3400 RepositoryState::Local { .. } => {
3401 this.update(&mut cx, |_, cx| {
3402 Self::open_local_commit_buffer(languages, buffer_store, cx)
3403 })?
3404 .await
3405 }
3406 RepositoryState::Remote { project_id, client } => {
3407 let request = client.request(proto::OpenCommitMessageBuffer {
3408 project_id: project_id.0,
3409 repository_id: id.to_proto(),
3410 });
3411 let response = request.await.context("requesting to open commit buffer")?;
3412 let buffer_id = BufferId::new(response.buffer_id)?;
3413 let buffer = buffer_store
3414 .update(&mut cx, |buffer_store, cx| {
3415 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3416 })?
3417 .await?;
3418 if let Some(language_registry) = languages {
3419 let git_commit_language =
3420 language_registry.language_for_name("Git Commit").await?;
3421 buffer.update(&mut cx, |buffer, cx| {
3422 buffer.set_language(Some(git_commit_language), cx);
3423 })?;
3424 }
3425 this.update(&mut cx, |this, _| {
3426 this.commit_message_buffer = Some(buffer.clone());
3427 })?;
3428 Ok(buffer)
3429 }
3430 }
3431 });
3432
3433 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3434 }
3435
3436 fn open_local_commit_buffer(
3437 language_registry: Option<Arc<LanguageRegistry>>,
3438 buffer_store: Entity<BufferStore>,
3439 cx: &mut Context<Self>,
3440 ) -> Task<Result<Entity<Buffer>>> {
3441 cx.spawn(async move |repository, cx| {
3442 let buffer = buffer_store
3443 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3444 .await?;
3445
3446 if let Some(language_registry) = language_registry {
3447 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3448 buffer.update(cx, |buffer, cx| {
3449 buffer.set_language(Some(git_commit_language), cx);
3450 })?;
3451 }
3452
3453 repository.update(cx, |repository, _| {
3454 repository.commit_message_buffer = Some(buffer.clone());
3455 })?;
3456 Ok(buffer)
3457 })
3458 }
3459
3460 pub fn checkout_files(
3461 &mut self,
3462 commit: &str,
3463 paths: Vec<RepoPath>,
3464 _cx: &mut App,
3465 ) -> oneshot::Receiver<Result<()>> {
3466 let commit = commit.to_string();
3467 let id = self.id;
3468
3469 self.send_job(
3470 Some(format!("git checkout {}", commit).into()),
3471 move |git_repo, _| async move {
3472 match git_repo {
3473 RepositoryState::Local {
3474 backend,
3475 environment,
3476 ..
3477 } => {
3478 backend
3479 .checkout_files(commit, paths, environment.clone())
3480 .await
3481 }
3482 RepositoryState::Remote { project_id, client } => {
3483 client
3484 .request(proto::GitCheckoutFiles {
3485 project_id: project_id.0,
3486 repository_id: id.to_proto(),
3487 commit,
3488 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3489 })
3490 .await?;
3491
3492 Ok(())
3493 }
3494 }
3495 },
3496 )
3497 }
3498
3499 pub fn reset(
3500 &mut self,
3501 commit: String,
3502 reset_mode: ResetMode,
3503 _cx: &mut App,
3504 ) -> oneshot::Receiver<Result<()>> {
3505 let id = self.id;
3506
3507 self.send_job(None, move |git_repo, _| async move {
3508 match git_repo {
3509 RepositoryState::Local {
3510 backend,
3511 environment,
3512 ..
3513 } => backend.reset(commit, reset_mode, environment).await,
3514 RepositoryState::Remote { project_id, client } => {
3515 client
3516 .request(proto::GitReset {
3517 project_id: project_id.0,
3518 repository_id: id.to_proto(),
3519 commit,
3520 mode: match reset_mode {
3521 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3522 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3523 },
3524 })
3525 .await?;
3526
3527 Ok(())
3528 }
3529 }
3530 })
3531 }
3532
3533 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3534 let id = self.id;
3535 self.send_job(None, move |git_repo, _cx| async move {
3536 match git_repo {
3537 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3538 RepositoryState::Remote { project_id, client } => {
3539 let resp = client
3540 .request(proto::GitShow {
3541 project_id: project_id.0,
3542 repository_id: id.to_proto(),
3543 commit,
3544 })
3545 .await?;
3546
3547 Ok(CommitDetails {
3548 sha: resp.sha.into(),
3549 message: resp.message.into(),
3550 commit_timestamp: resp.commit_timestamp,
3551 author_email: resp.author_email.into(),
3552 author_name: resp.author_name.into(),
3553 })
3554 }
3555 }
3556 })
3557 }
3558
3559 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3560 let id = self.id;
3561 self.send_job(None, move |git_repo, cx| async move {
3562 match git_repo {
3563 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3564 RepositoryState::Remote {
3565 client, project_id, ..
3566 } => {
3567 let response = client
3568 .request(proto::LoadCommitDiff {
3569 project_id: project_id.0,
3570 repository_id: id.to_proto(),
3571 commit,
3572 })
3573 .await?;
3574 Ok(CommitDiff {
3575 files: response
3576 .files
3577 .into_iter()
3578 .map(|file| {
3579 Ok(CommitFile {
3580 path: RepoPath::from_proto(&file.path)?,
3581 old_text: file.old_text,
3582 new_text: file.new_text,
3583 })
3584 })
3585 .collect::<Result<Vec<_>>>()?,
3586 })
3587 }
3588 }
3589 })
3590 }
3591
3592 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3593 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3594 }
3595
3596 pub fn stage_entries(
3597 &self,
3598 entries: Vec<RepoPath>,
3599 cx: &mut Context<Self>,
3600 ) -> Task<anyhow::Result<()>> {
3601 if entries.is_empty() {
3602 return Task::ready(Ok(()));
3603 }
3604 let id = self.id;
3605
3606 let mut save_futures = Vec::new();
3607 if let Some(buffer_store) = self.buffer_store(cx) {
3608 buffer_store.update(cx, |buffer_store, cx| {
3609 for path in &entries {
3610 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3611 continue;
3612 };
3613 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3614 && buffer
3615 .read(cx)
3616 .file()
3617 .is_some_and(|file| file.disk_state().exists())
3618 {
3619 save_futures.push(buffer_store.save_buffer(buffer, cx));
3620 }
3621 }
3622 })
3623 }
3624
3625 cx.spawn(async move |this, cx| {
3626 for save_future in save_futures {
3627 save_future.await?;
3628 }
3629
3630 this.update(cx, |this, _| {
3631 this.send_job(None, move |git_repo, _cx| async move {
3632 match git_repo {
3633 RepositoryState::Local {
3634 backend,
3635 environment,
3636 ..
3637 } => backend.stage_paths(entries, environment.clone()).await,
3638 RepositoryState::Remote { project_id, client } => {
3639 client
3640 .request(proto::Stage {
3641 project_id: project_id.0,
3642 repository_id: id.to_proto(),
3643 paths: entries
3644 .into_iter()
3645 .map(|repo_path| repo_path.to_proto())
3646 .collect(),
3647 })
3648 .await
3649 .context("sending stage request")?;
3650
3651 Ok(())
3652 }
3653 }
3654 })
3655 })?
3656 .await??;
3657
3658 Ok(())
3659 })
3660 }
3661
3662 pub fn unstage_entries(
3663 &self,
3664 entries: Vec<RepoPath>,
3665 cx: &mut Context<Self>,
3666 ) -> Task<anyhow::Result<()>> {
3667 if entries.is_empty() {
3668 return Task::ready(Ok(()));
3669 }
3670 let id = self.id;
3671
3672 let mut save_futures = Vec::new();
3673 if let Some(buffer_store) = self.buffer_store(cx) {
3674 buffer_store.update(cx, |buffer_store, cx| {
3675 for path in &entries {
3676 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3677 continue;
3678 };
3679 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3680 && buffer
3681 .read(cx)
3682 .file()
3683 .is_some_and(|file| file.disk_state().exists())
3684 {
3685 save_futures.push(buffer_store.save_buffer(buffer, cx));
3686 }
3687 }
3688 })
3689 }
3690
3691 cx.spawn(async move |this, cx| {
3692 for save_future in save_futures {
3693 save_future.await?;
3694 }
3695
3696 this.update(cx, |this, _| {
3697 this.send_job(None, move |git_repo, _cx| async move {
3698 match git_repo {
3699 RepositoryState::Local {
3700 backend,
3701 environment,
3702 ..
3703 } => backend.unstage_paths(entries, environment).await,
3704 RepositoryState::Remote { project_id, client } => {
3705 client
3706 .request(proto::Unstage {
3707 project_id: project_id.0,
3708 repository_id: id.to_proto(),
3709 paths: entries
3710 .into_iter()
3711 .map(|repo_path| repo_path.to_proto())
3712 .collect(),
3713 })
3714 .await
3715 .context("sending unstage request")?;
3716
3717 Ok(())
3718 }
3719 }
3720 })
3721 })?
3722 .await??;
3723
3724 Ok(())
3725 })
3726 }
3727
3728 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3729 let to_stage = self
3730 .cached_status()
3731 .filter(|entry| !entry.status.staging().is_fully_staged())
3732 .map(|entry| entry.repo_path)
3733 .collect();
3734 self.stage_entries(to_stage, cx)
3735 }
3736
3737 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3738 let to_unstage = self
3739 .cached_status()
3740 .filter(|entry| entry.status.staging().has_staged())
3741 .map(|entry| entry.repo_path)
3742 .collect();
3743 self.unstage_entries(to_unstage, cx)
3744 }
3745
3746 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3747 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3748
3749 self.stash_entries(to_stash, cx)
3750 }
3751
3752 pub fn stash_entries(
3753 &mut self,
3754 entries: Vec<RepoPath>,
3755 cx: &mut Context<Self>,
3756 ) -> Task<anyhow::Result<()>> {
3757 let id = self.id;
3758
3759 cx.spawn(async move |this, cx| {
3760 this.update(cx, |this, _| {
3761 this.send_job(None, move |git_repo, _cx| async move {
3762 match git_repo {
3763 RepositoryState::Local {
3764 backend,
3765 environment,
3766 ..
3767 } => backend.stash_paths(entries, environment).await,
3768 RepositoryState::Remote { project_id, client } => {
3769 client
3770 .request(proto::Stash {
3771 project_id: project_id.0,
3772 repository_id: id.to_proto(),
3773 paths: entries
3774 .into_iter()
3775 .map(|repo_path| repo_path.to_proto())
3776 .collect(),
3777 })
3778 .await
3779 .context("sending stash request")?;
3780 Ok(())
3781 }
3782 }
3783 })
3784 })?
3785 .await??;
3786 Ok(())
3787 })
3788 }
3789
3790 pub fn stash_pop(
3791 &mut self,
3792 index: Option<usize>,
3793 cx: &mut Context<Self>,
3794 ) -> Task<anyhow::Result<()>> {
3795 let id = self.id;
3796 cx.spawn(async move |this, cx| {
3797 this.update(cx, |this, _| {
3798 this.send_job(None, move |git_repo, _cx| async move {
3799 match git_repo {
3800 RepositoryState::Local {
3801 backend,
3802 environment,
3803 ..
3804 } => backend.stash_pop(index, environment).await,
3805 RepositoryState::Remote { project_id, client } => {
3806 client
3807 .request(proto::StashPop {
3808 project_id: project_id.0,
3809 repository_id: id.to_proto(),
3810 stash_index: index.map(|i| i as u64),
3811 })
3812 .await
3813 .context("sending stash pop request")?;
3814 Ok(())
3815 }
3816 }
3817 })
3818 })?
3819 .await??;
3820 Ok(())
3821 })
3822 }
3823
3824 pub fn stash_apply(
3825 &mut self,
3826 index: Option<usize>,
3827 cx: &mut Context<Self>,
3828 ) -> Task<anyhow::Result<()>> {
3829 let id = self.id;
3830 cx.spawn(async move |this, cx| {
3831 this.update(cx, |this, _| {
3832 this.send_job(None, move |git_repo, _cx| async move {
3833 match git_repo {
3834 RepositoryState::Local {
3835 backend,
3836 environment,
3837 ..
3838 } => backend.stash_apply(index, environment).await,
3839 RepositoryState::Remote { project_id, client } => {
3840 client
3841 .request(proto::StashApply {
3842 project_id: project_id.0,
3843 repository_id: id.to_proto(),
3844 stash_index: index.map(|i| i as u64),
3845 })
3846 .await
3847 .context("sending stash apply request")?;
3848 Ok(())
3849 }
3850 }
3851 })
3852 })?
3853 .await??;
3854 Ok(())
3855 })
3856 }
3857
3858 pub fn stash_drop(
3859 &mut self,
3860 index: Option<usize>,
3861 cx: &mut Context<Self>,
3862 ) -> oneshot::Receiver<anyhow::Result<()>> {
3863 let id = self.id;
3864 let updates_tx = self
3865 .git_store()
3866 .and_then(|git_store| match &git_store.read(cx).state {
3867 GitStoreState::Local { downstream, .. } => downstream
3868 .as_ref()
3869 .map(|downstream| downstream.updates_tx.clone()),
3870 _ => None,
3871 });
3872 let this = cx.weak_entity();
3873 self.send_job(None, move |git_repo, mut cx| async move {
3874 match git_repo {
3875 RepositoryState::Local {
3876 backend,
3877 environment,
3878 ..
3879 } => {
3880 let result = backend.stash_drop(index, environment).await;
3881 if result.is_ok()
3882 && let Ok(stash_entries) = backend.stash_entries().await
3883 {
3884 let snapshot = this.update(&mut cx, |this, cx| {
3885 this.snapshot.stash_entries = stash_entries;
3886 let snapshot = this.snapshot.clone();
3887 cx.emit(RepositoryEvent::Updated {
3888 full_scan: false,
3889 new_instance: false,
3890 });
3891 snapshot
3892 })?;
3893 if let Some(updates_tx) = updates_tx {
3894 updates_tx
3895 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3896 .ok();
3897 }
3898 }
3899
3900 result
3901 }
3902 RepositoryState::Remote { project_id, client } => {
3903 client
3904 .request(proto::StashDrop {
3905 project_id: project_id.0,
3906 repository_id: id.to_proto(),
3907 stash_index: index.map(|i| i as u64),
3908 })
3909 .await
3910 .context("sending stash pop request")?;
3911 Ok(())
3912 }
3913 }
3914 })
3915 }
3916
3917 pub fn commit(
3918 &mut self,
3919 message: SharedString,
3920 name_and_email: Option<(SharedString, SharedString)>,
3921 options: CommitOptions,
3922 _cx: &mut App,
3923 ) -> oneshot::Receiver<Result<()>> {
3924 let id = self.id;
3925
3926 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3927 match git_repo {
3928 RepositoryState::Local {
3929 backend,
3930 environment,
3931 ..
3932 } => {
3933 backend
3934 .commit(message, name_and_email, options, environment)
3935 .await
3936 }
3937 RepositoryState::Remote { project_id, client } => {
3938 let (name, email) = name_and_email.unzip();
3939 client
3940 .request(proto::Commit {
3941 project_id: project_id.0,
3942 repository_id: id.to_proto(),
3943 message: String::from(message),
3944 name: name.map(String::from),
3945 email: email.map(String::from),
3946 options: Some(proto::commit::CommitOptions {
3947 amend: options.amend,
3948 signoff: options.signoff,
3949 }),
3950 })
3951 .await
3952 .context("sending commit request")?;
3953
3954 Ok(())
3955 }
3956 }
3957 })
3958 }
3959
3960 pub fn fetch(
3961 &mut self,
3962 fetch_options: FetchOptions,
3963 askpass: AskPassDelegate,
3964 _cx: &mut App,
3965 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3966 let askpass_delegates = self.askpass_delegates.clone();
3967 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3968 let id = self.id;
3969
3970 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3971 match git_repo {
3972 RepositoryState::Local {
3973 backend,
3974 environment,
3975 ..
3976 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3977 RepositoryState::Remote { project_id, client } => {
3978 askpass_delegates.lock().insert(askpass_id, askpass);
3979 let _defer = util::defer(|| {
3980 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3981 debug_assert!(askpass_delegate.is_some());
3982 });
3983
3984 let response = client
3985 .request(proto::Fetch {
3986 project_id: project_id.0,
3987 repository_id: id.to_proto(),
3988 askpass_id,
3989 remote: fetch_options.to_proto(),
3990 })
3991 .await
3992 .context("sending fetch request")?;
3993
3994 Ok(RemoteCommandOutput {
3995 stdout: response.stdout,
3996 stderr: response.stderr,
3997 })
3998 }
3999 }
4000 })
4001 }
4002
4003 pub fn push(
4004 &mut self,
4005 branch: SharedString,
4006 remote: SharedString,
4007 options: Option<PushOptions>,
4008 askpass: AskPassDelegate,
4009 cx: &mut Context<Self>,
4010 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4011 let askpass_delegates = self.askpass_delegates.clone();
4012 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4013 let id = self.id;
4014
4015 let args = options
4016 .map(|option| match option {
4017 PushOptions::SetUpstream => " --set-upstream",
4018 PushOptions::Force => " --force-with-lease",
4019 })
4020 .unwrap_or("");
4021
4022 let updates_tx = self
4023 .git_store()
4024 .and_then(|git_store| match &git_store.read(cx).state {
4025 GitStoreState::Local { downstream, .. } => downstream
4026 .as_ref()
4027 .map(|downstream| downstream.updates_tx.clone()),
4028 _ => None,
4029 });
4030
4031 let this = cx.weak_entity();
4032 self.send_job(
4033 Some(format!("git push {} {} {}", args, branch, remote).into()),
4034 move |git_repo, mut cx| async move {
4035 match git_repo {
4036 RepositoryState::Local {
4037 backend,
4038 environment,
4039 ..
4040 } => {
4041 let result = backend
4042 .push(
4043 branch.to_string(),
4044 remote.to_string(),
4045 options,
4046 askpass,
4047 environment.clone(),
4048 cx.clone(),
4049 )
4050 .await;
4051 if result.is_ok() {
4052 let branches = backend.branches().await?;
4053 let branch = branches.into_iter().find(|branch| branch.is_head);
4054 log::info!("head branch after scan is {branch:?}");
4055 let snapshot = this.update(&mut cx, |this, cx| {
4056 this.snapshot.branch = branch;
4057 let snapshot = this.snapshot.clone();
4058 cx.emit(RepositoryEvent::Updated {
4059 full_scan: false,
4060 new_instance: false,
4061 });
4062 snapshot
4063 })?;
4064 if let Some(updates_tx) = updates_tx {
4065 updates_tx
4066 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4067 .ok();
4068 }
4069 }
4070 result
4071 }
4072 RepositoryState::Remote { project_id, client } => {
4073 askpass_delegates.lock().insert(askpass_id, askpass);
4074 let _defer = util::defer(|| {
4075 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4076 debug_assert!(askpass_delegate.is_some());
4077 });
4078 let response = client
4079 .request(proto::Push {
4080 project_id: project_id.0,
4081 repository_id: id.to_proto(),
4082 askpass_id,
4083 branch_name: branch.to_string(),
4084 remote_name: remote.to_string(),
4085 options: options.map(|options| match options {
4086 PushOptions::Force => proto::push::PushOptions::Force,
4087 PushOptions::SetUpstream => {
4088 proto::push::PushOptions::SetUpstream
4089 }
4090 }
4091 as i32),
4092 })
4093 .await
4094 .context("sending push request")?;
4095
4096 Ok(RemoteCommandOutput {
4097 stdout: response.stdout,
4098 stderr: response.stderr,
4099 })
4100 }
4101 }
4102 },
4103 )
4104 }
4105
4106 pub fn pull(
4107 &mut self,
4108 branch: SharedString,
4109 remote: SharedString,
4110 askpass: AskPassDelegate,
4111 _cx: &mut App,
4112 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4113 let askpass_delegates = self.askpass_delegates.clone();
4114 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4115 let id = self.id;
4116
4117 self.send_job(
4118 Some(format!("git pull {} {}", remote, branch).into()),
4119 move |git_repo, cx| async move {
4120 match git_repo {
4121 RepositoryState::Local {
4122 backend,
4123 environment,
4124 ..
4125 } => {
4126 backend
4127 .pull(
4128 branch.to_string(),
4129 remote.to_string(),
4130 askpass,
4131 environment.clone(),
4132 cx,
4133 )
4134 .await
4135 }
4136 RepositoryState::Remote { project_id, client } => {
4137 askpass_delegates.lock().insert(askpass_id, askpass);
4138 let _defer = util::defer(|| {
4139 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4140 debug_assert!(askpass_delegate.is_some());
4141 });
4142 let response = client
4143 .request(proto::Pull {
4144 project_id: project_id.0,
4145 repository_id: id.to_proto(),
4146 askpass_id,
4147 branch_name: branch.to_string(),
4148 remote_name: remote.to_string(),
4149 })
4150 .await
4151 .context("sending pull request")?;
4152
4153 Ok(RemoteCommandOutput {
4154 stdout: response.stdout,
4155 stderr: response.stderr,
4156 })
4157 }
4158 }
4159 },
4160 )
4161 }
4162
4163 fn spawn_set_index_text_job(
4164 &mut self,
4165 path: RepoPath,
4166 content: Option<String>,
4167 hunk_staging_operation_count: Option<usize>,
4168 cx: &mut Context<Self>,
4169 ) -> oneshot::Receiver<anyhow::Result<()>> {
4170 let id = self.id;
4171 let this = cx.weak_entity();
4172 let git_store = self.git_store.clone();
4173 self.send_keyed_job(
4174 Some(GitJobKey::WriteIndex(path.clone())),
4175 None,
4176 move |git_repo, mut cx| async move {
4177 log::debug!(
4178 "start updating index text for buffer {}",
4179 path.as_unix_str()
4180 );
4181 match git_repo {
4182 RepositoryState::Local {
4183 backend,
4184 environment,
4185 ..
4186 } => {
4187 backend
4188 .set_index_text(path.clone(), content, environment.clone())
4189 .await?;
4190 }
4191 RepositoryState::Remote { project_id, client } => {
4192 client
4193 .request(proto::SetIndexText {
4194 project_id: project_id.0,
4195 repository_id: id.to_proto(),
4196 path: path.to_proto(),
4197 text: content,
4198 })
4199 .await?;
4200 }
4201 }
4202 log::debug!(
4203 "finish updating index text for buffer {}",
4204 path.as_unix_str()
4205 );
4206
4207 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4208 let project_path = this
4209 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4210 .ok()
4211 .flatten();
4212 git_store.update(&mut cx, |git_store, cx| {
4213 let buffer_id = git_store
4214 .buffer_store
4215 .read(cx)
4216 .get_by_path(&project_path?)?
4217 .read(cx)
4218 .remote_id();
4219 let diff_state = git_store.diffs.get(&buffer_id)?;
4220 diff_state.update(cx, |diff_state, _| {
4221 diff_state.hunk_staging_operation_count_as_of_write =
4222 hunk_staging_operation_count;
4223 });
4224 Some(())
4225 })?;
4226 }
4227 Ok(())
4228 },
4229 )
4230 }
4231
4232 pub fn get_remotes(
4233 &mut self,
4234 branch_name: Option<String>,
4235 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4236 let id = self.id;
4237 self.send_job(None, move |repo, _cx| async move {
4238 match repo {
4239 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4240 RepositoryState::Remote { project_id, client } => {
4241 let response = client
4242 .request(proto::GetRemotes {
4243 project_id: project_id.0,
4244 repository_id: id.to_proto(),
4245 branch_name,
4246 })
4247 .await?;
4248
4249 let remotes = response
4250 .remotes
4251 .into_iter()
4252 .map(|remotes| git::repository::Remote {
4253 name: remotes.name.into(),
4254 })
4255 .collect();
4256
4257 Ok(remotes)
4258 }
4259 }
4260 })
4261 }
4262
4263 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4264 let id = self.id;
4265 self.send_job(None, move |repo, _| async move {
4266 match repo {
4267 RepositoryState::Local { backend, .. } => backend.branches().await,
4268 RepositoryState::Remote { project_id, client } => {
4269 let response = client
4270 .request(proto::GitGetBranches {
4271 project_id: project_id.0,
4272 repository_id: id.to_proto(),
4273 })
4274 .await?;
4275
4276 let branches = response
4277 .branches
4278 .into_iter()
4279 .map(|branch| proto_to_branch(&branch))
4280 .collect();
4281
4282 Ok(branches)
4283 }
4284 }
4285 })
4286 }
4287
4288 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4289 let id = self.id;
4290 self.send_job(None, move |repo, _| async move {
4291 match repo {
4292 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4293 RepositoryState::Remote { project_id, client } => {
4294 let response = client
4295 .request(proto::GetDefaultBranch {
4296 project_id: project_id.0,
4297 repository_id: id.to_proto(),
4298 })
4299 .await?;
4300
4301 anyhow::Ok(response.branch.map(SharedString::from))
4302 }
4303 }
4304 })
4305 }
4306
4307 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4308 let id = self.id;
4309 self.send_job(None, move |repo, _cx| async move {
4310 match repo {
4311 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4312 RepositoryState::Remote { project_id, client } => {
4313 let response = client
4314 .request(proto::GitDiff {
4315 project_id: project_id.0,
4316 repository_id: id.to_proto(),
4317 diff_type: match diff_type {
4318 DiffType::HeadToIndex => {
4319 proto::git_diff::DiffType::HeadToIndex.into()
4320 }
4321 DiffType::HeadToWorktree => {
4322 proto::git_diff::DiffType::HeadToWorktree.into()
4323 }
4324 },
4325 })
4326 .await?;
4327
4328 Ok(response.diff)
4329 }
4330 }
4331 })
4332 }
4333
4334 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4335 let id = self.id;
4336 self.send_job(
4337 Some(format!("git switch -c {branch_name}").into()),
4338 move |repo, _cx| async move {
4339 match repo {
4340 RepositoryState::Local { backend, .. } => {
4341 backend.create_branch(branch_name).await
4342 }
4343 RepositoryState::Remote { project_id, client } => {
4344 client
4345 .request(proto::GitCreateBranch {
4346 project_id: project_id.0,
4347 repository_id: id.to_proto(),
4348 branch_name,
4349 })
4350 .await?;
4351
4352 Ok(())
4353 }
4354 }
4355 },
4356 )
4357 }
4358
4359 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4360 let id = self.id;
4361 self.send_job(
4362 Some(format!("git switch {branch_name}").into()),
4363 move |repo, _cx| async move {
4364 match repo {
4365 RepositoryState::Local { backend, .. } => {
4366 backend.change_branch(branch_name).await
4367 }
4368 RepositoryState::Remote { project_id, client } => {
4369 client
4370 .request(proto::GitChangeBranch {
4371 project_id: project_id.0,
4372 repository_id: id.to_proto(),
4373 branch_name,
4374 })
4375 .await?;
4376
4377 Ok(())
4378 }
4379 }
4380 },
4381 )
4382 }
4383
4384 pub fn rename_branch(
4385 &mut self,
4386 branch: String,
4387 new_name: String,
4388 ) -> oneshot::Receiver<Result<()>> {
4389 let id = self.id;
4390 self.send_job(
4391 Some(format!("git branch -m {branch} {new_name}").into()),
4392 move |repo, _cx| async move {
4393 match repo {
4394 RepositoryState::Local { backend, .. } => {
4395 backend.rename_branch(branch, new_name).await
4396 }
4397 RepositoryState::Remote { project_id, client } => {
4398 client
4399 .request(proto::GitRenameBranch {
4400 project_id: project_id.0,
4401 repository_id: id.to_proto(),
4402 branch,
4403 new_name,
4404 })
4405 .await?;
4406
4407 Ok(())
4408 }
4409 }
4410 },
4411 )
4412 }
4413
4414 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4415 let id = self.id;
4416 self.send_job(None, move |repo, _cx| async move {
4417 match repo {
4418 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4419 RepositoryState::Remote { project_id, client } => {
4420 let response = client
4421 .request(proto::CheckForPushedCommits {
4422 project_id: project_id.0,
4423 repository_id: id.to_proto(),
4424 })
4425 .await?;
4426
4427 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4428
4429 Ok(branches)
4430 }
4431 }
4432 })
4433 }
4434
4435 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4436 self.send_job(None, |repo, _cx| async move {
4437 match repo {
4438 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4439 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4440 }
4441 })
4442 }
4443
4444 pub fn restore_checkpoint(
4445 &mut self,
4446 checkpoint: GitRepositoryCheckpoint,
4447 ) -> oneshot::Receiver<Result<()>> {
4448 self.send_job(None, move |repo, _cx| async move {
4449 match repo {
4450 RepositoryState::Local { backend, .. } => {
4451 backend.restore_checkpoint(checkpoint).await
4452 }
4453 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4454 }
4455 })
4456 }
4457
4458 pub(crate) fn apply_remote_update(
4459 &mut self,
4460 update: proto::UpdateRepository,
4461 is_new: bool,
4462 cx: &mut Context<Self>,
4463 ) -> Result<()> {
4464 let conflicted_paths = TreeSet::from_ordered_entries(
4465 update
4466 .current_merge_conflicts
4467 .into_iter()
4468 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4469 );
4470 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4471 self.snapshot.head_commit = update
4472 .head_commit_details
4473 .as_ref()
4474 .map(proto_to_commit_details);
4475
4476 self.snapshot.merge.conflicted_paths = conflicted_paths;
4477 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4478 self.snapshot.stash_entries = GitStash {
4479 entries: update
4480 .stash_entries
4481 .iter()
4482 .filter_map(|entry| proto_to_stash(entry).ok())
4483 .collect(),
4484 };
4485
4486 let edits = update
4487 .removed_statuses
4488 .into_iter()
4489 .filter_map(|path| {
4490 Some(sum_tree::Edit::Remove(PathKey(
4491 RelPath::from_proto(&path).log_err()?,
4492 )))
4493 })
4494 .chain(
4495 update
4496 .updated_statuses
4497 .into_iter()
4498 .filter_map(|updated_status| {
4499 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4500 }),
4501 )
4502 .collect::<Vec<_>>();
4503 self.snapshot.statuses_by_path.edit(edits, ());
4504 if update.is_last_update {
4505 self.snapshot.scan_id = update.scan_id;
4506 }
4507 cx.emit(RepositoryEvent::Updated {
4508 full_scan: true,
4509 new_instance: is_new,
4510 });
4511 Ok(())
4512 }
4513
4514 pub fn compare_checkpoints(
4515 &mut self,
4516 left: GitRepositoryCheckpoint,
4517 right: GitRepositoryCheckpoint,
4518 ) -> oneshot::Receiver<Result<bool>> {
4519 self.send_job(None, move |repo, _cx| async move {
4520 match repo {
4521 RepositoryState::Local { backend, .. } => {
4522 backend.compare_checkpoints(left, right).await
4523 }
4524 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4525 }
4526 })
4527 }
4528
4529 pub fn diff_checkpoints(
4530 &mut self,
4531 base_checkpoint: GitRepositoryCheckpoint,
4532 target_checkpoint: GitRepositoryCheckpoint,
4533 ) -> oneshot::Receiver<Result<String>> {
4534 self.send_job(None, move |repo, _cx| async move {
4535 match repo {
4536 RepositoryState::Local { backend, .. } => {
4537 backend
4538 .diff_checkpoints(base_checkpoint, target_checkpoint)
4539 .await
4540 }
4541 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4542 }
4543 })
4544 }
4545
4546 fn schedule_scan(
4547 &mut self,
4548 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4549 cx: &mut Context<Self>,
4550 ) {
4551 let this = cx.weak_entity();
4552 let _ = self.send_keyed_job(
4553 Some(GitJobKey::ReloadGitState),
4554 None,
4555 |state, mut cx| async move {
4556 log::debug!("run scheduled git status scan");
4557
4558 let Some(this) = this.upgrade() else {
4559 return Ok(());
4560 };
4561 let RepositoryState::Local { backend, .. } = state else {
4562 bail!("not a local repository")
4563 };
4564 let (snapshot, events) = this
4565 .update(&mut cx, |this, _| {
4566 this.paths_needing_status_update.clear();
4567 compute_snapshot(
4568 this.id,
4569 this.work_directory_abs_path.clone(),
4570 this.snapshot.clone(),
4571 backend.clone(),
4572 )
4573 })?
4574 .await?;
4575 this.update(&mut cx, |this, cx| {
4576 this.snapshot = snapshot.clone();
4577 for event in events {
4578 cx.emit(event);
4579 }
4580 })?;
4581 if let Some(updates_tx) = updates_tx {
4582 updates_tx
4583 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4584 .ok();
4585 }
4586 Ok(())
4587 },
4588 );
4589 }
4590
4591 fn spawn_local_git_worker(
4592 work_directory_abs_path: Arc<Path>,
4593 dot_git_abs_path: Arc<Path>,
4594 _repository_dir_abs_path: Arc<Path>,
4595 _common_dir_abs_path: Arc<Path>,
4596 project_environment: WeakEntity<ProjectEnvironment>,
4597 fs: Arc<dyn Fs>,
4598 cx: &mut Context<Self>,
4599 ) -> mpsc::UnboundedSender<GitJob> {
4600 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4601
4602 cx.spawn(async move |_, cx| {
4603 let environment = project_environment
4604 .upgrade()
4605 .context("missing project environment")?
4606 .update(cx, |project_environment, cx| {
4607 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4608 })?
4609 .await
4610 .unwrap_or_else(|| {
4611 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4612 HashMap::default()
4613 });
4614 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4615 let backend = cx
4616 .background_spawn(async move {
4617 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4618 .or_else(|| which::which("git").ok());
4619 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4620 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4621 })
4622 .await?;
4623
4624 if let Some(git_hosting_provider_registry) =
4625 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4626 {
4627 git_hosting_providers::register_additional_providers(
4628 git_hosting_provider_registry,
4629 backend.clone(),
4630 );
4631 }
4632
4633 let state = RepositoryState::Local {
4634 backend,
4635 environment: Arc::new(environment),
4636 };
4637 let mut jobs = VecDeque::new();
4638 loop {
4639 while let Ok(Some(next_job)) = job_rx.try_next() {
4640 jobs.push_back(next_job);
4641 }
4642
4643 if let Some(job) = jobs.pop_front() {
4644 if let Some(current_key) = &job.key
4645 && jobs
4646 .iter()
4647 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4648 {
4649 continue;
4650 }
4651 (job.job)(state.clone(), cx).await;
4652 } else if let Some(job) = job_rx.next().await {
4653 jobs.push_back(job);
4654 } else {
4655 break;
4656 }
4657 }
4658 anyhow::Ok(())
4659 })
4660 .detach_and_log_err(cx);
4661
4662 job_tx
4663 }
4664
4665 fn spawn_remote_git_worker(
4666 project_id: ProjectId,
4667 client: AnyProtoClient,
4668 cx: &mut Context<Self>,
4669 ) -> mpsc::UnboundedSender<GitJob> {
4670 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4671
4672 cx.spawn(async move |_, cx| {
4673 let state = RepositoryState::Remote { project_id, client };
4674 let mut jobs = VecDeque::new();
4675 loop {
4676 while let Ok(Some(next_job)) = job_rx.try_next() {
4677 jobs.push_back(next_job);
4678 }
4679
4680 if let Some(job) = jobs.pop_front() {
4681 if let Some(current_key) = &job.key
4682 && jobs
4683 .iter()
4684 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4685 {
4686 continue;
4687 }
4688 (job.job)(state.clone(), cx).await;
4689 } else if let Some(job) = job_rx.next().await {
4690 jobs.push_back(job);
4691 } else {
4692 break;
4693 }
4694 }
4695 anyhow::Ok(())
4696 })
4697 .detach_and_log_err(cx);
4698
4699 job_tx
4700 }
4701
4702 fn load_staged_text(
4703 &mut self,
4704 buffer_id: BufferId,
4705 repo_path: RepoPath,
4706 cx: &App,
4707 ) -> Task<Result<Option<String>>> {
4708 let rx = self.send_job(None, move |state, _| async move {
4709 match state {
4710 RepositoryState::Local { backend, .. } => {
4711 anyhow::Ok(backend.load_index_text(repo_path).await)
4712 }
4713 RepositoryState::Remote { project_id, client } => {
4714 let response = client
4715 .request(proto::OpenUnstagedDiff {
4716 project_id: project_id.to_proto(),
4717 buffer_id: buffer_id.to_proto(),
4718 })
4719 .await?;
4720 Ok(response.staged_text)
4721 }
4722 }
4723 });
4724 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4725 }
4726
4727 fn load_committed_text(
4728 &mut self,
4729 buffer_id: BufferId,
4730 repo_path: RepoPath,
4731 cx: &App,
4732 ) -> Task<Result<DiffBasesChange>> {
4733 let rx = self.send_job(None, move |state, _| async move {
4734 match state {
4735 RepositoryState::Local { backend, .. } => {
4736 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4737 let staged_text = backend.load_index_text(repo_path).await;
4738 let diff_bases_change = if committed_text == staged_text {
4739 DiffBasesChange::SetBoth(committed_text)
4740 } else {
4741 DiffBasesChange::SetEach {
4742 index: staged_text,
4743 head: committed_text,
4744 }
4745 };
4746 anyhow::Ok(diff_bases_change)
4747 }
4748 RepositoryState::Remote { project_id, client } => {
4749 use proto::open_uncommitted_diff_response::Mode;
4750
4751 let response = client
4752 .request(proto::OpenUncommittedDiff {
4753 project_id: project_id.to_proto(),
4754 buffer_id: buffer_id.to_proto(),
4755 })
4756 .await?;
4757 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4758 let bases = match mode {
4759 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4760 Mode::IndexAndHead => DiffBasesChange::SetEach {
4761 head: response.committed_text,
4762 index: response.staged_text,
4763 },
4764 };
4765 Ok(bases)
4766 }
4767 }
4768 });
4769
4770 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4771 }
4772
4773 fn paths_changed(
4774 &mut self,
4775 paths: Vec<RepoPath>,
4776 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4777 cx: &mut Context<Self>,
4778 ) {
4779 self.paths_needing_status_update.extend(paths);
4780
4781 let this = cx.weak_entity();
4782 let _ = self.send_keyed_job(
4783 Some(GitJobKey::RefreshStatuses),
4784 None,
4785 |state, mut cx| async move {
4786 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4787 (
4788 this.snapshot.clone(),
4789 mem::take(&mut this.paths_needing_status_update),
4790 )
4791 })?;
4792 let RepositoryState::Local { backend, .. } = state else {
4793 bail!("not a local repository")
4794 };
4795
4796 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4797 if paths.is_empty() {
4798 return Ok(());
4799 }
4800 let statuses = backend.status(&paths).await?;
4801 let stash_entries = backend.stash_entries().await?;
4802
4803 let changed_path_statuses = cx
4804 .background_spawn(async move {
4805 let mut changed_path_statuses = Vec::new();
4806 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4807 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4808
4809 for (repo_path, status) in &*statuses.entries {
4810 changed_paths.remove(repo_path);
4811 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4812 && cursor.item().is_some_and(|entry| entry.status == *status)
4813 {
4814 continue;
4815 }
4816
4817 changed_path_statuses.push(Edit::Insert(StatusEntry {
4818 repo_path: repo_path.clone(),
4819 status: *status,
4820 }));
4821 }
4822 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4823 for path in changed_paths.into_iter() {
4824 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4825 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4826 }
4827 }
4828 changed_path_statuses
4829 })
4830 .await;
4831
4832 this.update(&mut cx, |this, cx| {
4833 let needs_update = !changed_path_statuses.is_empty()
4834 || this.snapshot.stash_entries != stash_entries;
4835 this.snapshot.stash_entries = stash_entries;
4836 if !changed_path_statuses.is_empty() {
4837 this.snapshot
4838 .statuses_by_path
4839 .edit(changed_path_statuses, ());
4840 this.snapshot.scan_id += 1;
4841 }
4842
4843 if needs_update {
4844 cx.emit(RepositoryEvent::Updated {
4845 full_scan: false,
4846 new_instance: false,
4847 });
4848 }
4849
4850 if let Some(updates_tx) = updates_tx {
4851 updates_tx
4852 .unbounded_send(DownstreamUpdate::UpdateRepository(
4853 this.snapshot.clone(),
4854 ))
4855 .ok();
4856 }
4857 cx.emit(RepositoryEvent::PathsChanged);
4858 })
4859 },
4860 );
4861 }
4862
4863 /// currently running git command and when it started
4864 pub fn current_job(&self) -> Option<JobInfo> {
4865 self.active_jobs.values().next().cloned()
4866 }
4867
4868 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4869 self.send_job(None, |_, _| async {})
4870 }
4871}
4872
4873fn get_permalink_in_rust_registry_src(
4874 provider_registry: Arc<GitHostingProviderRegistry>,
4875 path: PathBuf,
4876 selection: Range<u32>,
4877) -> Result<url::Url> {
4878 #[derive(Deserialize)]
4879 struct CargoVcsGit {
4880 sha1: String,
4881 }
4882
4883 #[derive(Deserialize)]
4884 struct CargoVcsInfo {
4885 git: CargoVcsGit,
4886 path_in_vcs: String,
4887 }
4888
4889 #[derive(Deserialize)]
4890 struct CargoPackage {
4891 repository: String,
4892 }
4893
4894 #[derive(Deserialize)]
4895 struct CargoToml {
4896 package: CargoPackage,
4897 }
4898
4899 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4900 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4901 Some((dir, json))
4902 }) else {
4903 bail!("No .cargo_vcs_info.json found in parent directories")
4904 };
4905 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4906 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4907 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4908 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4909 .context("parsing package.repository field of manifest")?;
4910 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4911 let permalink = provider.build_permalink(
4912 remote,
4913 BuildPermalinkParams {
4914 sha: &cargo_vcs_info.git.sha1,
4915 path: &path.to_string_lossy(),
4916 selection: Some(selection),
4917 },
4918 );
4919 Ok(permalink)
4920}
4921
4922fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4923 let Some(blame) = blame else {
4924 return proto::BlameBufferResponse {
4925 blame_response: None,
4926 };
4927 };
4928
4929 let entries = blame
4930 .entries
4931 .into_iter()
4932 .map(|entry| proto::BlameEntry {
4933 sha: entry.sha.as_bytes().into(),
4934 start_line: entry.range.start,
4935 end_line: entry.range.end,
4936 original_line_number: entry.original_line_number,
4937 author: entry.author,
4938 author_mail: entry.author_mail,
4939 author_time: entry.author_time,
4940 author_tz: entry.author_tz,
4941 committer: entry.committer_name,
4942 committer_mail: entry.committer_email,
4943 committer_time: entry.committer_time,
4944 committer_tz: entry.committer_tz,
4945 summary: entry.summary,
4946 previous: entry.previous,
4947 filename: entry.filename,
4948 })
4949 .collect::<Vec<_>>();
4950
4951 let messages = blame
4952 .messages
4953 .into_iter()
4954 .map(|(oid, message)| proto::CommitMessage {
4955 oid: oid.as_bytes().into(),
4956 message,
4957 })
4958 .collect::<Vec<_>>();
4959
4960 proto::BlameBufferResponse {
4961 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4962 entries,
4963 messages,
4964 remote_url: blame.remote_url,
4965 }),
4966 }
4967}
4968
4969fn deserialize_blame_buffer_response(
4970 response: proto::BlameBufferResponse,
4971) -> Option<git::blame::Blame> {
4972 let response = response.blame_response?;
4973 let entries = response
4974 .entries
4975 .into_iter()
4976 .filter_map(|entry| {
4977 Some(git::blame::BlameEntry {
4978 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4979 range: entry.start_line..entry.end_line,
4980 original_line_number: entry.original_line_number,
4981 committer_name: entry.committer,
4982 committer_time: entry.committer_time,
4983 committer_tz: entry.committer_tz,
4984 committer_email: entry.committer_mail,
4985 author: entry.author,
4986 author_mail: entry.author_mail,
4987 author_time: entry.author_time,
4988 author_tz: entry.author_tz,
4989 summary: entry.summary,
4990 previous: entry.previous,
4991 filename: entry.filename,
4992 })
4993 })
4994 .collect::<Vec<_>>();
4995
4996 let messages = response
4997 .messages
4998 .into_iter()
4999 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5000 .collect::<HashMap<_, _>>();
5001
5002 Some(Blame {
5003 entries,
5004 messages,
5005 remote_url: response.remote_url,
5006 })
5007}
5008
5009fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5010 proto::Branch {
5011 is_head: branch.is_head,
5012 ref_name: branch.ref_name.to_string(),
5013 unix_timestamp: branch
5014 .most_recent_commit
5015 .as_ref()
5016 .map(|commit| commit.commit_timestamp as u64),
5017 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5018 ref_name: upstream.ref_name.to_string(),
5019 tracking: upstream
5020 .tracking
5021 .status()
5022 .map(|upstream| proto::UpstreamTracking {
5023 ahead: upstream.ahead as u64,
5024 behind: upstream.behind as u64,
5025 }),
5026 }),
5027 most_recent_commit: branch
5028 .most_recent_commit
5029 .as_ref()
5030 .map(|commit| proto::CommitSummary {
5031 sha: commit.sha.to_string(),
5032 subject: commit.subject.to_string(),
5033 commit_timestamp: commit.commit_timestamp,
5034 author_name: commit.author_name.to_string(),
5035 }),
5036 }
5037}
5038
5039fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5040 git::repository::Branch {
5041 is_head: proto.is_head,
5042 ref_name: proto.ref_name.clone().into(),
5043 upstream: proto
5044 .upstream
5045 .as_ref()
5046 .map(|upstream| git::repository::Upstream {
5047 ref_name: upstream.ref_name.to_string().into(),
5048 tracking: upstream
5049 .tracking
5050 .as_ref()
5051 .map(|tracking| {
5052 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5053 ahead: tracking.ahead as u32,
5054 behind: tracking.behind as u32,
5055 })
5056 })
5057 .unwrap_or(git::repository::UpstreamTracking::Gone),
5058 }),
5059 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5060 git::repository::CommitSummary {
5061 sha: commit.sha.to_string().into(),
5062 subject: commit.subject.to_string().into(),
5063 commit_timestamp: commit.commit_timestamp,
5064 author_name: commit.author_name.to_string().into(),
5065 has_parent: true,
5066 }
5067 }),
5068 }
5069}
5070
5071fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5072 proto::GitCommitDetails {
5073 sha: commit.sha.to_string(),
5074 message: commit.message.to_string(),
5075 commit_timestamp: commit.commit_timestamp,
5076 author_email: commit.author_email.to_string(),
5077 author_name: commit.author_name.to_string(),
5078 }
5079}
5080
5081fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5082 CommitDetails {
5083 sha: proto.sha.clone().into(),
5084 message: proto.message.clone().into(),
5085 commit_timestamp: proto.commit_timestamp,
5086 author_email: proto.author_email.clone().into(),
5087 author_name: proto.author_name.clone().into(),
5088 }
5089}
5090
5091async fn compute_snapshot(
5092 id: RepositoryId,
5093 work_directory_abs_path: Arc<Path>,
5094 prev_snapshot: RepositorySnapshot,
5095 backend: Arc<dyn GitRepository>,
5096) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5097 let mut events = Vec::new();
5098 let branches = backend.branches().await?;
5099 let branch = branches.into_iter().find(|branch| branch.is_head);
5100 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5101 let stash_entries = backend.stash_entries().await?;
5102 let statuses_by_path = SumTree::from_iter(
5103 statuses
5104 .entries
5105 .iter()
5106 .map(|(repo_path, status)| StatusEntry {
5107 repo_path: repo_path.clone(),
5108 status: *status,
5109 }),
5110 (),
5111 );
5112 let (merge_details, merge_heads_changed) =
5113 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5114 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5115
5116 if merge_heads_changed
5117 || branch != prev_snapshot.branch
5118 || statuses_by_path != prev_snapshot.statuses_by_path
5119 {
5120 events.push(RepositoryEvent::Updated {
5121 full_scan: true,
5122 new_instance: false,
5123 });
5124 }
5125
5126 // Cache merge conflict paths so they don't change from staging/unstaging,
5127 // until the merge heads change (at commit time, etc.).
5128 if merge_heads_changed {
5129 events.push(RepositoryEvent::MergeHeadsChanged);
5130 }
5131
5132 // Useful when branch is None in detached head state
5133 let head_commit = match backend.head_sha().await {
5134 Some(head_sha) => backend.show(head_sha).await.log_err(),
5135 None => None,
5136 };
5137
5138 // Used by edit prediction data collection
5139 let remote_origin_url = backend.remote_url("origin");
5140 let remote_upstream_url = backend.remote_url("upstream");
5141
5142 let snapshot = RepositorySnapshot {
5143 id,
5144 statuses_by_path,
5145 work_directory_abs_path,
5146 path_style: prev_snapshot.path_style,
5147 scan_id: prev_snapshot.scan_id + 1,
5148 branch,
5149 head_commit,
5150 merge: merge_details,
5151 remote_origin_url,
5152 remote_upstream_url,
5153 stash_entries,
5154 };
5155
5156 Ok((snapshot, events))
5157}
5158
5159fn status_from_proto(
5160 simple_status: i32,
5161 status: Option<proto::GitFileStatus>,
5162) -> anyhow::Result<FileStatus> {
5163 use proto::git_file_status::Variant;
5164
5165 let Some(variant) = status.and_then(|status| status.variant) else {
5166 let code = proto::GitStatus::from_i32(simple_status)
5167 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5168 let result = match code {
5169 proto::GitStatus::Added => TrackedStatus {
5170 worktree_status: StatusCode::Added,
5171 index_status: StatusCode::Unmodified,
5172 }
5173 .into(),
5174 proto::GitStatus::Modified => TrackedStatus {
5175 worktree_status: StatusCode::Modified,
5176 index_status: StatusCode::Unmodified,
5177 }
5178 .into(),
5179 proto::GitStatus::Conflict => UnmergedStatus {
5180 first_head: UnmergedStatusCode::Updated,
5181 second_head: UnmergedStatusCode::Updated,
5182 }
5183 .into(),
5184 proto::GitStatus::Deleted => TrackedStatus {
5185 worktree_status: StatusCode::Deleted,
5186 index_status: StatusCode::Unmodified,
5187 }
5188 .into(),
5189 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5190 };
5191 return Ok(result);
5192 };
5193
5194 let result = match variant {
5195 Variant::Untracked(_) => FileStatus::Untracked,
5196 Variant::Ignored(_) => FileStatus::Ignored,
5197 Variant::Unmerged(unmerged) => {
5198 let [first_head, second_head] =
5199 [unmerged.first_head, unmerged.second_head].map(|head| {
5200 let code = proto::GitStatus::from_i32(head)
5201 .with_context(|| format!("Invalid git status code: {head}"))?;
5202 let result = match code {
5203 proto::GitStatus::Added => UnmergedStatusCode::Added,
5204 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5205 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5206 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5207 };
5208 Ok(result)
5209 });
5210 let [first_head, second_head] = [first_head?, second_head?];
5211 UnmergedStatus {
5212 first_head,
5213 second_head,
5214 }
5215 .into()
5216 }
5217 Variant::Tracked(tracked) => {
5218 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5219 .map(|status| {
5220 let code = proto::GitStatus::from_i32(status)
5221 .with_context(|| format!("Invalid git status code: {status}"))?;
5222 let result = match code {
5223 proto::GitStatus::Modified => StatusCode::Modified,
5224 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5225 proto::GitStatus::Added => StatusCode::Added,
5226 proto::GitStatus::Deleted => StatusCode::Deleted,
5227 proto::GitStatus::Renamed => StatusCode::Renamed,
5228 proto::GitStatus::Copied => StatusCode::Copied,
5229 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5230 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5231 };
5232 Ok(result)
5233 });
5234 let [index_status, worktree_status] = [index_status?, worktree_status?];
5235 TrackedStatus {
5236 index_status,
5237 worktree_status,
5238 }
5239 .into()
5240 }
5241 };
5242 Ok(result)
5243}
5244
5245fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5246 use proto::git_file_status::{Tracked, Unmerged, Variant};
5247
5248 let variant = match status {
5249 FileStatus::Untracked => Variant::Untracked(Default::default()),
5250 FileStatus::Ignored => Variant::Ignored(Default::default()),
5251 FileStatus::Unmerged(UnmergedStatus {
5252 first_head,
5253 second_head,
5254 }) => Variant::Unmerged(Unmerged {
5255 first_head: unmerged_status_to_proto(first_head),
5256 second_head: unmerged_status_to_proto(second_head),
5257 }),
5258 FileStatus::Tracked(TrackedStatus {
5259 index_status,
5260 worktree_status,
5261 }) => Variant::Tracked(Tracked {
5262 index_status: tracked_status_to_proto(index_status),
5263 worktree_status: tracked_status_to_proto(worktree_status),
5264 }),
5265 };
5266 proto::GitFileStatus {
5267 variant: Some(variant),
5268 }
5269}
5270
5271fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5272 match code {
5273 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5274 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5275 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5276 }
5277}
5278
5279fn tracked_status_to_proto(code: StatusCode) -> i32 {
5280 match code {
5281 StatusCode::Added => proto::GitStatus::Added as _,
5282 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5283 StatusCode::Modified => proto::GitStatus::Modified as _,
5284 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5285 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5286 StatusCode::Copied => proto::GitStatus::Copied as _,
5287 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5288 }
5289}