1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use text::{Bias, BufferId};
66use util::{
67 ResultExt, debug_panic,
68 paths::{PathStyle, SanitizedPath},
69 post_inc,
70 rel_path::RelPath,
71};
72use worktree::{
73 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
74 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
75};
76use zeroize::Zeroize;
77
78pub struct GitStore {
79 state: GitStoreState,
80 buffer_store: Entity<BufferStore>,
81 worktree_store: Entity<WorktreeStore>,
82 repositories: HashMap<RepositoryId, Entity<Repository>>,
83 active_repo_id: Option<RepositoryId>,
84 #[allow(clippy::type_complexity)]
85 loading_diffs:
86 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
87 diffs: HashMap<BufferId, Entity<BufferGitState>>,
88 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
89 _subscriptions: Vec<Subscription>,
90}
91
92#[derive(Default)]
93struct SharedDiffs {
94 unstaged: Option<Entity<BufferDiff>>,
95 uncommitted: Option<Entity<BufferDiff>>,
96}
97
98struct BufferGitState {
99 unstaged_diff: Option<WeakEntity<BufferDiff>>,
100 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
101 conflict_set: Option<WeakEntity<ConflictSet>>,
102 recalculate_diff_task: Option<Task<Result<()>>>,
103 reparse_conflict_markers_task: Option<Task<Result<()>>>,
104 language: Option<Arc<Language>>,
105 language_registry: Option<Arc<LanguageRegistry>>,
106 conflict_updated_futures: Vec<oneshot::Sender<()>>,
107 recalculating_tx: postage::watch::Sender<bool>,
108
109 /// These operation counts are used to ensure that head and index text
110 /// values read from the git repository are up-to-date with any hunk staging
111 /// operations that have been performed on the BufferDiff.
112 ///
113 /// The operation count is incremented immediately when the user initiates a
114 /// hunk stage/unstage operation. Then, upon finishing writing the new index
115 /// text do disk, the `operation count as of write` is updated to reflect
116 /// the operation count that prompted the write.
117 hunk_staging_operation_count: usize,
118 hunk_staging_operation_count_as_of_write: usize,
119
120 head_text: Option<Arc<String>>,
121 index_text: Option<Arc<String>>,
122 head_changed: bool,
123 index_changed: bool,
124 language_changed: bool,
125}
126
127#[derive(Clone, Debug)]
128enum DiffBasesChange {
129 SetIndex(Option<String>),
130 SetHead(Option<String>),
131 SetEach {
132 index: Option<String>,
133 head: Option<String>,
134 },
135 SetBoth(Option<String>),
136}
137
138#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
139enum DiffKind {
140 Unstaged,
141 Uncommitted,
142}
143
144enum GitStoreState {
145 Local {
146 next_repository_id: Arc<AtomicU64>,
147 downstream: Option<LocalDownstreamState>,
148 project_environment: Entity<ProjectEnvironment>,
149 fs: Arc<dyn Fs>,
150 },
151 Remote {
152 upstream_client: AnyProtoClient,
153 upstream_project_id: u64,
154 downstream: Option<(AnyProtoClient, ProjectId)>,
155 },
156}
157
158enum DownstreamUpdate {
159 UpdateRepository(RepositorySnapshot),
160 RemoveRepository(RepositoryId),
161}
162
163struct LocalDownstreamState {
164 client: AnyProtoClient,
165 project_id: ProjectId,
166 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
167 _task: Task<Result<()>>,
168}
169
170#[derive(Clone, Debug)]
171pub struct GitStoreCheckpoint {
172 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
173}
174
175#[derive(Clone, Debug, PartialEq, Eq)]
176pub struct StatusEntry {
177 pub repo_path: RepoPath,
178 pub status: FileStatus,
179}
180
181impl StatusEntry {
182 fn to_proto(&self) -> proto::StatusEntry {
183 let simple_status = match self.status {
184 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
185 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
186 FileStatus::Tracked(TrackedStatus {
187 index_status,
188 worktree_status,
189 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
190 worktree_status
191 } else {
192 index_status
193 }),
194 };
195
196 proto::StatusEntry {
197 repo_path: self.repo_path.to_proto(),
198 simple_status,
199 status: Some(status_to_proto(self.status)),
200 }
201 }
202}
203
204impl TryFrom<proto::StatusEntry> for StatusEntry {
205 type Error = anyhow::Error;
206
207 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
208 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
209 let status = status_from_proto(value.simple_status, value.status)?;
210 Ok(Self { repo_path, status })
211 }
212}
213
214impl sum_tree::Item for StatusEntry {
215 type Summary = PathSummary<GitSummary>;
216
217 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
218 PathSummary {
219 max_path: self.repo_path.0.clone(),
220 item_summary: self.status.summary(),
221 }
222 }
223}
224
225impl sum_tree::KeyedItem for StatusEntry {
226 type Key = PathKey;
227
228 fn key(&self) -> Self::Key {
229 PathKey(self.repo_path.0.clone())
230 }
231}
232
233#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
234pub struct RepositoryId(pub u64);
235
236#[derive(Clone, Debug, Default, PartialEq, Eq)]
237pub struct MergeDetails {
238 pub conflicted_paths: TreeSet<RepoPath>,
239 pub message: Option<SharedString>,
240 pub heads: Vec<Option<SharedString>>,
241}
242
243#[derive(Clone, Debug, PartialEq, Eq)]
244pub struct RepositorySnapshot {
245 pub id: RepositoryId,
246 pub statuses_by_path: SumTree<StatusEntry>,
247 pub work_directory_abs_path: Arc<Path>,
248 pub path_style: PathStyle,
249 pub branch: Option<Branch>,
250 pub head_commit: Option<CommitDetails>,
251 pub scan_id: u64,
252 pub merge: MergeDetails,
253 pub remote_origin_url: Option<String>,
254 pub remote_upstream_url: Option<String>,
255 pub stash_entries: GitStash,
256}
257
258type JobId = u64;
259
260#[derive(Clone, Debug, PartialEq, Eq)]
261pub struct JobInfo {
262 pub start: Instant,
263 pub message: SharedString,
264}
265
266pub struct Repository {
267 this: WeakEntity<Self>,
268 snapshot: RepositorySnapshot,
269 commit_message_buffer: Option<Entity<Buffer>>,
270 git_store: WeakEntity<GitStore>,
271 // For a local repository, holds paths that have had worktree events since the last status scan completed,
272 // and that should be examined during the next status scan.
273 paths_needing_status_update: BTreeSet<RepoPath>,
274 job_sender: mpsc::UnboundedSender<GitJob>,
275 active_jobs: HashMap<JobId, JobInfo>,
276 job_id: JobId,
277 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
278 latest_askpass_id: u64,
279}
280
281impl std::ops::Deref for Repository {
282 type Target = RepositorySnapshot;
283
284 fn deref(&self) -> &Self::Target {
285 &self.snapshot
286 }
287}
288
289#[derive(Clone)]
290pub enum RepositoryState {
291 Local {
292 backend: Arc<dyn GitRepository>,
293 environment: Arc<HashMap<String, String>>,
294 },
295 Remote {
296 project_id: ProjectId,
297 client: AnyProtoClient,
298 },
299}
300
301#[derive(Clone, Debug, PartialEq, Eq)]
302pub enum RepositoryEvent {
303 Updated { full_scan: bool, new_instance: bool },
304 MergeHeadsChanged,
305 PathsChanged,
306}
307
308#[derive(Clone, Debug)]
309pub struct JobsUpdated;
310
311#[derive(Debug)]
312pub enum GitStoreEvent {
313 ActiveRepositoryChanged(Option<RepositoryId>),
314 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
315 RepositoryAdded(RepositoryId),
316 RepositoryRemoved(RepositoryId),
317 IndexWriteError(anyhow::Error),
318 JobsUpdated,
319 ConflictsUpdated,
320}
321
322impl EventEmitter<RepositoryEvent> for Repository {}
323impl EventEmitter<JobsUpdated> for Repository {}
324impl EventEmitter<GitStoreEvent> for GitStore {}
325
326pub struct GitJob {
327 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
328 key: Option<GitJobKey>,
329}
330
331#[derive(PartialEq, Eq)]
332enum GitJobKey {
333 WriteIndex(RepoPath),
334 ReloadBufferDiffBases,
335 RefreshStatuses,
336 ReloadGitState,
337}
338
339impl GitStore {
340 pub fn local(
341 worktree_store: &Entity<WorktreeStore>,
342 buffer_store: Entity<BufferStore>,
343 environment: Entity<ProjectEnvironment>,
344 fs: Arc<dyn Fs>,
345 cx: &mut Context<Self>,
346 ) -> Self {
347 Self::new(
348 worktree_store.clone(),
349 buffer_store,
350 GitStoreState::Local {
351 next_repository_id: Arc::new(AtomicU64::new(1)),
352 downstream: None,
353 project_environment: environment,
354 fs,
355 },
356 cx,
357 )
358 }
359
360 pub fn remote(
361 worktree_store: &Entity<WorktreeStore>,
362 buffer_store: Entity<BufferStore>,
363 upstream_client: AnyProtoClient,
364 project_id: u64,
365 cx: &mut Context<Self>,
366 ) -> Self {
367 Self::new(
368 worktree_store.clone(),
369 buffer_store,
370 GitStoreState::Remote {
371 upstream_client,
372 upstream_project_id: project_id,
373 downstream: None,
374 },
375 cx,
376 )
377 }
378
379 fn new(
380 worktree_store: Entity<WorktreeStore>,
381 buffer_store: Entity<BufferStore>,
382 state: GitStoreState,
383 cx: &mut Context<Self>,
384 ) -> Self {
385 let _subscriptions = vec![
386 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
387 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
388 ];
389
390 GitStore {
391 state,
392 buffer_store,
393 worktree_store,
394 repositories: HashMap::default(),
395 active_repo_id: None,
396 _subscriptions,
397 loading_diffs: HashMap::default(),
398 shared_diffs: HashMap::default(),
399 diffs: HashMap::default(),
400 }
401 }
402
403 pub fn init(client: &AnyProtoClient) {
404 client.add_entity_request_handler(Self::handle_get_remotes);
405 client.add_entity_request_handler(Self::handle_get_branches);
406 client.add_entity_request_handler(Self::handle_get_default_branch);
407 client.add_entity_request_handler(Self::handle_change_branch);
408 client.add_entity_request_handler(Self::handle_create_branch);
409 client.add_entity_request_handler(Self::handle_rename_branch);
410 client.add_entity_request_handler(Self::handle_git_init);
411 client.add_entity_request_handler(Self::handle_push);
412 client.add_entity_request_handler(Self::handle_pull);
413 client.add_entity_request_handler(Self::handle_fetch);
414 client.add_entity_request_handler(Self::handle_stage);
415 client.add_entity_request_handler(Self::handle_unstage);
416 client.add_entity_request_handler(Self::handle_stash);
417 client.add_entity_request_handler(Self::handle_stash_pop);
418 client.add_entity_request_handler(Self::handle_stash_apply);
419 client.add_entity_request_handler(Self::handle_stash_drop);
420 client.add_entity_request_handler(Self::handle_commit);
421 client.add_entity_request_handler(Self::handle_reset);
422 client.add_entity_request_handler(Self::handle_show);
423 client.add_entity_request_handler(Self::handle_load_commit_diff);
424 client.add_entity_request_handler(Self::handle_checkout_files);
425 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
426 client.add_entity_request_handler(Self::handle_set_index_text);
427 client.add_entity_request_handler(Self::handle_askpass);
428 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
429 client.add_entity_request_handler(Self::handle_git_diff);
430 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
431 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
432 client.add_entity_message_handler(Self::handle_update_diff_bases);
433 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
434 client.add_entity_request_handler(Self::handle_blame_buffer);
435 client.add_entity_message_handler(Self::handle_update_repository);
436 client.add_entity_message_handler(Self::handle_remove_repository);
437 client.add_entity_request_handler(Self::handle_git_clone);
438 }
439
440 pub fn is_local(&self) -> bool {
441 matches!(self.state, GitStoreState::Local { .. })
442 }
443 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
444 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
445 let id = repo.read(cx).id;
446 if self.active_repo_id != Some(id) {
447 self.active_repo_id = Some(id);
448 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
449 }
450 }
451 }
452
453 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
454 match &mut self.state {
455 GitStoreState::Remote {
456 downstream: downstream_client,
457 ..
458 } => {
459 for repo in self.repositories.values() {
460 let update = repo.read(cx).snapshot.initial_update(project_id);
461 for update in split_repository_update(update) {
462 client.send(update).log_err();
463 }
464 }
465 *downstream_client = Some((client, ProjectId(project_id)));
466 }
467 GitStoreState::Local {
468 downstream: downstream_client,
469 ..
470 } => {
471 let mut snapshots = HashMap::default();
472 let (updates_tx, mut updates_rx) = mpsc::unbounded();
473 for repo in self.repositories.values() {
474 updates_tx
475 .unbounded_send(DownstreamUpdate::UpdateRepository(
476 repo.read(cx).snapshot.clone(),
477 ))
478 .ok();
479 }
480 *downstream_client = Some(LocalDownstreamState {
481 client: client.clone(),
482 project_id: ProjectId(project_id),
483 updates_tx,
484 _task: cx.spawn(async move |this, cx| {
485 cx.background_spawn(async move {
486 while let Some(update) = updates_rx.next().await {
487 match update {
488 DownstreamUpdate::UpdateRepository(snapshot) => {
489 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
490 {
491 let update =
492 snapshot.build_update(old_snapshot, project_id);
493 *old_snapshot = snapshot;
494 for update in split_repository_update(update) {
495 client.send(update)?;
496 }
497 } else {
498 let update = snapshot.initial_update(project_id);
499 for update in split_repository_update(update) {
500 client.send(update)?;
501 }
502 snapshots.insert(snapshot.id, snapshot);
503 }
504 }
505 DownstreamUpdate::RemoveRepository(id) => {
506 client.send(proto::RemoveRepository {
507 project_id,
508 id: id.to_proto(),
509 })?;
510 }
511 }
512 }
513 anyhow::Ok(())
514 })
515 .await
516 .ok();
517 this.update(cx, |this, _| {
518 if let GitStoreState::Local {
519 downstream: downstream_client,
520 ..
521 } = &mut this.state
522 {
523 downstream_client.take();
524 } else {
525 unreachable!("unshared called on remote store");
526 }
527 })
528 }),
529 });
530 }
531 }
532 }
533
534 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
535 match &mut self.state {
536 GitStoreState::Local {
537 downstream: downstream_client,
538 ..
539 } => {
540 downstream_client.take();
541 }
542 GitStoreState::Remote {
543 downstream: downstream_client,
544 ..
545 } => {
546 downstream_client.take();
547 }
548 }
549 self.shared_diffs.clear();
550 }
551
552 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
553 self.shared_diffs.remove(peer_id);
554 }
555
556 pub fn active_repository(&self) -> Option<Entity<Repository>> {
557 self.active_repo_id
558 .as_ref()
559 .map(|id| self.repositories[id].clone())
560 }
561
562 pub fn open_unstaged_diff(
563 &mut self,
564 buffer: Entity<Buffer>,
565 cx: &mut Context<Self>,
566 ) -> Task<Result<Entity<BufferDiff>>> {
567 let buffer_id = buffer.read(cx).remote_id();
568 if let Some(diff_state) = self.diffs.get(&buffer_id)
569 && let Some(unstaged_diff) = diff_state
570 .read(cx)
571 .unstaged_diff
572 .as_ref()
573 .and_then(|weak| weak.upgrade())
574 {
575 if let Some(task) =
576 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
577 {
578 return cx.background_executor().spawn(async move {
579 task.await;
580 Ok(unstaged_diff)
581 });
582 }
583 return Task::ready(Ok(unstaged_diff));
584 }
585
586 let Some((repo, repo_path)) =
587 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
588 else {
589 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
590 };
591
592 let task = self
593 .loading_diffs
594 .entry((buffer_id, DiffKind::Unstaged))
595 .or_insert_with(|| {
596 let staged_text = repo.update(cx, |repo, cx| {
597 repo.load_staged_text(buffer_id, repo_path, cx)
598 });
599 cx.spawn(async move |this, cx| {
600 Self::open_diff_internal(
601 this,
602 DiffKind::Unstaged,
603 staged_text.await.map(DiffBasesChange::SetIndex),
604 buffer,
605 cx,
606 )
607 .await
608 .map_err(Arc::new)
609 })
610 .shared()
611 })
612 .clone();
613
614 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
615 }
616
617 pub fn open_uncommitted_diff(
618 &mut self,
619 buffer: Entity<Buffer>,
620 cx: &mut Context<Self>,
621 ) -> Task<Result<Entity<BufferDiff>>> {
622 let buffer_id = buffer.read(cx).remote_id();
623
624 if let Some(diff_state) = self.diffs.get(&buffer_id)
625 && let Some(uncommitted_diff) = diff_state
626 .read(cx)
627 .uncommitted_diff
628 .as_ref()
629 .and_then(|weak| weak.upgrade())
630 {
631 if let Some(task) =
632 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
633 {
634 return cx.background_executor().spawn(async move {
635 task.await;
636 Ok(uncommitted_diff)
637 });
638 }
639 return Task::ready(Ok(uncommitted_diff));
640 }
641
642 let Some((repo, repo_path)) =
643 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
644 else {
645 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
646 };
647
648 let task = self
649 .loading_diffs
650 .entry((buffer_id, DiffKind::Uncommitted))
651 .or_insert_with(|| {
652 let changes = repo.update(cx, |repo, cx| {
653 repo.load_committed_text(buffer_id, repo_path, cx)
654 });
655
656 cx.spawn(async move |this, cx| {
657 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
658 .await
659 .map_err(Arc::new)
660 })
661 .shared()
662 })
663 .clone();
664
665 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
666 }
667
668 async fn open_diff_internal(
669 this: WeakEntity<Self>,
670 kind: DiffKind,
671 texts: Result<DiffBasesChange>,
672 buffer_entity: Entity<Buffer>,
673 cx: &mut AsyncApp,
674 ) -> Result<Entity<BufferDiff>> {
675 let diff_bases_change = match texts {
676 Err(e) => {
677 this.update(cx, |this, cx| {
678 let buffer = buffer_entity.read(cx);
679 let buffer_id = buffer.remote_id();
680 this.loading_diffs.remove(&(buffer_id, kind));
681 })?;
682 return Err(e);
683 }
684 Ok(change) => change,
685 };
686
687 this.update(cx, |this, cx| {
688 let buffer = buffer_entity.read(cx);
689 let buffer_id = buffer.remote_id();
690 let language = buffer.language().cloned();
691 let language_registry = buffer.language_registry();
692 let text_snapshot = buffer.text_snapshot();
693 this.loading_diffs.remove(&(buffer_id, kind));
694
695 let git_store = cx.weak_entity();
696 let diff_state = this
697 .diffs
698 .entry(buffer_id)
699 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
700
701 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
702
703 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
704 diff_state.update(cx, |diff_state, cx| {
705 diff_state.language = language;
706 diff_state.language_registry = language_registry;
707
708 match kind {
709 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
710 DiffKind::Uncommitted => {
711 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
712 diff
713 } else {
714 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
715 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
716 unstaged_diff
717 };
718
719 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
720 diff_state.uncommitted_diff = Some(diff.downgrade())
721 }
722 }
723
724 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
725 let rx = diff_state.wait_for_recalculation();
726
727 anyhow::Ok(async move {
728 if let Some(rx) = rx {
729 rx.await;
730 }
731 Ok(diff)
732 })
733 })
734 })??
735 .await
736 }
737
738 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
739 let diff_state = self.diffs.get(&buffer_id)?;
740 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
741 }
742
743 pub fn get_uncommitted_diff(
744 &self,
745 buffer_id: BufferId,
746 cx: &App,
747 ) -> Option<Entity<BufferDiff>> {
748 let diff_state = self.diffs.get(&buffer_id)?;
749 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
750 }
751
752 pub fn open_conflict_set(
753 &mut self,
754 buffer: Entity<Buffer>,
755 cx: &mut Context<Self>,
756 ) -> Entity<ConflictSet> {
757 log::debug!("open conflict set");
758 let buffer_id = buffer.read(cx).remote_id();
759
760 if let Some(git_state) = self.diffs.get(&buffer_id)
761 && let Some(conflict_set) = git_state
762 .read(cx)
763 .conflict_set
764 .as_ref()
765 .and_then(|weak| weak.upgrade())
766 {
767 let conflict_set = conflict_set;
768 let buffer_snapshot = buffer.read(cx).text_snapshot();
769
770 git_state.update(cx, |state, cx| {
771 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
772 });
773
774 return conflict_set;
775 }
776
777 let is_unmerged = self
778 .repository_and_path_for_buffer_id(buffer_id, cx)
779 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
780 let git_store = cx.weak_entity();
781 let buffer_git_state = self
782 .diffs
783 .entry(buffer_id)
784 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
785 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
786
787 self._subscriptions
788 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
789 cx.emit(GitStoreEvent::ConflictsUpdated);
790 }));
791
792 buffer_git_state.update(cx, |state, cx| {
793 state.conflict_set = Some(conflict_set.downgrade());
794 let buffer_snapshot = buffer.read(cx).text_snapshot();
795 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
796 });
797
798 conflict_set
799 }
800
801 pub fn project_path_git_status(
802 &self,
803 project_path: &ProjectPath,
804 cx: &App,
805 ) -> Option<FileStatus> {
806 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
807 Some(repo.read(cx).status_for_path(&repo_path)?.status)
808 }
809
810 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
811 let mut work_directory_abs_paths = Vec::new();
812 let mut checkpoints = Vec::new();
813 for repository in self.repositories.values() {
814 repository.update(cx, |repository, _| {
815 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
816 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
817 });
818 }
819
820 cx.background_executor().spawn(async move {
821 let checkpoints = future::try_join_all(checkpoints).await?;
822 Ok(GitStoreCheckpoint {
823 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
824 .into_iter()
825 .zip(checkpoints)
826 .collect(),
827 })
828 })
829 }
830
831 pub fn restore_checkpoint(
832 &self,
833 checkpoint: GitStoreCheckpoint,
834 cx: &mut App,
835 ) -> Task<Result<()>> {
836 let repositories_by_work_dir_abs_path = self
837 .repositories
838 .values()
839 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
840 .collect::<HashMap<_, _>>();
841
842 let mut tasks = Vec::new();
843 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
844 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
845 let restore = repository.update(cx, |repository, _| {
846 repository.restore_checkpoint(checkpoint)
847 });
848 tasks.push(async move { restore.await? });
849 }
850 }
851 cx.background_spawn(async move {
852 future::try_join_all(tasks).await?;
853 Ok(())
854 })
855 }
856
857 /// Compares two checkpoints, returning true if they are equal.
858 pub fn compare_checkpoints(
859 &self,
860 left: GitStoreCheckpoint,
861 mut right: GitStoreCheckpoint,
862 cx: &mut App,
863 ) -> Task<Result<bool>> {
864 let repositories_by_work_dir_abs_path = self
865 .repositories
866 .values()
867 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
868 .collect::<HashMap<_, _>>();
869
870 let mut tasks = Vec::new();
871 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
872 if let Some(right_checkpoint) = right
873 .checkpoints_by_work_dir_abs_path
874 .remove(&work_dir_abs_path)
875 {
876 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
877 {
878 let compare = repository.update(cx, |repository, _| {
879 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
880 });
881
882 tasks.push(async move { compare.await? });
883 }
884 } else {
885 return Task::ready(Ok(false));
886 }
887 }
888 cx.background_spawn(async move {
889 Ok(future::try_join_all(tasks)
890 .await?
891 .into_iter()
892 .all(|result| result))
893 })
894 }
895
896 /// Blames a buffer.
897 pub fn blame_buffer(
898 &self,
899 buffer: &Entity<Buffer>,
900 version: Option<clock::Global>,
901 cx: &mut App,
902 ) -> Task<Result<Option<Blame>>> {
903 let buffer = buffer.read(cx);
904 let Some((repo, repo_path)) =
905 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
906 else {
907 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
908 };
909 let content = match &version {
910 Some(version) => buffer.rope_for_version(version),
911 None => buffer.as_rope().clone(),
912 };
913 let version = version.unwrap_or(buffer.version());
914 let buffer_id = buffer.remote_id();
915
916 let rx = repo.update(cx, |repo, _| {
917 repo.send_job(None, move |state, _| async move {
918 match state {
919 RepositoryState::Local { backend, .. } => backend
920 .blame(repo_path.clone(), content)
921 .await
922 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
923 .map(Some),
924 RepositoryState::Remote { project_id, client } => {
925 let response = client
926 .request(proto::BlameBuffer {
927 project_id: project_id.to_proto(),
928 buffer_id: buffer_id.into(),
929 version: serialize_version(&version),
930 })
931 .await?;
932 Ok(deserialize_blame_buffer_response(response))
933 }
934 }
935 })
936 });
937
938 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
939 }
940
941 pub fn get_permalink_to_line(
942 &self,
943 buffer: &Entity<Buffer>,
944 selection: Range<u32>,
945 cx: &mut App,
946 ) -> Task<Result<url::Url>> {
947 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
948 return Task::ready(Err(anyhow!("buffer has no file")));
949 };
950
951 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
952 &(file.worktree.read(cx).id(), file.path.clone()).into(),
953 cx,
954 ) else {
955 // If we're not in a Git repo, check whether this is a Rust source
956 // file in the Cargo registry (presumably opened with go-to-definition
957 // from a normal Rust file). If so, we can put together a permalink
958 // using crate metadata.
959 if buffer
960 .read(cx)
961 .language()
962 .is_none_or(|lang| lang.name() != "Rust".into())
963 {
964 return Task::ready(Err(anyhow!("no permalink available")));
965 }
966 let file_path = file.worktree.read(cx).absolutize(&file.path);
967 return cx.spawn(async move |cx| {
968 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
969 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
970 .context("no permalink available")
971 });
972
973 // TODO remote case
974 };
975
976 let buffer_id = buffer.read(cx).remote_id();
977 let branch = repo.read(cx).branch.clone();
978 let remote = branch
979 .as_ref()
980 .and_then(|b| b.upstream.as_ref())
981 .and_then(|b| b.remote_name())
982 .unwrap_or("origin")
983 .to_string();
984
985 let rx = repo.update(cx, |repo, _| {
986 repo.send_job(None, move |state, cx| async move {
987 match state {
988 RepositoryState::Local { backend, .. } => {
989 let origin_url = backend
990 .remote_url(&remote)
991 .with_context(|| format!("remote \"{remote}\" not found"))?;
992
993 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
994
995 let provider_registry =
996 cx.update(GitHostingProviderRegistry::default_global)?;
997
998 let (provider, remote) =
999 parse_git_remote_url(provider_registry, &origin_url)
1000 .context("parsing Git remote URL")?;
1001
1002 let path = repo_path.as_unix_str();
1003
1004 Ok(provider.build_permalink(
1005 remote,
1006 BuildPermalinkParams {
1007 sha: &sha,
1008 path,
1009 selection: Some(selection),
1010 },
1011 ))
1012 }
1013 RepositoryState::Remote { project_id, client } => {
1014 let response = client
1015 .request(proto::GetPermalinkToLine {
1016 project_id: project_id.to_proto(),
1017 buffer_id: buffer_id.into(),
1018 selection: Some(proto::Range {
1019 start: selection.start as u64,
1020 end: selection.end as u64,
1021 }),
1022 })
1023 .await?;
1024
1025 url::Url::parse(&response.permalink).context("failed to parse permalink")
1026 }
1027 }
1028 })
1029 });
1030 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1031 }
1032
1033 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1034 match &self.state {
1035 GitStoreState::Local {
1036 downstream: downstream_client,
1037 ..
1038 } => downstream_client
1039 .as_ref()
1040 .map(|state| (state.client.clone(), state.project_id)),
1041 GitStoreState::Remote {
1042 downstream: downstream_client,
1043 ..
1044 } => downstream_client.clone(),
1045 }
1046 }
1047
1048 fn upstream_client(&self) -> Option<AnyProtoClient> {
1049 match &self.state {
1050 GitStoreState::Local { .. } => None,
1051 GitStoreState::Remote {
1052 upstream_client, ..
1053 } => Some(upstream_client.clone()),
1054 }
1055 }
1056
1057 fn on_worktree_store_event(
1058 &mut self,
1059 worktree_store: Entity<WorktreeStore>,
1060 event: &WorktreeStoreEvent,
1061 cx: &mut Context<Self>,
1062 ) {
1063 let GitStoreState::Local {
1064 project_environment,
1065 downstream,
1066 next_repository_id,
1067 fs,
1068 } = &self.state
1069 else {
1070 return;
1071 };
1072
1073 match event {
1074 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1075 if let Some(worktree) = self
1076 .worktree_store
1077 .read(cx)
1078 .worktree_for_id(*worktree_id, cx)
1079 {
1080 let paths_by_git_repo =
1081 self.process_updated_entries(&worktree, updated_entries, cx);
1082 let downstream = downstream
1083 .as_ref()
1084 .map(|downstream| downstream.updates_tx.clone());
1085 cx.spawn(async move |_, cx| {
1086 let paths_by_git_repo = paths_by_git_repo.await;
1087 for (repo, paths) in paths_by_git_repo {
1088 repo.update(cx, |repo, cx| {
1089 repo.paths_changed(paths, downstream.clone(), cx);
1090 })
1091 .ok();
1092 }
1093 })
1094 .detach();
1095 }
1096 }
1097 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1098 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1099 else {
1100 return;
1101 };
1102 if !worktree.read(cx).is_visible() {
1103 log::debug!(
1104 "not adding repositories for local worktree {:?} because it's not visible",
1105 worktree.read(cx).abs_path()
1106 );
1107 return;
1108 }
1109 self.update_repositories_from_worktree(
1110 project_environment.clone(),
1111 next_repository_id.clone(),
1112 downstream
1113 .as_ref()
1114 .map(|downstream| downstream.updates_tx.clone()),
1115 changed_repos.clone(),
1116 fs.clone(),
1117 cx,
1118 );
1119 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1120 }
1121 _ => {}
1122 }
1123 }
1124 fn on_repository_event(
1125 &mut self,
1126 repo: Entity<Repository>,
1127 event: &RepositoryEvent,
1128 cx: &mut Context<Self>,
1129 ) {
1130 let id = repo.read(cx).id;
1131 let repo_snapshot = repo.read(cx).snapshot.clone();
1132 for (buffer_id, diff) in self.diffs.iter() {
1133 if let Some((buffer_repo, repo_path)) =
1134 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1135 && buffer_repo == repo
1136 {
1137 diff.update(cx, |diff, cx| {
1138 if let Some(conflict_set) = &diff.conflict_set {
1139 let conflict_status_changed =
1140 conflict_set.update(cx, |conflict_set, cx| {
1141 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1142 conflict_set.set_has_conflict(has_conflict, cx)
1143 })?;
1144 if conflict_status_changed {
1145 let buffer_store = self.buffer_store.read(cx);
1146 if let Some(buffer) = buffer_store.get(*buffer_id) {
1147 let _ = diff
1148 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1149 }
1150 }
1151 }
1152 anyhow::Ok(())
1153 })
1154 .ok();
1155 }
1156 }
1157 cx.emit(GitStoreEvent::RepositoryUpdated(
1158 id,
1159 event.clone(),
1160 self.active_repo_id == Some(id),
1161 ))
1162 }
1163
1164 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1165 cx.emit(GitStoreEvent::JobsUpdated)
1166 }
1167
1168 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1169 fn update_repositories_from_worktree(
1170 &mut self,
1171 project_environment: Entity<ProjectEnvironment>,
1172 next_repository_id: Arc<AtomicU64>,
1173 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1174 updated_git_repositories: UpdatedGitRepositoriesSet,
1175 fs: Arc<dyn Fs>,
1176 cx: &mut Context<Self>,
1177 ) {
1178 let mut removed_ids = Vec::new();
1179 for update in updated_git_repositories.iter() {
1180 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1181 let existing_work_directory_abs_path =
1182 repo.read(cx).work_directory_abs_path.clone();
1183 Some(&existing_work_directory_abs_path)
1184 == update.old_work_directory_abs_path.as_ref()
1185 || Some(&existing_work_directory_abs_path)
1186 == update.new_work_directory_abs_path.as_ref()
1187 }) {
1188 if let Some(new_work_directory_abs_path) =
1189 update.new_work_directory_abs_path.clone()
1190 {
1191 existing.update(cx, |existing, cx| {
1192 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1193 existing.schedule_scan(updates_tx.clone(), cx);
1194 });
1195 } else {
1196 removed_ids.push(*id);
1197 }
1198 } else if let UpdatedGitRepository {
1199 new_work_directory_abs_path: Some(work_directory_abs_path),
1200 dot_git_abs_path: Some(dot_git_abs_path),
1201 repository_dir_abs_path: Some(repository_dir_abs_path),
1202 common_dir_abs_path: Some(common_dir_abs_path),
1203 ..
1204 } = update
1205 {
1206 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1207 let git_store = cx.weak_entity();
1208 let repo = cx.new(|cx| {
1209 let mut repo = Repository::local(
1210 id,
1211 work_directory_abs_path.clone(),
1212 dot_git_abs_path.clone(),
1213 repository_dir_abs_path.clone(),
1214 common_dir_abs_path.clone(),
1215 project_environment.downgrade(),
1216 fs.clone(),
1217 git_store,
1218 cx,
1219 );
1220 repo.schedule_scan(updates_tx.clone(), cx);
1221 repo
1222 });
1223 self._subscriptions
1224 .push(cx.subscribe(&repo, Self::on_repository_event));
1225 self._subscriptions
1226 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1227 self.repositories.insert(id, repo);
1228 cx.emit(GitStoreEvent::RepositoryAdded(id));
1229 self.active_repo_id.get_or_insert_with(|| {
1230 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1231 id
1232 });
1233 }
1234 }
1235
1236 for id in removed_ids {
1237 if self.active_repo_id == Some(id) {
1238 self.active_repo_id = None;
1239 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1240 }
1241 self.repositories.remove(&id);
1242 if let Some(updates_tx) = updates_tx.as_ref() {
1243 updates_tx
1244 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1245 .ok();
1246 }
1247 }
1248 }
1249
1250 fn on_buffer_store_event(
1251 &mut self,
1252 _: Entity<BufferStore>,
1253 event: &BufferStoreEvent,
1254 cx: &mut Context<Self>,
1255 ) {
1256 match event {
1257 BufferStoreEvent::BufferAdded(buffer) => {
1258 cx.subscribe(buffer, |this, buffer, event, cx| {
1259 if let BufferEvent::LanguageChanged = event {
1260 let buffer_id = buffer.read(cx).remote_id();
1261 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1262 diff_state.update(cx, |diff_state, cx| {
1263 diff_state.buffer_language_changed(buffer, cx);
1264 });
1265 }
1266 }
1267 })
1268 .detach();
1269 }
1270 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1271 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1272 diffs.remove(buffer_id);
1273 }
1274 }
1275 BufferStoreEvent::BufferDropped(buffer_id) => {
1276 self.diffs.remove(buffer_id);
1277 for diffs in self.shared_diffs.values_mut() {
1278 diffs.remove(buffer_id);
1279 }
1280 }
1281
1282 _ => {}
1283 }
1284 }
1285
1286 pub fn recalculate_buffer_diffs(
1287 &mut self,
1288 buffers: Vec<Entity<Buffer>>,
1289 cx: &mut Context<Self>,
1290 ) -> impl Future<Output = ()> + use<> {
1291 let mut futures = Vec::new();
1292 for buffer in buffers {
1293 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1294 let buffer = buffer.read(cx).text_snapshot();
1295 diff_state.update(cx, |diff_state, cx| {
1296 diff_state.recalculate_diffs(buffer.clone(), cx);
1297 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1298 });
1299 futures.push(diff_state.update(cx, |diff_state, cx| {
1300 diff_state
1301 .reparse_conflict_markers(buffer, cx)
1302 .map(|_| {})
1303 .boxed()
1304 }));
1305 }
1306 }
1307 async move {
1308 futures::future::join_all(futures).await;
1309 }
1310 }
1311
1312 fn on_buffer_diff_event(
1313 &mut self,
1314 diff: Entity<buffer_diff::BufferDiff>,
1315 event: &BufferDiffEvent,
1316 cx: &mut Context<Self>,
1317 ) {
1318 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1319 let buffer_id = diff.read(cx).buffer_id;
1320 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1321 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1322 diff_state.hunk_staging_operation_count += 1;
1323 diff_state.hunk_staging_operation_count
1324 });
1325 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1326 let recv = repo.update(cx, |repo, cx| {
1327 log::debug!("hunks changed for {}", path.as_unix_str());
1328 repo.spawn_set_index_text_job(
1329 path,
1330 new_index_text.as_ref().map(|rope| rope.to_string()),
1331 Some(hunk_staging_operation_count),
1332 cx,
1333 )
1334 });
1335 let diff = diff.downgrade();
1336 cx.spawn(async move |this, cx| {
1337 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1338 diff.update(cx, |diff, cx| {
1339 diff.clear_pending_hunks(cx);
1340 })
1341 .ok();
1342 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1343 .ok();
1344 }
1345 })
1346 .detach();
1347 }
1348 }
1349 }
1350 }
1351
1352 fn local_worktree_git_repos_changed(
1353 &mut self,
1354 worktree: Entity<Worktree>,
1355 changed_repos: &UpdatedGitRepositoriesSet,
1356 cx: &mut Context<Self>,
1357 ) {
1358 log::debug!("local worktree repos changed");
1359 debug_assert!(worktree.read(cx).is_local());
1360
1361 for repository in self.repositories.values() {
1362 repository.update(cx, |repository, cx| {
1363 let repo_abs_path = &repository.work_directory_abs_path;
1364 if changed_repos.iter().any(|update| {
1365 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1366 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1367 }) {
1368 repository.reload_buffer_diff_bases(cx);
1369 }
1370 });
1371 }
1372 }
1373
1374 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1375 &self.repositories
1376 }
1377
1378 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1379 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1380 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1381 Some(status.status)
1382 }
1383
1384 pub fn repository_and_path_for_buffer_id(
1385 &self,
1386 buffer_id: BufferId,
1387 cx: &App,
1388 ) -> Option<(Entity<Repository>, RepoPath)> {
1389 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1390 let project_path = buffer.read(cx).project_path(cx)?;
1391 self.repository_and_path_for_project_path(&project_path, cx)
1392 }
1393
1394 pub fn repository_and_path_for_project_path(
1395 &self,
1396 path: &ProjectPath,
1397 cx: &App,
1398 ) -> Option<(Entity<Repository>, RepoPath)> {
1399 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1400 self.repositories
1401 .values()
1402 .filter_map(|repo| {
1403 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1404 Some((repo.clone(), repo_path))
1405 })
1406 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1407 }
1408
1409 pub fn git_init(
1410 &self,
1411 path: Arc<Path>,
1412 fallback_branch_name: String,
1413 cx: &App,
1414 ) -> Task<Result<()>> {
1415 match &self.state {
1416 GitStoreState::Local { fs, .. } => {
1417 let fs = fs.clone();
1418 cx.background_executor()
1419 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1420 }
1421 GitStoreState::Remote {
1422 upstream_client,
1423 upstream_project_id: project_id,
1424 ..
1425 } => {
1426 let client = upstream_client.clone();
1427 let project_id = *project_id;
1428 cx.background_executor().spawn(async move {
1429 client
1430 .request(proto::GitInit {
1431 project_id: project_id,
1432 abs_path: path.to_string_lossy().into_owned(),
1433 fallback_branch_name,
1434 })
1435 .await?;
1436 Ok(())
1437 })
1438 }
1439 }
1440 }
1441
1442 pub fn git_clone(
1443 &self,
1444 repo: String,
1445 path: impl Into<Arc<std::path::Path>>,
1446 cx: &App,
1447 ) -> Task<Result<()>> {
1448 let path = path.into();
1449 match &self.state {
1450 GitStoreState::Local { fs, .. } => {
1451 let fs = fs.clone();
1452 cx.background_executor()
1453 .spawn(async move { fs.git_clone(&repo, &path).await })
1454 }
1455 GitStoreState::Remote {
1456 upstream_client,
1457 upstream_project_id,
1458 ..
1459 } => {
1460 if upstream_client.is_via_collab() {
1461 return Task::ready(Err(anyhow!(
1462 "Git Clone isn't supported for project guests"
1463 )));
1464 }
1465 let request = upstream_client.request(proto::GitClone {
1466 project_id: *upstream_project_id,
1467 abs_path: path.to_string_lossy().into_owned(),
1468 remote_repo: repo,
1469 });
1470
1471 cx.background_spawn(async move {
1472 let result = request.await?;
1473
1474 match result.success {
1475 true => Ok(()),
1476 false => Err(anyhow!("Git Clone failed")),
1477 }
1478 })
1479 }
1480 }
1481 }
1482
1483 async fn handle_update_repository(
1484 this: Entity<Self>,
1485 envelope: TypedEnvelope<proto::UpdateRepository>,
1486 mut cx: AsyncApp,
1487 ) -> Result<()> {
1488 this.update(&mut cx, |this, cx| {
1489 let path_style = this.worktree_store.read(cx).path_style();
1490 let mut update = envelope.payload;
1491
1492 let id = RepositoryId::from_proto(update.id);
1493 let client = this.upstream_client().context("no upstream client")?;
1494
1495 let mut is_new = false;
1496 let repo = this.repositories.entry(id).or_insert_with(|| {
1497 is_new = true;
1498 let git_store = cx.weak_entity();
1499 cx.new(|cx| {
1500 Repository::remote(
1501 id,
1502 Path::new(&update.abs_path).into(),
1503 path_style,
1504 ProjectId(update.project_id),
1505 client,
1506 git_store,
1507 cx,
1508 )
1509 })
1510 });
1511 if is_new {
1512 this._subscriptions
1513 .push(cx.subscribe(repo, Self::on_repository_event))
1514 }
1515
1516 repo.update(cx, {
1517 let update = update.clone();
1518 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1519 })?;
1520
1521 this.active_repo_id.get_or_insert_with(|| {
1522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1523 id
1524 });
1525
1526 if let Some((client, project_id)) = this.downstream_client() {
1527 update.project_id = project_id.to_proto();
1528 client.send(update).log_err();
1529 }
1530 Ok(())
1531 })?
1532 }
1533
1534 async fn handle_remove_repository(
1535 this: Entity<Self>,
1536 envelope: TypedEnvelope<proto::RemoveRepository>,
1537 mut cx: AsyncApp,
1538 ) -> Result<()> {
1539 this.update(&mut cx, |this, cx| {
1540 let mut update = envelope.payload;
1541 let id = RepositoryId::from_proto(update.id);
1542 this.repositories.remove(&id);
1543 if let Some((client, project_id)) = this.downstream_client() {
1544 update.project_id = project_id.to_proto();
1545 client.send(update).log_err();
1546 }
1547 if this.active_repo_id == Some(id) {
1548 this.active_repo_id = None;
1549 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1550 }
1551 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1552 })
1553 }
1554
1555 async fn handle_git_init(
1556 this: Entity<Self>,
1557 envelope: TypedEnvelope<proto::GitInit>,
1558 cx: AsyncApp,
1559 ) -> Result<proto::Ack> {
1560 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1561 let name = envelope.payload.fallback_branch_name;
1562 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1563 .await?;
1564
1565 Ok(proto::Ack {})
1566 }
1567
1568 async fn handle_git_clone(
1569 this: Entity<Self>,
1570 envelope: TypedEnvelope<proto::GitClone>,
1571 cx: AsyncApp,
1572 ) -> Result<proto::GitCloneResponse> {
1573 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1574 let repo_name = envelope.payload.remote_repo;
1575 let result = cx
1576 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1577 .await;
1578
1579 Ok(proto::GitCloneResponse {
1580 success: result.is_ok(),
1581 })
1582 }
1583
1584 async fn handle_fetch(
1585 this: Entity<Self>,
1586 envelope: TypedEnvelope<proto::Fetch>,
1587 mut cx: AsyncApp,
1588 ) -> Result<proto::RemoteMessageResponse> {
1589 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1590 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1591 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1592 let askpass_id = envelope.payload.askpass_id;
1593
1594 let askpass = make_remote_delegate(
1595 this,
1596 envelope.payload.project_id,
1597 repository_id,
1598 askpass_id,
1599 &mut cx,
1600 );
1601
1602 let remote_output = repository_handle
1603 .update(&mut cx, |repository_handle, cx| {
1604 repository_handle.fetch(fetch_options, askpass, cx)
1605 })?
1606 .await??;
1607
1608 Ok(proto::RemoteMessageResponse {
1609 stdout: remote_output.stdout,
1610 stderr: remote_output.stderr,
1611 })
1612 }
1613
1614 async fn handle_push(
1615 this: Entity<Self>,
1616 envelope: TypedEnvelope<proto::Push>,
1617 mut cx: AsyncApp,
1618 ) -> Result<proto::RemoteMessageResponse> {
1619 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1620 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1621
1622 let askpass_id = envelope.payload.askpass_id;
1623 let askpass = make_remote_delegate(
1624 this,
1625 envelope.payload.project_id,
1626 repository_id,
1627 askpass_id,
1628 &mut cx,
1629 );
1630
1631 let options = envelope
1632 .payload
1633 .options
1634 .as_ref()
1635 .map(|_| match envelope.payload.options() {
1636 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1637 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1638 });
1639
1640 let branch_name = envelope.payload.branch_name.into();
1641 let remote_name = envelope.payload.remote_name.into();
1642
1643 let remote_output = repository_handle
1644 .update(&mut cx, |repository_handle, cx| {
1645 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1646 })?
1647 .await??;
1648 Ok(proto::RemoteMessageResponse {
1649 stdout: remote_output.stdout,
1650 stderr: remote_output.stderr,
1651 })
1652 }
1653
1654 async fn handle_pull(
1655 this: Entity<Self>,
1656 envelope: TypedEnvelope<proto::Pull>,
1657 mut cx: AsyncApp,
1658 ) -> Result<proto::RemoteMessageResponse> {
1659 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1660 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1661 let askpass_id = envelope.payload.askpass_id;
1662 let askpass = make_remote_delegate(
1663 this,
1664 envelope.payload.project_id,
1665 repository_id,
1666 askpass_id,
1667 &mut cx,
1668 );
1669
1670 let branch_name = envelope.payload.branch_name.into();
1671 let remote_name = envelope.payload.remote_name.into();
1672
1673 let remote_message = repository_handle
1674 .update(&mut cx, |repository_handle, cx| {
1675 repository_handle.pull(branch_name, remote_name, askpass, cx)
1676 })?
1677 .await??;
1678
1679 Ok(proto::RemoteMessageResponse {
1680 stdout: remote_message.stdout,
1681 stderr: remote_message.stderr,
1682 })
1683 }
1684
1685 async fn handle_stage(
1686 this: Entity<Self>,
1687 envelope: TypedEnvelope<proto::Stage>,
1688 mut cx: AsyncApp,
1689 ) -> Result<proto::Ack> {
1690 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1691 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1692
1693 let entries = envelope
1694 .payload
1695 .paths
1696 .into_iter()
1697 .map(|path| RepoPath::new(&path))
1698 .collect::<Result<Vec<_>>>()?;
1699
1700 repository_handle
1701 .update(&mut cx, |repository_handle, cx| {
1702 repository_handle.stage_entries(entries, cx)
1703 })?
1704 .await?;
1705 Ok(proto::Ack {})
1706 }
1707
1708 async fn handle_unstage(
1709 this: Entity<Self>,
1710 envelope: TypedEnvelope<proto::Unstage>,
1711 mut cx: AsyncApp,
1712 ) -> Result<proto::Ack> {
1713 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1714 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1715
1716 let entries = envelope
1717 .payload
1718 .paths
1719 .into_iter()
1720 .map(|path| RepoPath::new(&path))
1721 .collect::<Result<Vec<_>>>()?;
1722
1723 repository_handle
1724 .update(&mut cx, |repository_handle, cx| {
1725 repository_handle.unstage_entries(entries, cx)
1726 })?
1727 .await?;
1728
1729 Ok(proto::Ack {})
1730 }
1731
1732 async fn handle_stash(
1733 this: Entity<Self>,
1734 envelope: TypedEnvelope<proto::Stash>,
1735 mut cx: AsyncApp,
1736 ) -> Result<proto::Ack> {
1737 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1738 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1739
1740 let entries = envelope
1741 .payload
1742 .paths
1743 .into_iter()
1744 .map(|path| RepoPath::new(&path))
1745 .collect::<Result<Vec<_>>>()?;
1746
1747 repository_handle
1748 .update(&mut cx, |repository_handle, cx| {
1749 repository_handle.stash_entries(entries, cx)
1750 })?
1751 .await?;
1752
1753 Ok(proto::Ack {})
1754 }
1755
1756 async fn handle_stash_pop(
1757 this: Entity<Self>,
1758 envelope: TypedEnvelope<proto::StashPop>,
1759 mut cx: AsyncApp,
1760 ) -> Result<proto::Ack> {
1761 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1762 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1763 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1764
1765 repository_handle
1766 .update(&mut cx, |repository_handle, cx| {
1767 repository_handle.stash_pop(stash_index, cx)
1768 })?
1769 .await?;
1770
1771 Ok(proto::Ack {})
1772 }
1773
1774 async fn handle_stash_apply(
1775 this: Entity<Self>,
1776 envelope: TypedEnvelope<proto::StashApply>,
1777 mut cx: AsyncApp,
1778 ) -> Result<proto::Ack> {
1779 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1780 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1781 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1782
1783 repository_handle
1784 .update(&mut cx, |repository_handle, cx| {
1785 repository_handle.stash_apply(stash_index, cx)
1786 })?
1787 .await?;
1788
1789 Ok(proto::Ack {})
1790 }
1791
1792 async fn handle_stash_drop(
1793 this: Entity<Self>,
1794 envelope: TypedEnvelope<proto::StashDrop>,
1795 mut cx: AsyncApp,
1796 ) -> Result<proto::Ack> {
1797 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1798 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1799 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1800
1801 repository_handle
1802 .update(&mut cx, |repository_handle, cx| {
1803 repository_handle.stash_drop(stash_index, cx)
1804 })?
1805 .await??;
1806
1807 Ok(proto::Ack {})
1808 }
1809
1810 async fn handle_set_index_text(
1811 this: Entity<Self>,
1812 envelope: TypedEnvelope<proto::SetIndexText>,
1813 mut cx: AsyncApp,
1814 ) -> Result<proto::Ack> {
1815 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1816 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1817 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1818
1819 repository_handle
1820 .update(&mut cx, |repository_handle, cx| {
1821 repository_handle.spawn_set_index_text_job(
1822 repo_path,
1823 envelope.payload.text,
1824 None,
1825 cx,
1826 )
1827 })?
1828 .await??;
1829 Ok(proto::Ack {})
1830 }
1831
1832 async fn handle_commit(
1833 this: Entity<Self>,
1834 envelope: TypedEnvelope<proto::Commit>,
1835 mut cx: AsyncApp,
1836 ) -> Result<proto::Ack> {
1837 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1838 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1839
1840 let message = SharedString::from(envelope.payload.message);
1841 let name = envelope.payload.name.map(SharedString::from);
1842 let email = envelope.payload.email.map(SharedString::from);
1843 let options = envelope.payload.options.unwrap_or_default();
1844
1845 repository_handle
1846 .update(&mut cx, |repository_handle, cx| {
1847 repository_handle.commit(
1848 message,
1849 name.zip(email),
1850 CommitOptions {
1851 amend: options.amend,
1852 signoff: options.signoff,
1853 },
1854 cx,
1855 )
1856 })?
1857 .await??;
1858 Ok(proto::Ack {})
1859 }
1860
1861 async fn handle_get_remotes(
1862 this: Entity<Self>,
1863 envelope: TypedEnvelope<proto::GetRemotes>,
1864 mut cx: AsyncApp,
1865 ) -> Result<proto::GetRemotesResponse> {
1866 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1867 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1868
1869 let branch_name = envelope.payload.branch_name;
1870
1871 let remotes = repository_handle
1872 .update(&mut cx, |repository_handle, _| {
1873 repository_handle.get_remotes(branch_name)
1874 })?
1875 .await??;
1876
1877 Ok(proto::GetRemotesResponse {
1878 remotes: remotes
1879 .into_iter()
1880 .map(|remotes| proto::get_remotes_response::Remote {
1881 name: remotes.name.to_string(),
1882 })
1883 .collect::<Vec<_>>(),
1884 })
1885 }
1886
1887 async fn handle_get_branches(
1888 this: Entity<Self>,
1889 envelope: TypedEnvelope<proto::GitGetBranches>,
1890 mut cx: AsyncApp,
1891 ) -> Result<proto::GitBranchesResponse> {
1892 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1893 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1894
1895 let branches = repository_handle
1896 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1897 .await??;
1898
1899 Ok(proto::GitBranchesResponse {
1900 branches: branches
1901 .into_iter()
1902 .map(|branch| branch_to_proto(&branch))
1903 .collect::<Vec<_>>(),
1904 })
1905 }
1906 async fn handle_get_default_branch(
1907 this: Entity<Self>,
1908 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1909 mut cx: AsyncApp,
1910 ) -> Result<proto::GetDefaultBranchResponse> {
1911 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1912 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1913
1914 let branch = repository_handle
1915 .update(&mut cx, |repository_handle, _| {
1916 repository_handle.default_branch()
1917 })?
1918 .await??
1919 .map(Into::into);
1920
1921 Ok(proto::GetDefaultBranchResponse { branch })
1922 }
1923 async fn handle_create_branch(
1924 this: Entity<Self>,
1925 envelope: TypedEnvelope<proto::GitCreateBranch>,
1926 mut cx: AsyncApp,
1927 ) -> Result<proto::Ack> {
1928 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1929 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1930 let branch_name = envelope.payload.branch_name;
1931
1932 repository_handle
1933 .update(&mut cx, |repository_handle, _| {
1934 repository_handle.create_branch(branch_name)
1935 })?
1936 .await??;
1937
1938 Ok(proto::Ack {})
1939 }
1940
1941 async fn handle_change_branch(
1942 this: Entity<Self>,
1943 envelope: TypedEnvelope<proto::GitChangeBranch>,
1944 mut cx: AsyncApp,
1945 ) -> Result<proto::Ack> {
1946 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1947 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1948 let branch_name = envelope.payload.branch_name;
1949
1950 repository_handle
1951 .update(&mut cx, |repository_handle, _| {
1952 repository_handle.change_branch(branch_name)
1953 })?
1954 .await??;
1955
1956 Ok(proto::Ack {})
1957 }
1958
1959 async fn handle_rename_branch(
1960 this: Entity<Self>,
1961 envelope: TypedEnvelope<proto::GitRenameBranch>,
1962 mut cx: AsyncApp,
1963 ) -> Result<proto::Ack> {
1964 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1965 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1966 let branch = envelope.payload.branch;
1967 let new_name = envelope.payload.new_name;
1968
1969 repository_handle
1970 .update(&mut cx, |repository_handle, _| {
1971 repository_handle.rename_branch(branch, new_name)
1972 })?
1973 .await??;
1974
1975 Ok(proto::Ack {})
1976 }
1977
1978 async fn handle_show(
1979 this: Entity<Self>,
1980 envelope: TypedEnvelope<proto::GitShow>,
1981 mut cx: AsyncApp,
1982 ) -> Result<proto::GitCommitDetails> {
1983 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1984 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1985
1986 let commit = repository_handle
1987 .update(&mut cx, |repository_handle, _| {
1988 repository_handle.show(envelope.payload.commit)
1989 })?
1990 .await??;
1991 Ok(proto::GitCommitDetails {
1992 sha: commit.sha.into(),
1993 message: commit.message.into(),
1994 commit_timestamp: commit.commit_timestamp,
1995 author_email: commit.author_email.into(),
1996 author_name: commit.author_name.into(),
1997 })
1998 }
1999
2000 async fn handle_load_commit_diff(
2001 this: Entity<Self>,
2002 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2003 mut cx: AsyncApp,
2004 ) -> Result<proto::LoadCommitDiffResponse> {
2005 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2006 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2007
2008 let commit_diff = repository_handle
2009 .update(&mut cx, |repository_handle, _| {
2010 repository_handle.load_commit_diff(envelope.payload.commit)
2011 })?
2012 .await??;
2013 Ok(proto::LoadCommitDiffResponse {
2014 files: commit_diff
2015 .files
2016 .into_iter()
2017 .map(|file| proto::CommitFile {
2018 path: file.path.to_proto(),
2019 old_text: file.old_text,
2020 new_text: file.new_text,
2021 })
2022 .collect(),
2023 })
2024 }
2025
2026 async fn handle_reset(
2027 this: Entity<Self>,
2028 envelope: TypedEnvelope<proto::GitReset>,
2029 mut cx: AsyncApp,
2030 ) -> Result<proto::Ack> {
2031 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2032 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2033
2034 let mode = match envelope.payload.mode() {
2035 git_reset::ResetMode::Soft => ResetMode::Soft,
2036 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2037 };
2038
2039 repository_handle
2040 .update(&mut cx, |repository_handle, cx| {
2041 repository_handle.reset(envelope.payload.commit, mode, cx)
2042 })?
2043 .await??;
2044 Ok(proto::Ack {})
2045 }
2046
2047 async fn handle_checkout_files(
2048 this: Entity<Self>,
2049 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2050 mut cx: AsyncApp,
2051 ) -> Result<proto::Ack> {
2052 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2053 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2054 let paths = envelope
2055 .payload
2056 .paths
2057 .iter()
2058 .map(|s| RepoPath::from_proto(s))
2059 .collect::<Result<Vec<_>>>()?;
2060
2061 repository_handle
2062 .update(&mut cx, |repository_handle, cx| {
2063 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2064 })?
2065 .await??;
2066 Ok(proto::Ack {})
2067 }
2068
2069 async fn handle_open_commit_message_buffer(
2070 this: Entity<Self>,
2071 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2072 mut cx: AsyncApp,
2073 ) -> Result<proto::OpenBufferResponse> {
2074 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2075 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2076 let buffer = repository
2077 .update(&mut cx, |repository, cx| {
2078 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2079 })?
2080 .await?;
2081
2082 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2083 this.update(&mut cx, |this, cx| {
2084 this.buffer_store.update(cx, |buffer_store, cx| {
2085 buffer_store
2086 .create_buffer_for_peer(
2087 &buffer,
2088 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2089 cx,
2090 )
2091 .detach_and_log_err(cx);
2092 })
2093 })?;
2094
2095 Ok(proto::OpenBufferResponse {
2096 buffer_id: buffer_id.to_proto(),
2097 })
2098 }
2099
2100 async fn handle_askpass(
2101 this: Entity<Self>,
2102 envelope: TypedEnvelope<proto::AskPassRequest>,
2103 mut cx: AsyncApp,
2104 ) -> Result<proto::AskPassResponse> {
2105 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2106 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2107
2108 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2109 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2110 debug_panic!("no askpass found");
2111 anyhow::bail!("no askpass found");
2112 };
2113
2114 let response = askpass
2115 .ask_password(envelope.payload.prompt)
2116 .await
2117 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2118
2119 delegates
2120 .lock()
2121 .insert(envelope.payload.askpass_id, askpass);
2122
2123 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2124 Ok(proto::AskPassResponse {
2125 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2126 })
2127 }
2128
2129 async fn handle_check_for_pushed_commits(
2130 this: Entity<Self>,
2131 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2132 mut cx: AsyncApp,
2133 ) -> Result<proto::CheckForPushedCommitsResponse> {
2134 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2135 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2136
2137 let branches = repository_handle
2138 .update(&mut cx, |repository_handle, _| {
2139 repository_handle.check_for_pushed_commits()
2140 })?
2141 .await??;
2142 Ok(proto::CheckForPushedCommitsResponse {
2143 pushed_to: branches
2144 .into_iter()
2145 .map(|commit| commit.to_string())
2146 .collect(),
2147 })
2148 }
2149
2150 async fn handle_git_diff(
2151 this: Entity<Self>,
2152 envelope: TypedEnvelope<proto::GitDiff>,
2153 mut cx: AsyncApp,
2154 ) -> Result<proto::GitDiffResponse> {
2155 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2156 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2157 let diff_type = match envelope.payload.diff_type() {
2158 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2159 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2160 };
2161
2162 let mut diff = repository_handle
2163 .update(&mut cx, |repository_handle, cx| {
2164 repository_handle.diff(diff_type, cx)
2165 })?
2166 .await??;
2167 const ONE_MB: usize = 1_000_000;
2168 if diff.len() > ONE_MB {
2169 diff = diff.chars().take(ONE_MB).collect()
2170 }
2171
2172 Ok(proto::GitDiffResponse { diff })
2173 }
2174
2175 async fn handle_open_unstaged_diff(
2176 this: Entity<Self>,
2177 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2178 mut cx: AsyncApp,
2179 ) -> Result<proto::OpenUnstagedDiffResponse> {
2180 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2181 let diff = this
2182 .update(&mut cx, |this, cx| {
2183 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2184 Some(this.open_unstaged_diff(buffer, cx))
2185 })?
2186 .context("missing buffer")?
2187 .await?;
2188 this.update(&mut cx, |this, _| {
2189 let shared_diffs = this
2190 .shared_diffs
2191 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2192 .or_default();
2193 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2194 })?;
2195 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2196 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2197 }
2198
2199 async fn handle_open_uncommitted_diff(
2200 this: Entity<Self>,
2201 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2202 mut cx: AsyncApp,
2203 ) -> Result<proto::OpenUncommittedDiffResponse> {
2204 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2205 let diff = this
2206 .update(&mut cx, |this, cx| {
2207 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2208 Some(this.open_uncommitted_diff(buffer, cx))
2209 })?
2210 .context("missing buffer")?
2211 .await?;
2212 this.update(&mut cx, |this, _| {
2213 let shared_diffs = this
2214 .shared_diffs
2215 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2216 .or_default();
2217 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2218 })?;
2219 diff.read_with(&cx, |diff, cx| {
2220 use proto::open_uncommitted_diff_response::Mode;
2221
2222 let unstaged_diff = diff.secondary_diff();
2223 let index_snapshot = unstaged_diff.and_then(|diff| {
2224 let diff = diff.read(cx);
2225 diff.base_text_exists().then(|| diff.base_text())
2226 });
2227
2228 let mode;
2229 let staged_text;
2230 let committed_text;
2231 if diff.base_text_exists() {
2232 let committed_snapshot = diff.base_text();
2233 committed_text = Some(committed_snapshot.text());
2234 if let Some(index_text) = index_snapshot {
2235 if index_text.remote_id() == committed_snapshot.remote_id() {
2236 mode = Mode::IndexMatchesHead;
2237 staged_text = None;
2238 } else {
2239 mode = Mode::IndexAndHead;
2240 staged_text = Some(index_text.text());
2241 }
2242 } else {
2243 mode = Mode::IndexAndHead;
2244 staged_text = None;
2245 }
2246 } else {
2247 mode = Mode::IndexAndHead;
2248 committed_text = None;
2249 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2250 }
2251
2252 proto::OpenUncommittedDiffResponse {
2253 committed_text,
2254 staged_text,
2255 mode: mode.into(),
2256 }
2257 })
2258 }
2259
2260 async fn handle_update_diff_bases(
2261 this: Entity<Self>,
2262 request: TypedEnvelope<proto::UpdateDiffBases>,
2263 mut cx: AsyncApp,
2264 ) -> Result<()> {
2265 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2266 this.update(&mut cx, |this, cx| {
2267 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2268 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2269 {
2270 let buffer = buffer.read(cx).text_snapshot();
2271 diff_state.update(cx, |diff_state, cx| {
2272 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2273 })
2274 }
2275 })
2276 }
2277
2278 async fn handle_blame_buffer(
2279 this: Entity<Self>,
2280 envelope: TypedEnvelope<proto::BlameBuffer>,
2281 mut cx: AsyncApp,
2282 ) -> Result<proto::BlameBufferResponse> {
2283 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2284 let version = deserialize_version(&envelope.payload.version);
2285 let buffer = this.read_with(&cx, |this, cx| {
2286 this.buffer_store.read(cx).get_existing(buffer_id)
2287 })??;
2288 buffer
2289 .update(&mut cx, |buffer, _| {
2290 buffer.wait_for_version(version.clone())
2291 })?
2292 .await?;
2293 let blame = this
2294 .update(&mut cx, |this, cx| {
2295 this.blame_buffer(&buffer, Some(version), cx)
2296 })?
2297 .await?;
2298 Ok(serialize_blame_buffer_response(blame))
2299 }
2300
2301 async fn handle_get_permalink_to_line(
2302 this: Entity<Self>,
2303 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2304 mut cx: AsyncApp,
2305 ) -> Result<proto::GetPermalinkToLineResponse> {
2306 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2307 // let version = deserialize_version(&envelope.payload.version);
2308 let selection = {
2309 let proto_selection = envelope
2310 .payload
2311 .selection
2312 .context("no selection to get permalink for defined")?;
2313 proto_selection.start as u32..proto_selection.end as u32
2314 };
2315 let buffer = this.read_with(&cx, |this, cx| {
2316 this.buffer_store.read(cx).get_existing(buffer_id)
2317 })??;
2318 let permalink = this
2319 .update(&mut cx, |this, cx| {
2320 this.get_permalink_to_line(&buffer, selection, cx)
2321 })?
2322 .await?;
2323 Ok(proto::GetPermalinkToLineResponse {
2324 permalink: permalink.to_string(),
2325 })
2326 }
2327
2328 fn repository_for_request(
2329 this: &Entity<Self>,
2330 id: RepositoryId,
2331 cx: &mut AsyncApp,
2332 ) -> Result<Entity<Repository>> {
2333 this.read_with(cx, |this, _| {
2334 this.repositories
2335 .get(&id)
2336 .context("missing repository handle")
2337 .cloned()
2338 })?
2339 }
2340
2341 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2342 self.repositories
2343 .iter()
2344 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2345 .collect()
2346 }
2347
2348 fn process_updated_entries(
2349 &self,
2350 worktree: &Entity<Worktree>,
2351 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2352 cx: &mut App,
2353 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2354 let path_style = worktree.read(cx).path_style();
2355 let mut repo_paths = self
2356 .repositories
2357 .values()
2358 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2359 .collect::<Vec<_>>();
2360 let mut entries: Vec<_> = updated_entries
2361 .iter()
2362 .map(|(path, _, _)| path.clone())
2363 .collect();
2364 entries.sort();
2365 let worktree = worktree.read(cx);
2366
2367 let entries = entries
2368 .into_iter()
2369 .map(|path| worktree.absolutize(&path))
2370 .collect::<Arc<[_]>>();
2371
2372 let executor = cx.background_executor().clone();
2373 cx.background_executor().spawn(async move {
2374 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2375 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2376 let mut tasks = FuturesOrdered::new();
2377 for (repo_path, repo) in repo_paths.into_iter().rev() {
2378 let entries = entries.clone();
2379 let task = executor.spawn(async move {
2380 // Find all repository paths that belong to this repo
2381 let mut ix = entries.partition_point(|path| path < &*repo_path);
2382 if ix == entries.len() {
2383 return None;
2384 };
2385
2386 let mut paths = Vec::new();
2387 // All paths prefixed by a given repo will constitute a continuous range.
2388 while let Some(path) = entries.get(ix)
2389 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2390 &repo_path, path, path_style,
2391 )
2392 {
2393 paths.push((repo_path, ix));
2394 ix += 1;
2395 }
2396 if paths.is_empty() {
2397 None
2398 } else {
2399 Some((repo, paths))
2400 }
2401 });
2402 tasks.push_back(task);
2403 }
2404
2405 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2406 let mut path_was_used = vec![false; entries.len()];
2407 let tasks = tasks.collect::<Vec<_>>().await;
2408 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2409 // We always want to assign a path to it's innermost repository.
2410 for t in tasks {
2411 let Some((repo, paths)) = t else {
2412 continue;
2413 };
2414 let entry = paths_by_git_repo.entry(repo).or_default();
2415 for (repo_path, ix) in paths {
2416 if path_was_used[ix] {
2417 continue;
2418 }
2419 path_was_used[ix] = true;
2420 entry.push(repo_path);
2421 }
2422 }
2423
2424 paths_by_git_repo
2425 })
2426 }
2427}
2428
2429impl BufferGitState {
2430 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2431 Self {
2432 unstaged_diff: Default::default(),
2433 uncommitted_diff: Default::default(),
2434 recalculate_diff_task: Default::default(),
2435 language: Default::default(),
2436 language_registry: Default::default(),
2437 recalculating_tx: postage::watch::channel_with(false).0,
2438 hunk_staging_operation_count: 0,
2439 hunk_staging_operation_count_as_of_write: 0,
2440 head_text: Default::default(),
2441 index_text: Default::default(),
2442 head_changed: Default::default(),
2443 index_changed: Default::default(),
2444 language_changed: Default::default(),
2445 conflict_updated_futures: Default::default(),
2446 conflict_set: Default::default(),
2447 reparse_conflict_markers_task: Default::default(),
2448 }
2449 }
2450
2451 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2452 self.language = buffer.read(cx).language().cloned();
2453 self.language_changed = true;
2454 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2455 }
2456
2457 fn reparse_conflict_markers(
2458 &mut self,
2459 buffer: text::BufferSnapshot,
2460 cx: &mut Context<Self>,
2461 ) -> oneshot::Receiver<()> {
2462 let (tx, rx) = oneshot::channel();
2463
2464 let Some(conflict_set) = self
2465 .conflict_set
2466 .as_ref()
2467 .and_then(|conflict_set| conflict_set.upgrade())
2468 else {
2469 return rx;
2470 };
2471
2472 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2473 if conflict_set.has_conflict {
2474 Some(conflict_set.snapshot())
2475 } else {
2476 None
2477 }
2478 });
2479
2480 if let Some(old_snapshot) = old_snapshot {
2481 self.conflict_updated_futures.push(tx);
2482 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2483 let (snapshot, changed_range) = cx
2484 .background_spawn(async move {
2485 let new_snapshot = ConflictSet::parse(&buffer);
2486 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2487 (new_snapshot, changed_range)
2488 })
2489 .await;
2490 this.update(cx, |this, cx| {
2491 if let Some(conflict_set) = &this.conflict_set {
2492 conflict_set
2493 .update(cx, |conflict_set, cx| {
2494 conflict_set.set_snapshot(snapshot, changed_range, cx);
2495 })
2496 .ok();
2497 }
2498 let futures = std::mem::take(&mut this.conflict_updated_futures);
2499 for tx in futures {
2500 tx.send(()).ok();
2501 }
2502 })
2503 }))
2504 }
2505
2506 rx
2507 }
2508
2509 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2510 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2511 }
2512
2513 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2514 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2515 }
2516
2517 fn handle_base_texts_updated(
2518 &mut self,
2519 buffer: text::BufferSnapshot,
2520 message: proto::UpdateDiffBases,
2521 cx: &mut Context<Self>,
2522 ) {
2523 use proto::update_diff_bases::Mode;
2524
2525 let Some(mode) = Mode::from_i32(message.mode) else {
2526 return;
2527 };
2528
2529 let diff_bases_change = match mode {
2530 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2531 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2532 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2533 Mode::IndexAndHead => DiffBasesChange::SetEach {
2534 index: message.staged_text,
2535 head: message.committed_text,
2536 },
2537 };
2538
2539 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2540 }
2541
2542 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2543 if *self.recalculating_tx.borrow() {
2544 let mut rx = self.recalculating_tx.subscribe();
2545 Some(async move {
2546 loop {
2547 let is_recalculating = rx.recv().await;
2548 if is_recalculating != Some(true) {
2549 break;
2550 }
2551 }
2552 })
2553 } else {
2554 None
2555 }
2556 }
2557
2558 fn diff_bases_changed(
2559 &mut self,
2560 buffer: text::BufferSnapshot,
2561 diff_bases_change: Option<DiffBasesChange>,
2562 cx: &mut Context<Self>,
2563 ) {
2564 match diff_bases_change {
2565 Some(DiffBasesChange::SetIndex(index)) => {
2566 self.index_text = index.map(|mut index| {
2567 text::LineEnding::normalize(&mut index);
2568 Arc::new(index)
2569 });
2570 self.index_changed = true;
2571 }
2572 Some(DiffBasesChange::SetHead(head)) => {
2573 self.head_text = head.map(|mut head| {
2574 text::LineEnding::normalize(&mut head);
2575 Arc::new(head)
2576 });
2577 self.head_changed = true;
2578 }
2579 Some(DiffBasesChange::SetBoth(text)) => {
2580 let text = text.map(|mut text| {
2581 text::LineEnding::normalize(&mut text);
2582 Arc::new(text)
2583 });
2584 self.head_text = text.clone();
2585 self.index_text = text;
2586 self.head_changed = true;
2587 self.index_changed = true;
2588 }
2589 Some(DiffBasesChange::SetEach { index, head }) => {
2590 self.index_text = index.map(|mut index| {
2591 text::LineEnding::normalize(&mut index);
2592 Arc::new(index)
2593 });
2594 self.index_changed = true;
2595 self.head_text = head.map(|mut head| {
2596 text::LineEnding::normalize(&mut head);
2597 Arc::new(head)
2598 });
2599 self.head_changed = true;
2600 }
2601 None => {}
2602 }
2603
2604 self.recalculate_diffs(buffer, cx)
2605 }
2606
2607 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2608 *self.recalculating_tx.borrow_mut() = true;
2609
2610 let language = self.language.clone();
2611 let language_registry = self.language_registry.clone();
2612 let unstaged_diff = self.unstaged_diff();
2613 let uncommitted_diff = self.uncommitted_diff();
2614 let head = self.head_text.clone();
2615 let index = self.index_text.clone();
2616 let index_changed = self.index_changed;
2617 let head_changed = self.head_changed;
2618 let language_changed = self.language_changed;
2619 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2620 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2621 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2622 (None, None) => true,
2623 _ => false,
2624 };
2625 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2626 log::debug!(
2627 "start recalculating diffs for buffer {}",
2628 buffer.remote_id()
2629 );
2630
2631 let mut new_unstaged_diff = None;
2632 if let Some(unstaged_diff) = &unstaged_diff {
2633 new_unstaged_diff = Some(
2634 BufferDiff::update_diff(
2635 unstaged_diff.clone(),
2636 buffer.clone(),
2637 index,
2638 index_changed,
2639 language_changed,
2640 language.clone(),
2641 language_registry.clone(),
2642 cx,
2643 )
2644 .await?,
2645 );
2646 }
2647
2648 let mut new_uncommitted_diff = None;
2649 if let Some(uncommitted_diff) = &uncommitted_diff {
2650 new_uncommitted_diff = if index_matches_head {
2651 new_unstaged_diff.clone()
2652 } else {
2653 Some(
2654 BufferDiff::update_diff(
2655 uncommitted_diff.clone(),
2656 buffer.clone(),
2657 head,
2658 head_changed,
2659 language_changed,
2660 language.clone(),
2661 language_registry.clone(),
2662 cx,
2663 )
2664 .await?,
2665 )
2666 }
2667 }
2668
2669 let cancel = this.update(cx, |this, _| {
2670 // This checks whether all pending stage/unstage operations
2671 // have quiesced (i.e. both the corresponding write and the
2672 // read of that write have completed). If not, then we cancel
2673 // this recalculation attempt to avoid invalidating pending
2674 // state too quickly; another recalculation will come along
2675 // later and clear the pending state once the state of the index has settled.
2676 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2677 *this.recalculating_tx.borrow_mut() = false;
2678 true
2679 } else {
2680 false
2681 }
2682 })?;
2683 if cancel {
2684 log::debug!(
2685 concat!(
2686 "aborting recalculating diffs for buffer {}",
2687 "due to subsequent hunk operations",
2688 ),
2689 buffer.remote_id()
2690 );
2691 return Ok(());
2692 }
2693
2694 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2695 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2696 {
2697 unstaged_diff.update(cx, |diff, cx| {
2698 if language_changed {
2699 diff.language_changed(cx);
2700 }
2701 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2702 })?
2703 } else {
2704 None
2705 };
2706
2707 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2708 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2709 {
2710 uncommitted_diff.update(cx, |diff, cx| {
2711 if language_changed {
2712 diff.language_changed(cx);
2713 }
2714 diff.set_snapshot_with_secondary(
2715 new_uncommitted_diff,
2716 &buffer,
2717 unstaged_changed_range,
2718 true,
2719 cx,
2720 );
2721 })?;
2722 }
2723
2724 log::debug!(
2725 "finished recalculating diffs for buffer {}",
2726 buffer.remote_id()
2727 );
2728
2729 if let Some(this) = this.upgrade() {
2730 this.update(cx, |this, _| {
2731 this.index_changed = false;
2732 this.head_changed = false;
2733 this.language_changed = false;
2734 *this.recalculating_tx.borrow_mut() = false;
2735 })?;
2736 }
2737
2738 Ok(())
2739 }));
2740 }
2741}
2742
2743fn make_remote_delegate(
2744 this: Entity<GitStore>,
2745 project_id: u64,
2746 repository_id: RepositoryId,
2747 askpass_id: u64,
2748 cx: &mut AsyncApp,
2749) -> AskPassDelegate {
2750 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2751 this.update(cx, |this, cx| {
2752 let Some((client, _)) = this.downstream_client() else {
2753 return;
2754 };
2755 let response = client.request(proto::AskPassRequest {
2756 project_id,
2757 repository_id: repository_id.to_proto(),
2758 askpass_id,
2759 prompt,
2760 });
2761 cx.spawn(async move |_, _| {
2762 let mut response = response.await?.response;
2763 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2764 .ok();
2765 response.zeroize();
2766 anyhow::Ok(())
2767 })
2768 .detach_and_log_err(cx);
2769 })
2770 .log_err();
2771 })
2772}
2773
2774impl RepositoryId {
2775 pub fn to_proto(self) -> u64 {
2776 self.0
2777 }
2778
2779 pub fn from_proto(id: u64) -> Self {
2780 RepositoryId(id)
2781 }
2782}
2783
2784impl RepositorySnapshot {
2785 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2786 Self {
2787 id,
2788 statuses_by_path: Default::default(),
2789 work_directory_abs_path,
2790 branch: None,
2791 head_commit: None,
2792 scan_id: 0,
2793 merge: Default::default(),
2794 remote_origin_url: None,
2795 remote_upstream_url: None,
2796 stash_entries: Default::default(),
2797 path_style,
2798 }
2799 }
2800
2801 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2802 proto::UpdateRepository {
2803 branch_summary: self.branch.as_ref().map(branch_to_proto),
2804 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2805 updated_statuses: self
2806 .statuses_by_path
2807 .iter()
2808 .map(|entry| entry.to_proto())
2809 .collect(),
2810 removed_statuses: Default::default(),
2811 current_merge_conflicts: self
2812 .merge
2813 .conflicted_paths
2814 .iter()
2815 .map(|repo_path| repo_path.to_proto())
2816 .collect(),
2817 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2818 project_id,
2819 id: self.id.to_proto(),
2820 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2821 entry_ids: vec![self.id.to_proto()],
2822 scan_id: self.scan_id,
2823 is_last_update: true,
2824 stash_entries: self
2825 .stash_entries
2826 .entries
2827 .iter()
2828 .map(stash_to_proto)
2829 .collect(),
2830 }
2831 }
2832
2833 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2834 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2835 let mut removed_statuses: Vec<String> = Vec::new();
2836
2837 let mut new_statuses = self.statuses_by_path.iter().peekable();
2838 let mut old_statuses = old.statuses_by_path.iter().peekable();
2839
2840 let mut current_new_entry = new_statuses.next();
2841 let mut current_old_entry = old_statuses.next();
2842 loop {
2843 match (current_new_entry, current_old_entry) {
2844 (Some(new_entry), Some(old_entry)) => {
2845 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2846 Ordering::Less => {
2847 updated_statuses.push(new_entry.to_proto());
2848 current_new_entry = new_statuses.next();
2849 }
2850 Ordering::Equal => {
2851 if new_entry.status != old_entry.status {
2852 updated_statuses.push(new_entry.to_proto());
2853 }
2854 current_old_entry = old_statuses.next();
2855 current_new_entry = new_statuses.next();
2856 }
2857 Ordering::Greater => {
2858 removed_statuses.push(old_entry.repo_path.to_proto());
2859 current_old_entry = old_statuses.next();
2860 }
2861 }
2862 }
2863 (None, Some(old_entry)) => {
2864 removed_statuses.push(old_entry.repo_path.to_proto());
2865 current_old_entry = old_statuses.next();
2866 }
2867 (Some(new_entry), None) => {
2868 updated_statuses.push(new_entry.to_proto());
2869 current_new_entry = new_statuses.next();
2870 }
2871 (None, None) => break,
2872 }
2873 }
2874
2875 proto::UpdateRepository {
2876 branch_summary: self.branch.as_ref().map(branch_to_proto),
2877 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2878 updated_statuses,
2879 removed_statuses,
2880 current_merge_conflicts: self
2881 .merge
2882 .conflicted_paths
2883 .iter()
2884 .map(|path| path.to_proto())
2885 .collect(),
2886 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2887 project_id,
2888 id: self.id.to_proto(),
2889 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2890 entry_ids: vec![],
2891 scan_id: self.scan_id,
2892 is_last_update: true,
2893 stash_entries: self
2894 .stash_entries
2895 .entries
2896 .iter()
2897 .map(stash_to_proto)
2898 .collect(),
2899 }
2900 }
2901
2902 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2903 self.statuses_by_path.iter().cloned()
2904 }
2905
2906 pub fn status_summary(&self) -> GitSummary {
2907 self.statuses_by_path.summary().item_summary
2908 }
2909
2910 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2911 self.statuses_by_path
2912 .get(&PathKey(path.0.clone()), ())
2913 .cloned()
2914 }
2915
2916 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2917 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
2918 }
2919
2920 #[inline]
2921 fn abs_path_to_repo_path_inner(
2922 work_directory_abs_path: &Path,
2923 abs_path: &Path,
2924 path_style: PathStyle,
2925 ) -> Option<RepoPath> {
2926 abs_path
2927 .strip_prefix(&work_directory_abs_path)
2928 .ok()
2929 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
2930 }
2931
2932 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2933 self.merge.conflicted_paths.contains(repo_path)
2934 }
2935
2936 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2937 let had_conflict_on_last_merge_head_change =
2938 self.merge.conflicted_paths.contains(repo_path);
2939 let has_conflict_currently = self
2940 .status_for_path(repo_path)
2941 .is_some_and(|entry| entry.status.is_conflicted());
2942 had_conflict_on_last_merge_head_change || has_conflict_currently
2943 }
2944
2945 /// This is the name that will be displayed in the repository selector for this repository.
2946 pub fn display_name(&self) -> SharedString {
2947 self.work_directory_abs_path
2948 .file_name()
2949 .unwrap_or_default()
2950 .to_string_lossy()
2951 .to_string()
2952 .into()
2953 }
2954}
2955
2956pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2957 proto::StashEntry {
2958 oid: entry.oid.as_bytes().to_vec(),
2959 message: entry.message.clone(),
2960 branch: entry.branch.clone(),
2961 index: entry.index as u64,
2962 timestamp: entry.timestamp,
2963 }
2964}
2965
2966pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2967 Ok(StashEntry {
2968 oid: Oid::from_bytes(&entry.oid)?,
2969 message: entry.message.clone(),
2970 index: entry.index as usize,
2971 branch: entry.branch.clone(),
2972 timestamp: entry.timestamp,
2973 })
2974}
2975
2976impl MergeDetails {
2977 async fn load(
2978 backend: &Arc<dyn GitRepository>,
2979 status: &SumTree<StatusEntry>,
2980 prev_snapshot: &RepositorySnapshot,
2981 ) -> Result<(MergeDetails, bool)> {
2982 log::debug!("load merge details");
2983 let message = backend.merge_message().await;
2984 let heads = backend
2985 .revparse_batch(vec![
2986 "MERGE_HEAD".into(),
2987 "CHERRY_PICK_HEAD".into(),
2988 "REBASE_HEAD".into(),
2989 "REVERT_HEAD".into(),
2990 "APPLY_HEAD".into(),
2991 ])
2992 .await
2993 .log_err()
2994 .unwrap_or_default()
2995 .into_iter()
2996 .map(|opt| opt.map(SharedString::from))
2997 .collect::<Vec<_>>();
2998 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2999 let conflicted_paths = if merge_heads_changed {
3000 let current_conflicted_paths = TreeSet::from_ordered_entries(
3001 status
3002 .iter()
3003 .filter(|entry| entry.status.is_conflicted())
3004 .map(|entry| entry.repo_path.clone()),
3005 );
3006
3007 // It can happen that we run a scan while a lengthy merge is in progress
3008 // that will eventually result in conflicts, but before those conflicts
3009 // are reported by `git status`. Since for the moment we only care about
3010 // the merge heads state for the purposes of tracking conflicts, don't update
3011 // this state until we see some conflicts.
3012 if heads.iter().any(Option::is_some)
3013 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3014 && current_conflicted_paths.is_empty()
3015 {
3016 log::debug!("not updating merge heads because no conflicts found");
3017 return Ok((
3018 MergeDetails {
3019 message: message.map(SharedString::from),
3020 ..prev_snapshot.merge.clone()
3021 },
3022 false,
3023 ));
3024 }
3025
3026 current_conflicted_paths
3027 } else {
3028 prev_snapshot.merge.conflicted_paths.clone()
3029 };
3030 let details = MergeDetails {
3031 conflicted_paths,
3032 message: message.map(SharedString::from),
3033 heads,
3034 };
3035 Ok((details, merge_heads_changed))
3036 }
3037}
3038
3039impl Repository {
3040 pub fn snapshot(&self) -> RepositorySnapshot {
3041 self.snapshot.clone()
3042 }
3043
3044 fn local(
3045 id: RepositoryId,
3046 work_directory_abs_path: Arc<Path>,
3047 dot_git_abs_path: Arc<Path>,
3048 repository_dir_abs_path: Arc<Path>,
3049 common_dir_abs_path: Arc<Path>,
3050 project_environment: WeakEntity<ProjectEnvironment>,
3051 fs: Arc<dyn Fs>,
3052 git_store: WeakEntity<GitStore>,
3053 cx: &mut Context<Self>,
3054 ) -> Self {
3055 let snapshot =
3056 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3057 Repository {
3058 this: cx.weak_entity(),
3059 git_store,
3060 snapshot,
3061 commit_message_buffer: None,
3062 askpass_delegates: Default::default(),
3063 paths_needing_status_update: Default::default(),
3064 latest_askpass_id: 0,
3065 job_sender: Repository::spawn_local_git_worker(
3066 work_directory_abs_path,
3067 dot_git_abs_path,
3068 repository_dir_abs_path,
3069 common_dir_abs_path,
3070 project_environment,
3071 fs,
3072 cx,
3073 ),
3074 job_id: 0,
3075 active_jobs: Default::default(),
3076 }
3077 }
3078
3079 fn remote(
3080 id: RepositoryId,
3081 work_directory_abs_path: Arc<Path>,
3082 path_style: PathStyle,
3083 project_id: ProjectId,
3084 client: AnyProtoClient,
3085 git_store: WeakEntity<GitStore>,
3086 cx: &mut Context<Self>,
3087 ) -> Self {
3088 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3089 Self {
3090 this: cx.weak_entity(),
3091 snapshot,
3092 commit_message_buffer: None,
3093 git_store,
3094 paths_needing_status_update: Default::default(),
3095 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3096 askpass_delegates: Default::default(),
3097 latest_askpass_id: 0,
3098 active_jobs: Default::default(),
3099 job_id: 0,
3100 }
3101 }
3102
3103 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3104 self.git_store.upgrade()
3105 }
3106
3107 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3108 let this = cx.weak_entity();
3109 let git_store = self.git_store.clone();
3110 let _ = self.send_keyed_job(
3111 Some(GitJobKey::ReloadBufferDiffBases),
3112 None,
3113 |state, mut cx| async move {
3114 let RepositoryState::Local { backend, .. } = state else {
3115 log::error!("tried to recompute diffs for a non-local repository");
3116 return Ok(());
3117 };
3118
3119 let Some(this) = this.upgrade() else {
3120 return Ok(());
3121 };
3122
3123 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3124 git_store.update(cx, |git_store, cx| {
3125 git_store
3126 .diffs
3127 .iter()
3128 .filter_map(|(buffer_id, diff_state)| {
3129 let buffer_store = git_store.buffer_store.read(cx);
3130 let buffer = buffer_store.get(*buffer_id)?;
3131 let file = File::from_dyn(buffer.read(cx).file())?;
3132 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3133 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3134 log::debug!(
3135 "start reload diff bases for repo path {}",
3136 repo_path.as_unix_str()
3137 );
3138 diff_state.update(cx, |diff_state, _| {
3139 let has_unstaged_diff = diff_state
3140 .unstaged_diff
3141 .as_ref()
3142 .is_some_and(|diff| diff.is_upgradable());
3143 let has_uncommitted_diff = diff_state
3144 .uncommitted_diff
3145 .as_ref()
3146 .is_some_and(|set| set.is_upgradable());
3147
3148 Some((
3149 buffer,
3150 repo_path,
3151 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3152 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3153 ))
3154 })
3155 })
3156 .collect::<Vec<_>>()
3157 })
3158 })??;
3159
3160 let buffer_diff_base_changes = cx
3161 .background_spawn(async move {
3162 let mut changes = Vec::new();
3163 for (buffer, repo_path, current_index_text, current_head_text) in
3164 &repo_diff_state_updates
3165 {
3166 let index_text = if current_index_text.is_some() {
3167 backend.load_index_text(repo_path.clone()).await
3168 } else {
3169 None
3170 };
3171 let head_text = if current_head_text.is_some() {
3172 backend.load_committed_text(repo_path.clone()).await
3173 } else {
3174 None
3175 };
3176
3177 let change =
3178 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3179 (Some(current_index), Some(current_head)) => {
3180 let index_changed =
3181 index_text.as_ref() != current_index.as_deref();
3182 let head_changed =
3183 head_text.as_ref() != current_head.as_deref();
3184 if index_changed && head_changed {
3185 if index_text == head_text {
3186 Some(DiffBasesChange::SetBoth(head_text))
3187 } else {
3188 Some(DiffBasesChange::SetEach {
3189 index: index_text,
3190 head: head_text,
3191 })
3192 }
3193 } else if index_changed {
3194 Some(DiffBasesChange::SetIndex(index_text))
3195 } else if head_changed {
3196 Some(DiffBasesChange::SetHead(head_text))
3197 } else {
3198 None
3199 }
3200 }
3201 (Some(current_index), None) => {
3202 let index_changed =
3203 index_text.as_ref() != current_index.as_deref();
3204 index_changed
3205 .then_some(DiffBasesChange::SetIndex(index_text))
3206 }
3207 (None, Some(current_head)) => {
3208 let head_changed =
3209 head_text.as_ref() != current_head.as_deref();
3210 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3211 }
3212 (None, None) => None,
3213 };
3214
3215 changes.push((buffer.clone(), change))
3216 }
3217 changes
3218 })
3219 .await;
3220
3221 git_store.update(&mut cx, |git_store, cx| {
3222 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3223 let buffer_snapshot = buffer.read(cx).text_snapshot();
3224 let buffer_id = buffer_snapshot.remote_id();
3225 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3226 continue;
3227 };
3228
3229 let downstream_client = git_store.downstream_client();
3230 diff_state.update(cx, |diff_state, cx| {
3231 use proto::update_diff_bases::Mode;
3232
3233 if let Some((diff_bases_change, (client, project_id))) =
3234 diff_bases_change.clone().zip(downstream_client)
3235 {
3236 let (staged_text, committed_text, mode) = match diff_bases_change {
3237 DiffBasesChange::SetIndex(index) => {
3238 (index, None, Mode::IndexOnly)
3239 }
3240 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3241 DiffBasesChange::SetEach { index, head } => {
3242 (index, head, Mode::IndexAndHead)
3243 }
3244 DiffBasesChange::SetBoth(text) => {
3245 (None, text, Mode::IndexMatchesHead)
3246 }
3247 };
3248 client
3249 .send(proto::UpdateDiffBases {
3250 project_id: project_id.to_proto(),
3251 buffer_id: buffer_id.to_proto(),
3252 staged_text,
3253 committed_text,
3254 mode: mode as i32,
3255 })
3256 .log_err();
3257 }
3258
3259 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3260 });
3261 }
3262 })
3263 },
3264 );
3265 }
3266
3267 pub fn send_job<F, Fut, R>(
3268 &mut self,
3269 status: Option<SharedString>,
3270 job: F,
3271 ) -> oneshot::Receiver<R>
3272 where
3273 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3274 Fut: Future<Output = R> + 'static,
3275 R: Send + 'static,
3276 {
3277 self.send_keyed_job(None, status, job)
3278 }
3279
3280 fn send_keyed_job<F, Fut, R>(
3281 &mut self,
3282 key: Option<GitJobKey>,
3283 status: Option<SharedString>,
3284 job: F,
3285 ) -> oneshot::Receiver<R>
3286 where
3287 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3288 Fut: Future<Output = R> + 'static,
3289 R: Send + 'static,
3290 {
3291 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3292 let job_id = post_inc(&mut self.job_id);
3293 let this = self.this.clone();
3294 self.job_sender
3295 .unbounded_send(GitJob {
3296 key,
3297 job: Box::new(move |state, cx: &mut AsyncApp| {
3298 let job = job(state, cx.clone());
3299 cx.spawn(async move |cx| {
3300 if let Some(s) = status.clone() {
3301 this.update(cx, |this, cx| {
3302 this.active_jobs.insert(
3303 job_id,
3304 JobInfo {
3305 start: Instant::now(),
3306 message: s.clone(),
3307 },
3308 );
3309
3310 cx.notify();
3311 })
3312 .ok();
3313 }
3314 let result = job.await;
3315
3316 this.update(cx, |this, cx| {
3317 this.active_jobs.remove(&job_id);
3318 cx.notify();
3319 })
3320 .ok();
3321
3322 result_tx.send(result).ok();
3323 })
3324 }),
3325 })
3326 .ok();
3327 result_rx
3328 }
3329
3330 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3331 let Some(git_store) = self.git_store.upgrade() else {
3332 return;
3333 };
3334 let entity = cx.entity();
3335 git_store.update(cx, |git_store, cx| {
3336 let Some((&id, _)) = git_store
3337 .repositories
3338 .iter()
3339 .find(|(_, handle)| *handle == &entity)
3340 else {
3341 return;
3342 };
3343 git_store.active_repo_id = Some(id);
3344 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3345 });
3346 }
3347
3348 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3349 self.snapshot.status()
3350 }
3351
3352 pub fn cached_stash(&self) -> GitStash {
3353 self.snapshot.stash_entries.clone()
3354 }
3355
3356 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3357 let git_store = self.git_store.upgrade()?;
3358 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3359 let abs_path = self
3360 .snapshot
3361 .work_directory_abs_path
3362 .join(path.as_std_path());
3363 let abs_path = SanitizedPath::new(&abs_path);
3364 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3365 Some(ProjectPath {
3366 worktree_id: worktree.read(cx).id(),
3367 path: relative_path,
3368 })
3369 }
3370
3371 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3372 let git_store = self.git_store.upgrade()?;
3373 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3374 let abs_path = worktree_store.absolutize(path, cx)?;
3375 self.snapshot.abs_path_to_repo_path(&abs_path)
3376 }
3377
3378 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3379 other
3380 .read(cx)
3381 .snapshot
3382 .work_directory_abs_path
3383 .starts_with(&self.snapshot.work_directory_abs_path)
3384 }
3385
3386 pub fn open_commit_buffer(
3387 &mut self,
3388 languages: Option<Arc<LanguageRegistry>>,
3389 buffer_store: Entity<BufferStore>,
3390 cx: &mut Context<Self>,
3391 ) -> Task<Result<Entity<Buffer>>> {
3392 let id = self.id;
3393 if let Some(buffer) = self.commit_message_buffer.clone() {
3394 return Task::ready(Ok(buffer));
3395 }
3396 let this = cx.weak_entity();
3397
3398 let rx = self.send_job(None, move |state, mut cx| async move {
3399 let Some(this) = this.upgrade() else {
3400 bail!("git store was dropped");
3401 };
3402 match state {
3403 RepositoryState::Local { .. } => {
3404 this.update(&mut cx, |_, cx| {
3405 Self::open_local_commit_buffer(languages, buffer_store, cx)
3406 })?
3407 .await
3408 }
3409 RepositoryState::Remote { project_id, client } => {
3410 let request = client.request(proto::OpenCommitMessageBuffer {
3411 project_id: project_id.0,
3412 repository_id: id.to_proto(),
3413 });
3414 let response = request.await.context("requesting to open commit buffer")?;
3415 let buffer_id = BufferId::new(response.buffer_id)?;
3416 let buffer = buffer_store
3417 .update(&mut cx, |buffer_store, cx| {
3418 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3419 })?
3420 .await?;
3421 if let Some(language_registry) = languages {
3422 let git_commit_language =
3423 language_registry.language_for_name("Git Commit").await?;
3424 buffer.update(&mut cx, |buffer, cx| {
3425 buffer.set_language(Some(git_commit_language), cx);
3426 })?;
3427 }
3428 this.update(&mut cx, |this, _| {
3429 this.commit_message_buffer = Some(buffer.clone());
3430 })?;
3431 Ok(buffer)
3432 }
3433 }
3434 });
3435
3436 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3437 }
3438
3439 fn open_local_commit_buffer(
3440 language_registry: Option<Arc<LanguageRegistry>>,
3441 buffer_store: Entity<BufferStore>,
3442 cx: &mut Context<Self>,
3443 ) -> Task<Result<Entity<Buffer>>> {
3444 cx.spawn(async move |repository, cx| {
3445 let buffer = buffer_store
3446 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3447 .await?;
3448
3449 if let Some(language_registry) = language_registry {
3450 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3451 buffer.update(cx, |buffer, cx| {
3452 buffer.set_language(Some(git_commit_language), cx);
3453 })?;
3454 }
3455
3456 repository.update(cx, |repository, _| {
3457 repository.commit_message_buffer = Some(buffer.clone());
3458 })?;
3459 Ok(buffer)
3460 })
3461 }
3462
3463 pub fn checkout_files(
3464 &mut self,
3465 commit: &str,
3466 paths: Vec<RepoPath>,
3467 _cx: &mut App,
3468 ) -> oneshot::Receiver<Result<()>> {
3469 let commit = commit.to_string();
3470 let id = self.id;
3471
3472 self.send_job(
3473 Some(format!("git checkout {}", commit).into()),
3474 move |git_repo, _| async move {
3475 match git_repo {
3476 RepositoryState::Local {
3477 backend,
3478 environment,
3479 ..
3480 } => {
3481 backend
3482 .checkout_files(commit, paths, environment.clone())
3483 .await
3484 }
3485 RepositoryState::Remote { project_id, client } => {
3486 client
3487 .request(proto::GitCheckoutFiles {
3488 project_id: project_id.0,
3489 repository_id: id.to_proto(),
3490 commit,
3491 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3492 })
3493 .await?;
3494
3495 Ok(())
3496 }
3497 }
3498 },
3499 )
3500 }
3501
3502 pub fn reset(
3503 &mut self,
3504 commit: String,
3505 reset_mode: ResetMode,
3506 _cx: &mut App,
3507 ) -> oneshot::Receiver<Result<()>> {
3508 let id = self.id;
3509
3510 self.send_job(None, move |git_repo, _| async move {
3511 match git_repo {
3512 RepositoryState::Local {
3513 backend,
3514 environment,
3515 ..
3516 } => backend.reset(commit, reset_mode, environment).await,
3517 RepositoryState::Remote { project_id, client } => {
3518 client
3519 .request(proto::GitReset {
3520 project_id: project_id.0,
3521 repository_id: id.to_proto(),
3522 commit,
3523 mode: match reset_mode {
3524 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3525 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3526 },
3527 })
3528 .await?;
3529
3530 Ok(())
3531 }
3532 }
3533 })
3534 }
3535
3536 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3537 let id = self.id;
3538 self.send_job(None, move |git_repo, _cx| async move {
3539 match git_repo {
3540 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3541 RepositoryState::Remote { project_id, client } => {
3542 let resp = client
3543 .request(proto::GitShow {
3544 project_id: project_id.0,
3545 repository_id: id.to_proto(),
3546 commit,
3547 })
3548 .await?;
3549
3550 Ok(CommitDetails {
3551 sha: resp.sha.into(),
3552 message: resp.message.into(),
3553 commit_timestamp: resp.commit_timestamp,
3554 author_email: resp.author_email.into(),
3555 author_name: resp.author_name.into(),
3556 })
3557 }
3558 }
3559 })
3560 }
3561
3562 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3563 let id = self.id;
3564 self.send_job(None, move |git_repo, cx| async move {
3565 match git_repo {
3566 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3567 RepositoryState::Remote {
3568 client, project_id, ..
3569 } => {
3570 let response = client
3571 .request(proto::LoadCommitDiff {
3572 project_id: project_id.0,
3573 repository_id: id.to_proto(),
3574 commit,
3575 })
3576 .await?;
3577 Ok(CommitDiff {
3578 files: response
3579 .files
3580 .into_iter()
3581 .map(|file| {
3582 Ok(CommitFile {
3583 path: RepoPath::from_proto(&file.path)?,
3584 old_text: file.old_text,
3585 new_text: file.new_text,
3586 })
3587 })
3588 .collect::<Result<Vec<_>>>()?,
3589 })
3590 }
3591 }
3592 })
3593 }
3594
3595 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3596 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3597 }
3598
3599 pub fn stage_entries(
3600 &self,
3601 entries: Vec<RepoPath>,
3602 cx: &mut Context<Self>,
3603 ) -> Task<anyhow::Result<()>> {
3604 if entries.is_empty() {
3605 return Task::ready(Ok(()));
3606 }
3607 let id = self.id;
3608
3609 let mut save_futures = Vec::new();
3610 if let Some(buffer_store) = self.buffer_store(cx) {
3611 buffer_store.update(cx, |buffer_store, cx| {
3612 for path in &entries {
3613 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3614 continue;
3615 };
3616 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3617 && buffer
3618 .read(cx)
3619 .file()
3620 .is_some_and(|file| file.disk_state().exists())
3621 {
3622 save_futures.push(buffer_store.save_buffer(buffer, cx));
3623 }
3624 }
3625 })
3626 }
3627
3628 cx.spawn(async move |this, cx| {
3629 for save_future in save_futures {
3630 save_future.await?;
3631 }
3632
3633 this.update(cx, |this, _| {
3634 this.send_job(None, move |git_repo, _cx| async move {
3635 match git_repo {
3636 RepositoryState::Local {
3637 backend,
3638 environment,
3639 ..
3640 } => backend.stage_paths(entries, environment.clone()).await,
3641 RepositoryState::Remote { project_id, client } => {
3642 client
3643 .request(proto::Stage {
3644 project_id: project_id.0,
3645 repository_id: id.to_proto(),
3646 paths: entries
3647 .into_iter()
3648 .map(|repo_path| repo_path.to_proto())
3649 .collect(),
3650 })
3651 .await
3652 .context("sending stage request")?;
3653
3654 Ok(())
3655 }
3656 }
3657 })
3658 })?
3659 .await??;
3660
3661 Ok(())
3662 })
3663 }
3664
3665 pub fn unstage_entries(
3666 &self,
3667 entries: Vec<RepoPath>,
3668 cx: &mut Context<Self>,
3669 ) -> Task<anyhow::Result<()>> {
3670 if entries.is_empty() {
3671 return Task::ready(Ok(()));
3672 }
3673 let id = self.id;
3674
3675 let mut save_futures = Vec::new();
3676 if let Some(buffer_store) = self.buffer_store(cx) {
3677 buffer_store.update(cx, |buffer_store, cx| {
3678 for path in &entries {
3679 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3680 continue;
3681 };
3682 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3683 && buffer
3684 .read(cx)
3685 .file()
3686 .is_some_and(|file| file.disk_state().exists())
3687 {
3688 save_futures.push(buffer_store.save_buffer(buffer, cx));
3689 }
3690 }
3691 })
3692 }
3693
3694 cx.spawn(async move |this, cx| {
3695 for save_future in save_futures {
3696 save_future.await?;
3697 }
3698
3699 this.update(cx, |this, _| {
3700 this.send_job(None, move |git_repo, _cx| async move {
3701 match git_repo {
3702 RepositoryState::Local {
3703 backend,
3704 environment,
3705 ..
3706 } => backend.unstage_paths(entries, environment).await,
3707 RepositoryState::Remote { project_id, client } => {
3708 client
3709 .request(proto::Unstage {
3710 project_id: project_id.0,
3711 repository_id: id.to_proto(),
3712 paths: entries
3713 .into_iter()
3714 .map(|repo_path| repo_path.to_proto())
3715 .collect(),
3716 })
3717 .await
3718 .context("sending unstage request")?;
3719
3720 Ok(())
3721 }
3722 }
3723 })
3724 })?
3725 .await??;
3726
3727 Ok(())
3728 })
3729 }
3730
3731 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3732 let to_stage = self
3733 .cached_status()
3734 .filter(|entry| !entry.status.staging().is_fully_staged())
3735 .map(|entry| entry.repo_path)
3736 .collect();
3737 self.stage_entries(to_stage, cx)
3738 }
3739
3740 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3741 let to_unstage = self
3742 .cached_status()
3743 .filter(|entry| entry.status.staging().has_staged())
3744 .map(|entry| entry.repo_path)
3745 .collect();
3746 self.unstage_entries(to_unstage, cx)
3747 }
3748
3749 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3750 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3751
3752 self.stash_entries(to_stash, cx)
3753 }
3754
3755 pub fn stash_entries(
3756 &mut self,
3757 entries: Vec<RepoPath>,
3758 cx: &mut Context<Self>,
3759 ) -> Task<anyhow::Result<()>> {
3760 let id = self.id;
3761
3762 cx.spawn(async move |this, cx| {
3763 this.update(cx, |this, _| {
3764 this.send_job(None, move |git_repo, _cx| async move {
3765 match git_repo {
3766 RepositoryState::Local {
3767 backend,
3768 environment,
3769 ..
3770 } => backend.stash_paths(entries, environment).await,
3771 RepositoryState::Remote { project_id, client } => {
3772 client
3773 .request(proto::Stash {
3774 project_id: project_id.0,
3775 repository_id: id.to_proto(),
3776 paths: entries
3777 .into_iter()
3778 .map(|repo_path| repo_path.to_proto())
3779 .collect(),
3780 })
3781 .await
3782 .context("sending stash request")?;
3783 Ok(())
3784 }
3785 }
3786 })
3787 })?
3788 .await??;
3789 Ok(())
3790 })
3791 }
3792
3793 pub fn stash_pop(
3794 &mut self,
3795 index: Option<usize>,
3796 cx: &mut Context<Self>,
3797 ) -> Task<anyhow::Result<()>> {
3798 let id = self.id;
3799 cx.spawn(async move |this, cx| {
3800 this.update(cx, |this, _| {
3801 this.send_job(None, move |git_repo, _cx| async move {
3802 match git_repo {
3803 RepositoryState::Local {
3804 backend,
3805 environment,
3806 ..
3807 } => backend.stash_pop(index, environment).await,
3808 RepositoryState::Remote { project_id, client } => {
3809 client
3810 .request(proto::StashPop {
3811 project_id: project_id.0,
3812 repository_id: id.to_proto(),
3813 stash_index: index.map(|i| i as u64),
3814 })
3815 .await
3816 .context("sending stash pop request")?;
3817 Ok(())
3818 }
3819 }
3820 })
3821 })?
3822 .await??;
3823 Ok(())
3824 })
3825 }
3826
3827 pub fn stash_apply(
3828 &mut self,
3829 index: Option<usize>,
3830 cx: &mut Context<Self>,
3831 ) -> Task<anyhow::Result<()>> {
3832 let id = self.id;
3833 cx.spawn(async move |this, cx| {
3834 this.update(cx, |this, _| {
3835 this.send_job(None, move |git_repo, _cx| async move {
3836 match git_repo {
3837 RepositoryState::Local {
3838 backend,
3839 environment,
3840 ..
3841 } => backend.stash_apply(index, environment).await,
3842 RepositoryState::Remote { project_id, client } => {
3843 client
3844 .request(proto::StashApply {
3845 project_id: project_id.0,
3846 repository_id: id.to_proto(),
3847 stash_index: index.map(|i| i as u64),
3848 })
3849 .await
3850 .context("sending stash apply request")?;
3851 Ok(())
3852 }
3853 }
3854 })
3855 })?
3856 .await??;
3857 Ok(())
3858 })
3859 }
3860
3861 pub fn stash_drop(
3862 &mut self,
3863 index: Option<usize>,
3864 cx: &mut Context<Self>,
3865 ) -> oneshot::Receiver<anyhow::Result<()>> {
3866 let id = self.id;
3867 let updates_tx = self
3868 .git_store()
3869 .and_then(|git_store| match &git_store.read(cx).state {
3870 GitStoreState::Local { downstream, .. } => downstream
3871 .as_ref()
3872 .map(|downstream| downstream.updates_tx.clone()),
3873 _ => None,
3874 });
3875 let this = cx.weak_entity();
3876 self.send_job(None, move |git_repo, mut cx| async move {
3877 match git_repo {
3878 RepositoryState::Local {
3879 backend,
3880 environment,
3881 ..
3882 } => {
3883 let result = backend.stash_drop(index, environment).await;
3884 if result.is_ok()
3885 && let Ok(stash_entries) = backend.stash_entries().await
3886 {
3887 let snapshot = this.update(&mut cx, |this, cx| {
3888 this.snapshot.stash_entries = stash_entries;
3889 let snapshot = this.snapshot.clone();
3890 cx.emit(RepositoryEvent::Updated {
3891 full_scan: false,
3892 new_instance: false,
3893 });
3894 snapshot
3895 })?;
3896 if let Some(updates_tx) = updates_tx {
3897 updates_tx
3898 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3899 .ok();
3900 }
3901 }
3902
3903 result
3904 }
3905 RepositoryState::Remote { project_id, client } => {
3906 client
3907 .request(proto::StashDrop {
3908 project_id: project_id.0,
3909 repository_id: id.to_proto(),
3910 stash_index: index.map(|i| i as u64),
3911 })
3912 .await
3913 .context("sending stash pop request")?;
3914 Ok(())
3915 }
3916 }
3917 })
3918 }
3919
3920 pub fn commit(
3921 &mut self,
3922 message: SharedString,
3923 name_and_email: Option<(SharedString, SharedString)>,
3924 options: CommitOptions,
3925 _cx: &mut App,
3926 ) -> oneshot::Receiver<Result<()>> {
3927 let id = self.id;
3928
3929 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3930 match git_repo {
3931 RepositoryState::Local {
3932 backend,
3933 environment,
3934 ..
3935 } => {
3936 backend
3937 .commit(message, name_and_email, options, environment)
3938 .await
3939 }
3940 RepositoryState::Remote { project_id, client } => {
3941 let (name, email) = name_and_email.unzip();
3942 client
3943 .request(proto::Commit {
3944 project_id: project_id.0,
3945 repository_id: id.to_proto(),
3946 message: String::from(message),
3947 name: name.map(String::from),
3948 email: email.map(String::from),
3949 options: Some(proto::commit::CommitOptions {
3950 amend: options.amend,
3951 signoff: options.signoff,
3952 }),
3953 })
3954 .await
3955 .context("sending commit request")?;
3956
3957 Ok(())
3958 }
3959 }
3960 })
3961 }
3962
3963 pub fn fetch(
3964 &mut self,
3965 fetch_options: FetchOptions,
3966 askpass: AskPassDelegate,
3967 _cx: &mut App,
3968 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3969 let askpass_delegates = self.askpass_delegates.clone();
3970 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3971 let id = self.id;
3972
3973 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3974 match git_repo {
3975 RepositoryState::Local {
3976 backend,
3977 environment,
3978 ..
3979 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3980 RepositoryState::Remote { project_id, client } => {
3981 askpass_delegates.lock().insert(askpass_id, askpass);
3982 let _defer = util::defer(|| {
3983 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3984 debug_assert!(askpass_delegate.is_some());
3985 });
3986
3987 let response = client
3988 .request(proto::Fetch {
3989 project_id: project_id.0,
3990 repository_id: id.to_proto(),
3991 askpass_id,
3992 remote: fetch_options.to_proto(),
3993 })
3994 .await
3995 .context("sending fetch request")?;
3996
3997 Ok(RemoteCommandOutput {
3998 stdout: response.stdout,
3999 stderr: response.stderr,
4000 })
4001 }
4002 }
4003 })
4004 }
4005
4006 pub fn push(
4007 &mut self,
4008 branch: SharedString,
4009 remote: SharedString,
4010 options: Option<PushOptions>,
4011 askpass: AskPassDelegate,
4012 cx: &mut Context<Self>,
4013 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4014 let askpass_delegates = self.askpass_delegates.clone();
4015 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4016 let id = self.id;
4017
4018 let args = options
4019 .map(|option| match option {
4020 PushOptions::SetUpstream => " --set-upstream",
4021 PushOptions::Force => " --force-with-lease",
4022 })
4023 .unwrap_or("");
4024
4025 let updates_tx = self
4026 .git_store()
4027 .and_then(|git_store| match &git_store.read(cx).state {
4028 GitStoreState::Local { downstream, .. } => downstream
4029 .as_ref()
4030 .map(|downstream| downstream.updates_tx.clone()),
4031 _ => None,
4032 });
4033
4034 let this = cx.weak_entity();
4035 self.send_job(
4036 Some(format!("git push {} {} {}", args, branch, remote).into()),
4037 move |git_repo, mut cx| async move {
4038 match git_repo {
4039 RepositoryState::Local {
4040 backend,
4041 environment,
4042 ..
4043 } => {
4044 let result = backend
4045 .push(
4046 branch.to_string(),
4047 remote.to_string(),
4048 options,
4049 askpass,
4050 environment.clone(),
4051 cx.clone(),
4052 )
4053 .await;
4054 if result.is_ok() {
4055 let branches = backend.branches().await?;
4056 let branch = branches.into_iter().find(|branch| branch.is_head);
4057 log::info!("head branch after scan is {branch:?}");
4058 let snapshot = this.update(&mut cx, |this, cx| {
4059 this.snapshot.branch = branch;
4060 let snapshot = this.snapshot.clone();
4061 cx.emit(RepositoryEvent::Updated {
4062 full_scan: false,
4063 new_instance: false,
4064 });
4065 snapshot
4066 })?;
4067 if let Some(updates_tx) = updates_tx {
4068 updates_tx
4069 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4070 .ok();
4071 }
4072 }
4073 result
4074 }
4075 RepositoryState::Remote { project_id, client } => {
4076 askpass_delegates.lock().insert(askpass_id, askpass);
4077 let _defer = util::defer(|| {
4078 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4079 debug_assert!(askpass_delegate.is_some());
4080 });
4081 let response = client
4082 .request(proto::Push {
4083 project_id: project_id.0,
4084 repository_id: id.to_proto(),
4085 askpass_id,
4086 branch_name: branch.to_string(),
4087 remote_name: remote.to_string(),
4088 options: options.map(|options| match options {
4089 PushOptions::Force => proto::push::PushOptions::Force,
4090 PushOptions::SetUpstream => {
4091 proto::push::PushOptions::SetUpstream
4092 }
4093 }
4094 as i32),
4095 })
4096 .await
4097 .context("sending push request")?;
4098
4099 Ok(RemoteCommandOutput {
4100 stdout: response.stdout,
4101 stderr: response.stderr,
4102 })
4103 }
4104 }
4105 },
4106 )
4107 }
4108
4109 pub fn pull(
4110 &mut self,
4111 branch: SharedString,
4112 remote: SharedString,
4113 askpass: AskPassDelegate,
4114 _cx: &mut App,
4115 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4116 let askpass_delegates = self.askpass_delegates.clone();
4117 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4118 let id = self.id;
4119
4120 self.send_job(
4121 Some(format!("git pull {} {}", remote, branch).into()),
4122 move |git_repo, cx| async move {
4123 match git_repo {
4124 RepositoryState::Local {
4125 backend,
4126 environment,
4127 ..
4128 } => {
4129 backend
4130 .pull(
4131 branch.to_string(),
4132 remote.to_string(),
4133 askpass,
4134 environment.clone(),
4135 cx,
4136 )
4137 .await
4138 }
4139 RepositoryState::Remote { project_id, client } => {
4140 askpass_delegates.lock().insert(askpass_id, askpass);
4141 let _defer = util::defer(|| {
4142 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4143 debug_assert!(askpass_delegate.is_some());
4144 });
4145 let response = client
4146 .request(proto::Pull {
4147 project_id: project_id.0,
4148 repository_id: id.to_proto(),
4149 askpass_id,
4150 branch_name: branch.to_string(),
4151 remote_name: remote.to_string(),
4152 })
4153 .await
4154 .context("sending pull request")?;
4155
4156 Ok(RemoteCommandOutput {
4157 stdout: response.stdout,
4158 stderr: response.stderr,
4159 })
4160 }
4161 }
4162 },
4163 )
4164 }
4165
4166 fn spawn_set_index_text_job(
4167 &mut self,
4168 path: RepoPath,
4169 content: Option<String>,
4170 hunk_staging_operation_count: Option<usize>,
4171 cx: &mut Context<Self>,
4172 ) -> oneshot::Receiver<anyhow::Result<()>> {
4173 let id = self.id;
4174 let this = cx.weak_entity();
4175 let git_store = self.git_store.clone();
4176 self.send_keyed_job(
4177 Some(GitJobKey::WriteIndex(path.clone())),
4178 None,
4179 move |git_repo, mut cx| async move {
4180 log::debug!(
4181 "start updating index text for buffer {}",
4182 path.as_unix_str()
4183 );
4184 match git_repo {
4185 RepositoryState::Local {
4186 backend,
4187 environment,
4188 ..
4189 } => {
4190 backend
4191 .set_index_text(path.clone(), content, environment.clone())
4192 .await?;
4193 }
4194 RepositoryState::Remote { project_id, client } => {
4195 client
4196 .request(proto::SetIndexText {
4197 project_id: project_id.0,
4198 repository_id: id.to_proto(),
4199 path: path.to_proto(),
4200 text: content,
4201 })
4202 .await?;
4203 }
4204 }
4205 log::debug!(
4206 "finish updating index text for buffer {}",
4207 path.as_unix_str()
4208 );
4209
4210 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4211 let project_path = this
4212 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4213 .ok()
4214 .flatten();
4215 git_store.update(&mut cx, |git_store, cx| {
4216 let buffer_id = git_store
4217 .buffer_store
4218 .read(cx)
4219 .get_by_path(&project_path?)?
4220 .read(cx)
4221 .remote_id();
4222 let diff_state = git_store.diffs.get(&buffer_id)?;
4223 diff_state.update(cx, |diff_state, _| {
4224 diff_state.hunk_staging_operation_count_as_of_write =
4225 hunk_staging_operation_count;
4226 });
4227 Some(())
4228 })?;
4229 }
4230 Ok(())
4231 },
4232 )
4233 }
4234
4235 pub fn get_remotes(
4236 &mut self,
4237 branch_name: Option<String>,
4238 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4239 let id = self.id;
4240 self.send_job(None, move |repo, _cx| async move {
4241 match repo {
4242 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4243 RepositoryState::Remote { project_id, client } => {
4244 let response = client
4245 .request(proto::GetRemotes {
4246 project_id: project_id.0,
4247 repository_id: id.to_proto(),
4248 branch_name,
4249 })
4250 .await?;
4251
4252 let remotes = response
4253 .remotes
4254 .into_iter()
4255 .map(|remotes| git::repository::Remote {
4256 name: remotes.name.into(),
4257 })
4258 .collect();
4259
4260 Ok(remotes)
4261 }
4262 }
4263 })
4264 }
4265
4266 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4267 let id = self.id;
4268 self.send_job(None, move |repo, _| async move {
4269 match repo {
4270 RepositoryState::Local { backend, .. } => backend.branches().await,
4271 RepositoryState::Remote { project_id, client } => {
4272 let response = client
4273 .request(proto::GitGetBranches {
4274 project_id: project_id.0,
4275 repository_id: id.to_proto(),
4276 })
4277 .await?;
4278
4279 let branches = response
4280 .branches
4281 .into_iter()
4282 .map(|branch| proto_to_branch(&branch))
4283 .collect();
4284
4285 Ok(branches)
4286 }
4287 }
4288 })
4289 }
4290
4291 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4292 let id = self.id;
4293 self.send_job(None, move |repo, _| async move {
4294 match repo {
4295 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4296 RepositoryState::Remote { project_id, client } => {
4297 let response = client
4298 .request(proto::GetDefaultBranch {
4299 project_id: project_id.0,
4300 repository_id: id.to_proto(),
4301 })
4302 .await?;
4303
4304 anyhow::Ok(response.branch.map(SharedString::from))
4305 }
4306 }
4307 })
4308 }
4309
4310 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4311 let id = self.id;
4312 self.send_job(None, move |repo, _cx| async move {
4313 match repo {
4314 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4315 RepositoryState::Remote { project_id, client } => {
4316 let response = client
4317 .request(proto::GitDiff {
4318 project_id: project_id.0,
4319 repository_id: id.to_proto(),
4320 diff_type: match diff_type {
4321 DiffType::HeadToIndex => {
4322 proto::git_diff::DiffType::HeadToIndex.into()
4323 }
4324 DiffType::HeadToWorktree => {
4325 proto::git_diff::DiffType::HeadToWorktree.into()
4326 }
4327 },
4328 })
4329 .await?;
4330
4331 Ok(response.diff)
4332 }
4333 }
4334 })
4335 }
4336
4337 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4338 let id = self.id;
4339 self.send_job(
4340 Some(format!("git switch -c {branch_name}").into()),
4341 move |repo, _cx| async move {
4342 match repo {
4343 RepositoryState::Local { backend, .. } => {
4344 backend.create_branch(branch_name).await
4345 }
4346 RepositoryState::Remote { project_id, client } => {
4347 client
4348 .request(proto::GitCreateBranch {
4349 project_id: project_id.0,
4350 repository_id: id.to_proto(),
4351 branch_name,
4352 })
4353 .await?;
4354
4355 Ok(())
4356 }
4357 }
4358 },
4359 )
4360 }
4361
4362 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4363 let id = self.id;
4364 self.send_job(
4365 Some(format!("git switch {branch_name}").into()),
4366 move |repo, _cx| async move {
4367 match repo {
4368 RepositoryState::Local { backend, .. } => {
4369 backend.change_branch(branch_name).await
4370 }
4371 RepositoryState::Remote { project_id, client } => {
4372 client
4373 .request(proto::GitChangeBranch {
4374 project_id: project_id.0,
4375 repository_id: id.to_proto(),
4376 branch_name,
4377 })
4378 .await?;
4379
4380 Ok(())
4381 }
4382 }
4383 },
4384 )
4385 }
4386
4387 pub fn rename_branch(
4388 &mut self,
4389 branch: String,
4390 new_name: String,
4391 ) -> oneshot::Receiver<Result<()>> {
4392 let id = self.id;
4393 self.send_job(
4394 Some(format!("git branch -m {branch} {new_name}").into()),
4395 move |repo, _cx| async move {
4396 match repo {
4397 RepositoryState::Local { backend, .. } => {
4398 backend.rename_branch(branch, new_name).await
4399 }
4400 RepositoryState::Remote { project_id, client } => {
4401 client
4402 .request(proto::GitRenameBranch {
4403 project_id: project_id.0,
4404 repository_id: id.to_proto(),
4405 branch,
4406 new_name,
4407 })
4408 .await?;
4409
4410 Ok(())
4411 }
4412 }
4413 },
4414 )
4415 }
4416
4417 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4418 let id = self.id;
4419 self.send_job(None, move |repo, _cx| async move {
4420 match repo {
4421 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4422 RepositoryState::Remote { project_id, client } => {
4423 let response = client
4424 .request(proto::CheckForPushedCommits {
4425 project_id: project_id.0,
4426 repository_id: id.to_proto(),
4427 })
4428 .await?;
4429
4430 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4431
4432 Ok(branches)
4433 }
4434 }
4435 })
4436 }
4437
4438 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4439 self.send_job(None, |repo, _cx| async move {
4440 match repo {
4441 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4442 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4443 }
4444 })
4445 }
4446
4447 pub fn restore_checkpoint(
4448 &mut self,
4449 checkpoint: GitRepositoryCheckpoint,
4450 ) -> oneshot::Receiver<Result<()>> {
4451 self.send_job(None, move |repo, _cx| async move {
4452 match repo {
4453 RepositoryState::Local { backend, .. } => {
4454 backend.restore_checkpoint(checkpoint).await
4455 }
4456 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4457 }
4458 })
4459 }
4460
4461 pub(crate) fn apply_remote_update(
4462 &mut self,
4463 update: proto::UpdateRepository,
4464 is_new: bool,
4465 cx: &mut Context<Self>,
4466 ) -> Result<()> {
4467 let conflicted_paths = TreeSet::from_ordered_entries(
4468 update
4469 .current_merge_conflicts
4470 .into_iter()
4471 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4472 );
4473 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4474 self.snapshot.head_commit = update
4475 .head_commit_details
4476 .as_ref()
4477 .map(proto_to_commit_details);
4478
4479 self.snapshot.merge.conflicted_paths = conflicted_paths;
4480 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4481 self.snapshot.stash_entries = GitStash {
4482 entries: update
4483 .stash_entries
4484 .iter()
4485 .filter_map(|entry| proto_to_stash(entry).ok())
4486 .collect(),
4487 };
4488
4489 let edits = update
4490 .removed_statuses
4491 .into_iter()
4492 .filter_map(|path| {
4493 Some(sum_tree::Edit::Remove(PathKey(
4494 RelPath::from_proto(&path).log_err()?,
4495 )))
4496 })
4497 .chain(
4498 update
4499 .updated_statuses
4500 .into_iter()
4501 .filter_map(|updated_status| {
4502 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4503 }),
4504 )
4505 .collect::<Vec<_>>();
4506 self.snapshot.statuses_by_path.edit(edits, ());
4507 if update.is_last_update {
4508 self.snapshot.scan_id = update.scan_id;
4509 }
4510 cx.emit(RepositoryEvent::Updated {
4511 full_scan: true,
4512 new_instance: is_new,
4513 });
4514 Ok(())
4515 }
4516
4517 pub fn compare_checkpoints(
4518 &mut self,
4519 left: GitRepositoryCheckpoint,
4520 right: GitRepositoryCheckpoint,
4521 ) -> oneshot::Receiver<Result<bool>> {
4522 self.send_job(None, move |repo, _cx| async move {
4523 match repo {
4524 RepositoryState::Local { backend, .. } => {
4525 backend.compare_checkpoints(left, right).await
4526 }
4527 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4528 }
4529 })
4530 }
4531
4532 pub fn diff_checkpoints(
4533 &mut self,
4534 base_checkpoint: GitRepositoryCheckpoint,
4535 target_checkpoint: GitRepositoryCheckpoint,
4536 ) -> oneshot::Receiver<Result<String>> {
4537 self.send_job(None, move |repo, _cx| async move {
4538 match repo {
4539 RepositoryState::Local { backend, .. } => {
4540 backend
4541 .diff_checkpoints(base_checkpoint, target_checkpoint)
4542 .await
4543 }
4544 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4545 }
4546 })
4547 }
4548
4549 fn schedule_scan(
4550 &mut self,
4551 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4552 cx: &mut Context<Self>,
4553 ) {
4554 let this = cx.weak_entity();
4555 let _ = self.send_keyed_job(
4556 Some(GitJobKey::ReloadGitState),
4557 None,
4558 |state, mut cx| async move {
4559 log::debug!("run scheduled git status scan");
4560
4561 let Some(this) = this.upgrade() else {
4562 return Ok(());
4563 };
4564 let RepositoryState::Local { backend, .. } = state else {
4565 bail!("not a local repository")
4566 };
4567 let (snapshot, events) = this
4568 .update(&mut cx, |this, _| {
4569 this.paths_needing_status_update.clear();
4570 compute_snapshot(
4571 this.id,
4572 this.work_directory_abs_path.clone(),
4573 this.snapshot.clone(),
4574 backend.clone(),
4575 )
4576 })?
4577 .await?;
4578 this.update(&mut cx, |this, cx| {
4579 this.snapshot = snapshot.clone();
4580 for event in events {
4581 cx.emit(event);
4582 }
4583 })?;
4584 if let Some(updates_tx) = updates_tx {
4585 updates_tx
4586 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4587 .ok();
4588 }
4589 Ok(())
4590 },
4591 );
4592 }
4593
4594 fn spawn_local_git_worker(
4595 work_directory_abs_path: Arc<Path>,
4596 dot_git_abs_path: Arc<Path>,
4597 _repository_dir_abs_path: Arc<Path>,
4598 _common_dir_abs_path: Arc<Path>,
4599 project_environment: WeakEntity<ProjectEnvironment>,
4600 fs: Arc<dyn Fs>,
4601 cx: &mut Context<Self>,
4602 ) -> mpsc::UnboundedSender<GitJob> {
4603 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4604
4605 cx.spawn(async move |_, cx| {
4606 let environment = project_environment
4607 .upgrade()
4608 .context("missing project environment")?
4609 .update(cx, |project_environment, cx| {
4610 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4611 })?
4612 .await
4613 .unwrap_or_else(|| {
4614 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4615 HashMap::default()
4616 });
4617 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4618 let backend = cx
4619 .background_spawn(async move {
4620 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4621 .or_else(|| which::which("git").ok());
4622 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4623 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4624 })
4625 .await?;
4626
4627 if let Some(git_hosting_provider_registry) =
4628 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4629 {
4630 git_hosting_providers::register_additional_providers(
4631 git_hosting_provider_registry,
4632 backend.clone(),
4633 );
4634 }
4635
4636 let state = RepositoryState::Local {
4637 backend,
4638 environment: Arc::new(environment),
4639 };
4640 let mut jobs = VecDeque::new();
4641 loop {
4642 while let Ok(Some(next_job)) = job_rx.try_next() {
4643 jobs.push_back(next_job);
4644 }
4645
4646 if let Some(job) = jobs.pop_front() {
4647 if let Some(current_key) = &job.key
4648 && jobs
4649 .iter()
4650 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4651 {
4652 continue;
4653 }
4654 (job.job)(state.clone(), cx).await;
4655 } else if let Some(job) = job_rx.next().await {
4656 jobs.push_back(job);
4657 } else {
4658 break;
4659 }
4660 }
4661 anyhow::Ok(())
4662 })
4663 .detach_and_log_err(cx);
4664
4665 job_tx
4666 }
4667
4668 fn spawn_remote_git_worker(
4669 project_id: ProjectId,
4670 client: AnyProtoClient,
4671 cx: &mut Context<Self>,
4672 ) -> mpsc::UnboundedSender<GitJob> {
4673 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4674
4675 cx.spawn(async move |_, cx| {
4676 let state = RepositoryState::Remote { project_id, client };
4677 let mut jobs = VecDeque::new();
4678 loop {
4679 while let Ok(Some(next_job)) = job_rx.try_next() {
4680 jobs.push_back(next_job);
4681 }
4682
4683 if let Some(job) = jobs.pop_front() {
4684 if let Some(current_key) = &job.key
4685 && jobs
4686 .iter()
4687 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4688 {
4689 continue;
4690 }
4691 (job.job)(state.clone(), cx).await;
4692 } else if let Some(job) = job_rx.next().await {
4693 jobs.push_back(job);
4694 } else {
4695 break;
4696 }
4697 }
4698 anyhow::Ok(())
4699 })
4700 .detach_and_log_err(cx);
4701
4702 job_tx
4703 }
4704
4705 fn load_staged_text(
4706 &mut self,
4707 buffer_id: BufferId,
4708 repo_path: RepoPath,
4709 cx: &App,
4710 ) -> Task<Result<Option<String>>> {
4711 let rx = self.send_job(None, move |state, _| async move {
4712 match state {
4713 RepositoryState::Local { backend, .. } => {
4714 anyhow::Ok(backend.load_index_text(repo_path).await)
4715 }
4716 RepositoryState::Remote { project_id, client } => {
4717 let response = client
4718 .request(proto::OpenUnstagedDiff {
4719 project_id: project_id.to_proto(),
4720 buffer_id: buffer_id.to_proto(),
4721 })
4722 .await?;
4723 Ok(response.staged_text)
4724 }
4725 }
4726 });
4727 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4728 }
4729
4730 fn load_committed_text(
4731 &mut self,
4732 buffer_id: BufferId,
4733 repo_path: RepoPath,
4734 cx: &App,
4735 ) -> Task<Result<DiffBasesChange>> {
4736 let rx = self.send_job(None, move |state, _| async move {
4737 match state {
4738 RepositoryState::Local { backend, .. } => {
4739 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4740 let staged_text = backend.load_index_text(repo_path).await;
4741 let diff_bases_change = if committed_text == staged_text {
4742 DiffBasesChange::SetBoth(committed_text)
4743 } else {
4744 DiffBasesChange::SetEach {
4745 index: staged_text,
4746 head: committed_text,
4747 }
4748 };
4749 anyhow::Ok(diff_bases_change)
4750 }
4751 RepositoryState::Remote { project_id, client } => {
4752 use proto::open_uncommitted_diff_response::Mode;
4753
4754 let response = client
4755 .request(proto::OpenUncommittedDiff {
4756 project_id: project_id.to_proto(),
4757 buffer_id: buffer_id.to_proto(),
4758 })
4759 .await?;
4760 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4761 let bases = match mode {
4762 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4763 Mode::IndexAndHead => DiffBasesChange::SetEach {
4764 head: response.committed_text,
4765 index: response.staged_text,
4766 },
4767 };
4768 Ok(bases)
4769 }
4770 }
4771 });
4772
4773 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4774 }
4775
4776 fn paths_changed(
4777 &mut self,
4778 paths: Vec<RepoPath>,
4779 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4780 cx: &mut Context<Self>,
4781 ) {
4782 self.paths_needing_status_update.extend(paths);
4783
4784 let this = cx.weak_entity();
4785 let _ = self.send_keyed_job(
4786 Some(GitJobKey::RefreshStatuses),
4787 None,
4788 |state, mut cx| async move {
4789 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4790 (
4791 this.snapshot.clone(),
4792 mem::take(&mut this.paths_needing_status_update),
4793 )
4794 })?;
4795 let RepositoryState::Local { backend, .. } = state else {
4796 bail!("not a local repository")
4797 };
4798
4799 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4800 if paths.is_empty() {
4801 return Ok(());
4802 }
4803 let statuses = backend.status(&paths).await?;
4804 let stash_entries = backend.stash_entries().await?;
4805
4806 let changed_path_statuses = cx
4807 .background_spawn(async move {
4808 let mut changed_path_statuses = Vec::new();
4809 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4810 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4811
4812 for (repo_path, status) in &*statuses.entries {
4813 changed_paths.remove(repo_path);
4814 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4815 && cursor.item().is_some_and(|entry| entry.status == *status)
4816 {
4817 continue;
4818 }
4819
4820 changed_path_statuses.push(Edit::Insert(StatusEntry {
4821 repo_path: repo_path.clone(),
4822 status: *status,
4823 }));
4824 }
4825 let mut cursor = prev_statuses.cursor::<PathProgress>(());
4826 for path in changed_paths.into_iter() {
4827 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4828 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4829 }
4830 }
4831 changed_path_statuses
4832 })
4833 .await;
4834
4835 this.update(&mut cx, |this, cx| {
4836 let needs_update = !changed_path_statuses.is_empty()
4837 || this.snapshot.stash_entries != stash_entries;
4838 this.snapshot.stash_entries = stash_entries;
4839 if !changed_path_statuses.is_empty() {
4840 this.snapshot
4841 .statuses_by_path
4842 .edit(changed_path_statuses, ());
4843 this.snapshot.scan_id += 1;
4844 }
4845
4846 if needs_update {
4847 cx.emit(RepositoryEvent::Updated {
4848 full_scan: false,
4849 new_instance: false,
4850 });
4851 }
4852
4853 if let Some(updates_tx) = updates_tx {
4854 updates_tx
4855 .unbounded_send(DownstreamUpdate::UpdateRepository(
4856 this.snapshot.clone(),
4857 ))
4858 .ok();
4859 }
4860 cx.emit(RepositoryEvent::PathsChanged);
4861 })
4862 },
4863 );
4864 }
4865
4866 /// currently running git command and when it started
4867 pub fn current_job(&self) -> Option<JobInfo> {
4868 self.active_jobs.values().next().cloned()
4869 }
4870
4871 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4872 self.send_job(None, |_, _| async {})
4873 }
4874}
4875
4876fn get_permalink_in_rust_registry_src(
4877 provider_registry: Arc<GitHostingProviderRegistry>,
4878 path: PathBuf,
4879 selection: Range<u32>,
4880) -> Result<url::Url> {
4881 #[derive(Deserialize)]
4882 struct CargoVcsGit {
4883 sha1: String,
4884 }
4885
4886 #[derive(Deserialize)]
4887 struct CargoVcsInfo {
4888 git: CargoVcsGit,
4889 path_in_vcs: String,
4890 }
4891
4892 #[derive(Deserialize)]
4893 struct CargoPackage {
4894 repository: String,
4895 }
4896
4897 #[derive(Deserialize)]
4898 struct CargoToml {
4899 package: CargoPackage,
4900 }
4901
4902 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4903 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4904 Some((dir, json))
4905 }) else {
4906 bail!("No .cargo_vcs_info.json found in parent directories")
4907 };
4908 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4909 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4910 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4911 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4912 .context("parsing package.repository field of manifest")?;
4913 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4914 let permalink = provider.build_permalink(
4915 remote,
4916 BuildPermalinkParams {
4917 sha: &cargo_vcs_info.git.sha1,
4918 path: &path.to_string_lossy(),
4919 selection: Some(selection),
4920 },
4921 );
4922 Ok(permalink)
4923}
4924
4925fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4926 let Some(blame) = blame else {
4927 return proto::BlameBufferResponse {
4928 blame_response: None,
4929 };
4930 };
4931
4932 let entries = blame
4933 .entries
4934 .into_iter()
4935 .map(|entry| proto::BlameEntry {
4936 sha: entry.sha.as_bytes().into(),
4937 start_line: entry.range.start,
4938 end_line: entry.range.end,
4939 original_line_number: entry.original_line_number,
4940 author: entry.author,
4941 author_mail: entry.author_mail,
4942 author_time: entry.author_time,
4943 author_tz: entry.author_tz,
4944 committer: entry.committer_name,
4945 committer_mail: entry.committer_email,
4946 committer_time: entry.committer_time,
4947 committer_tz: entry.committer_tz,
4948 summary: entry.summary,
4949 previous: entry.previous,
4950 filename: entry.filename,
4951 })
4952 .collect::<Vec<_>>();
4953
4954 let messages = blame
4955 .messages
4956 .into_iter()
4957 .map(|(oid, message)| proto::CommitMessage {
4958 oid: oid.as_bytes().into(),
4959 message,
4960 })
4961 .collect::<Vec<_>>();
4962
4963 proto::BlameBufferResponse {
4964 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4965 entries,
4966 messages,
4967 remote_url: blame.remote_url,
4968 }),
4969 }
4970}
4971
4972fn deserialize_blame_buffer_response(
4973 response: proto::BlameBufferResponse,
4974) -> Option<git::blame::Blame> {
4975 let response = response.blame_response?;
4976 let entries = response
4977 .entries
4978 .into_iter()
4979 .filter_map(|entry| {
4980 Some(git::blame::BlameEntry {
4981 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4982 range: entry.start_line..entry.end_line,
4983 original_line_number: entry.original_line_number,
4984 committer_name: entry.committer,
4985 committer_time: entry.committer_time,
4986 committer_tz: entry.committer_tz,
4987 committer_email: entry.committer_mail,
4988 author: entry.author,
4989 author_mail: entry.author_mail,
4990 author_time: entry.author_time,
4991 author_tz: entry.author_tz,
4992 summary: entry.summary,
4993 previous: entry.previous,
4994 filename: entry.filename,
4995 })
4996 })
4997 .collect::<Vec<_>>();
4998
4999 let messages = response
5000 .messages
5001 .into_iter()
5002 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5003 .collect::<HashMap<_, _>>();
5004
5005 Some(Blame {
5006 entries,
5007 messages,
5008 remote_url: response.remote_url,
5009 })
5010}
5011
5012fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5013 proto::Branch {
5014 is_head: branch.is_head,
5015 ref_name: branch.ref_name.to_string(),
5016 unix_timestamp: branch
5017 .most_recent_commit
5018 .as_ref()
5019 .map(|commit| commit.commit_timestamp as u64),
5020 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5021 ref_name: upstream.ref_name.to_string(),
5022 tracking: upstream
5023 .tracking
5024 .status()
5025 .map(|upstream| proto::UpstreamTracking {
5026 ahead: upstream.ahead as u64,
5027 behind: upstream.behind as u64,
5028 }),
5029 }),
5030 most_recent_commit: branch
5031 .most_recent_commit
5032 .as_ref()
5033 .map(|commit| proto::CommitSummary {
5034 sha: commit.sha.to_string(),
5035 subject: commit.subject.to_string(),
5036 commit_timestamp: commit.commit_timestamp,
5037 author_name: commit.author_name.to_string(),
5038 }),
5039 }
5040}
5041
5042fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5043 git::repository::Branch {
5044 is_head: proto.is_head,
5045 ref_name: proto.ref_name.clone().into(),
5046 upstream: proto
5047 .upstream
5048 .as_ref()
5049 .map(|upstream| git::repository::Upstream {
5050 ref_name: upstream.ref_name.to_string().into(),
5051 tracking: upstream
5052 .tracking
5053 .as_ref()
5054 .map(|tracking| {
5055 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5056 ahead: tracking.ahead as u32,
5057 behind: tracking.behind as u32,
5058 })
5059 })
5060 .unwrap_or(git::repository::UpstreamTracking::Gone),
5061 }),
5062 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5063 git::repository::CommitSummary {
5064 sha: commit.sha.to_string().into(),
5065 subject: commit.subject.to_string().into(),
5066 commit_timestamp: commit.commit_timestamp,
5067 author_name: commit.author_name.to_string().into(),
5068 has_parent: true,
5069 }
5070 }),
5071 }
5072}
5073
5074fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5075 proto::GitCommitDetails {
5076 sha: commit.sha.to_string(),
5077 message: commit.message.to_string(),
5078 commit_timestamp: commit.commit_timestamp,
5079 author_email: commit.author_email.to_string(),
5080 author_name: commit.author_name.to_string(),
5081 }
5082}
5083
5084fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5085 CommitDetails {
5086 sha: proto.sha.clone().into(),
5087 message: proto.message.clone().into(),
5088 commit_timestamp: proto.commit_timestamp,
5089 author_email: proto.author_email.clone().into(),
5090 author_name: proto.author_name.clone().into(),
5091 }
5092}
5093
5094async fn compute_snapshot(
5095 id: RepositoryId,
5096 work_directory_abs_path: Arc<Path>,
5097 prev_snapshot: RepositorySnapshot,
5098 backend: Arc<dyn GitRepository>,
5099) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5100 let mut events = Vec::new();
5101 let branches = backend.branches().await?;
5102 let branch = branches.into_iter().find(|branch| branch.is_head);
5103 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5104 let stash_entries = backend.stash_entries().await?;
5105 let statuses_by_path = SumTree::from_iter(
5106 statuses
5107 .entries
5108 .iter()
5109 .map(|(repo_path, status)| StatusEntry {
5110 repo_path: repo_path.clone(),
5111 status: *status,
5112 }),
5113 (),
5114 );
5115 let (merge_details, merge_heads_changed) =
5116 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5117 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5118
5119 if merge_heads_changed
5120 || branch != prev_snapshot.branch
5121 || statuses_by_path != prev_snapshot.statuses_by_path
5122 {
5123 events.push(RepositoryEvent::Updated {
5124 full_scan: true,
5125 new_instance: false,
5126 });
5127 }
5128
5129 // Cache merge conflict paths so they don't change from staging/unstaging,
5130 // until the merge heads change (at commit time, etc.).
5131 if merge_heads_changed {
5132 events.push(RepositoryEvent::MergeHeadsChanged);
5133 }
5134
5135 // Useful when branch is None in detached head state
5136 let head_commit = match backend.head_sha().await {
5137 Some(head_sha) => backend.show(head_sha).await.log_err(),
5138 None => None,
5139 };
5140
5141 // Used by edit prediction data collection
5142 let remote_origin_url = backend.remote_url("origin");
5143 let remote_upstream_url = backend.remote_url("upstream");
5144
5145 let snapshot = RepositorySnapshot {
5146 id,
5147 statuses_by_path,
5148 work_directory_abs_path,
5149 path_style: prev_snapshot.path_style,
5150 scan_id: prev_snapshot.scan_id + 1,
5151 branch,
5152 head_commit,
5153 merge: merge_details,
5154 remote_origin_url,
5155 remote_upstream_url,
5156 stash_entries,
5157 };
5158
5159 Ok((snapshot, events))
5160}
5161
5162fn status_from_proto(
5163 simple_status: i32,
5164 status: Option<proto::GitFileStatus>,
5165) -> anyhow::Result<FileStatus> {
5166 use proto::git_file_status::Variant;
5167
5168 let Some(variant) = status.and_then(|status| status.variant) else {
5169 let code = proto::GitStatus::from_i32(simple_status)
5170 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5171 let result = match code {
5172 proto::GitStatus::Added => TrackedStatus {
5173 worktree_status: StatusCode::Added,
5174 index_status: StatusCode::Unmodified,
5175 }
5176 .into(),
5177 proto::GitStatus::Modified => TrackedStatus {
5178 worktree_status: StatusCode::Modified,
5179 index_status: StatusCode::Unmodified,
5180 }
5181 .into(),
5182 proto::GitStatus::Conflict => UnmergedStatus {
5183 first_head: UnmergedStatusCode::Updated,
5184 second_head: UnmergedStatusCode::Updated,
5185 }
5186 .into(),
5187 proto::GitStatus::Deleted => TrackedStatus {
5188 worktree_status: StatusCode::Deleted,
5189 index_status: StatusCode::Unmodified,
5190 }
5191 .into(),
5192 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5193 };
5194 return Ok(result);
5195 };
5196
5197 let result = match variant {
5198 Variant::Untracked(_) => FileStatus::Untracked,
5199 Variant::Ignored(_) => FileStatus::Ignored,
5200 Variant::Unmerged(unmerged) => {
5201 let [first_head, second_head] =
5202 [unmerged.first_head, unmerged.second_head].map(|head| {
5203 let code = proto::GitStatus::from_i32(head)
5204 .with_context(|| format!("Invalid git status code: {head}"))?;
5205 let result = match code {
5206 proto::GitStatus::Added => UnmergedStatusCode::Added,
5207 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5208 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5209 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5210 };
5211 Ok(result)
5212 });
5213 let [first_head, second_head] = [first_head?, second_head?];
5214 UnmergedStatus {
5215 first_head,
5216 second_head,
5217 }
5218 .into()
5219 }
5220 Variant::Tracked(tracked) => {
5221 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5222 .map(|status| {
5223 let code = proto::GitStatus::from_i32(status)
5224 .with_context(|| format!("Invalid git status code: {status}"))?;
5225 let result = match code {
5226 proto::GitStatus::Modified => StatusCode::Modified,
5227 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5228 proto::GitStatus::Added => StatusCode::Added,
5229 proto::GitStatus::Deleted => StatusCode::Deleted,
5230 proto::GitStatus::Renamed => StatusCode::Renamed,
5231 proto::GitStatus::Copied => StatusCode::Copied,
5232 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5233 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5234 };
5235 Ok(result)
5236 });
5237 let [index_status, worktree_status] = [index_status?, worktree_status?];
5238 TrackedStatus {
5239 index_status,
5240 worktree_status,
5241 }
5242 .into()
5243 }
5244 };
5245 Ok(result)
5246}
5247
5248fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5249 use proto::git_file_status::{Tracked, Unmerged, Variant};
5250
5251 let variant = match status {
5252 FileStatus::Untracked => Variant::Untracked(Default::default()),
5253 FileStatus::Ignored => Variant::Ignored(Default::default()),
5254 FileStatus::Unmerged(UnmergedStatus {
5255 first_head,
5256 second_head,
5257 }) => Variant::Unmerged(Unmerged {
5258 first_head: unmerged_status_to_proto(first_head),
5259 second_head: unmerged_status_to_proto(second_head),
5260 }),
5261 FileStatus::Tracked(TrackedStatus {
5262 index_status,
5263 worktree_status,
5264 }) => Variant::Tracked(Tracked {
5265 index_status: tracked_status_to_proto(index_status),
5266 worktree_status: tracked_status_to_proto(worktree_status),
5267 }),
5268 };
5269 proto::GitFileStatus {
5270 variant: Some(variant),
5271 }
5272}
5273
5274fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5275 match code {
5276 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5277 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5278 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5279 }
5280}
5281
5282fn tracked_status_to_proto(code: StatusCode) -> i32 {
5283 match code {
5284 StatusCode::Added => proto::GitStatus::Added as _,
5285 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5286 StatusCode::Modified => proto::GitStatus::Modified as _,
5287 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5288 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5289 StatusCode::Copied => proto::GitStatus::Copied as _,
5290 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5291 }
5292}