1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, Oid, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 stash::{GitStash, StashEntry},
32 status::{
33 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
34 },
35};
36use gpui::{
37 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
38 WeakEntity,
39};
40use language::{
41 Buffer, BufferEvent, Language, LanguageRegistry,
42 proto::{deserialize_version, serialize_version},
43};
44use parking_lot::Mutex;
45use postage::stream::Stream as _;
46use rpc::{
47 AnyProtoClient, TypedEnvelope,
48 proto::{self, FromProto, ToProto, git_reset, split_repository_update},
49};
50use serde::Deserialize;
51use std::{
52 cmp::Ordering,
53 collections::{BTreeSet, VecDeque},
54 future::Future,
55 mem,
56 ops::Range,
57 path::{Path, PathBuf},
58 sync::{
59 Arc,
60 atomic::{self, AtomicU64},
61 },
62 time::Instant,
63};
64use sum_tree::{Edit, SumTree, TreeSet};
65use text::{Bias, BufferId};
66use util::{ResultExt, debug_panic, paths::SanitizedPath, post_inc};
67use worktree::{
68 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
69 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
70};
71
72pub struct GitStore {
73 state: GitStoreState,
74 buffer_store: Entity<BufferStore>,
75 worktree_store: Entity<WorktreeStore>,
76 repositories: HashMap<RepositoryId, Entity<Repository>>,
77 active_repo_id: Option<RepositoryId>,
78 #[allow(clippy::type_complexity)]
79 loading_diffs:
80 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
81 diffs: HashMap<BufferId, Entity<BufferGitState>>,
82 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
83 _subscriptions: Vec<Subscription>,
84}
85
86#[derive(Default)]
87struct SharedDiffs {
88 unstaged: Option<Entity<BufferDiff>>,
89 uncommitted: Option<Entity<BufferDiff>>,
90}
91
92struct BufferGitState {
93 unstaged_diff: Option<WeakEntity<BufferDiff>>,
94 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
95 conflict_set: Option<WeakEntity<ConflictSet>>,
96 recalculate_diff_task: Option<Task<Result<()>>>,
97 reparse_conflict_markers_task: Option<Task<Result<()>>>,
98 language: Option<Arc<Language>>,
99 language_registry: Option<Arc<LanguageRegistry>>,
100 conflict_updated_futures: Vec<oneshot::Sender<()>>,
101 recalculating_tx: postage::watch::Sender<bool>,
102
103 /// These operation counts are used to ensure that head and index text
104 /// values read from the git repository are up-to-date with any hunk staging
105 /// operations that have been performed on the BufferDiff.
106 ///
107 /// The operation count is incremented immediately when the user initiates a
108 /// hunk stage/unstage operation. Then, upon finishing writing the new index
109 /// text do disk, the `operation count as of write` is updated to reflect
110 /// the operation count that prompted the write.
111 hunk_staging_operation_count: usize,
112 hunk_staging_operation_count_as_of_write: usize,
113
114 head_text: Option<Arc<String>>,
115 index_text: Option<Arc<String>>,
116 head_changed: bool,
117 index_changed: bool,
118 language_changed: bool,
119}
120
121#[derive(Clone, Debug)]
122enum DiffBasesChange {
123 SetIndex(Option<String>),
124 SetHead(Option<String>),
125 SetEach {
126 index: Option<String>,
127 head: Option<String>,
128 },
129 SetBoth(Option<String>),
130}
131
132#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
133enum DiffKind {
134 Unstaged,
135 Uncommitted,
136}
137
138enum GitStoreState {
139 Local {
140 next_repository_id: Arc<AtomicU64>,
141 downstream: Option<LocalDownstreamState>,
142 project_environment: Entity<ProjectEnvironment>,
143 fs: Arc<dyn Fs>,
144 },
145 Remote {
146 upstream_client: AnyProtoClient,
147 upstream_project_id: u64,
148 downstream: Option<(AnyProtoClient, ProjectId)>,
149 },
150}
151
152enum DownstreamUpdate {
153 UpdateRepository(RepositorySnapshot),
154 RemoveRepository(RepositoryId),
155}
156
157struct LocalDownstreamState {
158 client: AnyProtoClient,
159 project_id: ProjectId,
160 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
161 _task: Task<Result<()>>,
162}
163
164#[derive(Clone, Debug)]
165pub struct GitStoreCheckpoint {
166 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
167}
168
169#[derive(Clone, Debug, PartialEq, Eq)]
170pub struct StatusEntry {
171 pub repo_path: RepoPath,
172 pub status: FileStatus,
173}
174
175impl StatusEntry {
176 fn to_proto(&self) -> proto::StatusEntry {
177 let simple_status = match self.status {
178 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
179 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
180 FileStatus::Tracked(TrackedStatus {
181 index_status,
182 worktree_status,
183 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
184 worktree_status
185 } else {
186 index_status
187 }),
188 };
189
190 proto::StatusEntry {
191 repo_path: self.repo_path.as_ref().to_proto(),
192 simple_status,
193 status: Some(status_to_proto(self.status)),
194 }
195 }
196}
197
198impl TryFrom<proto::StatusEntry> for StatusEntry {
199 type Error = anyhow::Error;
200
201 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
202 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
203 let status = status_from_proto(value.simple_status, value.status)?;
204 Ok(Self { repo_path, status })
205 }
206}
207
208impl sum_tree::Item for StatusEntry {
209 type Summary = PathSummary<GitSummary>;
210
211 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
212 PathSummary {
213 max_path: self.repo_path.0.clone(),
214 item_summary: self.status.summary(),
215 }
216 }
217}
218
219impl sum_tree::KeyedItem for StatusEntry {
220 type Key = PathKey;
221
222 fn key(&self) -> Self::Key {
223 PathKey(self.repo_path.0.clone())
224 }
225}
226
227#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
228pub struct RepositoryId(pub u64);
229
230#[derive(Clone, Debug, Default, PartialEq, Eq)]
231pub struct MergeDetails {
232 pub conflicted_paths: TreeSet<RepoPath>,
233 pub message: Option<SharedString>,
234 pub heads: Vec<Option<SharedString>>,
235}
236
237#[derive(Clone, Debug, PartialEq, Eq)]
238pub struct RepositorySnapshot {
239 pub id: RepositoryId,
240 pub statuses_by_path: SumTree<StatusEntry>,
241 pub work_directory_abs_path: Arc<Path>,
242 pub branch: Option<Branch>,
243 pub head_commit: Option<CommitDetails>,
244 pub scan_id: u64,
245 pub merge: MergeDetails,
246 pub remote_origin_url: Option<String>,
247 pub remote_upstream_url: Option<String>,
248 pub stash_entries: GitStash,
249}
250
251type JobId = u64;
252
253#[derive(Clone, Debug, PartialEq, Eq)]
254pub struct JobInfo {
255 pub start: Instant,
256 pub message: SharedString,
257}
258
259pub struct Repository {
260 this: WeakEntity<Self>,
261 snapshot: RepositorySnapshot,
262 commit_message_buffer: Option<Entity<Buffer>>,
263 git_store: WeakEntity<GitStore>,
264 // For a local repository, holds paths that have had worktree events since the last status scan completed,
265 // and that should be examined during the next status scan.
266 paths_needing_status_update: BTreeSet<RepoPath>,
267 job_sender: mpsc::UnboundedSender<GitJob>,
268 active_jobs: HashMap<JobId, JobInfo>,
269 job_id: JobId,
270 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
271 latest_askpass_id: u64,
272}
273
274impl std::ops::Deref for Repository {
275 type Target = RepositorySnapshot;
276
277 fn deref(&self) -> &Self::Target {
278 &self.snapshot
279 }
280}
281
282#[derive(Clone)]
283pub enum RepositoryState {
284 Local {
285 backend: Arc<dyn GitRepository>,
286 environment: Arc<HashMap<String, String>>,
287 },
288 Remote {
289 project_id: ProjectId,
290 client: AnyProtoClient,
291 },
292}
293
294#[derive(Clone, Debug)]
295pub enum RepositoryEvent {
296 Updated { full_scan: bool, new_instance: bool },
297 MergeHeadsChanged,
298}
299
300#[derive(Clone, Debug)]
301pub struct JobsUpdated;
302
303#[derive(Debug)]
304pub enum GitStoreEvent {
305 ActiveRepositoryChanged(Option<RepositoryId>),
306 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
307 RepositoryAdded(RepositoryId),
308 RepositoryRemoved(RepositoryId),
309 IndexWriteError(anyhow::Error),
310 JobsUpdated,
311 ConflictsUpdated,
312}
313
314impl EventEmitter<RepositoryEvent> for Repository {}
315impl EventEmitter<JobsUpdated> for Repository {}
316impl EventEmitter<GitStoreEvent> for GitStore {}
317
318pub struct GitJob {
319 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
320 key: Option<GitJobKey>,
321}
322
323#[derive(PartialEq, Eq)]
324enum GitJobKey {
325 WriteIndex(RepoPath),
326 ReloadBufferDiffBases,
327 RefreshStatuses,
328 ReloadGitState,
329}
330
331impl GitStore {
332 pub fn local(
333 worktree_store: &Entity<WorktreeStore>,
334 buffer_store: Entity<BufferStore>,
335 environment: Entity<ProjectEnvironment>,
336 fs: Arc<dyn Fs>,
337 cx: &mut Context<Self>,
338 ) -> Self {
339 Self::new(
340 worktree_store.clone(),
341 buffer_store,
342 GitStoreState::Local {
343 next_repository_id: Arc::new(AtomicU64::new(1)),
344 downstream: None,
345 project_environment: environment,
346 fs,
347 },
348 cx,
349 )
350 }
351
352 pub fn remote(
353 worktree_store: &Entity<WorktreeStore>,
354 buffer_store: Entity<BufferStore>,
355 upstream_client: AnyProtoClient,
356 project_id: u64,
357 cx: &mut Context<Self>,
358 ) -> Self {
359 Self::new(
360 worktree_store.clone(),
361 buffer_store,
362 GitStoreState::Remote {
363 upstream_client,
364 upstream_project_id: project_id,
365 downstream: None,
366 },
367 cx,
368 )
369 }
370
371 fn new(
372 worktree_store: Entity<WorktreeStore>,
373 buffer_store: Entity<BufferStore>,
374 state: GitStoreState,
375 cx: &mut Context<Self>,
376 ) -> Self {
377 let _subscriptions = vec![
378 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
379 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
380 ];
381
382 GitStore {
383 state,
384 buffer_store,
385 worktree_store,
386 repositories: HashMap::default(),
387 active_repo_id: None,
388 _subscriptions,
389 loading_diffs: HashMap::default(),
390 shared_diffs: HashMap::default(),
391 diffs: HashMap::default(),
392 }
393 }
394
395 pub fn init(client: &AnyProtoClient) {
396 client.add_entity_request_handler(Self::handle_get_remotes);
397 client.add_entity_request_handler(Self::handle_get_branches);
398 client.add_entity_request_handler(Self::handle_get_default_branch);
399 client.add_entity_request_handler(Self::handle_change_branch);
400 client.add_entity_request_handler(Self::handle_create_branch);
401 client.add_entity_request_handler(Self::handle_git_init);
402 client.add_entity_request_handler(Self::handle_push);
403 client.add_entity_request_handler(Self::handle_pull);
404 client.add_entity_request_handler(Self::handle_fetch);
405 client.add_entity_request_handler(Self::handle_stage);
406 client.add_entity_request_handler(Self::handle_unstage);
407 client.add_entity_request_handler(Self::handle_stash);
408 client.add_entity_request_handler(Self::handle_stash_pop);
409 client.add_entity_request_handler(Self::handle_stash_apply);
410 client.add_entity_request_handler(Self::handle_stash_drop);
411 client.add_entity_request_handler(Self::handle_commit);
412 client.add_entity_request_handler(Self::handle_reset);
413 client.add_entity_request_handler(Self::handle_show);
414 client.add_entity_request_handler(Self::handle_load_commit_diff);
415 client.add_entity_request_handler(Self::handle_checkout_files);
416 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
417 client.add_entity_request_handler(Self::handle_set_index_text);
418 client.add_entity_request_handler(Self::handle_askpass);
419 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
420 client.add_entity_request_handler(Self::handle_git_diff);
421 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
422 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
423 client.add_entity_message_handler(Self::handle_update_diff_bases);
424 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
425 client.add_entity_request_handler(Self::handle_blame_buffer);
426 client.add_entity_message_handler(Self::handle_update_repository);
427 client.add_entity_message_handler(Self::handle_remove_repository);
428 client.add_entity_request_handler(Self::handle_git_clone);
429 }
430
431 pub fn is_local(&self) -> bool {
432 matches!(self.state, GitStoreState::Local { .. })
433 }
434
435 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
436 match &mut self.state {
437 GitStoreState::Remote {
438 downstream: downstream_client,
439 ..
440 } => {
441 for repo in self.repositories.values() {
442 let update = repo.read(cx).snapshot.initial_update(project_id);
443 for update in split_repository_update(update) {
444 client.send(update).log_err();
445 }
446 }
447 *downstream_client = Some((client, ProjectId(project_id)));
448 }
449 GitStoreState::Local {
450 downstream: downstream_client,
451 ..
452 } => {
453 let mut snapshots = HashMap::default();
454 let (updates_tx, mut updates_rx) = mpsc::unbounded();
455 for repo in self.repositories.values() {
456 updates_tx
457 .unbounded_send(DownstreamUpdate::UpdateRepository(
458 repo.read(cx).snapshot.clone(),
459 ))
460 .ok();
461 }
462 *downstream_client = Some(LocalDownstreamState {
463 client: client.clone(),
464 project_id: ProjectId(project_id),
465 updates_tx,
466 _task: cx.spawn(async move |this, cx| {
467 cx.background_spawn(async move {
468 while let Some(update) = updates_rx.next().await {
469 match update {
470 DownstreamUpdate::UpdateRepository(snapshot) => {
471 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
472 {
473 let update =
474 snapshot.build_update(old_snapshot, project_id);
475 *old_snapshot = snapshot;
476 for update in split_repository_update(update) {
477 client.send(update)?;
478 }
479 } else {
480 let update = snapshot.initial_update(project_id);
481 for update in split_repository_update(update) {
482 client.send(update)?;
483 }
484 snapshots.insert(snapshot.id, snapshot);
485 }
486 }
487 DownstreamUpdate::RemoveRepository(id) => {
488 client.send(proto::RemoveRepository {
489 project_id,
490 id: id.to_proto(),
491 })?;
492 }
493 }
494 }
495 anyhow::Ok(())
496 })
497 .await
498 .ok();
499 this.update(cx, |this, _| {
500 if let GitStoreState::Local {
501 downstream: downstream_client,
502 ..
503 } = &mut this.state
504 {
505 downstream_client.take();
506 } else {
507 unreachable!("unshared called on remote store");
508 }
509 })
510 }),
511 });
512 }
513 }
514 }
515
516 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
517 match &mut self.state {
518 GitStoreState::Local {
519 downstream: downstream_client,
520 ..
521 } => {
522 downstream_client.take();
523 }
524 GitStoreState::Remote {
525 downstream: downstream_client,
526 ..
527 } => {
528 downstream_client.take();
529 }
530 }
531 self.shared_diffs.clear();
532 }
533
534 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
535 self.shared_diffs.remove(peer_id);
536 }
537
538 pub fn active_repository(&self) -> Option<Entity<Repository>> {
539 self.active_repo_id
540 .as_ref()
541 .map(|id| self.repositories[id].clone())
542 }
543
544 pub fn open_unstaged_diff(
545 &mut self,
546 buffer: Entity<Buffer>,
547 cx: &mut Context<Self>,
548 ) -> Task<Result<Entity<BufferDiff>>> {
549 let buffer_id = buffer.read(cx).remote_id();
550 if let Some(diff_state) = self.diffs.get(&buffer_id)
551 && let Some(unstaged_diff) = diff_state
552 .read(cx)
553 .unstaged_diff
554 .as_ref()
555 .and_then(|weak| weak.upgrade())
556 {
557 if let Some(task) =
558 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
559 {
560 return cx.background_executor().spawn(async move {
561 task.await;
562 Ok(unstaged_diff)
563 });
564 }
565 return Task::ready(Ok(unstaged_diff));
566 }
567
568 let Some((repo, repo_path)) =
569 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
570 else {
571 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
572 };
573
574 let task = self
575 .loading_diffs
576 .entry((buffer_id, DiffKind::Unstaged))
577 .or_insert_with(|| {
578 let staged_text = repo.update(cx, |repo, cx| {
579 repo.load_staged_text(buffer_id, repo_path, cx)
580 });
581 cx.spawn(async move |this, cx| {
582 Self::open_diff_internal(
583 this,
584 DiffKind::Unstaged,
585 staged_text.await.map(DiffBasesChange::SetIndex),
586 buffer,
587 cx,
588 )
589 .await
590 .map_err(Arc::new)
591 })
592 .shared()
593 })
594 .clone();
595
596 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
597 }
598
599 pub fn open_uncommitted_diff(
600 &mut self,
601 buffer: Entity<Buffer>,
602 cx: &mut Context<Self>,
603 ) -> Task<Result<Entity<BufferDiff>>> {
604 let buffer_id = buffer.read(cx).remote_id();
605
606 if let Some(diff_state) = self.diffs.get(&buffer_id)
607 && let Some(uncommitted_diff) = diff_state
608 .read(cx)
609 .uncommitted_diff
610 .as_ref()
611 .and_then(|weak| weak.upgrade())
612 {
613 if let Some(task) =
614 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
615 {
616 return cx.background_executor().spawn(async move {
617 task.await;
618 Ok(uncommitted_diff)
619 });
620 }
621 return Task::ready(Ok(uncommitted_diff));
622 }
623
624 let Some((repo, repo_path)) =
625 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
626 else {
627 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
628 };
629
630 let task = self
631 .loading_diffs
632 .entry((buffer_id, DiffKind::Uncommitted))
633 .or_insert_with(|| {
634 let changes = repo.update(cx, |repo, cx| {
635 repo.load_committed_text(buffer_id, repo_path, cx)
636 });
637
638 cx.spawn(async move |this, cx| {
639 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
640 .await
641 .map_err(Arc::new)
642 })
643 .shared()
644 })
645 .clone();
646
647 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
648 }
649
650 async fn open_diff_internal(
651 this: WeakEntity<Self>,
652 kind: DiffKind,
653 texts: Result<DiffBasesChange>,
654 buffer_entity: Entity<Buffer>,
655 cx: &mut AsyncApp,
656 ) -> Result<Entity<BufferDiff>> {
657 let diff_bases_change = match texts {
658 Err(e) => {
659 this.update(cx, |this, cx| {
660 let buffer = buffer_entity.read(cx);
661 let buffer_id = buffer.remote_id();
662 this.loading_diffs.remove(&(buffer_id, kind));
663 })?;
664 return Err(e);
665 }
666 Ok(change) => change,
667 };
668
669 this.update(cx, |this, cx| {
670 let buffer = buffer_entity.read(cx);
671 let buffer_id = buffer.remote_id();
672 let language = buffer.language().cloned();
673 let language_registry = buffer.language_registry();
674 let text_snapshot = buffer.text_snapshot();
675 this.loading_diffs.remove(&(buffer_id, kind));
676
677 let git_store = cx.weak_entity();
678 let diff_state = this
679 .diffs
680 .entry(buffer_id)
681 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
682
683 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
684
685 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
686 diff_state.update(cx, |diff_state, cx| {
687 diff_state.language = language;
688 diff_state.language_registry = language_registry;
689
690 match kind {
691 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
692 DiffKind::Uncommitted => {
693 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
694 diff
695 } else {
696 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
697 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
698 unstaged_diff
699 };
700
701 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
702 diff_state.uncommitted_diff = Some(diff.downgrade())
703 }
704 }
705
706 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
707 let rx = diff_state.wait_for_recalculation();
708
709 anyhow::Ok(async move {
710 if let Some(rx) = rx {
711 rx.await;
712 }
713 Ok(diff)
714 })
715 })
716 })??
717 .await
718 }
719
720 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
721 let diff_state = self.diffs.get(&buffer_id)?;
722 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
723 }
724
725 pub fn get_uncommitted_diff(
726 &self,
727 buffer_id: BufferId,
728 cx: &App,
729 ) -> Option<Entity<BufferDiff>> {
730 let diff_state = self.diffs.get(&buffer_id)?;
731 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
732 }
733
734 pub fn open_conflict_set(
735 &mut self,
736 buffer: Entity<Buffer>,
737 cx: &mut Context<Self>,
738 ) -> Entity<ConflictSet> {
739 log::debug!("open conflict set");
740 let buffer_id = buffer.read(cx).remote_id();
741
742 if let Some(git_state) = self.diffs.get(&buffer_id)
743 && let Some(conflict_set) = git_state
744 .read(cx)
745 .conflict_set
746 .as_ref()
747 .and_then(|weak| weak.upgrade())
748 {
749 let conflict_set = conflict_set;
750 let buffer_snapshot = buffer.read(cx).text_snapshot();
751
752 git_state.update(cx, |state, cx| {
753 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
754 });
755
756 return conflict_set;
757 }
758
759 let is_unmerged = self
760 .repository_and_path_for_buffer_id(buffer_id, cx)
761 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
762 let git_store = cx.weak_entity();
763 let buffer_git_state = self
764 .diffs
765 .entry(buffer_id)
766 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
767 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
768
769 self._subscriptions
770 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
771 cx.emit(GitStoreEvent::ConflictsUpdated);
772 }));
773
774 buffer_git_state.update(cx, |state, cx| {
775 state.conflict_set = Some(conflict_set.downgrade());
776 let buffer_snapshot = buffer.read(cx).text_snapshot();
777 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
778 });
779
780 conflict_set
781 }
782
783 pub fn project_path_git_status(
784 &self,
785 project_path: &ProjectPath,
786 cx: &App,
787 ) -> Option<FileStatus> {
788 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
789 Some(repo.read(cx).status_for_path(&repo_path)?.status)
790 }
791
792 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
793 let mut work_directory_abs_paths = Vec::new();
794 let mut checkpoints = Vec::new();
795 for repository in self.repositories.values() {
796 repository.update(cx, |repository, _| {
797 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
798 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
799 });
800 }
801
802 cx.background_executor().spawn(async move {
803 let checkpoints = future::try_join_all(checkpoints).await?;
804 Ok(GitStoreCheckpoint {
805 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
806 .into_iter()
807 .zip(checkpoints)
808 .collect(),
809 })
810 })
811 }
812
813 pub fn restore_checkpoint(
814 &self,
815 checkpoint: GitStoreCheckpoint,
816 cx: &mut App,
817 ) -> Task<Result<()>> {
818 let repositories_by_work_dir_abs_path = self
819 .repositories
820 .values()
821 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
822 .collect::<HashMap<_, _>>();
823
824 let mut tasks = Vec::new();
825 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
826 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
827 let restore = repository.update(cx, |repository, _| {
828 repository.restore_checkpoint(checkpoint)
829 });
830 tasks.push(async move { restore.await? });
831 }
832 }
833 cx.background_spawn(async move {
834 future::try_join_all(tasks).await?;
835 Ok(())
836 })
837 }
838
839 /// Compares two checkpoints, returning true if they are equal.
840 pub fn compare_checkpoints(
841 &self,
842 left: GitStoreCheckpoint,
843 mut right: GitStoreCheckpoint,
844 cx: &mut App,
845 ) -> Task<Result<bool>> {
846 let repositories_by_work_dir_abs_path = self
847 .repositories
848 .values()
849 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
850 .collect::<HashMap<_, _>>();
851
852 let mut tasks = Vec::new();
853 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
854 if let Some(right_checkpoint) = right
855 .checkpoints_by_work_dir_abs_path
856 .remove(&work_dir_abs_path)
857 {
858 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
859 {
860 let compare = repository.update(cx, |repository, _| {
861 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
862 });
863
864 tasks.push(async move { compare.await? });
865 }
866 } else {
867 return Task::ready(Ok(false));
868 }
869 }
870 cx.background_spawn(async move {
871 Ok(future::try_join_all(tasks)
872 .await?
873 .into_iter()
874 .all(|result| result))
875 })
876 }
877
878 /// Blames a buffer.
879 pub fn blame_buffer(
880 &self,
881 buffer: &Entity<Buffer>,
882 version: Option<clock::Global>,
883 cx: &mut App,
884 ) -> Task<Result<Option<Blame>>> {
885 let buffer = buffer.read(cx);
886 let Some((repo, repo_path)) =
887 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
888 else {
889 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
890 };
891 let content = match &version {
892 Some(version) => buffer.rope_for_version(version),
893 None => buffer.as_rope().clone(),
894 };
895 let version = version.unwrap_or(buffer.version());
896 let buffer_id = buffer.remote_id();
897
898 let rx = repo.update(cx, |repo, _| {
899 repo.send_job(None, move |state, _| async move {
900 match state {
901 RepositoryState::Local { backend, .. } => backend
902 .blame(repo_path.clone(), content)
903 .await
904 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
905 .map(Some),
906 RepositoryState::Remote { project_id, client } => {
907 let response = client
908 .request(proto::BlameBuffer {
909 project_id: project_id.to_proto(),
910 buffer_id: buffer_id.into(),
911 version: serialize_version(&version),
912 })
913 .await?;
914 Ok(deserialize_blame_buffer_response(response))
915 }
916 }
917 })
918 });
919
920 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
921 }
922
923 pub fn get_permalink_to_line(
924 &self,
925 buffer: &Entity<Buffer>,
926 selection: Range<u32>,
927 cx: &mut App,
928 ) -> Task<Result<url::Url>> {
929 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
930 return Task::ready(Err(anyhow!("buffer has no file")));
931 };
932
933 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
934 &(file.worktree.read(cx).id(), file.path.clone()).into(),
935 cx,
936 ) else {
937 // If we're not in a Git repo, check whether this is a Rust source
938 // file in the Cargo registry (presumably opened with go-to-definition
939 // from a normal Rust file). If so, we can put together a permalink
940 // using crate metadata.
941 if buffer
942 .read(cx)
943 .language()
944 .is_none_or(|lang| lang.name() != "Rust".into())
945 {
946 return Task::ready(Err(anyhow!("no permalink available")));
947 }
948 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
949 return Task::ready(Err(anyhow!("no permalink available")));
950 };
951 return cx.spawn(async move |cx| {
952 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
953 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
954 .context("no permalink available")
955 });
956
957 // TODO remote case
958 };
959
960 let buffer_id = buffer.read(cx).remote_id();
961 let branch = repo.read(cx).branch.clone();
962 let remote = branch
963 .as_ref()
964 .and_then(|b| b.upstream.as_ref())
965 .and_then(|b| b.remote_name())
966 .unwrap_or("origin")
967 .to_string();
968
969 let rx = repo.update(cx, |repo, _| {
970 repo.send_job(None, move |state, cx| async move {
971 match state {
972 RepositoryState::Local { backend, .. } => {
973 let origin_url = backend
974 .remote_url(&remote)
975 .with_context(|| format!("remote \"{remote}\" not found"))?;
976
977 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
978
979 let provider_registry =
980 cx.update(GitHostingProviderRegistry::default_global)?;
981
982 let (provider, remote) =
983 parse_git_remote_url(provider_registry, &origin_url)
984 .context("parsing Git remote URL")?;
985
986 let path = repo_path.to_str().with_context(|| {
987 format!("converting repo path {repo_path:?} to string")
988 })?;
989
990 Ok(provider.build_permalink(
991 remote,
992 BuildPermalinkParams {
993 sha: &sha,
994 path,
995 selection: Some(selection),
996 },
997 ))
998 }
999 RepositoryState::Remote { project_id, client } => {
1000 let response = client
1001 .request(proto::GetPermalinkToLine {
1002 project_id: project_id.to_proto(),
1003 buffer_id: buffer_id.into(),
1004 selection: Some(proto::Range {
1005 start: selection.start as u64,
1006 end: selection.end as u64,
1007 }),
1008 })
1009 .await?;
1010
1011 url::Url::parse(&response.permalink).context("failed to parse permalink")
1012 }
1013 }
1014 })
1015 });
1016 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1017 }
1018
1019 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1020 match &self.state {
1021 GitStoreState::Local {
1022 downstream: downstream_client,
1023 ..
1024 } => downstream_client
1025 .as_ref()
1026 .map(|state| (state.client.clone(), state.project_id)),
1027 GitStoreState::Remote {
1028 downstream: downstream_client,
1029 ..
1030 } => downstream_client.clone(),
1031 }
1032 }
1033
1034 fn upstream_client(&self) -> Option<AnyProtoClient> {
1035 match &self.state {
1036 GitStoreState::Local { .. } => None,
1037 GitStoreState::Remote {
1038 upstream_client, ..
1039 } => Some(upstream_client.clone()),
1040 }
1041 }
1042
1043 fn on_worktree_store_event(
1044 &mut self,
1045 worktree_store: Entity<WorktreeStore>,
1046 event: &WorktreeStoreEvent,
1047 cx: &mut Context<Self>,
1048 ) {
1049 let GitStoreState::Local {
1050 project_environment,
1051 downstream,
1052 next_repository_id,
1053 fs,
1054 } = &self.state
1055 else {
1056 return;
1057 };
1058
1059 match event {
1060 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1061 if let Some(worktree) = self
1062 .worktree_store
1063 .read(cx)
1064 .worktree_for_id(*worktree_id, cx)
1065 {
1066 let paths_by_git_repo =
1067 self.process_updated_entries(&worktree, updated_entries, cx);
1068 let downstream = downstream
1069 .as_ref()
1070 .map(|downstream| downstream.updates_tx.clone());
1071 cx.spawn(async move |_, cx| {
1072 let paths_by_git_repo = paths_by_git_repo.await;
1073 for (repo, paths) in paths_by_git_repo {
1074 repo.update(cx, |repo, cx| {
1075 repo.paths_changed(paths, downstream.clone(), cx);
1076 })
1077 .ok();
1078 }
1079 })
1080 .detach();
1081 }
1082 }
1083 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1084 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1085 else {
1086 return;
1087 };
1088 if !worktree.read(cx).is_visible() {
1089 log::debug!(
1090 "not adding repositories for local worktree {:?} because it's not visible",
1091 worktree.read(cx).abs_path()
1092 );
1093 return;
1094 }
1095 self.update_repositories_from_worktree(
1096 project_environment.clone(),
1097 next_repository_id.clone(),
1098 downstream
1099 .as_ref()
1100 .map(|downstream| downstream.updates_tx.clone()),
1101 changed_repos.clone(),
1102 fs.clone(),
1103 cx,
1104 );
1105 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1106 }
1107 _ => {}
1108 }
1109 }
1110
1111 fn on_repository_event(
1112 &mut self,
1113 repo: Entity<Repository>,
1114 event: &RepositoryEvent,
1115 cx: &mut Context<Self>,
1116 ) {
1117 let id = repo.read(cx).id;
1118 let repo_snapshot = repo.read(cx).snapshot.clone();
1119 for (buffer_id, diff) in self.diffs.iter() {
1120 if let Some((buffer_repo, repo_path)) =
1121 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1122 && buffer_repo == repo
1123 {
1124 diff.update(cx, |diff, cx| {
1125 if let Some(conflict_set) = &diff.conflict_set {
1126 let conflict_status_changed =
1127 conflict_set.update(cx, |conflict_set, cx| {
1128 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1129 conflict_set.set_has_conflict(has_conflict, cx)
1130 })?;
1131 if conflict_status_changed {
1132 let buffer_store = self.buffer_store.read(cx);
1133 if let Some(buffer) = buffer_store.get(*buffer_id) {
1134 let _ = diff
1135 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1136 }
1137 }
1138 }
1139 anyhow::Ok(())
1140 })
1141 .ok();
1142 }
1143 }
1144 cx.emit(GitStoreEvent::RepositoryUpdated(
1145 id,
1146 event.clone(),
1147 self.active_repo_id == Some(id),
1148 ))
1149 }
1150
1151 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1152 cx.emit(GitStoreEvent::JobsUpdated)
1153 }
1154
1155 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1156 fn update_repositories_from_worktree(
1157 &mut self,
1158 project_environment: Entity<ProjectEnvironment>,
1159 next_repository_id: Arc<AtomicU64>,
1160 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1161 updated_git_repositories: UpdatedGitRepositoriesSet,
1162 fs: Arc<dyn Fs>,
1163 cx: &mut Context<Self>,
1164 ) {
1165 let mut removed_ids = Vec::new();
1166 for update in updated_git_repositories.iter() {
1167 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1168 let existing_work_directory_abs_path =
1169 repo.read(cx).work_directory_abs_path.clone();
1170 Some(&existing_work_directory_abs_path)
1171 == update.old_work_directory_abs_path.as_ref()
1172 || Some(&existing_work_directory_abs_path)
1173 == update.new_work_directory_abs_path.as_ref()
1174 }) {
1175 if let Some(new_work_directory_abs_path) =
1176 update.new_work_directory_abs_path.clone()
1177 {
1178 existing.update(cx, |existing, cx| {
1179 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1180 existing.schedule_scan(updates_tx.clone(), cx);
1181 });
1182 } else {
1183 removed_ids.push(*id);
1184 }
1185 } else if let UpdatedGitRepository {
1186 new_work_directory_abs_path: Some(work_directory_abs_path),
1187 dot_git_abs_path: Some(dot_git_abs_path),
1188 repository_dir_abs_path: Some(repository_dir_abs_path),
1189 common_dir_abs_path: Some(common_dir_abs_path),
1190 ..
1191 } = update
1192 {
1193 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1194 let git_store = cx.weak_entity();
1195 let repo = cx.new(|cx| {
1196 let mut repo = Repository::local(
1197 id,
1198 work_directory_abs_path.clone(),
1199 dot_git_abs_path.clone(),
1200 repository_dir_abs_path.clone(),
1201 common_dir_abs_path.clone(),
1202 project_environment.downgrade(),
1203 fs.clone(),
1204 git_store,
1205 cx,
1206 );
1207 repo.schedule_scan(updates_tx.clone(), cx);
1208 repo
1209 });
1210 self._subscriptions
1211 .push(cx.subscribe(&repo, Self::on_repository_event));
1212 self._subscriptions
1213 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1214 self.repositories.insert(id, repo);
1215 cx.emit(GitStoreEvent::RepositoryAdded(id));
1216 self.active_repo_id.get_or_insert_with(|| {
1217 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1218 id
1219 });
1220 }
1221 }
1222
1223 for id in removed_ids {
1224 if self.active_repo_id == Some(id) {
1225 self.active_repo_id = None;
1226 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1227 }
1228 self.repositories.remove(&id);
1229 if let Some(updates_tx) = updates_tx.as_ref() {
1230 updates_tx
1231 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1232 .ok();
1233 }
1234 }
1235 }
1236
1237 fn on_buffer_store_event(
1238 &mut self,
1239 _: Entity<BufferStore>,
1240 event: &BufferStoreEvent,
1241 cx: &mut Context<Self>,
1242 ) {
1243 match event {
1244 BufferStoreEvent::BufferAdded(buffer) => {
1245 cx.subscribe(buffer, |this, buffer, event, cx| {
1246 if let BufferEvent::LanguageChanged = event {
1247 let buffer_id = buffer.read(cx).remote_id();
1248 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1249 diff_state.update(cx, |diff_state, cx| {
1250 diff_state.buffer_language_changed(buffer, cx);
1251 });
1252 }
1253 }
1254 })
1255 .detach();
1256 }
1257 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1258 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1259 diffs.remove(buffer_id);
1260 }
1261 }
1262 BufferStoreEvent::BufferDropped(buffer_id) => {
1263 self.diffs.remove(buffer_id);
1264 for diffs in self.shared_diffs.values_mut() {
1265 diffs.remove(buffer_id);
1266 }
1267 }
1268
1269 _ => {}
1270 }
1271 }
1272
1273 pub fn recalculate_buffer_diffs(
1274 &mut self,
1275 buffers: Vec<Entity<Buffer>>,
1276 cx: &mut Context<Self>,
1277 ) -> impl Future<Output = ()> + use<> {
1278 let mut futures = Vec::new();
1279 for buffer in buffers {
1280 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1281 let buffer = buffer.read(cx).text_snapshot();
1282 diff_state.update(cx, |diff_state, cx| {
1283 diff_state.recalculate_diffs(buffer.clone(), cx);
1284 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1285 });
1286 futures.push(diff_state.update(cx, |diff_state, cx| {
1287 diff_state
1288 .reparse_conflict_markers(buffer, cx)
1289 .map(|_| {})
1290 .boxed()
1291 }));
1292 }
1293 }
1294 async move {
1295 futures::future::join_all(futures).await;
1296 }
1297 }
1298
1299 fn on_buffer_diff_event(
1300 &mut self,
1301 diff: Entity<buffer_diff::BufferDiff>,
1302 event: &BufferDiffEvent,
1303 cx: &mut Context<Self>,
1304 ) {
1305 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1306 let buffer_id = diff.read(cx).buffer_id;
1307 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1308 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1309 diff_state.hunk_staging_operation_count += 1;
1310 diff_state.hunk_staging_operation_count
1311 });
1312 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1313 let recv = repo.update(cx, |repo, cx| {
1314 log::debug!("hunks changed for {}", path.display());
1315 repo.spawn_set_index_text_job(
1316 path,
1317 new_index_text.as_ref().map(|rope| rope.to_string()),
1318 Some(hunk_staging_operation_count),
1319 cx,
1320 )
1321 });
1322 let diff = diff.downgrade();
1323 cx.spawn(async move |this, cx| {
1324 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1325 diff.update(cx, |diff, cx| {
1326 diff.clear_pending_hunks(cx);
1327 })
1328 .ok();
1329 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1330 .ok();
1331 }
1332 })
1333 .detach();
1334 }
1335 }
1336 }
1337 }
1338
1339 fn local_worktree_git_repos_changed(
1340 &mut self,
1341 worktree: Entity<Worktree>,
1342 changed_repos: &UpdatedGitRepositoriesSet,
1343 cx: &mut Context<Self>,
1344 ) {
1345 log::debug!("local worktree repos changed");
1346 debug_assert!(worktree.read(cx).is_local());
1347
1348 for repository in self.repositories.values() {
1349 repository.update(cx, |repository, cx| {
1350 let repo_abs_path = &repository.work_directory_abs_path;
1351 if changed_repos.iter().any(|update| {
1352 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1353 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1354 }) {
1355 repository.reload_buffer_diff_bases(cx);
1356 }
1357 });
1358 }
1359 }
1360
1361 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1362 &self.repositories
1363 }
1364
1365 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1366 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1367 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1368 Some(status.status)
1369 }
1370
1371 pub fn repository_and_path_for_buffer_id(
1372 &self,
1373 buffer_id: BufferId,
1374 cx: &App,
1375 ) -> Option<(Entity<Repository>, RepoPath)> {
1376 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1377 let project_path = buffer.read(cx).project_path(cx)?;
1378 self.repository_and_path_for_project_path(&project_path, cx)
1379 }
1380
1381 pub fn repository_and_path_for_project_path(
1382 &self,
1383 path: &ProjectPath,
1384 cx: &App,
1385 ) -> Option<(Entity<Repository>, RepoPath)> {
1386 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1387 self.repositories
1388 .values()
1389 .filter_map(|repo| {
1390 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1391 Some((repo.clone(), repo_path))
1392 })
1393 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1394 }
1395
1396 pub fn git_init(
1397 &self,
1398 path: Arc<Path>,
1399 fallback_branch_name: String,
1400 cx: &App,
1401 ) -> Task<Result<()>> {
1402 match &self.state {
1403 GitStoreState::Local { fs, .. } => {
1404 let fs = fs.clone();
1405 cx.background_executor()
1406 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1407 }
1408 GitStoreState::Remote {
1409 upstream_client,
1410 upstream_project_id: project_id,
1411 ..
1412 } => {
1413 let client = upstream_client.clone();
1414 let project_id = *project_id;
1415 cx.background_executor().spawn(async move {
1416 client
1417 .request(proto::GitInit {
1418 project_id: project_id,
1419 abs_path: path.to_string_lossy().to_string(),
1420 fallback_branch_name,
1421 })
1422 .await?;
1423 Ok(())
1424 })
1425 }
1426 }
1427 }
1428
1429 pub fn git_clone(
1430 &self,
1431 repo: String,
1432 path: impl Into<Arc<std::path::Path>>,
1433 cx: &App,
1434 ) -> Task<Result<()>> {
1435 let path = path.into();
1436 match &self.state {
1437 GitStoreState::Local { fs, .. } => {
1438 let fs = fs.clone();
1439 cx.background_executor()
1440 .spawn(async move { fs.git_clone(&repo, &path).await })
1441 }
1442 GitStoreState::Remote {
1443 upstream_client,
1444 upstream_project_id,
1445 ..
1446 } => {
1447 if upstream_client.is_via_collab() {
1448 return Task::ready(Err(anyhow!(
1449 "Git Clone isn't supported for project guests"
1450 )));
1451 }
1452 let request = upstream_client.request(proto::GitClone {
1453 project_id: *upstream_project_id,
1454 abs_path: path.to_string_lossy().to_string(),
1455 remote_repo: repo,
1456 });
1457
1458 cx.background_spawn(async move {
1459 let result = request.await?;
1460
1461 match result.success {
1462 true => Ok(()),
1463 false => Err(anyhow!("Git Clone failed")),
1464 }
1465 })
1466 }
1467 }
1468 }
1469
1470 async fn handle_update_repository(
1471 this: Entity<Self>,
1472 envelope: TypedEnvelope<proto::UpdateRepository>,
1473 mut cx: AsyncApp,
1474 ) -> Result<()> {
1475 this.update(&mut cx, |this, cx| {
1476 let mut update = envelope.payload;
1477
1478 let id = RepositoryId::from_proto(update.id);
1479 let client = this.upstream_client().context("no upstream client")?;
1480
1481 let mut is_new = false;
1482 let repo = this.repositories.entry(id).or_insert_with(|| {
1483 is_new = true;
1484 let git_store = cx.weak_entity();
1485 cx.new(|cx| {
1486 Repository::remote(
1487 id,
1488 Path::new(&update.abs_path).into(),
1489 ProjectId(update.project_id),
1490 client,
1491 git_store,
1492 cx,
1493 )
1494 })
1495 });
1496 if is_new {
1497 this._subscriptions
1498 .push(cx.subscribe(repo, Self::on_repository_event))
1499 }
1500
1501 repo.update(cx, {
1502 let update = update.clone();
1503 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1504 })?;
1505
1506 this.active_repo_id.get_or_insert_with(|| {
1507 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1508 id
1509 });
1510
1511 if let Some((client, project_id)) = this.downstream_client() {
1512 update.project_id = project_id.to_proto();
1513 client.send(update).log_err();
1514 }
1515 Ok(())
1516 })?
1517 }
1518
1519 async fn handle_remove_repository(
1520 this: Entity<Self>,
1521 envelope: TypedEnvelope<proto::RemoveRepository>,
1522 mut cx: AsyncApp,
1523 ) -> Result<()> {
1524 this.update(&mut cx, |this, cx| {
1525 let mut update = envelope.payload;
1526 let id = RepositoryId::from_proto(update.id);
1527 this.repositories.remove(&id);
1528 if let Some((client, project_id)) = this.downstream_client() {
1529 update.project_id = project_id.to_proto();
1530 client.send(update).log_err();
1531 }
1532 if this.active_repo_id == Some(id) {
1533 this.active_repo_id = None;
1534 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1535 }
1536 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1537 })
1538 }
1539
1540 async fn handle_git_init(
1541 this: Entity<Self>,
1542 envelope: TypedEnvelope<proto::GitInit>,
1543 cx: AsyncApp,
1544 ) -> Result<proto::Ack> {
1545 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1546 let name = envelope.payload.fallback_branch_name;
1547 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1548 .await?;
1549
1550 Ok(proto::Ack {})
1551 }
1552
1553 async fn handle_git_clone(
1554 this: Entity<Self>,
1555 envelope: TypedEnvelope<proto::GitClone>,
1556 cx: AsyncApp,
1557 ) -> Result<proto::GitCloneResponse> {
1558 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1559 let repo_name = envelope.payload.remote_repo;
1560 let result = cx
1561 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1562 .await;
1563
1564 Ok(proto::GitCloneResponse {
1565 success: result.is_ok(),
1566 })
1567 }
1568
1569 async fn handle_fetch(
1570 this: Entity<Self>,
1571 envelope: TypedEnvelope<proto::Fetch>,
1572 mut cx: AsyncApp,
1573 ) -> Result<proto::RemoteMessageResponse> {
1574 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1575 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1576 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1577 let askpass_id = envelope.payload.askpass_id;
1578
1579 let askpass = make_remote_delegate(
1580 this,
1581 envelope.payload.project_id,
1582 repository_id,
1583 askpass_id,
1584 &mut cx,
1585 );
1586
1587 let remote_output = repository_handle
1588 .update(&mut cx, |repository_handle, cx| {
1589 repository_handle.fetch(fetch_options, askpass, cx)
1590 })?
1591 .await??;
1592
1593 Ok(proto::RemoteMessageResponse {
1594 stdout: remote_output.stdout,
1595 stderr: remote_output.stderr,
1596 })
1597 }
1598
1599 async fn handle_push(
1600 this: Entity<Self>,
1601 envelope: TypedEnvelope<proto::Push>,
1602 mut cx: AsyncApp,
1603 ) -> Result<proto::RemoteMessageResponse> {
1604 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1605 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1606
1607 let askpass_id = envelope.payload.askpass_id;
1608 let askpass = make_remote_delegate(
1609 this,
1610 envelope.payload.project_id,
1611 repository_id,
1612 askpass_id,
1613 &mut cx,
1614 );
1615
1616 let options = envelope
1617 .payload
1618 .options
1619 .as_ref()
1620 .map(|_| match envelope.payload.options() {
1621 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1622 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1623 });
1624
1625 let branch_name = envelope.payload.branch_name.into();
1626 let remote_name = envelope.payload.remote_name.into();
1627
1628 let remote_output = repository_handle
1629 .update(&mut cx, |repository_handle, cx| {
1630 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1631 })?
1632 .await??;
1633 Ok(proto::RemoteMessageResponse {
1634 stdout: remote_output.stdout,
1635 stderr: remote_output.stderr,
1636 })
1637 }
1638
1639 async fn handle_pull(
1640 this: Entity<Self>,
1641 envelope: TypedEnvelope<proto::Pull>,
1642 mut cx: AsyncApp,
1643 ) -> Result<proto::RemoteMessageResponse> {
1644 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1645 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1646 let askpass_id = envelope.payload.askpass_id;
1647 let askpass = make_remote_delegate(
1648 this,
1649 envelope.payload.project_id,
1650 repository_id,
1651 askpass_id,
1652 &mut cx,
1653 );
1654
1655 let branch_name = envelope.payload.branch_name.into();
1656 let remote_name = envelope.payload.remote_name.into();
1657
1658 let remote_message = repository_handle
1659 .update(&mut cx, |repository_handle, cx| {
1660 repository_handle.pull(branch_name, remote_name, askpass, cx)
1661 })?
1662 .await??;
1663
1664 Ok(proto::RemoteMessageResponse {
1665 stdout: remote_message.stdout,
1666 stderr: remote_message.stderr,
1667 })
1668 }
1669
1670 async fn handle_stage(
1671 this: Entity<Self>,
1672 envelope: TypedEnvelope<proto::Stage>,
1673 mut cx: AsyncApp,
1674 ) -> Result<proto::Ack> {
1675 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1676 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1677
1678 let entries = envelope
1679 .payload
1680 .paths
1681 .into_iter()
1682 .map(PathBuf::from)
1683 .map(RepoPath::new)
1684 .collect();
1685
1686 repository_handle
1687 .update(&mut cx, |repository_handle, cx| {
1688 repository_handle.stage_entries(entries, cx)
1689 })?
1690 .await?;
1691 Ok(proto::Ack {})
1692 }
1693
1694 async fn handle_unstage(
1695 this: Entity<Self>,
1696 envelope: TypedEnvelope<proto::Unstage>,
1697 mut cx: AsyncApp,
1698 ) -> Result<proto::Ack> {
1699 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1700 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1701
1702 let entries = envelope
1703 .payload
1704 .paths
1705 .into_iter()
1706 .map(PathBuf::from)
1707 .map(RepoPath::new)
1708 .collect();
1709
1710 repository_handle
1711 .update(&mut cx, |repository_handle, cx| {
1712 repository_handle.unstage_entries(entries, cx)
1713 })?
1714 .await?;
1715
1716 Ok(proto::Ack {})
1717 }
1718
1719 async fn handle_stash(
1720 this: Entity<Self>,
1721 envelope: TypedEnvelope<proto::Stash>,
1722 mut cx: AsyncApp,
1723 ) -> Result<proto::Ack> {
1724 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1725 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1726
1727 let entries = envelope
1728 .payload
1729 .paths
1730 .into_iter()
1731 .map(PathBuf::from)
1732 .map(RepoPath::new)
1733 .collect();
1734
1735 repository_handle
1736 .update(&mut cx, |repository_handle, cx| {
1737 repository_handle.stash_entries(entries, cx)
1738 })?
1739 .await?;
1740
1741 Ok(proto::Ack {})
1742 }
1743
1744 async fn handle_stash_pop(
1745 this: Entity<Self>,
1746 envelope: TypedEnvelope<proto::StashPop>,
1747 mut cx: AsyncApp,
1748 ) -> Result<proto::Ack> {
1749 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1750 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1751 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1752
1753 repository_handle
1754 .update(&mut cx, |repository_handle, cx| {
1755 repository_handle.stash_pop(stash_index, cx)
1756 })?
1757 .await?;
1758
1759 Ok(proto::Ack {})
1760 }
1761
1762 async fn handle_stash_apply(
1763 this: Entity<Self>,
1764 envelope: TypedEnvelope<proto::StashApply>,
1765 mut cx: AsyncApp,
1766 ) -> Result<proto::Ack> {
1767 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1768 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1769 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1770
1771 repository_handle
1772 .update(&mut cx, |repository_handle, cx| {
1773 repository_handle.stash_apply(stash_index, cx)
1774 })?
1775 .await?;
1776
1777 Ok(proto::Ack {})
1778 }
1779
1780 async fn handle_stash_drop(
1781 this: Entity<Self>,
1782 envelope: TypedEnvelope<proto::StashDrop>,
1783 mut cx: AsyncApp,
1784 ) -> Result<proto::Ack> {
1785 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1786 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1787 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1788
1789 repository_handle
1790 .update(&mut cx, |repository_handle, cx| {
1791 repository_handle.stash_drop(stash_index, cx)
1792 })?
1793 .await??;
1794
1795 Ok(proto::Ack {})
1796 }
1797
1798 async fn handle_set_index_text(
1799 this: Entity<Self>,
1800 envelope: TypedEnvelope<proto::SetIndexText>,
1801 mut cx: AsyncApp,
1802 ) -> Result<proto::Ack> {
1803 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1804 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1805 let repo_path = RepoPath::from_str(&envelope.payload.path);
1806
1807 repository_handle
1808 .update(&mut cx, |repository_handle, cx| {
1809 repository_handle.spawn_set_index_text_job(
1810 repo_path,
1811 envelope.payload.text,
1812 None,
1813 cx,
1814 )
1815 })?
1816 .await??;
1817 Ok(proto::Ack {})
1818 }
1819
1820 async fn handle_commit(
1821 this: Entity<Self>,
1822 envelope: TypedEnvelope<proto::Commit>,
1823 mut cx: AsyncApp,
1824 ) -> Result<proto::Ack> {
1825 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1826 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1827
1828 let message = SharedString::from(envelope.payload.message);
1829 let name = envelope.payload.name.map(SharedString::from);
1830 let email = envelope.payload.email.map(SharedString::from);
1831 let options = envelope.payload.options.unwrap_or_default();
1832
1833 repository_handle
1834 .update(&mut cx, |repository_handle, cx| {
1835 repository_handle.commit(
1836 message,
1837 name.zip(email),
1838 CommitOptions {
1839 amend: options.amend,
1840 signoff: options.signoff,
1841 },
1842 cx,
1843 )
1844 })?
1845 .await??;
1846 Ok(proto::Ack {})
1847 }
1848
1849 async fn handle_get_remotes(
1850 this: Entity<Self>,
1851 envelope: TypedEnvelope<proto::GetRemotes>,
1852 mut cx: AsyncApp,
1853 ) -> Result<proto::GetRemotesResponse> {
1854 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1855 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1856
1857 let branch_name = envelope.payload.branch_name;
1858
1859 let remotes = repository_handle
1860 .update(&mut cx, |repository_handle, _| {
1861 repository_handle.get_remotes(branch_name)
1862 })?
1863 .await??;
1864
1865 Ok(proto::GetRemotesResponse {
1866 remotes: remotes
1867 .into_iter()
1868 .map(|remotes| proto::get_remotes_response::Remote {
1869 name: remotes.name.to_string(),
1870 })
1871 .collect::<Vec<_>>(),
1872 })
1873 }
1874
1875 async fn handle_get_branches(
1876 this: Entity<Self>,
1877 envelope: TypedEnvelope<proto::GitGetBranches>,
1878 mut cx: AsyncApp,
1879 ) -> Result<proto::GitBranchesResponse> {
1880 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1881 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1882
1883 let branches = repository_handle
1884 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1885 .await??;
1886
1887 Ok(proto::GitBranchesResponse {
1888 branches: branches
1889 .into_iter()
1890 .map(|branch| branch_to_proto(&branch))
1891 .collect::<Vec<_>>(),
1892 })
1893 }
1894 async fn handle_get_default_branch(
1895 this: Entity<Self>,
1896 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1897 mut cx: AsyncApp,
1898 ) -> Result<proto::GetDefaultBranchResponse> {
1899 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1900 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1901
1902 let branch = repository_handle
1903 .update(&mut cx, |repository_handle, _| {
1904 repository_handle.default_branch()
1905 })?
1906 .await??
1907 .map(Into::into);
1908
1909 Ok(proto::GetDefaultBranchResponse { branch })
1910 }
1911 async fn handle_create_branch(
1912 this: Entity<Self>,
1913 envelope: TypedEnvelope<proto::GitCreateBranch>,
1914 mut cx: AsyncApp,
1915 ) -> Result<proto::Ack> {
1916 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1917 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1918 let branch_name = envelope.payload.branch_name;
1919
1920 repository_handle
1921 .update(&mut cx, |repository_handle, _| {
1922 repository_handle.create_branch(branch_name)
1923 })?
1924 .await??;
1925
1926 Ok(proto::Ack {})
1927 }
1928
1929 async fn handle_change_branch(
1930 this: Entity<Self>,
1931 envelope: TypedEnvelope<proto::GitChangeBranch>,
1932 mut cx: AsyncApp,
1933 ) -> Result<proto::Ack> {
1934 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1935 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1936 let branch_name = envelope.payload.branch_name;
1937
1938 repository_handle
1939 .update(&mut cx, |repository_handle, _| {
1940 repository_handle.change_branch(branch_name)
1941 })?
1942 .await??;
1943
1944 Ok(proto::Ack {})
1945 }
1946
1947 async fn handle_show(
1948 this: Entity<Self>,
1949 envelope: TypedEnvelope<proto::GitShow>,
1950 mut cx: AsyncApp,
1951 ) -> Result<proto::GitCommitDetails> {
1952 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1953 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1954
1955 let commit = repository_handle
1956 .update(&mut cx, |repository_handle, _| {
1957 repository_handle.show(envelope.payload.commit)
1958 })?
1959 .await??;
1960 Ok(proto::GitCommitDetails {
1961 sha: commit.sha.into(),
1962 message: commit.message.into(),
1963 commit_timestamp: commit.commit_timestamp,
1964 author_email: commit.author_email.into(),
1965 author_name: commit.author_name.into(),
1966 })
1967 }
1968
1969 async fn handle_load_commit_diff(
1970 this: Entity<Self>,
1971 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1972 mut cx: AsyncApp,
1973 ) -> Result<proto::LoadCommitDiffResponse> {
1974 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1975 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1976
1977 let commit_diff = repository_handle
1978 .update(&mut cx, |repository_handle, _| {
1979 repository_handle.load_commit_diff(envelope.payload.commit)
1980 })?
1981 .await??;
1982 Ok(proto::LoadCommitDiffResponse {
1983 files: commit_diff
1984 .files
1985 .into_iter()
1986 .map(|file| proto::CommitFile {
1987 path: file.path.to_string(),
1988 old_text: file.old_text,
1989 new_text: file.new_text,
1990 })
1991 .collect(),
1992 })
1993 }
1994
1995 async fn handle_reset(
1996 this: Entity<Self>,
1997 envelope: TypedEnvelope<proto::GitReset>,
1998 mut cx: AsyncApp,
1999 ) -> Result<proto::Ack> {
2000 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2001 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2002
2003 let mode = match envelope.payload.mode() {
2004 git_reset::ResetMode::Soft => ResetMode::Soft,
2005 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2006 };
2007
2008 repository_handle
2009 .update(&mut cx, |repository_handle, cx| {
2010 repository_handle.reset(envelope.payload.commit, mode, cx)
2011 })?
2012 .await??;
2013 Ok(proto::Ack {})
2014 }
2015
2016 async fn handle_checkout_files(
2017 this: Entity<Self>,
2018 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2019 mut cx: AsyncApp,
2020 ) -> Result<proto::Ack> {
2021 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2022 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2023 let paths = envelope
2024 .payload
2025 .paths
2026 .iter()
2027 .map(|s| RepoPath::from_str(s))
2028 .collect();
2029
2030 repository_handle
2031 .update(&mut cx, |repository_handle, cx| {
2032 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2033 })?
2034 .await??;
2035 Ok(proto::Ack {})
2036 }
2037
2038 async fn handle_open_commit_message_buffer(
2039 this: Entity<Self>,
2040 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2041 mut cx: AsyncApp,
2042 ) -> Result<proto::OpenBufferResponse> {
2043 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2044 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2045 let buffer = repository
2046 .update(&mut cx, |repository, cx| {
2047 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2048 })?
2049 .await?;
2050
2051 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2052 this.update(&mut cx, |this, cx| {
2053 this.buffer_store.update(cx, |buffer_store, cx| {
2054 buffer_store
2055 .create_buffer_for_peer(
2056 &buffer,
2057 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2058 cx,
2059 )
2060 .detach_and_log_err(cx);
2061 })
2062 })?;
2063
2064 Ok(proto::OpenBufferResponse {
2065 buffer_id: buffer_id.to_proto(),
2066 })
2067 }
2068
2069 async fn handle_askpass(
2070 this: Entity<Self>,
2071 envelope: TypedEnvelope<proto::AskPassRequest>,
2072 mut cx: AsyncApp,
2073 ) -> Result<proto::AskPassResponse> {
2074 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2075 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2076
2077 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2078 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2079 debug_panic!("no askpass found");
2080 anyhow::bail!("no askpass found");
2081 };
2082
2083 let response = askpass.ask_password(envelope.payload.prompt).await?;
2084
2085 delegates
2086 .lock()
2087 .insert(envelope.payload.askpass_id, askpass);
2088
2089 Ok(proto::AskPassResponse { response })
2090 }
2091
2092 async fn handle_check_for_pushed_commits(
2093 this: Entity<Self>,
2094 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2095 mut cx: AsyncApp,
2096 ) -> Result<proto::CheckForPushedCommitsResponse> {
2097 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2098 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2099
2100 let branches = repository_handle
2101 .update(&mut cx, |repository_handle, _| {
2102 repository_handle.check_for_pushed_commits()
2103 })?
2104 .await??;
2105 Ok(proto::CheckForPushedCommitsResponse {
2106 pushed_to: branches
2107 .into_iter()
2108 .map(|commit| commit.to_string())
2109 .collect(),
2110 })
2111 }
2112
2113 async fn handle_git_diff(
2114 this: Entity<Self>,
2115 envelope: TypedEnvelope<proto::GitDiff>,
2116 mut cx: AsyncApp,
2117 ) -> Result<proto::GitDiffResponse> {
2118 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2119 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2120 let diff_type = match envelope.payload.diff_type() {
2121 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2122 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2123 };
2124
2125 let mut diff = repository_handle
2126 .update(&mut cx, |repository_handle, cx| {
2127 repository_handle.diff(diff_type, cx)
2128 })?
2129 .await??;
2130 const ONE_MB: usize = 1_000_000;
2131 if diff.len() > ONE_MB {
2132 diff = diff.chars().take(ONE_MB).collect()
2133 }
2134
2135 Ok(proto::GitDiffResponse { diff })
2136 }
2137
2138 async fn handle_open_unstaged_diff(
2139 this: Entity<Self>,
2140 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2141 mut cx: AsyncApp,
2142 ) -> Result<proto::OpenUnstagedDiffResponse> {
2143 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2144 let diff = this
2145 .update(&mut cx, |this, cx| {
2146 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2147 Some(this.open_unstaged_diff(buffer, cx))
2148 })?
2149 .context("missing buffer")?
2150 .await?;
2151 this.update(&mut cx, |this, _| {
2152 let shared_diffs = this
2153 .shared_diffs
2154 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2155 .or_default();
2156 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2157 })?;
2158 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2159 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2160 }
2161
2162 async fn handle_open_uncommitted_diff(
2163 this: Entity<Self>,
2164 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2165 mut cx: AsyncApp,
2166 ) -> Result<proto::OpenUncommittedDiffResponse> {
2167 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2168 let diff = this
2169 .update(&mut cx, |this, cx| {
2170 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2171 Some(this.open_uncommitted_diff(buffer, cx))
2172 })?
2173 .context("missing buffer")?
2174 .await?;
2175 this.update(&mut cx, |this, _| {
2176 let shared_diffs = this
2177 .shared_diffs
2178 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2179 .or_default();
2180 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2181 })?;
2182 diff.read_with(&cx, |diff, cx| {
2183 use proto::open_uncommitted_diff_response::Mode;
2184
2185 let unstaged_diff = diff.secondary_diff();
2186 let index_snapshot = unstaged_diff.and_then(|diff| {
2187 let diff = diff.read(cx);
2188 diff.base_text_exists().then(|| diff.base_text())
2189 });
2190
2191 let mode;
2192 let staged_text;
2193 let committed_text;
2194 if diff.base_text_exists() {
2195 let committed_snapshot = diff.base_text();
2196 committed_text = Some(committed_snapshot.text());
2197 if let Some(index_text) = index_snapshot {
2198 if index_text.remote_id() == committed_snapshot.remote_id() {
2199 mode = Mode::IndexMatchesHead;
2200 staged_text = None;
2201 } else {
2202 mode = Mode::IndexAndHead;
2203 staged_text = Some(index_text.text());
2204 }
2205 } else {
2206 mode = Mode::IndexAndHead;
2207 staged_text = None;
2208 }
2209 } else {
2210 mode = Mode::IndexAndHead;
2211 committed_text = None;
2212 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2213 }
2214
2215 proto::OpenUncommittedDiffResponse {
2216 committed_text,
2217 staged_text,
2218 mode: mode.into(),
2219 }
2220 })
2221 }
2222
2223 async fn handle_update_diff_bases(
2224 this: Entity<Self>,
2225 request: TypedEnvelope<proto::UpdateDiffBases>,
2226 mut cx: AsyncApp,
2227 ) -> Result<()> {
2228 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2229 this.update(&mut cx, |this, cx| {
2230 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2231 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2232 {
2233 let buffer = buffer.read(cx).text_snapshot();
2234 diff_state.update(cx, |diff_state, cx| {
2235 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2236 })
2237 }
2238 })
2239 }
2240
2241 async fn handle_blame_buffer(
2242 this: Entity<Self>,
2243 envelope: TypedEnvelope<proto::BlameBuffer>,
2244 mut cx: AsyncApp,
2245 ) -> Result<proto::BlameBufferResponse> {
2246 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2247 let version = deserialize_version(&envelope.payload.version);
2248 let buffer = this.read_with(&cx, |this, cx| {
2249 this.buffer_store.read(cx).get_existing(buffer_id)
2250 })??;
2251 buffer
2252 .update(&mut cx, |buffer, _| {
2253 buffer.wait_for_version(version.clone())
2254 })?
2255 .await?;
2256 let blame = this
2257 .update(&mut cx, |this, cx| {
2258 this.blame_buffer(&buffer, Some(version), cx)
2259 })?
2260 .await?;
2261 Ok(serialize_blame_buffer_response(blame))
2262 }
2263
2264 async fn handle_get_permalink_to_line(
2265 this: Entity<Self>,
2266 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2267 mut cx: AsyncApp,
2268 ) -> Result<proto::GetPermalinkToLineResponse> {
2269 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2270 // let version = deserialize_version(&envelope.payload.version);
2271 let selection = {
2272 let proto_selection = envelope
2273 .payload
2274 .selection
2275 .context("no selection to get permalink for defined")?;
2276 proto_selection.start as u32..proto_selection.end as u32
2277 };
2278 let buffer = this.read_with(&cx, |this, cx| {
2279 this.buffer_store.read(cx).get_existing(buffer_id)
2280 })??;
2281 let permalink = this
2282 .update(&mut cx, |this, cx| {
2283 this.get_permalink_to_line(&buffer, selection, cx)
2284 })?
2285 .await?;
2286 Ok(proto::GetPermalinkToLineResponse {
2287 permalink: permalink.to_string(),
2288 })
2289 }
2290
2291 fn repository_for_request(
2292 this: &Entity<Self>,
2293 id: RepositoryId,
2294 cx: &mut AsyncApp,
2295 ) -> Result<Entity<Repository>> {
2296 this.read_with(cx, |this, _| {
2297 this.repositories
2298 .get(&id)
2299 .context("missing repository handle")
2300 .cloned()
2301 })?
2302 }
2303
2304 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2305 self.repositories
2306 .iter()
2307 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2308 .collect()
2309 }
2310
2311 fn process_updated_entries(
2312 &self,
2313 worktree: &Entity<Worktree>,
2314 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2315 cx: &mut App,
2316 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2317 let mut repo_paths = self
2318 .repositories
2319 .values()
2320 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2321 .collect::<Vec<_>>();
2322 let mut entries: Vec<_> = updated_entries
2323 .iter()
2324 .map(|(path, _, _)| path.clone())
2325 .collect();
2326 entries.sort();
2327 let worktree = worktree.read(cx);
2328
2329 let entries = entries
2330 .into_iter()
2331 .filter_map(|path| worktree.absolutize(&path).ok())
2332 .collect::<Arc<[_]>>();
2333
2334 let executor = cx.background_executor().clone();
2335 cx.background_executor().spawn(async move {
2336 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2337 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2338 let mut tasks = FuturesOrdered::new();
2339 for (repo_path, repo) in repo_paths.into_iter().rev() {
2340 let entries = entries.clone();
2341 let task = executor.spawn(async move {
2342 // Find all repository paths that belong to this repo
2343 let mut ix = entries.partition_point(|path| path < &*repo_path);
2344 if ix == entries.len() {
2345 return None;
2346 };
2347
2348 let mut paths = Vec::new();
2349 // All paths prefixed by a given repo will constitute a continuous range.
2350 while let Some(path) = entries.get(ix)
2351 && let Some(repo_path) =
2352 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, path)
2353 {
2354 paths.push((repo_path, ix));
2355 ix += 1;
2356 }
2357 if paths.is_empty() {
2358 None
2359 } else {
2360 Some((repo, paths))
2361 }
2362 });
2363 tasks.push_back(task);
2364 }
2365
2366 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2367 let mut path_was_used = vec![false; entries.len()];
2368 let tasks = tasks.collect::<Vec<_>>().await;
2369 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2370 // We always want to assign a path to it's innermost repository.
2371 for t in tasks {
2372 let Some((repo, paths)) = t else {
2373 continue;
2374 };
2375 let entry = paths_by_git_repo.entry(repo).or_default();
2376 for (repo_path, ix) in paths {
2377 if path_was_used[ix] {
2378 continue;
2379 }
2380 path_was_used[ix] = true;
2381 entry.push(repo_path);
2382 }
2383 }
2384
2385 paths_by_git_repo
2386 })
2387 }
2388}
2389
2390impl BufferGitState {
2391 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2392 Self {
2393 unstaged_diff: Default::default(),
2394 uncommitted_diff: Default::default(),
2395 recalculate_diff_task: Default::default(),
2396 language: Default::default(),
2397 language_registry: Default::default(),
2398 recalculating_tx: postage::watch::channel_with(false).0,
2399 hunk_staging_operation_count: 0,
2400 hunk_staging_operation_count_as_of_write: 0,
2401 head_text: Default::default(),
2402 index_text: Default::default(),
2403 head_changed: Default::default(),
2404 index_changed: Default::default(),
2405 language_changed: Default::default(),
2406 conflict_updated_futures: Default::default(),
2407 conflict_set: Default::default(),
2408 reparse_conflict_markers_task: Default::default(),
2409 }
2410 }
2411
2412 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2413 self.language = buffer.read(cx).language().cloned();
2414 self.language_changed = true;
2415 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2416 }
2417
2418 fn reparse_conflict_markers(
2419 &mut self,
2420 buffer: text::BufferSnapshot,
2421 cx: &mut Context<Self>,
2422 ) -> oneshot::Receiver<()> {
2423 let (tx, rx) = oneshot::channel();
2424
2425 let Some(conflict_set) = self
2426 .conflict_set
2427 .as_ref()
2428 .and_then(|conflict_set| conflict_set.upgrade())
2429 else {
2430 return rx;
2431 };
2432
2433 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2434 if conflict_set.has_conflict {
2435 Some(conflict_set.snapshot())
2436 } else {
2437 None
2438 }
2439 });
2440
2441 if let Some(old_snapshot) = old_snapshot {
2442 self.conflict_updated_futures.push(tx);
2443 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2444 let (snapshot, changed_range) = cx
2445 .background_spawn(async move {
2446 let new_snapshot = ConflictSet::parse(&buffer);
2447 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2448 (new_snapshot, changed_range)
2449 })
2450 .await;
2451 this.update(cx, |this, cx| {
2452 if let Some(conflict_set) = &this.conflict_set {
2453 conflict_set
2454 .update(cx, |conflict_set, cx| {
2455 conflict_set.set_snapshot(snapshot, changed_range, cx);
2456 })
2457 .ok();
2458 }
2459 let futures = std::mem::take(&mut this.conflict_updated_futures);
2460 for tx in futures {
2461 tx.send(()).ok();
2462 }
2463 })
2464 }))
2465 }
2466
2467 rx
2468 }
2469
2470 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2471 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2472 }
2473
2474 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2475 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2476 }
2477
2478 fn handle_base_texts_updated(
2479 &mut self,
2480 buffer: text::BufferSnapshot,
2481 message: proto::UpdateDiffBases,
2482 cx: &mut Context<Self>,
2483 ) {
2484 use proto::update_diff_bases::Mode;
2485
2486 let Some(mode) = Mode::from_i32(message.mode) else {
2487 return;
2488 };
2489
2490 let diff_bases_change = match mode {
2491 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2492 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2493 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2494 Mode::IndexAndHead => DiffBasesChange::SetEach {
2495 index: message.staged_text,
2496 head: message.committed_text,
2497 },
2498 };
2499
2500 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2501 }
2502
2503 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2504 if *self.recalculating_tx.borrow() {
2505 let mut rx = self.recalculating_tx.subscribe();
2506 Some(async move {
2507 loop {
2508 let is_recalculating = rx.recv().await;
2509 if is_recalculating != Some(true) {
2510 break;
2511 }
2512 }
2513 })
2514 } else {
2515 None
2516 }
2517 }
2518
2519 fn diff_bases_changed(
2520 &mut self,
2521 buffer: text::BufferSnapshot,
2522 diff_bases_change: Option<DiffBasesChange>,
2523 cx: &mut Context<Self>,
2524 ) {
2525 match diff_bases_change {
2526 Some(DiffBasesChange::SetIndex(index)) => {
2527 self.index_text = index.map(|mut index| {
2528 text::LineEnding::normalize(&mut index);
2529 Arc::new(index)
2530 });
2531 self.index_changed = true;
2532 }
2533 Some(DiffBasesChange::SetHead(head)) => {
2534 self.head_text = head.map(|mut head| {
2535 text::LineEnding::normalize(&mut head);
2536 Arc::new(head)
2537 });
2538 self.head_changed = true;
2539 }
2540 Some(DiffBasesChange::SetBoth(text)) => {
2541 let text = text.map(|mut text| {
2542 text::LineEnding::normalize(&mut text);
2543 Arc::new(text)
2544 });
2545 self.head_text = text.clone();
2546 self.index_text = text;
2547 self.head_changed = true;
2548 self.index_changed = true;
2549 }
2550 Some(DiffBasesChange::SetEach { index, head }) => {
2551 self.index_text = index.map(|mut index| {
2552 text::LineEnding::normalize(&mut index);
2553 Arc::new(index)
2554 });
2555 self.index_changed = true;
2556 self.head_text = head.map(|mut head| {
2557 text::LineEnding::normalize(&mut head);
2558 Arc::new(head)
2559 });
2560 self.head_changed = true;
2561 }
2562 None => {}
2563 }
2564
2565 self.recalculate_diffs(buffer, cx)
2566 }
2567
2568 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2569 *self.recalculating_tx.borrow_mut() = true;
2570
2571 let language = self.language.clone();
2572 let language_registry = self.language_registry.clone();
2573 let unstaged_diff = self.unstaged_diff();
2574 let uncommitted_diff = self.uncommitted_diff();
2575 let head = self.head_text.clone();
2576 let index = self.index_text.clone();
2577 let index_changed = self.index_changed;
2578 let head_changed = self.head_changed;
2579 let language_changed = self.language_changed;
2580 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2581 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2582 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2583 (None, None) => true,
2584 _ => false,
2585 };
2586 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2587 log::debug!(
2588 "start recalculating diffs for buffer {}",
2589 buffer.remote_id()
2590 );
2591
2592 let mut new_unstaged_diff = None;
2593 if let Some(unstaged_diff) = &unstaged_diff {
2594 new_unstaged_diff = Some(
2595 BufferDiff::update_diff(
2596 unstaged_diff.clone(),
2597 buffer.clone(),
2598 index,
2599 index_changed,
2600 language_changed,
2601 language.clone(),
2602 language_registry.clone(),
2603 cx,
2604 )
2605 .await?,
2606 );
2607 }
2608
2609 let mut new_uncommitted_diff = None;
2610 if let Some(uncommitted_diff) = &uncommitted_diff {
2611 new_uncommitted_diff = if index_matches_head {
2612 new_unstaged_diff.clone()
2613 } else {
2614 Some(
2615 BufferDiff::update_diff(
2616 uncommitted_diff.clone(),
2617 buffer.clone(),
2618 head,
2619 head_changed,
2620 language_changed,
2621 language.clone(),
2622 language_registry.clone(),
2623 cx,
2624 )
2625 .await?,
2626 )
2627 }
2628 }
2629
2630 let cancel = this.update(cx, |this, _| {
2631 // This checks whether all pending stage/unstage operations
2632 // have quiesced (i.e. both the corresponding write and the
2633 // read of that write have completed). If not, then we cancel
2634 // this recalculation attempt to avoid invalidating pending
2635 // state too quickly; another recalculation will come along
2636 // later and clear the pending state once the state of the index has settled.
2637 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2638 *this.recalculating_tx.borrow_mut() = false;
2639 true
2640 } else {
2641 false
2642 }
2643 })?;
2644 if cancel {
2645 log::debug!(
2646 concat!(
2647 "aborting recalculating diffs for buffer {}",
2648 "due to subsequent hunk operations",
2649 ),
2650 buffer.remote_id()
2651 );
2652 return Ok(());
2653 }
2654
2655 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2656 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2657 {
2658 unstaged_diff.update(cx, |diff, cx| {
2659 if language_changed {
2660 diff.language_changed(cx);
2661 }
2662 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2663 })?
2664 } else {
2665 None
2666 };
2667
2668 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2669 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2670 {
2671 uncommitted_diff.update(cx, |diff, cx| {
2672 if language_changed {
2673 diff.language_changed(cx);
2674 }
2675 diff.set_snapshot_with_secondary(
2676 new_uncommitted_diff,
2677 &buffer,
2678 unstaged_changed_range,
2679 true,
2680 cx,
2681 );
2682 })?;
2683 }
2684
2685 log::debug!(
2686 "finished recalculating diffs for buffer {}",
2687 buffer.remote_id()
2688 );
2689
2690 if let Some(this) = this.upgrade() {
2691 this.update(cx, |this, _| {
2692 this.index_changed = false;
2693 this.head_changed = false;
2694 this.language_changed = false;
2695 *this.recalculating_tx.borrow_mut() = false;
2696 })?;
2697 }
2698
2699 Ok(())
2700 }));
2701 }
2702}
2703
2704fn make_remote_delegate(
2705 this: Entity<GitStore>,
2706 project_id: u64,
2707 repository_id: RepositoryId,
2708 askpass_id: u64,
2709 cx: &mut AsyncApp,
2710) -> AskPassDelegate {
2711 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2712 this.update(cx, |this, cx| {
2713 let Some((client, _)) = this.downstream_client() else {
2714 return;
2715 };
2716 let response = client.request(proto::AskPassRequest {
2717 project_id,
2718 repository_id: repository_id.to_proto(),
2719 askpass_id,
2720 prompt,
2721 });
2722 cx.spawn(async move |_, _| {
2723 tx.send(response.await?.response).ok();
2724 anyhow::Ok(())
2725 })
2726 .detach_and_log_err(cx);
2727 })
2728 .log_err();
2729 })
2730}
2731
2732impl RepositoryId {
2733 pub fn to_proto(self) -> u64 {
2734 self.0
2735 }
2736
2737 pub fn from_proto(id: u64) -> Self {
2738 RepositoryId(id)
2739 }
2740}
2741
2742impl RepositorySnapshot {
2743 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2744 Self {
2745 id,
2746 statuses_by_path: Default::default(),
2747 work_directory_abs_path,
2748 branch: None,
2749 head_commit: None,
2750 scan_id: 0,
2751 merge: Default::default(),
2752 remote_origin_url: None,
2753 remote_upstream_url: None,
2754 stash_entries: Default::default(),
2755 }
2756 }
2757
2758 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2759 proto::UpdateRepository {
2760 branch_summary: self.branch.as_ref().map(branch_to_proto),
2761 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2762 updated_statuses: self
2763 .statuses_by_path
2764 .iter()
2765 .map(|entry| entry.to_proto())
2766 .collect(),
2767 removed_statuses: Default::default(),
2768 current_merge_conflicts: self
2769 .merge
2770 .conflicted_paths
2771 .iter()
2772 .map(|repo_path| repo_path.to_proto())
2773 .collect(),
2774 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2775 project_id,
2776 id: self.id.to_proto(),
2777 abs_path: self.work_directory_abs_path.to_proto(),
2778 entry_ids: vec![self.id.to_proto()],
2779 scan_id: self.scan_id,
2780 is_last_update: true,
2781 stash_entries: self
2782 .stash_entries
2783 .entries
2784 .iter()
2785 .map(stash_to_proto)
2786 .collect(),
2787 }
2788 }
2789
2790 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2791 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2792 let mut removed_statuses: Vec<String> = Vec::new();
2793
2794 let mut new_statuses = self.statuses_by_path.iter().peekable();
2795 let mut old_statuses = old.statuses_by_path.iter().peekable();
2796
2797 let mut current_new_entry = new_statuses.next();
2798 let mut current_old_entry = old_statuses.next();
2799 loop {
2800 match (current_new_entry, current_old_entry) {
2801 (Some(new_entry), Some(old_entry)) => {
2802 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2803 Ordering::Less => {
2804 updated_statuses.push(new_entry.to_proto());
2805 current_new_entry = new_statuses.next();
2806 }
2807 Ordering::Equal => {
2808 if new_entry.status != old_entry.status {
2809 updated_statuses.push(new_entry.to_proto());
2810 }
2811 current_old_entry = old_statuses.next();
2812 current_new_entry = new_statuses.next();
2813 }
2814 Ordering::Greater => {
2815 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2816 current_old_entry = old_statuses.next();
2817 }
2818 }
2819 }
2820 (None, Some(old_entry)) => {
2821 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2822 current_old_entry = old_statuses.next();
2823 }
2824 (Some(new_entry), None) => {
2825 updated_statuses.push(new_entry.to_proto());
2826 current_new_entry = new_statuses.next();
2827 }
2828 (None, None) => break,
2829 }
2830 }
2831
2832 proto::UpdateRepository {
2833 branch_summary: self.branch.as_ref().map(branch_to_proto),
2834 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2835 updated_statuses,
2836 removed_statuses,
2837 current_merge_conflicts: self
2838 .merge
2839 .conflicted_paths
2840 .iter()
2841 .map(|path| path.as_ref().to_proto())
2842 .collect(),
2843 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2844 project_id,
2845 id: self.id.to_proto(),
2846 abs_path: self.work_directory_abs_path.to_proto(),
2847 entry_ids: vec![],
2848 scan_id: self.scan_id,
2849 is_last_update: true,
2850 stash_entries: self
2851 .stash_entries
2852 .entries
2853 .iter()
2854 .map(stash_to_proto)
2855 .collect(),
2856 }
2857 }
2858
2859 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2860 self.statuses_by_path.iter().cloned()
2861 }
2862
2863 pub fn status_summary(&self) -> GitSummary {
2864 self.statuses_by_path.summary().item_summary
2865 }
2866
2867 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2868 self.statuses_by_path
2869 .get(&PathKey(path.0.clone()), &())
2870 .cloned()
2871 }
2872
2873 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2874 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2875 }
2876
2877 #[inline]
2878 fn abs_path_to_repo_path_inner(
2879 work_directory_abs_path: &Path,
2880 abs_path: &Path,
2881 ) -> Option<RepoPath> {
2882 abs_path
2883 .strip_prefix(&work_directory_abs_path)
2884 .map(RepoPath::from)
2885 .ok()
2886 }
2887
2888 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2889 self.merge.conflicted_paths.contains(repo_path)
2890 }
2891
2892 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2893 let had_conflict_on_last_merge_head_change =
2894 self.merge.conflicted_paths.contains(repo_path);
2895 let has_conflict_currently = self
2896 .status_for_path(repo_path)
2897 .is_some_and(|entry| entry.status.is_conflicted());
2898 had_conflict_on_last_merge_head_change || has_conflict_currently
2899 }
2900
2901 /// This is the name that will be displayed in the repository selector for this repository.
2902 pub fn display_name(&self) -> SharedString {
2903 self.work_directory_abs_path
2904 .file_name()
2905 .unwrap_or_default()
2906 .to_string_lossy()
2907 .to_string()
2908 .into()
2909 }
2910}
2911
2912pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
2913 proto::StashEntry {
2914 oid: entry.oid.as_bytes().to_vec(),
2915 message: entry.message.clone(),
2916 branch: entry.branch.clone(),
2917 index: entry.index as u64,
2918 timestamp: entry.timestamp,
2919 }
2920}
2921
2922pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
2923 Ok(StashEntry {
2924 oid: Oid::from_bytes(&entry.oid)?,
2925 message: entry.message.clone(),
2926 index: entry.index as usize,
2927 branch: entry.branch.clone(),
2928 timestamp: entry.timestamp,
2929 })
2930}
2931
2932impl MergeDetails {
2933 async fn load(
2934 backend: &Arc<dyn GitRepository>,
2935 status: &SumTree<StatusEntry>,
2936 prev_snapshot: &RepositorySnapshot,
2937 ) -> Result<(MergeDetails, bool)> {
2938 log::debug!("load merge details");
2939 let message = backend.merge_message().await;
2940 let heads = backend
2941 .revparse_batch(vec![
2942 "MERGE_HEAD".into(),
2943 "CHERRY_PICK_HEAD".into(),
2944 "REBASE_HEAD".into(),
2945 "REVERT_HEAD".into(),
2946 "APPLY_HEAD".into(),
2947 ])
2948 .await
2949 .log_err()
2950 .unwrap_or_default()
2951 .into_iter()
2952 .map(|opt| opt.map(SharedString::from))
2953 .collect::<Vec<_>>();
2954 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2955 let conflicted_paths = if merge_heads_changed {
2956 let current_conflicted_paths = TreeSet::from_ordered_entries(
2957 status
2958 .iter()
2959 .filter(|entry| entry.status.is_conflicted())
2960 .map(|entry| entry.repo_path.clone()),
2961 );
2962
2963 // It can happen that we run a scan while a lengthy merge is in progress
2964 // that will eventually result in conflicts, but before those conflicts
2965 // are reported by `git status`. Since for the moment we only care about
2966 // the merge heads state for the purposes of tracking conflicts, don't update
2967 // this state until we see some conflicts.
2968 if heads.iter().any(Option::is_some)
2969 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2970 && current_conflicted_paths.is_empty()
2971 {
2972 log::debug!("not updating merge heads because no conflicts found");
2973 return Ok((
2974 MergeDetails {
2975 message: message.map(SharedString::from),
2976 ..prev_snapshot.merge.clone()
2977 },
2978 false,
2979 ));
2980 }
2981
2982 current_conflicted_paths
2983 } else {
2984 prev_snapshot.merge.conflicted_paths.clone()
2985 };
2986 let details = MergeDetails {
2987 conflicted_paths,
2988 message: message.map(SharedString::from),
2989 heads,
2990 };
2991 Ok((details, merge_heads_changed))
2992 }
2993}
2994
2995impl Repository {
2996 pub fn snapshot(&self) -> RepositorySnapshot {
2997 self.snapshot.clone()
2998 }
2999
3000 fn local(
3001 id: RepositoryId,
3002 work_directory_abs_path: Arc<Path>,
3003 dot_git_abs_path: Arc<Path>,
3004 repository_dir_abs_path: Arc<Path>,
3005 common_dir_abs_path: Arc<Path>,
3006 project_environment: WeakEntity<ProjectEnvironment>,
3007 fs: Arc<dyn Fs>,
3008 git_store: WeakEntity<GitStore>,
3009 cx: &mut Context<Self>,
3010 ) -> Self {
3011 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
3012 Repository {
3013 this: cx.weak_entity(),
3014 git_store,
3015 snapshot,
3016 commit_message_buffer: None,
3017 askpass_delegates: Default::default(),
3018 paths_needing_status_update: Default::default(),
3019 latest_askpass_id: 0,
3020 job_sender: Repository::spawn_local_git_worker(
3021 work_directory_abs_path,
3022 dot_git_abs_path,
3023 repository_dir_abs_path,
3024 common_dir_abs_path,
3025 project_environment,
3026 fs,
3027 cx,
3028 ),
3029 job_id: 0,
3030 active_jobs: Default::default(),
3031 }
3032 }
3033
3034 fn remote(
3035 id: RepositoryId,
3036 work_directory_abs_path: Arc<Path>,
3037 project_id: ProjectId,
3038 client: AnyProtoClient,
3039 git_store: WeakEntity<GitStore>,
3040 cx: &mut Context<Self>,
3041 ) -> Self {
3042 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
3043 Self {
3044 this: cx.weak_entity(),
3045 snapshot,
3046 commit_message_buffer: None,
3047 git_store,
3048 paths_needing_status_update: Default::default(),
3049 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3050 askpass_delegates: Default::default(),
3051 latest_askpass_id: 0,
3052 active_jobs: Default::default(),
3053 job_id: 0,
3054 }
3055 }
3056
3057 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3058 self.git_store.upgrade()
3059 }
3060
3061 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3062 let this = cx.weak_entity();
3063 let git_store = self.git_store.clone();
3064 let _ = self.send_keyed_job(
3065 Some(GitJobKey::ReloadBufferDiffBases),
3066 None,
3067 |state, mut cx| async move {
3068 let RepositoryState::Local { backend, .. } = state else {
3069 log::error!("tried to recompute diffs for a non-local repository");
3070 return Ok(());
3071 };
3072
3073 let Some(this) = this.upgrade() else {
3074 return Ok(());
3075 };
3076
3077 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3078 git_store.update(cx, |git_store, cx| {
3079 git_store
3080 .diffs
3081 .iter()
3082 .filter_map(|(buffer_id, diff_state)| {
3083 let buffer_store = git_store.buffer_store.read(cx);
3084 let buffer = buffer_store.get(*buffer_id)?;
3085 let file = File::from_dyn(buffer.read(cx).file())?;
3086 let abs_path =
3087 file.worktree.read(cx).absolutize(&file.path).ok()?;
3088 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3089 log::debug!(
3090 "start reload diff bases for repo path {}",
3091 repo_path.0.display()
3092 );
3093 diff_state.update(cx, |diff_state, _| {
3094 let has_unstaged_diff = diff_state
3095 .unstaged_diff
3096 .as_ref()
3097 .is_some_and(|diff| diff.is_upgradable());
3098 let has_uncommitted_diff = diff_state
3099 .uncommitted_diff
3100 .as_ref()
3101 .is_some_and(|set| set.is_upgradable());
3102
3103 Some((
3104 buffer,
3105 repo_path,
3106 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3107 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3108 ))
3109 })
3110 })
3111 .collect::<Vec<_>>()
3112 })
3113 })??;
3114
3115 let buffer_diff_base_changes = cx
3116 .background_spawn(async move {
3117 let mut changes = Vec::new();
3118 for (buffer, repo_path, current_index_text, current_head_text) in
3119 &repo_diff_state_updates
3120 {
3121 let index_text = if current_index_text.is_some() {
3122 backend.load_index_text(repo_path.clone()).await
3123 } else {
3124 None
3125 };
3126 let head_text = if current_head_text.is_some() {
3127 backend.load_committed_text(repo_path.clone()).await
3128 } else {
3129 None
3130 };
3131
3132 let change =
3133 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3134 (Some(current_index), Some(current_head)) => {
3135 let index_changed =
3136 index_text.as_ref() != current_index.as_deref();
3137 let head_changed =
3138 head_text.as_ref() != current_head.as_deref();
3139 if index_changed && head_changed {
3140 if index_text == head_text {
3141 Some(DiffBasesChange::SetBoth(head_text))
3142 } else {
3143 Some(DiffBasesChange::SetEach {
3144 index: index_text,
3145 head: head_text,
3146 })
3147 }
3148 } else if index_changed {
3149 Some(DiffBasesChange::SetIndex(index_text))
3150 } else if head_changed {
3151 Some(DiffBasesChange::SetHead(head_text))
3152 } else {
3153 None
3154 }
3155 }
3156 (Some(current_index), None) => {
3157 let index_changed =
3158 index_text.as_ref() != current_index.as_deref();
3159 index_changed
3160 .then_some(DiffBasesChange::SetIndex(index_text))
3161 }
3162 (None, Some(current_head)) => {
3163 let head_changed =
3164 head_text.as_ref() != current_head.as_deref();
3165 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3166 }
3167 (None, None) => None,
3168 };
3169
3170 changes.push((buffer.clone(), change))
3171 }
3172 changes
3173 })
3174 .await;
3175
3176 git_store.update(&mut cx, |git_store, cx| {
3177 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3178 let buffer_snapshot = buffer.read(cx).text_snapshot();
3179 let buffer_id = buffer_snapshot.remote_id();
3180 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3181 continue;
3182 };
3183
3184 let downstream_client = git_store.downstream_client();
3185 diff_state.update(cx, |diff_state, cx| {
3186 use proto::update_diff_bases::Mode;
3187
3188 if let Some((diff_bases_change, (client, project_id))) =
3189 diff_bases_change.clone().zip(downstream_client)
3190 {
3191 let (staged_text, committed_text, mode) = match diff_bases_change {
3192 DiffBasesChange::SetIndex(index) => {
3193 (index, None, Mode::IndexOnly)
3194 }
3195 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3196 DiffBasesChange::SetEach { index, head } => {
3197 (index, head, Mode::IndexAndHead)
3198 }
3199 DiffBasesChange::SetBoth(text) => {
3200 (None, text, Mode::IndexMatchesHead)
3201 }
3202 };
3203 client
3204 .send(proto::UpdateDiffBases {
3205 project_id: project_id.to_proto(),
3206 buffer_id: buffer_id.to_proto(),
3207 staged_text,
3208 committed_text,
3209 mode: mode as i32,
3210 })
3211 .log_err();
3212 }
3213
3214 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3215 });
3216 }
3217 })
3218 },
3219 );
3220 }
3221
3222 pub fn send_job<F, Fut, R>(
3223 &mut self,
3224 status: Option<SharedString>,
3225 job: F,
3226 ) -> oneshot::Receiver<R>
3227 where
3228 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3229 Fut: Future<Output = R> + 'static,
3230 R: Send + 'static,
3231 {
3232 self.send_keyed_job(None, status, job)
3233 }
3234
3235 fn send_keyed_job<F, Fut, R>(
3236 &mut self,
3237 key: Option<GitJobKey>,
3238 status: Option<SharedString>,
3239 job: F,
3240 ) -> oneshot::Receiver<R>
3241 where
3242 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3243 Fut: Future<Output = R> + 'static,
3244 R: Send + 'static,
3245 {
3246 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3247 let job_id = post_inc(&mut self.job_id);
3248 let this = self.this.clone();
3249 self.job_sender
3250 .unbounded_send(GitJob {
3251 key,
3252 job: Box::new(move |state, cx: &mut AsyncApp| {
3253 let job = job(state, cx.clone());
3254 cx.spawn(async move |cx| {
3255 if let Some(s) = status.clone() {
3256 this.update(cx, |this, cx| {
3257 this.active_jobs.insert(
3258 job_id,
3259 JobInfo {
3260 start: Instant::now(),
3261 message: s.clone(),
3262 },
3263 );
3264
3265 cx.notify();
3266 })
3267 .ok();
3268 }
3269 let result = job.await;
3270
3271 this.update(cx, |this, cx| {
3272 this.active_jobs.remove(&job_id);
3273 cx.notify();
3274 })
3275 .ok();
3276
3277 result_tx.send(result).ok();
3278 })
3279 }),
3280 })
3281 .ok();
3282 result_rx
3283 }
3284
3285 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3286 let Some(git_store) = self.git_store.upgrade() else {
3287 return;
3288 };
3289 let entity = cx.entity();
3290 git_store.update(cx, |git_store, cx| {
3291 let Some((&id, _)) = git_store
3292 .repositories
3293 .iter()
3294 .find(|(_, handle)| *handle == &entity)
3295 else {
3296 return;
3297 };
3298 git_store.active_repo_id = Some(id);
3299 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3300 });
3301 }
3302
3303 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3304 self.snapshot.status()
3305 }
3306
3307 pub fn cached_stash(&self) -> GitStash {
3308 self.snapshot.stash_entries.clone()
3309 }
3310
3311 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3312 let git_store = self.git_store.upgrade()?;
3313 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3314 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3315 let abs_path = SanitizedPath::new(&abs_path);
3316 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3317 Some(ProjectPath {
3318 worktree_id: worktree.read(cx).id(),
3319 path: relative_path.into(),
3320 })
3321 }
3322
3323 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3324 let git_store = self.git_store.upgrade()?;
3325 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3326 let abs_path = worktree_store.absolutize(path, cx)?;
3327 self.snapshot.abs_path_to_repo_path(&abs_path)
3328 }
3329
3330 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3331 other
3332 .read(cx)
3333 .snapshot
3334 .work_directory_abs_path
3335 .starts_with(&self.snapshot.work_directory_abs_path)
3336 }
3337
3338 pub fn open_commit_buffer(
3339 &mut self,
3340 languages: Option<Arc<LanguageRegistry>>,
3341 buffer_store: Entity<BufferStore>,
3342 cx: &mut Context<Self>,
3343 ) -> Task<Result<Entity<Buffer>>> {
3344 let id = self.id;
3345 if let Some(buffer) = self.commit_message_buffer.clone() {
3346 return Task::ready(Ok(buffer));
3347 }
3348 let this = cx.weak_entity();
3349
3350 let rx = self.send_job(None, move |state, mut cx| async move {
3351 let Some(this) = this.upgrade() else {
3352 bail!("git store was dropped");
3353 };
3354 match state {
3355 RepositoryState::Local { .. } => {
3356 this.update(&mut cx, |_, cx| {
3357 Self::open_local_commit_buffer(languages, buffer_store, cx)
3358 })?
3359 .await
3360 }
3361 RepositoryState::Remote { project_id, client } => {
3362 let request = client.request(proto::OpenCommitMessageBuffer {
3363 project_id: project_id.0,
3364 repository_id: id.to_proto(),
3365 });
3366 let response = request.await.context("requesting to open commit buffer")?;
3367 let buffer_id = BufferId::new(response.buffer_id)?;
3368 let buffer = buffer_store
3369 .update(&mut cx, |buffer_store, cx| {
3370 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3371 })?
3372 .await?;
3373 if let Some(language_registry) = languages {
3374 let git_commit_language =
3375 language_registry.language_for_name("Git Commit").await?;
3376 buffer.update(&mut cx, |buffer, cx| {
3377 buffer.set_language(Some(git_commit_language), cx);
3378 })?;
3379 }
3380 this.update(&mut cx, |this, _| {
3381 this.commit_message_buffer = Some(buffer.clone());
3382 })?;
3383 Ok(buffer)
3384 }
3385 }
3386 });
3387
3388 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3389 }
3390
3391 fn open_local_commit_buffer(
3392 language_registry: Option<Arc<LanguageRegistry>>,
3393 buffer_store: Entity<BufferStore>,
3394 cx: &mut Context<Self>,
3395 ) -> Task<Result<Entity<Buffer>>> {
3396 cx.spawn(async move |repository, cx| {
3397 let buffer = buffer_store
3398 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3399 .await?;
3400
3401 if let Some(language_registry) = language_registry {
3402 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3403 buffer.update(cx, |buffer, cx| {
3404 buffer.set_language(Some(git_commit_language), cx);
3405 })?;
3406 }
3407
3408 repository.update(cx, |repository, _| {
3409 repository.commit_message_buffer = Some(buffer.clone());
3410 })?;
3411 Ok(buffer)
3412 })
3413 }
3414
3415 pub fn checkout_files(
3416 &mut self,
3417 commit: &str,
3418 paths: Vec<RepoPath>,
3419 _cx: &mut App,
3420 ) -> oneshot::Receiver<Result<()>> {
3421 let commit = commit.to_string();
3422 let id = self.id;
3423
3424 self.send_job(
3425 Some(format!("git checkout {}", commit).into()),
3426 move |git_repo, _| async move {
3427 match git_repo {
3428 RepositoryState::Local {
3429 backend,
3430 environment,
3431 ..
3432 } => {
3433 backend
3434 .checkout_files(commit, paths, environment.clone())
3435 .await
3436 }
3437 RepositoryState::Remote { project_id, client } => {
3438 client
3439 .request(proto::GitCheckoutFiles {
3440 project_id: project_id.0,
3441 repository_id: id.to_proto(),
3442 commit,
3443 paths: paths
3444 .into_iter()
3445 .map(|p| p.to_string_lossy().to_string())
3446 .collect(),
3447 })
3448 .await?;
3449
3450 Ok(())
3451 }
3452 }
3453 },
3454 )
3455 }
3456
3457 pub fn reset(
3458 &mut self,
3459 commit: String,
3460 reset_mode: ResetMode,
3461 _cx: &mut App,
3462 ) -> oneshot::Receiver<Result<()>> {
3463 let id = self.id;
3464
3465 self.send_job(None, move |git_repo, _| async move {
3466 match git_repo {
3467 RepositoryState::Local {
3468 backend,
3469 environment,
3470 ..
3471 } => backend.reset(commit, reset_mode, environment).await,
3472 RepositoryState::Remote { project_id, client } => {
3473 client
3474 .request(proto::GitReset {
3475 project_id: project_id.0,
3476 repository_id: id.to_proto(),
3477 commit,
3478 mode: match reset_mode {
3479 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3480 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3481 },
3482 })
3483 .await?;
3484
3485 Ok(())
3486 }
3487 }
3488 })
3489 }
3490
3491 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3492 let id = self.id;
3493 self.send_job(None, move |git_repo, _cx| async move {
3494 match git_repo {
3495 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3496 RepositoryState::Remote { project_id, client } => {
3497 let resp = client
3498 .request(proto::GitShow {
3499 project_id: project_id.0,
3500 repository_id: id.to_proto(),
3501 commit,
3502 })
3503 .await?;
3504
3505 Ok(CommitDetails {
3506 sha: resp.sha.into(),
3507 message: resp.message.into(),
3508 commit_timestamp: resp.commit_timestamp,
3509 author_email: resp.author_email.into(),
3510 author_name: resp.author_name.into(),
3511 })
3512 }
3513 }
3514 })
3515 }
3516
3517 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3518 let id = self.id;
3519 self.send_job(None, move |git_repo, cx| async move {
3520 match git_repo {
3521 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3522 RepositoryState::Remote {
3523 client, project_id, ..
3524 } => {
3525 let response = client
3526 .request(proto::LoadCommitDiff {
3527 project_id: project_id.0,
3528 repository_id: id.to_proto(),
3529 commit,
3530 })
3531 .await?;
3532 Ok(CommitDiff {
3533 files: response
3534 .files
3535 .into_iter()
3536 .map(|file| CommitFile {
3537 path: Path::new(&file.path).into(),
3538 old_text: file.old_text,
3539 new_text: file.new_text,
3540 })
3541 .collect(),
3542 })
3543 }
3544 }
3545 })
3546 }
3547
3548 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3549 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3550 }
3551
3552 pub fn stage_entries(
3553 &self,
3554 entries: Vec<RepoPath>,
3555 cx: &mut Context<Self>,
3556 ) -> Task<anyhow::Result<()>> {
3557 if entries.is_empty() {
3558 return Task::ready(Ok(()));
3559 }
3560 let id = self.id;
3561
3562 let mut save_futures = Vec::new();
3563 if let Some(buffer_store) = self.buffer_store(cx) {
3564 buffer_store.update(cx, |buffer_store, cx| {
3565 for path in &entries {
3566 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3567 continue;
3568 };
3569 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3570 && buffer
3571 .read(cx)
3572 .file()
3573 .is_some_and(|file| file.disk_state().exists())
3574 {
3575 save_futures.push(buffer_store.save_buffer(buffer, cx));
3576 }
3577 }
3578 })
3579 }
3580
3581 cx.spawn(async move |this, cx| {
3582 for save_future in save_futures {
3583 save_future.await?;
3584 }
3585
3586 this.update(cx, |this, _| {
3587 this.send_job(None, move |git_repo, _cx| async move {
3588 match git_repo {
3589 RepositoryState::Local {
3590 backend,
3591 environment,
3592 ..
3593 } => backend.stage_paths(entries, environment.clone()).await,
3594 RepositoryState::Remote { project_id, client } => {
3595 client
3596 .request(proto::Stage {
3597 project_id: project_id.0,
3598 repository_id: id.to_proto(),
3599 paths: entries
3600 .into_iter()
3601 .map(|repo_path| repo_path.as_ref().to_proto())
3602 .collect(),
3603 })
3604 .await
3605 .context("sending stage request")?;
3606
3607 Ok(())
3608 }
3609 }
3610 })
3611 })?
3612 .await??;
3613
3614 Ok(())
3615 })
3616 }
3617
3618 pub fn unstage_entries(
3619 &self,
3620 entries: Vec<RepoPath>,
3621 cx: &mut Context<Self>,
3622 ) -> Task<anyhow::Result<()>> {
3623 if entries.is_empty() {
3624 return Task::ready(Ok(()));
3625 }
3626 let id = self.id;
3627
3628 let mut save_futures = Vec::new();
3629 if let Some(buffer_store) = self.buffer_store(cx) {
3630 buffer_store.update(cx, |buffer_store, cx| {
3631 for path in &entries {
3632 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3633 continue;
3634 };
3635 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3636 && buffer
3637 .read(cx)
3638 .file()
3639 .is_some_and(|file| file.disk_state().exists())
3640 {
3641 save_futures.push(buffer_store.save_buffer(buffer, cx));
3642 }
3643 }
3644 })
3645 }
3646
3647 cx.spawn(async move |this, cx| {
3648 for save_future in save_futures {
3649 save_future.await?;
3650 }
3651
3652 this.update(cx, |this, _| {
3653 this.send_job(None, move |git_repo, _cx| async move {
3654 match git_repo {
3655 RepositoryState::Local {
3656 backend,
3657 environment,
3658 ..
3659 } => backend.unstage_paths(entries, environment).await,
3660 RepositoryState::Remote { project_id, client } => {
3661 client
3662 .request(proto::Unstage {
3663 project_id: project_id.0,
3664 repository_id: id.to_proto(),
3665 paths: entries
3666 .into_iter()
3667 .map(|repo_path| repo_path.as_ref().to_proto())
3668 .collect(),
3669 })
3670 .await
3671 .context("sending unstage request")?;
3672
3673 Ok(())
3674 }
3675 }
3676 })
3677 })?
3678 .await??;
3679
3680 Ok(())
3681 })
3682 }
3683
3684 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3685 let to_stage = self
3686 .cached_status()
3687 .filter(|entry| !entry.status.staging().is_fully_staged())
3688 .map(|entry| entry.repo_path)
3689 .collect();
3690 self.stage_entries(to_stage, cx)
3691 }
3692
3693 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3694 let to_unstage = self
3695 .cached_status()
3696 .filter(|entry| entry.status.staging().has_staged())
3697 .map(|entry| entry.repo_path)
3698 .collect();
3699 self.unstage_entries(to_unstage, cx)
3700 }
3701
3702 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3703 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3704
3705 self.stash_entries(to_stash, cx)
3706 }
3707
3708 pub fn stash_entries(
3709 &mut self,
3710 entries: Vec<RepoPath>,
3711 cx: &mut Context<Self>,
3712 ) -> Task<anyhow::Result<()>> {
3713 let id = self.id;
3714
3715 cx.spawn(async move |this, cx| {
3716 this.update(cx, |this, _| {
3717 this.send_job(None, move |git_repo, _cx| async move {
3718 match git_repo {
3719 RepositoryState::Local {
3720 backend,
3721 environment,
3722 ..
3723 } => backend.stash_paths(entries, environment).await,
3724 RepositoryState::Remote { project_id, client } => {
3725 client
3726 .request(proto::Stash {
3727 project_id: project_id.0,
3728 repository_id: id.to_proto(),
3729 paths: entries
3730 .into_iter()
3731 .map(|repo_path| repo_path.as_ref().to_proto())
3732 .collect(),
3733 })
3734 .await
3735 .context("sending stash request")?;
3736 Ok(())
3737 }
3738 }
3739 })
3740 })?
3741 .await??;
3742 Ok(())
3743 })
3744 }
3745
3746 pub fn stash_pop(
3747 &mut self,
3748 index: Option<usize>,
3749 cx: &mut Context<Self>,
3750 ) -> Task<anyhow::Result<()>> {
3751 let id = self.id;
3752 cx.spawn(async move |this, cx| {
3753 this.update(cx, |this, _| {
3754 this.send_job(None, move |git_repo, _cx| async move {
3755 match git_repo {
3756 RepositoryState::Local {
3757 backend,
3758 environment,
3759 ..
3760 } => backend.stash_pop(index, environment).await,
3761 RepositoryState::Remote { project_id, client } => {
3762 client
3763 .request(proto::StashPop {
3764 project_id: project_id.0,
3765 repository_id: id.to_proto(),
3766 stash_index: index.map(|i| i as u64),
3767 })
3768 .await
3769 .context("sending stash pop request")?;
3770 Ok(())
3771 }
3772 }
3773 })
3774 })?
3775 .await??;
3776 Ok(())
3777 })
3778 }
3779
3780 pub fn stash_apply(
3781 &mut self,
3782 index: Option<usize>,
3783 cx: &mut Context<Self>,
3784 ) -> Task<anyhow::Result<()>> {
3785 let id = self.id;
3786 cx.spawn(async move |this, cx| {
3787 this.update(cx, |this, _| {
3788 this.send_job(None, move |git_repo, _cx| async move {
3789 match git_repo {
3790 RepositoryState::Local {
3791 backend,
3792 environment,
3793 ..
3794 } => backend.stash_apply(index, environment).await,
3795 RepositoryState::Remote { project_id, client } => {
3796 client
3797 .request(proto::StashApply {
3798 project_id: project_id.0,
3799 repository_id: id.to_proto(),
3800 stash_index: index.map(|i| i as u64),
3801 })
3802 .await
3803 .context("sending stash apply request")?;
3804 Ok(())
3805 }
3806 }
3807 })
3808 })?
3809 .await??;
3810 Ok(())
3811 })
3812 }
3813
3814 pub fn stash_drop(
3815 &mut self,
3816 index: Option<usize>,
3817 cx: &mut Context<Self>,
3818 ) -> oneshot::Receiver<anyhow::Result<()>> {
3819 let id = self.id;
3820 let updates_tx = self
3821 .git_store()
3822 .and_then(|git_store| match &git_store.read(cx).state {
3823 GitStoreState::Local { downstream, .. } => downstream
3824 .as_ref()
3825 .map(|downstream| downstream.updates_tx.clone()),
3826 _ => None,
3827 });
3828 let this = cx.weak_entity();
3829 self.send_job(None, move |git_repo, mut cx| async move {
3830 match git_repo {
3831 RepositoryState::Local {
3832 backend,
3833 environment,
3834 ..
3835 } => {
3836 let result = backend.stash_drop(index, environment).await;
3837 if result.is_ok()
3838 && let Ok(stash_entries) = backend.stash_entries().await
3839 {
3840 let snapshot = this.update(&mut cx, |this, cx| {
3841 this.snapshot.stash_entries = stash_entries;
3842 let snapshot = this.snapshot.clone();
3843 cx.emit(RepositoryEvent::Updated {
3844 full_scan: false,
3845 new_instance: false,
3846 });
3847 snapshot
3848 })?;
3849 if let Some(updates_tx) = updates_tx {
3850 updates_tx
3851 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3852 .ok();
3853 }
3854 }
3855
3856 result
3857 }
3858 RepositoryState::Remote { project_id, client } => {
3859 client
3860 .request(proto::StashDrop {
3861 project_id: project_id.0,
3862 repository_id: id.to_proto(),
3863 stash_index: index.map(|i| i as u64),
3864 })
3865 .await
3866 .context("sending stash pop request")?;
3867 Ok(())
3868 }
3869 }
3870 })
3871 }
3872
3873 pub fn commit(
3874 &mut self,
3875 message: SharedString,
3876 name_and_email: Option<(SharedString, SharedString)>,
3877 options: CommitOptions,
3878 _cx: &mut App,
3879 ) -> oneshot::Receiver<Result<()>> {
3880 let id = self.id;
3881
3882 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3883 match git_repo {
3884 RepositoryState::Local {
3885 backend,
3886 environment,
3887 ..
3888 } => {
3889 backend
3890 .commit(message, name_and_email, options, environment)
3891 .await
3892 }
3893 RepositoryState::Remote { project_id, client } => {
3894 let (name, email) = name_and_email.unzip();
3895 client
3896 .request(proto::Commit {
3897 project_id: project_id.0,
3898 repository_id: id.to_proto(),
3899 message: String::from(message),
3900 name: name.map(String::from),
3901 email: email.map(String::from),
3902 options: Some(proto::commit::CommitOptions {
3903 amend: options.amend,
3904 signoff: options.signoff,
3905 }),
3906 })
3907 .await
3908 .context("sending commit request")?;
3909
3910 Ok(())
3911 }
3912 }
3913 })
3914 }
3915
3916 pub fn fetch(
3917 &mut self,
3918 fetch_options: FetchOptions,
3919 askpass: AskPassDelegate,
3920 _cx: &mut App,
3921 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3922 let askpass_delegates = self.askpass_delegates.clone();
3923 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3924 let id = self.id;
3925
3926 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3927 match git_repo {
3928 RepositoryState::Local {
3929 backend,
3930 environment,
3931 ..
3932 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3933 RepositoryState::Remote { project_id, client } => {
3934 askpass_delegates.lock().insert(askpass_id, askpass);
3935 let _defer = util::defer(|| {
3936 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3937 debug_assert!(askpass_delegate.is_some());
3938 });
3939
3940 let response = client
3941 .request(proto::Fetch {
3942 project_id: project_id.0,
3943 repository_id: id.to_proto(),
3944 askpass_id,
3945 remote: fetch_options.to_proto(),
3946 })
3947 .await
3948 .context("sending fetch request")?;
3949
3950 Ok(RemoteCommandOutput {
3951 stdout: response.stdout,
3952 stderr: response.stderr,
3953 })
3954 }
3955 }
3956 })
3957 }
3958
3959 pub fn push(
3960 &mut self,
3961 branch: SharedString,
3962 remote: SharedString,
3963 options: Option<PushOptions>,
3964 askpass: AskPassDelegate,
3965 cx: &mut Context<Self>,
3966 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3967 let askpass_delegates = self.askpass_delegates.clone();
3968 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3969 let id = self.id;
3970
3971 let args = options
3972 .map(|option| match option {
3973 PushOptions::SetUpstream => " --set-upstream",
3974 PushOptions::Force => " --force-with-lease",
3975 })
3976 .unwrap_or("");
3977
3978 let updates_tx = self
3979 .git_store()
3980 .and_then(|git_store| match &git_store.read(cx).state {
3981 GitStoreState::Local { downstream, .. } => downstream
3982 .as_ref()
3983 .map(|downstream| downstream.updates_tx.clone()),
3984 _ => None,
3985 });
3986
3987 let this = cx.weak_entity();
3988 self.send_job(
3989 Some(format!("git push {} {} {}", args, branch, remote).into()),
3990 move |git_repo, mut cx| async move {
3991 match git_repo {
3992 RepositoryState::Local {
3993 backend,
3994 environment,
3995 ..
3996 } => {
3997 let result = backend
3998 .push(
3999 branch.to_string(),
4000 remote.to_string(),
4001 options,
4002 askpass,
4003 environment.clone(),
4004 cx.clone(),
4005 )
4006 .await;
4007 if result.is_ok() {
4008 let branches = backend.branches().await?;
4009 let branch = branches.into_iter().find(|branch| branch.is_head);
4010 log::info!("head branch after scan is {branch:?}");
4011 let snapshot = this.update(&mut cx, |this, cx| {
4012 this.snapshot.branch = branch;
4013 let snapshot = this.snapshot.clone();
4014 cx.emit(RepositoryEvent::Updated {
4015 full_scan: false,
4016 new_instance: false,
4017 });
4018 snapshot
4019 })?;
4020 if let Some(updates_tx) = updates_tx {
4021 updates_tx
4022 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4023 .ok();
4024 }
4025 }
4026 result
4027 }
4028 RepositoryState::Remote { project_id, client } => {
4029 askpass_delegates.lock().insert(askpass_id, askpass);
4030 let _defer = util::defer(|| {
4031 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4032 debug_assert!(askpass_delegate.is_some());
4033 });
4034 let response = client
4035 .request(proto::Push {
4036 project_id: project_id.0,
4037 repository_id: id.to_proto(),
4038 askpass_id,
4039 branch_name: branch.to_string(),
4040 remote_name: remote.to_string(),
4041 options: options.map(|options| match options {
4042 PushOptions::Force => proto::push::PushOptions::Force,
4043 PushOptions::SetUpstream => {
4044 proto::push::PushOptions::SetUpstream
4045 }
4046 }
4047 as i32),
4048 })
4049 .await
4050 .context("sending push request")?;
4051
4052 Ok(RemoteCommandOutput {
4053 stdout: response.stdout,
4054 stderr: response.stderr,
4055 })
4056 }
4057 }
4058 },
4059 )
4060 }
4061
4062 pub fn pull(
4063 &mut self,
4064 branch: SharedString,
4065 remote: SharedString,
4066 askpass: AskPassDelegate,
4067 _cx: &mut App,
4068 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4069 let askpass_delegates = self.askpass_delegates.clone();
4070 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4071 let id = self.id;
4072
4073 self.send_job(
4074 Some(format!("git pull {} {}", remote, branch).into()),
4075 move |git_repo, cx| async move {
4076 match git_repo {
4077 RepositoryState::Local {
4078 backend,
4079 environment,
4080 ..
4081 } => {
4082 backend
4083 .pull(
4084 branch.to_string(),
4085 remote.to_string(),
4086 askpass,
4087 environment.clone(),
4088 cx,
4089 )
4090 .await
4091 }
4092 RepositoryState::Remote { project_id, client } => {
4093 askpass_delegates.lock().insert(askpass_id, askpass);
4094 let _defer = util::defer(|| {
4095 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4096 debug_assert!(askpass_delegate.is_some());
4097 });
4098 let response = client
4099 .request(proto::Pull {
4100 project_id: project_id.0,
4101 repository_id: id.to_proto(),
4102 askpass_id,
4103 branch_name: branch.to_string(),
4104 remote_name: remote.to_string(),
4105 })
4106 .await
4107 .context("sending pull request")?;
4108
4109 Ok(RemoteCommandOutput {
4110 stdout: response.stdout,
4111 stderr: response.stderr,
4112 })
4113 }
4114 }
4115 },
4116 )
4117 }
4118
4119 fn spawn_set_index_text_job(
4120 &mut self,
4121 path: RepoPath,
4122 content: Option<String>,
4123 hunk_staging_operation_count: Option<usize>,
4124 cx: &mut Context<Self>,
4125 ) -> oneshot::Receiver<anyhow::Result<()>> {
4126 let id = self.id;
4127 let this = cx.weak_entity();
4128 let git_store = self.git_store.clone();
4129 self.send_keyed_job(
4130 Some(GitJobKey::WriteIndex(path.clone())),
4131 None,
4132 move |git_repo, mut cx| async move {
4133 log::debug!("start updating index text for buffer {}", path.display());
4134 match git_repo {
4135 RepositoryState::Local {
4136 backend,
4137 environment,
4138 ..
4139 } => {
4140 backend
4141 .set_index_text(path.clone(), content, environment.clone())
4142 .await?;
4143 }
4144 RepositoryState::Remote { project_id, client } => {
4145 client
4146 .request(proto::SetIndexText {
4147 project_id: project_id.0,
4148 repository_id: id.to_proto(),
4149 path: path.as_ref().to_proto(),
4150 text: content,
4151 })
4152 .await?;
4153 }
4154 }
4155 log::debug!("finish updating index text for buffer {}", path.display());
4156
4157 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4158 let project_path = this
4159 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4160 .ok()
4161 .flatten();
4162 git_store.update(&mut cx, |git_store, cx| {
4163 let buffer_id = git_store
4164 .buffer_store
4165 .read(cx)
4166 .get_by_path(&project_path?)?
4167 .read(cx)
4168 .remote_id();
4169 let diff_state = git_store.diffs.get(&buffer_id)?;
4170 diff_state.update(cx, |diff_state, _| {
4171 diff_state.hunk_staging_operation_count_as_of_write =
4172 hunk_staging_operation_count;
4173 });
4174 Some(())
4175 })?;
4176 }
4177 Ok(())
4178 },
4179 )
4180 }
4181
4182 pub fn get_remotes(
4183 &mut self,
4184 branch_name: Option<String>,
4185 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4186 let id = self.id;
4187 self.send_job(None, move |repo, _cx| async move {
4188 match repo {
4189 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4190 RepositoryState::Remote { project_id, client } => {
4191 let response = client
4192 .request(proto::GetRemotes {
4193 project_id: project_id.0,
4194 repository_id: id.to_proto(),
4195 branch_name,
4196 })
4197 .await?;
4198
4199 let remotes = response
4200 .remotes
4201 .into_iter()
4202 .map(|remotes| git::repository::Remote {
4203 name: remotes.name.into(),
4204 })
4205 .collect();
4206
4207 Ok(remotes)
4208 }
4209 }
4210 })
4211 }
4212
4213 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4214 let id = self.id;
4215 self.send_job(None, move |repo, _| async move {
4216 match repo {
4217 RepositoryState::Local { backend, .. } => backend.branches().await,
4218 RepositoryState::Remote { project_id, client } => {
4219 let response = client
4220 .request(proto::GitGetBranches {
4221 project_id: project_id.0,
4222 repository_id: id.to_proto(),
4223 })
4224 .await?;
4225
4226 let branches = response
4227 .branches
4228 .into_iter()
4229 .map(|branch| proto_to_branch(&branch))
4230 .collect();
4231
4232 Ok(branches)
4233 }
4234 }
4235 })
4236 }
4237
4238 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4239 let id = self.id;
4240 self.send_job(None, move |repo, _| async move {
4241 match repo {
4242 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4243 RepositoryState::Remote { project_id, client } => {
4244 let response = client
4245 .request(proto::GetDefaultBranch {
4246 project_id: project_id.0,
4247 repository_id: id.to_proto(),
4248 })
4249 .await?;
4250
4251 anyhow::Ok(response.branch.map(SharedString::from))
4252 }
4253 }
4254 })
4255 }
4256
4257 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4258 let id = self.id;
4259 self.send_job(None, move |repo, _cx| async move {
4260 match repo {
4261 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4262 RepositoryState::Remote { project_id, client } => {
4263 let response = client
4264 .request(proto::GitDiff {
4265 project_id: project_id.0,
4266 repository_id: id.to_proto(),
4267 diff_type: match diff_type {
4268 DiffType::HeadToIndex => {
4269 proto::git_diff::DiffType::HeadToIndex.into()
4270 }
4271 DiffType::HeadToWorktree => {
4272 proto::git_diff::DiffType::HeadToWorktree.into()
4273 }
4274 },
4275 })
4276 .await?;
4277
4278 Ok(response.diff)
4279 }
4280 }
4281 })
4282 }
4283
4284 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4285 let id = self.id;
4286 self.send_job(
4287 Some(format!("git switch -c {branch_name}").into()),
4288 move |repo, _cx| async move {
4289 match repo {
4290 RepositoryState::Local { backend, .. } => {
4291 backend.create_branch(branch_name).await
4292 }
4293 RepositoryState::Remote { project_id, client } => {
4294 client
4295 .request(proto::GitCreateBranch {
4296 project_id: project_id.0,
4297 repository_id: id.to_proto(),
4298 branch_name,
4299 })
4300 .await?;
4301
4302 Ok(())
4303 }
4304 }
4305 },
4306 )
4307 }
4308
4309 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4310 let id = self.id;
4311 self.send_job(
4312 Some(format!("git switch {branch_name}").into()),
4313 move |repo, _cx| async move {
4314 match repo {
4315 RepositoryState::Local { backend, .. } => {
4316 backend.change_branch(branch_name).await
4317 }
4318 RepositoryState::Remote { project_id, client } => {
4319 client
4320 .request(proto::GitChangeBranch {
4321 project_id: project_id.0,
4322 repository_id: id.to_proto(),
4323 branch_name,
4324 })
4325 .await?;
4326
4327 Ok(())
4328 }
4329 }
4330 },
4331 )
4332 }
4333
4334 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4335 let id = self.id;
4336 self.send_job(None, move |repo, _cx| async move {
4337 match repo {
4338 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4339 RepositoryState::Remote { project_id, client } => {
4340 let response = client
4341 .request(proto::CheckForPushedCommits {
4342 project_id: project_id.0,
4343 repository_id: id.to_proto(),
4344 })
4345 .await?;
4346
4347 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4348
4349 Ok(branches)
4350 }
4351 }
4352 })
4353 }
4354
4355 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4356 self.send_job(None, |repo, _cx| async move {
4357 match repo {
4358 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4359 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4360 }
4361 })
4362 }
4363
4364 pub fn restore_checkpoint(
4365 &mut self,
4366 checkpoint: GitRepositoryCheckpoint,
4367 ) -> oneshot::Receiver<Result<()>> {
4368 self.send_job(None, move |repo, _cx| async move {
4369 match repo {
4370 RepositoryState::Local { backend, .. } => {
4371 backend.restore_checkpoint(checkpoint).await
4372 }
4373 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4374 }
4375 })
4376 }
4377
4378 pub(crate) fn apply_remote_update(
4379 &mut self,
4380 update: proto::UpdateRepository,
4381 is_new: bool,
4382 cx: &mut Context<Self>,
4383 ) -> Result<()> {
4384 let conflicted_paths = TreeSet::from_ordered_entries(
4385 update
4386 .current_merge_conflicts
4387 .into_iter()
4388 .map(|path| RepoPath(Path::new(&path).into())),
4389 );
4390 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4391 self.snapshot.head_commit = update
4392 .head_commit_details
4393 .as_ref()
4394 .map(proto_to_commit_details);
4395
4396 self.snapshot.merge.conflicted_paths = conflicted_paths;
4397 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4398 self.snapshot.stash_entries = GitStash {
4399 entries: update
4400 .stash_entries
4401 .iter()
4402 .filter_map(|entry| proto_to_stash(entry).ok())
4403 .collect(),
4404 };
4405
4406 let edits = update
4407 .removed_statuses
4408 .into_iter()
4409 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4410 .chain(
4411 update
4412 .updated_statuses
4413 .into_iter()
4414 .filter_map(|updated_status| {
4415 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4416 }),
4417 )
4418 .collect::<Vec<_>>();
4419 self.snapshot.statuses_by_path.edit(edits, &());
4420 if update.is_last_update {
4421 self.snapshot.scan_id = update.scan_id;
4422 }
4423 cx.emit(RepositoryEvent::Updated {
4424 full_scan: true,
4425 new_instance: is_new,
4426 });
4427 Ok(())
4428 }
4429
4430 pub fn compare_checkpoints(
4431 &mut self,
4432 left: GitRepositoryCheckpoint,
4433 right: GitRepositoryCheckpoint,
4434 ) -> oneshot::Receiver<Result<bool>> {
4435 self.send_job(None, move |repo, _cx| async move {
4436 match repo {
4437 RepositoryState::Local { backend, .. } => {
4438 backend.compare_checkpoints(left, right).await
4439 }
4440 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4441 }
4442 })
4443 }
4444
4445 pub fn diff_checkpoints(
4446 &mut self,
4447 base_checkpoint: GitRepositoryCheckpoint,
4448 target_checkpoint: GitRepositoryCheckpoint,
4449 ) -> oneshot::Receiver<Result<String>> {
4450 self.send_job(None, move |repo, _cx| async move {
4451 match repo {
4452 RepositoryState::Local { backend, .. } => {
4453 backend
4454 .diff_checkpoints(base_checkpoint, target_checkpoint)
4455 .await
4456 }
4457 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4458 }
4459 })
4460 }
4461
4462 fn schedule_scan(
4463 &mut self,
4464 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4465 cx: &mut Context<Self>,
4466 ) {
4467 let this = cx.weak_entity();
4468 let _ = self.send_keyed_job(
4469 Some(GitJobKey::ReloadGitState),
4470 None,
4471 |state, mut cx| async move {
4472 log::debug!("run scheduled git status scan");
4473
4474 let Some(this) = this.upgrade() else {
4475 return Ok(());
4476 };
4477 let RepositoryState::Local { backend, .. } = state else {
4478 bail!("not a local repository")
4479 };
4480 let (snapshot, events) = this
4481 .update(&mut cx, |this, _| {
4482 this.paths_needing_status_update.clear();
4483 compute_snapshot(
4484 this.id,
4485 this.work_directory_abs_path.clone(),
4486 this.snapshot.clone(),
4487 backend.clone(),
4488 )
4489 })?
4490 .await?;
4491 this.update(&mut cx, |this, cx| {
4492 this.snapshot = snapshot.clone();
4493 for event in events {
4494 cx.emit(event);
4495 }
4496 })?;
4497 if let Some(updates_tx) = updates_tx {
4498 updates_tx
4499 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4500 .ok();
4501 }
4502 Ok(())
4503 },
4504 );
4505 }
4506
4507 fn spawn_local_git_worker(
4508 work_directory_abs_path: Arc<Path>,
4509 dot_git_abs_path: Arc<Path>,
4510 _repository_dir_abs_path: Arc<Path>,
4511 _common_dir_abs_path: Arc<Path>,
4512 project_environment: WeakEntity<ProjectEnvironment>,
4513 fs: Arc<dyn Fs>,
4514 cx: &mut Context<Self>,
4515 ) -> mpsc::UnboundedSender<GitJob> {
4516 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4517
4518 cx.spawn(async move |_, cx| {
4519 let environment = project_environment
4520 .upgrade()
4521 .context("missing project environment")?
4522 .update(cx, |project_environment, cx| {
4523 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4524 })?
4525 .await
4526 .unwrap_or_else(|| {
4527 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4528 HashMap::default()
4529 });
4530 let backend = cx
4531 .background_spawn(async move {
4532 fs.open_repo(&dot_git_abs_path)
4533 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4534 })
4535 .await?;
4536
4537 if let Some(git_hosting_provider_registry) =
4538 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4539 {
4540 git_hosting_providers::register_additional_providers(
4541 git_hosting_provider_registry,
4542 backend.clone(),
4543 );
4544 }
4545
4546 let state = RepositoryState::Local {
4547 backend,
4548 environment: Arc::new(environment),
4549 };
4550 let mut jobs = VecDeque::new();
4551 loop {
4552 while let Ok(Some(next_job)) = job_rx.try_next() {
4553 jobs.push_back(next_job);
4554 }
4555
4556 if let Some(job) = jobs.pop_front() {
4557 if let Some(current_key) = &job.key
4558 && jobs
4559 .iter()
4560 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4561 {
4562 continue;
4563 }
4564 (job.job)(state.clone(), cx).await;
4565 } else if let Some(job) = job_rx.next().await {
4566 jobs.push_back(job);
4567 } else {
4568 break;
4569 }
4570 }
4571 anyhow::Ok(())
4572 })
4573 .detach_and_log_err(cx);
4574
4575 job_tx
4576 }
4577
4578 fn spawn_remote_git_worker(
4579 project_id: ProjectId,
4580 client: AnyProtoClient,
4581 cx: &mut Context<Self>,
4582 ) -> mpsc::UnboundedSender<GitJob> {
4583 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4584
4585 cx.spawn(async move |_, cx| {
4586 let state = RepositoryState::Remote { project_id, client };
4587 let mut jobs = VecDeque::new();
4588 loop {
4589 while let Ok(Some(next_job)) = job_rx.try_next() {
4590 jobs.push_back(next_job);
4591 }
4592
4593 if let Some(job) = jobs.pop_front() {
4594 if let Some(current_key) = &job.key
4595 && jobs
4596 .iter()
4597 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4598 {
4599 continue;
4600 }
4601 (job.job)(state.clone(), cx).await;
4602 } else if let Some(job) = job_rx.next().await {
4603 jobs.push_back(job);
4604 } else {
4605 break;
4606 }
4607 }
4608 anyhow::Ok(())
4609 })
4610 .detach_and_log_err(cx);
4611
4612 job_tx
4613 }
4614
4615 fn load_staged_text(
4616 &mut self,
4617 buffer_id: BufferId,
4618 repo_path: RepoPath,
4619 cx: &App,
4620 ) -> Task<Result<Option<String>>> {
4621 let rx = self.send_job(None, move |state, _| async move {
4622 match state {
4623 RepositoryState::Local { backend, .. } => {
4624 anyhow::Ok(backend.load_index_text(repo_path).await)
4625 }
4626 RepositoryState::Remote { project_id, client } => {
4627 let response = client
4628 .request(proto::OpenUnstagedDiff {
4629 project_id: project_id.to_proto(),
4630 buffer_id: buffer_id.to_proto(),
4631 })
4632 .await?;
4633 Ok(response.staged_text)
4634 }
4635 }
4636 });
4637 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4638 }
4639
4640 fn load_committed_text(
4641 &mut self,
4642 buffer_id: BufferId,
4643 repo_path: RepoPath,
4644 cx: &App,
4645 ) -> Task<Result<DiffBasesChange>> {
4646 let rx = self.send_job(None, move |state, _| async move {
4647 match state {
4648 RepositoryState::Local { backend, .. } => {
4649 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4650 let staged_text = backend.load_index_text(repo_path).await;
4651 let diff_bases_change = if committed_text == staged_text {
4652 DiffBasesChange::SetBoth(committed_text)
4653 } else {
4654 DiffBasesChange::SetEach {
4655 index: staged_text,
4656 head: committed_text,
4657 }
4658 };
4659 anyhow::Ok(diff_bases_change)
4660 }
4661 RepositoryState::Remote { project_id, client } => {
4662 use proto::open_uncommitted_diff_response::Mode;
4663
4664 let response = client
4665 .request(proto::OpenUncommittedDiff {
4666 project_id: project_id.to_proto(),
4667 buffer_id: buffer_id.to_proto(),
4668 })
4669 .await?;
4670 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4671 let bases = match mode {
4672 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4673 Mode::IndexAndHead => DiffBasesChange::SetEach {
4674 head: response.committed_text,
4675 index: response.staged_text,
4676 },
4677 };
4678 Ok(bases)
4679 }
4680 }
4681 });
4682
4683 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4684 }
4685
4686 fn paths_changed(
4687 &mut self,
4688 paths: Vec<RepoPath>,
4689 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4690 cx: &mut Context<Self>,
4691 ) {
4692 self.paths_needing_status_update.extend(paths);
4693
4694 let this = cx.weak_entity();
4695 let _ = self.send_keyed_job(
4696 Some(GitJobKey::RefreshStatuses),
4697 None,
4698 |state, mut cx| async move {
4699 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4700 (
4701 this.snapshot.clone(),
4702 mem::take(&mut this.paths_needing_status_update),
4703 )
4704 })?;
4705 let RepositoryState::Local { backend, .. } = state else {
4706 bail!("not a local repository")
4707 };
4708
4709 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4710 if paths.is_empty() {
4711 return Ok(());
4712 }
4713 let statuses = backend.status(&paths).await?;
4714 let stash_entries = backend.stash_entries().await?;
4715
4716 let changed_path_statuses = cx
4717 .background_spawn(async move {
4718 let mut changed_path_statuses = Vec::new();
4719 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4720 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4721
4722 for (repo_path, status) in &*statuses.entries {
4723 changed_paths.remove(repo_path);
4724 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
4725 && cursor.item().is_some_and(|entry| entry.status == *status)
4726 {
4727 continue;
4728 }
4729
4730 changed_path_statuses.push(Edit::Insert(StatusEntry {
4731 repo_path: repo_path.clone(),
4732 status: *status,
4733 }));
4734 }
4735 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4736 for path in changed_paths.into_iter() {
4737 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4738 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4739 }
4740 }
4741 changed_path_statuses
4742 })
4743 .await;
4744
4745 this.update(&mut cx, |this, cx| {
4746 let needs_update = !changed_path_statuses.is_empty()
4747 || this.snapshot.stash_entries != stash_entries;
4748 this.snapshot.stash_entries = stash_entries;
4749 if !changed_path_statuses.is_empty() {
4750 this.snapshot
4751 .statuses_by_path
4752 .edit(changed_path_statuses, &());
4753 this.snapshot.scan_id += 1;
4754 }
4755
4756 if needs_update && let Some(updates_tx) = updates_tx {
4757 updates_tx
4758 .unbounded_send(DownstreamUpdate::UpdateRepository(
4759 this.snapshot.clone(),
4760 ))
4761 .ok();
4762 }
4763 cx.emit(RepositoryEvent::Updated {
4764 full_scan: false,
4765 new_instance: false,
4766 });
4767 })
4768 },
4769 );
4770 }
4771
4772 /// currently running git command and when it started
4773 pub fn current_job(&self) -> Option<JobInfo> {
4774 self.active_jobs.values().next().cloned()
4775 }
4776
4777 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4778 self.send_job(None, |_, _| async {})
4779 }
4780}
4781
4782fn get_permalink_in_rust_registry_src(
4783 provider_registry: Arc<GitHostingProviderRegistry>,
4784 path: PathBuf,
4785 selection: Range<u32>,
4786) -> Result<url::Url> {
4787 #[derive(Deserialize)]
4788 struct CargoVcsGit {
4789 sha1: String,
4790 }
4791
4792 #[derive(Deserialize)]
4793 struct CargoVcsInfo {
4794 git: CargoVcsGit,
4795 path_in_vcs: String,
4796 }
4797
4798 #[derive(Deserialize)]
4799 struct CargoPackage {
4800 repository: String,
4801 }
4802
4803 #[derive(Deserialize)]
4804 struct CargoToml {
4805 package: CargoPackage,
4806 }
4807
4808 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4809 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4810 Some((dir, json))
4811 }) else {
4812 bail!("No .cargo_vcs_info.json found in parent directories")
4813 };
4814 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4815 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4816 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4817 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4818 .context("parsing package.repository field of manifest")?;
4819 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4820 let permalink = provider.build_permalink(
4821 remote,
4822 BuildPermalinkParams {
4823 sha: &cargo_vcs_info.git.sha1,
4824 path: &path.to_string_lossy(),
4825 selection: Some(selection),
4826 },
4827 );
4828 Ok(permalink)
4829}
4830
4831fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4832 let Some(blame) = blame else {
4833 return proto::BlameBufferResponse {
4834 blame_response: None,
4835 };
4836 };
4837
4838 let entries = blame
4839 .entries
4840 .into_iter()
4841 .map(|entry| proto::BlameEntry {
4842 sha: entry.sha.as_bytes().into(),
4843 start_line: entry.range.start,
4844 end_line: entry.range.end,
4845 original_line_number: entry.original_line_number,
4846 author: entry.author,
4847 author_mail: entry.author_mail,
4848 author_time: entry.author_time,
4849 author_tz: entry.author_tz,
4850 committer: entry.committer_name,
4851 committer_mail: entry.committer_email,
4852 committer_time: entry.committer_time,
4853 committer_tz: entry.committer_tz,
4854 summary: entry.summary,
4855 previous: entry.previous,
4856 filename: entry.filename,
4857 })
4858 .collect::<Vec<_>>();
4859
4860 let messages = blame
4861 .messages
4862 .into_iter()
4863 .map(|(oid, message)| proto::CommitMessage {
4864 oid: oid.as_bytes().into(),
4865 message,
4866 })
4867 .collect::<Vec<_>>();
4868
4869 proto::BlameBufferResponse {
4870 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4871 entries,
4872 messages,
4873 remote_url: blame.remote_url,
4874 }),
4875 }
4876}
4877
4878fn deserialize_blame_buffer_response(
4879 response: proto::BlameBufferResponse,
4880) -> Option<git::blame::Blame> {
4881 let response = response.blame_response?;
4882 let entries = response
4883 .entries
4884 .into_iter()
4885 .filter_map(|entry| {
4886 Some(git::blame::BlameEntry {
4887 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4888 range: entry.start_line..entry.end_line,
4889 original_line_number: entry.original_line_number,
4890 committer_name: entry.committer,
4891 committer_time: entry.committer_time,
4892 committer_tz: entry.committer_tz,
4893 committer_email: entry.committer_mail,
4894 author: entry.author,
4895 author_mail: entry.author_mail,
4896 author_time: entry.author_time,
4897 author_tz: entry.author_tz,
4898 summary: entry.summary,
4899 previous: entry.previous,
4900 filename: entry.filename,
4901 })
4902 })
4903 .collect::<Vec<_>>();
4904
4905 let messages = response
4906 .messages
4907 .into_iter()
4908 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4909 .collect::<HashMap<_, _>>();
4910
4911 Some(Blame {
4912 entries,
4913 messages,
4914 remote_url: response.remote_url,
4915 })
4916}
4917
4918fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4919 proto::Branch {
4920 is_head: branch.is_head,
4921 ref_name: branch.ref_name.to_string(),
4922 unix_timestamp: branch
4923 .most_recent_commit
4924 .as_ref()
4925 .map(|commit| commit.commit_timestamp as u64),
4926 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4927 ref_name: upstream.ref_name.to_string(),
4928 tracking: upstream
4929 .tracking
4930 .status()
4931 .map(|upstream| proto::UpstreamTracking {
4932 ahead: upstream.ahead as u64,
4933 behind: upstream.behind as u64,
4934 }),
4935 }),
4936 most_recent_commit: branch
4937 .most_recent_commit
4938 .as_ref()
4939 .map(|commit| proto::CommitSummary {
4940 sha: commit.sha.to_string(),
4941 subject: commit.subject.to_string(),
4942 commit_timestamp: commit.commit_timestamp,
4943 author_name: commit.author_name.to_string(),
4944 }),
4945 }
4946}
4947
4948fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4949 git::repository::Branch {
4950 is_head: proto.is_head,
4951 ref_name: proto.ref_name.clone().into(),
4952 upstream: proto
4953 .upstream
4954 .as_ref()
4955 .map(|upstream| git::repository::Upstream {
4956 ref_name: upstream.ref_name.to_string().into(),
4957 tracking: upstream
4958 .tracking
4959 .as_ref()
4960 .map(|tracking| {
4961 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4962 ahead: tracking.ahead as u32,
4963 behind: tracking.behind as u32,
4964 })
4965 })
4966 .unwrap_or(git::repository::UpstreamTracking::Gone),
4967 }),
4968 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4969 git::repository::CommitSummary {
4970 sha: commit.sha.to_string().into(),
4971 subject: commit.subject.to_string().into(),
4972 commit_timestamp: commit.commit_timestamp,
4973 author_name: commit.author_name.to_string().into(),
4974 has_parent: true,
4975 }
4976 }),
4977 }
4978}
4979
4980fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4981 proto::GitCommitDetails {
4982 sha: commit.sha.to_string(),
4983 message: commit.message.to_string(),
4984 commit_timestamp: commit.commit_timestamp,
4985 author_email: commit.author_email.to_string(),
4986 author_name: commit.author_name.to_string(),
4987 }
4988}
4989
4990fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4991 CommitDetails {
4992 sha: proto.sha.clone().into(),
4993 message: proto.message.clone().into(),
4994 commit_timestamp: proto.commit_timestamp,
4995 author_email: proto.author_email.clone().into(),
4996 author_name: proto.author_name.clone().into(),
4997 }
4998}
4999
5000async fn compute_snapshot(
5001 id: RepositoryId,
5002 work_directory_abs_path: Arc<Path>,
5003 prev_snapshot: RepositorySnapshot,
5004 backend: Arc<dyn GitRepository>,
5005) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5006 let mut events = Vec::new();
5007 let branches = backend.branches().await?;
5008 let branch = branches.into_iter().find(|branch| branch.is_head);
5009 let statuses = backend
5010 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
5011 .await?;
5012 let stash_entries = backend.stash_entries().await?;
5013 let statuses_by_path = SumTree::from_iter(
5014 statuses
5015 .entries
5016 .iter()
5017 .map(|(repo_path, status)| StatusEntry {
5018 repo_path: repo_path.clone(),
5019 status: *status,
5020 }),
5021 &(),
5022 );
5023 let (merge_details, merge_heads_changed) =
5024 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5025 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5026
5027 if merge_heads_changed
5028 || branch != prev_snapshot.branch
5029 || statuses_by_path != prev_snapshot.statuses_by_path
5030 {
5031 events.push(RepositoryEvent::Updated {
5032 full_scan: true,
5033 new_instance: false,
5034 });
5035 }
5036
5037 // Cache merge conflict paths so they don't change from staging/unstaging,
5038 // until the merge heads change (at commit time, etc.).
5039 if merge_heads_changed {
5040 events.push(RepositoryEvent::MergeHeadsChanged);
5041 }
5042
5043 // Useful when branch is None in detached head state
5044 let head_commit = match backend.head_sha().await {
5045 Some(head_sha) => backend.show(head_sha).await.log_err(),
5046 None => None,
5047 };
5048
5049 // Used by edit prediction data collection
5050 let remote_origin_url = backend.remote_url("origin");
5051 let remote_upstream_url = backend.remote_url("upstream");
5052
5053 let snapshot = RepositorySnapshot {
5054 id,
5055 statuses_by_path,
5056 work_directory_abs_path,
5057 scan_id: prev_snapshot.scan_id + 1,
5058 branch,
5059 head_commit,
5060 merge: merge_details,
5061 remote_origin_url,
5062 remote_upstream_url,
5063 stash_entries,
5064 };
5065
5066 Ok((snapshot, events))
5067}
5068
5069fn status_from_proto(
5070 simple_status: i32,
5071 status: Option<proto::GitFileStatus>,
5072) -> anyhow::Result<FileStatus> {
5073 use proto::git_file_status::Variant;
5074
5075 let Some(variant) = status.and_then(|status| status.variant) else {
5076 let code = proto::GitStatus::from_i32(simple_status)
5077 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5078 let result = match code {
5079 proto::GitStatus::Added => TrackedStatus {
5080 worktree_status: StatusCode::Added,
5081 index_status: StatusCode::Unmodified,
5082 }
5083 .into(),
5084 proto::GitStatus::Modified => TrackedStatus {
5085 worktree_status: StatusCode::Modified,
5086 index_status: StatusCode::Unmodified,
5087 }
5088 .into(),
5089 proto::GitStatus::Conflict => UnmergedStatus {
5090 first_head: UnmergedStatusCode::Updated,
5091 second_head: UnmergedStatusCode::Updated,
5092 }
5093 .into(),
5094 proto::GitStatus::Deleted => TrackedStatus {
5095 worktree_status: StatusCode::Deleted,
5096 index_status: StatusCode::Unmodified,
5097 }
5098 .into(),
5099 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5100 };
5101 return Ok(result);
5102 };
5103
5104 let result = match variant {
5105 Variant::Untracked(_) => FileStatus::Untracked,
5106 Variant::Ignored(_) => FileStatus::Ignored,
5107 Variant::Unmerged(unmerged) => {
5108 let [first_head, second_head] =
5109 [unmerged.first_head, unmerged.second_head].map(|head| {
5110 let code = proto::GitStatus::from_i32(head)
5111 .with_context(|| format!("Invalid git status code: {head}"))?;
5112 let result = match code {
5113 proto::GitStatus::Added => UnmergedStatusCode::Added,
5114 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5115 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5116 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5117 };
5118 Ok(result)
5119 });
5120 let [first_head, second_head] = [first_head?, second_head?];
5121 UnmergedStatus {
5122 first_head,
5123 second_head,
5124 }
5125 .into()
5126 }
5127 Variant::Tracked(tracked) => {
5128 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5129 .map(|status| {
5130 let code = proto::GitStatus::from_i32(status)
5131 .with_context(|| format!("Invalid git status code: {status}"))?;
5132 let result = match code {
5133 proto::GitStatus::Modified => StatusCode::Modified,
5134 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5135 proto::GitStatus::Added => StatusCode::Added,
5136 proto::GitStatus::Deleted => StatusCode::Deleted,
5137 proto::GitStatus::Renamed => StatusCode::Renamed,
5138 proto::GitStatus::Copied => StatusCode::Copied,
5139 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5140 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5141 };
5142 Ok(result)
5143 });
5144 let [index_status, worktree_status] = [index_status?, worktree_status?];
5145 TrackedStatus {
5146 index_status,
5147 worktree_status,
5148 }
5149 .into()
5150 }
5151 };
5152 Ok(result)
5153}
5154
5155fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5156 use proto::git_file_status::{Tracked, Unmerged, Variant};
5157
5158 let variant = match status {
5159 FileStatus::Untracked => Variant::Untracked(Default::default()),
5160 FileStatus::Ignored => Variant::Ignored(Default::default()),
5161 FileStatus::Unmerged(UnmergedStatus {
5162 first_head,
5163 second_head,
5164 }) => Variant::Unmerged(Unmerged {
5165 first_head: unmerged_status_to_proto(first_head),
5166 second_head: unmerged_status_to_proto(second_head),
5167 }),
5168 FileStatus::Tracked(TrackedStatus {
5169 index_status,
5170 worktree_status,
5171 }) => Variant::Tracked(Tracked {
5172 index_status: tracked_status_to_proto(index_status),
5173 worktree_status: tracked_status_to_proto(worktree_status),
5174 }),
5175 };
5176 proto::GitFileStatus {
5177 variant: Some(variant),
5178 }
5179}
5180
5181fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5182 match code {
5183 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5184 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5185 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5186 }
5187}
5188
5189fn tracked_status_to_proto(code: StatusCode) -> i32 {
5190 match code {
5191 StatusCode::Added => proto::GitStatus::Added as _,
5192 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5193 StatusCode::Modified => proto::GitStatus::Modified as _,
5194 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5195 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5196 StatusCode::Copied => proto::GitStatus::Copied as _,
5197 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5198 }
5199}