1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20 stream::FuturesOrdered,
21};
22use git::{
23 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
24 blame::Blame,
25 parse_git_remote_url,
26 repository::{
27 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
28 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
29 ResetMode, UpstreamTrackingStatus,
30 },
31 status::{
32 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
33 },
34};
35use gpui::{
36 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
37 WeakEntity,
38};
39use language::{
40 Buffer, BufferEvent, Language, LanguageRegistry,
41 proto::{deserialize_version, serialize_version},
42};
43use parking_lot::Mutex;
44use postage::stream::Stream as _;
45use rpc::{
46 AnyProtoClient, TypedEnvelope,
47 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
48};
49use serde::Deserialize;
50use std::{
51 cmp::Ordering,
52 collections::{BTreeSet, VecDeque},
53 future::Future,
54 mem,
55 ops::Range,
56 path::{Path, PathBuf},
57 sync::{
58 Arc,
59 atomic::{self, AtomicU64},
60 },
61 time::Instant,
62};
63use sum_tree::{Edit, SumTree, TreeSet};
64use text::{Bias, BufferId};
65use util::{ResultExt, debug_panic, post_inc};
66use worktree::{
67 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
68 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
69};
70
71pub struct GitStore {
72 state: GitStoreState,
73 buffer_store: Entity<BufferStore>,
74 worktree_store: Entity<WorktreeStore>,
75 repositories: HashMap<RepositoryId, Entity<Repository>>,
76 active_repo_id: Option<RepositoryId>,
77 #[allow(clippy::type_complexity)]
78 loading_diffs:
79 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
80 diffs: HashMap<BufferId, Entity<BufferGitState>>,
81 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
82 _subscriptions: Vec<Subscription>,
83}
84
85#[derive(Default)]
86struct SharedDiffs {
87 unstaged: Option<Entity<BufferDiff>>,
88 uncommitted: Option<Entity<BufferDiff>>,
89}
90
91struct BufferGitState {
92 unstaged_diff: Option<WeakEntity<BufferDiff>>,
93 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
94 conflict_set: Option<WeakEntity<ConflictSet>>,
95 recalculate_diff_task: Option<Task<Result<()>>>,
96 reparse_conflict_markers_task: Option<Task<Result<()>>>,
97 language: Option<Arc<Language>>,
98 language_registry: Option<Arc<LanguageRegistry>>,
99 conflict_updated_futures: Vec<oneshot::Sender<()>>,
100 recalculating_tx: postage::watch::Sender<bool>,
101
102 /// These operation counts are used to ensure that head and index text
103 /// values read from the git repository are up-to-date with any hunk staging
104 /// operations that have been performed on the BufferDiff.
105 ///
106 /// The operation count is incremented immediately when the user initiates a
107 /// hunk stage/unstage operation. Then, upon finishing writing the new index
108 /// text do disk, the `operation count as of write` is updated to reflect
109 /// the operation count that prompted the write.
110 hunk_staging_operation_count: usize,
111 hunk_staging_operation_count_as_of_write: usize,
112
113 head_text: Option<Arc<String>>,
114 index_text: Option<Arc<String>>,
115 head_changed: bool,
116 index_changed: bool,
117 language_changed: bool,
118}
119
120#[derive(Clone, Debug)]
121enum DiffBasesChange {
122 SetIndex(Option<String>),
123 SetHead(Option<String>),
124 SetEach {
125 index: Option<String>,
126 head: Option<String>,
127 },
128 SetBoth(Option<String>),
129}
130
131#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
132enum DiffKind {
133 Unstaged,
134 Uncommitted,
135}
136
137enum GitStoreState {
138 Local {
139 next_repository_id: Arc<AtomicU64>,
140 downstream: Option<LocalDownstreamState>,
141 project_environment: Entity<ProjectEnvironment>,
142 fs: Arc<dyn Fs>,
143 },
144 Ssh {
145 upstream_client: AnyProtoClient,
146 upstream_project_id: ProjectId,
147 downstream: Option<(AnyProtoClient, ProjectId)>,
148 },
149 Remote {
150 upstream_client: AnyProtoClient,
151 upstream_project_id: ProjectId,
152 },
153}
154
155enum DownstreamUpdate {
156 UpdateRepository(RepositorySnapshot),
157 RemoveRepository(RepositoryId),
158}
159
160struct LocalDownstreamState {
161 client: AnyProtoClient,
162 project_id: ProjectId,
163 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
164 _task: Task<Result<()>>,
165}
166
167#[derive(Clone, Debug)]
168pub struct GitStoreCheckpoint {
169 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
170}
171
172#[derive(Clone, Debug, PartialEq, Eq)]
173pub struct StatusEntry {
174 pub repo_path: RepoPath,
175 pub status: FileStatus,
176}
177
178impl StatusEntry {
179 fn to_proto(&self) -> proto::StatusEntry {
180 let simple_status = match self.status {
181 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
182 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
183 FileStatus::Tracked(TrackedStatus {
184 index_status,
185 worktree_status,
186 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
187 worktree_status
188 } else {
189 index_status
190 }),
191 };
192
193 proto::StatusEntry {
194 repo_path: self.repo_path.as_ref().to_proto(),
195 simple_status,
196 status: Some(status_to_proto(self.status)),
197 }
198 }
199}
200
201impl TryFrom<proto::StatusEntry> for StatusEntry {
202 type Error = anyhow::Error;
203
204 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
205 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
206 let status = status_from_proto(value.simple_status, value.status)?;
207 Ok(Self { repo_path, status })
208 }
209}
210
211impl sum_tree::Item for StatusEntry {
212 type Summary = PathSummary<GitSummary>;
213
214 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
215 PathSummary {
216 max_path: self.repo_path.0.clone(),
217 item_summary: self.status.summary(),
218 }
219 }
220}
221
222impl sum_tree::KeyedItem for StatusEntry {
223 type Key = PathKey;
224
225 fn key(&self) -> Self::Key {
226 PathKey(self.repo_path.0.clone())
227 }
228}
229
230#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
231pub struct RepositoryId(pub u64);
232
233#[derive(Clone, Debug, Default, PartialEq, Eq)]
234pub struct MergeDetails {
235 pub conflicted_paths: TreeSet<RepoPath>,
236 pub message: Option<SharedString>,
237 pub heads: Vec<Option<SharedString>>,
238}
239
240#[derive(Clone, Debug, PartialEq, Eq)]
241pub struct RepositorySnapshot {
242 pub id: RepositoryId,
243 pub statuses_by_path: SumTree<StatusEntry>,
244 pub work_directory_abs_path: Arc<Path>,
245 pub branch: Option<Branch>,
246 pub head_commit: Option<CommitDetails>,
247 pub scan_id: u64,
248 pub merge: MergeDetails,
249}
250
251type JobId = u64;
252
253#[derive(Clone, Debug, PartialEq, Eq)]
254pub struct JobInfo {
255 pub start: Instant,
256 pub message: SharedString,
257}
258
259pub struct Repository {
260 this: WeakEntity<Self>,
261 snapshot: RepositorySnapshot,
262 commit_message_buffer: Option<Entity<Buffer>>,
263 git_store: WeakEntity<GitStore>,
264 // For a local repository, holds paths that have had worktree events since the last status scan completed,
265 // and that should be examined during the next status scan.
266 paths_needing_status_update: BTreeSet<RepoPath>,
267 job_sender: mpsc::UnboundedSender<GitJob>,
268 active_jobs: HashMap<JobId, JobInfo>,
269 job_id: JobId,
270 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
271 latest_askpass_id: u64,
272}
273
274impl std::ops::Deref for Repository {
275 type Target = RepositorySnapshot;
276
277 fn deref(&self) -> &Self::Target {
278 &self.snapshot
279 }
280}
281
282#[derive(Clone)]
283pub enum RepositoryState {
284 Local {
285 backend: Arc<dyn GitRepository>,
286 environment: Arc<HashMap<String, String>>,
287 },
288 Remote {
289 project_id: ProjectId,
290 client: AnyProtoClient,
291 },
292}
293
294#[derive(Clone, Debug)]
295pub enum RepositoryEvent {
296 Updated { full_scan: bool, new_instance: bool },
297 MergeHeadsChanged,
298}
299
300#[derive(Clone, Debug)]
301pub struct JobsUpdated;
302
303#[derive(Debug)]
304pub enum GitStoreEvent {
305 ActiveRepositoryChanged(Option<RepositoryId>),
306 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
307 RepositoryAdded(RepositoryId),
308 RepositoryRemoved(RepositoryId),
309 IndexWriteError(anyhow::Error),
310 JobsUpdated,
311 ConflictsUpdated,
312}
313
314impl EventEmitter<RepositoryEvent> for Repository {}
315impl EventEmitter<JobsUpdated> for Repository {}
316impl EventEmitter<GitStoreEvent> for GitStore {}
317
318pub struct GitJob {
319 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
320 key: Option<GitJobKey>,
321}
322
323#[derive(PartialEq, Eq)]
324enum GitJobKey {
325 WriteIndex(RepoPath),
326 ReloadBufferDiffBases,
327 RefreshStatuses,
328 ReloadGitState,
329}
330
331impl GitStore {
332 pub fn local(
333 worktree_store: &Entity<WorktreeStore>,
334 buffer_store: Entity<BufferStore>,
335 environment: Entity<ProjectEnvironment>,
336 fs: Arc<dyn Fs>,
337 cx: &mut Context<Self>,
338 ) -> Self {
339 Self::new(
340 worktree_store.clone(),
341 buffer_store,
342 GitStoreState::Local {
343 next_repository_id: Arc::new(AtomicU64::new(1)),
344 downstream: None,
345 project_environment: environment,
346 fs,
347 },
348 cx,
349 )
350 }
351
352 pub fn remote(
353 worktree_store: &Entity<WorktreeStore>,
354 buffer_store: Entity<BufferStore>,
355 upstream_client: AnyProtoClient,
356 project_id: ProjectId,
357 cx: &mut Context<Self>,
358 ) -> Self {
359 Self::new(
360 worktree_store.clone(),
361 buffer_store,
362 GitStoreState::Remote {
363 upstream_client,
364 upstream_project_id: project_id,
365 },
366 cx,
367 )
368 }
369
370 pub fn ssh(
371 worktree_store: &Entity<WorktreeStore>,
372 buffer_store: Entity<BufferStore>,
373 upstream_client: AnyProtoClient,
374 cx: &mut Context<Self>,
375 ) -> Self {
376 Self::new(
377 worktree_store.clone(),
378 buffer_store,
379 GitStoreState::Ssh {
380 upstream_client,
381 upstream_project_id: ProjectId(SSH_PROJECT_ID),
382 downstream: None,
383 },
384 cx,
385 )
386 }
387
388 fn new(
389 worktree_store: Entity<WorktreeStore>,
390 buffer_store: Entity<BufferStore>,
391 state: GitStoreState,
392 cx: &mut Context<Self>,
393 ) -> Self {
394 let _subscriptions = vec![
395 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
396 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
397 ];
398
399 GitStore {
400 state,
401 buffer_store,
402 worktree_store,
403 repositories: HashMap::default(),
404 active_repo_id: None,
405 _subscriptions,
406 loading_diffs: HashMap::default(),
407 shared_diffs: HashMap::default(),
408 diffs: HashMap::default(),
409 }
410 }
411
412 pub fn init(client: &AnyProtoClient) {
413 client.add_entity_request_handler(Self::handle_get_remotes);
414 client.add_entity_request_handler(Self::handle_get_branches);
415 client.add_entity_request_handler(Self::handle_change_branch);
416 client.add_entity_request_handler(Self::handle_create_branch);
417 client.add_entity_request_handler(Self::handle_git_init);
418 client.add_entity_request_handler(Self::handle_push);
419 client.add_entity_request_handler(Self::handle_pull);
420 client.add_entity_request_handler(Self::handle_fetch);
421 client.add_entity_request_handler(Self::handle_stage);
422 client.add_entity_request_handler(Self::handle_unstage);
423 client.add_entity_request_handler(Self::handle_commit);
424 client.add_entity_request_handler(Self::handle_reset);
425 client.add_entity_request_handler(Self::handle_show);
426 client.add_entity_request_handler(Self::handle_load_commit_diff);
427 client.add_entity_request_handler(Self::handle_checkout_files);
428 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
429 client.add_entity_request_handler(Self::handle_set_index_text);
430 client.add_entity_request_handler(Self::handle_askpass);
431 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
432 client.add_entity_request_handler(Self::handle_git_diff);
433 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
434 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
435 client.add_entity_message_handler(Self::handle_update_diff_bases);
436 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
437 client.add_entity_request_handler(Self::handle_blame_buffer);
438 client.add_entity_message_handler(Self::handle_update_repository);
439 client.add_entity_message_handler(Self::handle_remove_repository);
440 }
441
442 pub fn is_local(&self) -> bool {
443 matches!(self.state, GitStoreState::Local { .. })
444 }
445
446 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
447 match &mut self.state {
448 GitStoreState::Ssh {
449 downstream: downstream_client,
450 ..
451 } => {
452 for repo in self.repositories.values() {
453 let update = repo.read(cx).snapshot.initial_update(project_id);
454 for update in split_repository_update(update) {
455 client.send(update).log_err();
456 }
457 }
458 *downstream_client = Some((client, ProjectId(project_id)));
459 }
460 GitStoreState::Local {
461 downstream: downstream_client,
462 ..
463 } => {
464 let mut snapshots = HashMap::default();
465 let (updates_tx, mut updates_rx) = mpsc::unbounded();
466 for repo in self.repositories.values() {
467 updates_tx
468 .unbounded_send(DownstreamUpdate::UpdateRepository(
469 repo.read(cx).snapshot.clone(),
470 ))
471 .ok();
472 }
473 *downstream_client = Some(LocalDownstreamState {
474 client: client.clone(),
475 project_id: ProjectId(project_id),
476 updates_tx,
477 _task: cx.spawn(async move |this, cx| {
478 cx.background_spawn(async move {
479 while let Some(update) = updates_rx.next().await {
480 match update {
481 DownstreamUpdate::UpdateRepository(snapshot) => {
482 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
483 {
484 let update =
485 snapshot.build_update(old_snapshot, project_id);
486 *old_snapshot = snapshot;
487 for update in split_repository_update(update) {
488 client.send(update)?;
489 }
490 } else {
491 let update = snapshot.initial_update(project_id);
492 for update in split_repository_update(update) {
493 client.send(update)?;
494 }
495 snapshots.insert(snapshot.id, snapshot);
496 }
497 }
498 DownstreamUpdate::RemoveRepository(id) => {
499 client.send(proto::RemoveRepository {
500 project_id,
501 id: id.to_proto(),
502 })?;
503 }
504 }
505 }
506 anyhow::Ok(())
507 })
508 .await
509 .ok();
510 this.update(cx, |this, _| {
511 if let GitStoreState::Local {
512 downstream: downstream_client,
513 ..
514 } = &mut this.state
515 {
516 downstream_client.take();
517 } else {
518 unreachable!("unshared called on remote store");
519 }
520 })
521 }),
522 });
523 }
524 GitStoreState::Remote { .. } => {
525 debug_panic!("shared called on remote store");
526 }
527 }
528 }
529
530 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
531 match &mut self.state {
532 GitStoreState::Local {
533 downstream: downstream_client,
534 ..
535 } => {
536 downstream_client.take();
537 }
538 GitStoreState::Ssh {
539 downstream: downstream_client,
540 ..
541 } => {
542 downstream_client.take();
543 }
544 GitStoreState::Remote { .. } => {
545 debug_panic!("unshared called on remote store");
546 }
547 }
548 self.shared_diffs.clear();
549 }
550
551 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
552 self.shared_diffs.remove(peer_id);
553 }
554
555 pub fn active_repository(&self) -> Option<Entity<Repository>> {
556 self.active_repo_id
557 .as_ref()
558 .map(|id| self.repositories[&id].clone())
559 }
560
561 pub fn open_unstaged_diff(
562 &mut self,
563 buffer: Entity<Buffer>,
564 cx: &mut Context<Self>,
565 ) -> Task<Result<Entity<BufferDiff>>> {
566 let buffer_id = buffer.read(cx).remote_id();
567 if let Some(diff_state) = self.diffs.get(&buffer_id) {
568 if let Some(unstaged_diff) = diff_state
569 .read(cx)
570 .unstaged_diff
571 .as_ref()
572 .and_then(|weak| weak.upgrade())
573 {
574 if let Some(task) =
575 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
576 {
577 return cx.background_executor().spawn(async move {
578 task.await;
579 Ok(unstaged_diff)
580 });
581 }
582 return Task::ready(Ok(unstaged_diff));
583 }
584 }
585
586 let Some((repo, repo_path)) =
587 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
588 else {
589 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
590 };
591
592 let task = self
593 .loading_diffs
594 .entry((buffer_id, DiffKind::Unstaged))
595 .or_insert_with(|| {
596 let staged_text = repo.update(cx, |repo, cx| {
597 repo.load_staged_text(buffer_id, repo_path, cx)
598 });
599 cx.spawn(async move |this, cx| {
600 Self::open_diff_internal(
601 this,
602 DiffKind::Unstaged,
603 staged_text.await.map(DiffBasesChange::SetIndex),
604 buffer,
605 cx,
606 )
607 .await
608 .map_err(Arc::new)
609 })
610 .shared()
611 })
612 .clone();
613
614 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
615 }
616
617 pub fn open_uncommitted_diff(
618 &mut self,
619 buffer: Entity<Buffer>,
620 cx: &mut Context<Self>,
621 ) -> Task<Result<Entity<BufferDiff>>> {
622 let buffer_id = buffer.read(cx).remote_id();
623
624 if let Some(diff_state) = self.diffs.get(&buffer_id) {
625 if let Some(uncommitted_diff) = diff_state
626 .read(cx)
627 .uncommitted_diff
628 .as_ref()
629 .and_then(|weak| weak.upgrade())
630 {
631 if let Some(task) =
632 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
633 {
634 return cx.background_executor().spawn(async move {
635 task.await;
636 Ok(uncommitted_diff)
637 });
638 }
639 return Task::ready(Ok(uncommitted_diff));
640 }
641 }
642
643 let Some((repo, repo_path)) =
644 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
645 else {
646 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
647 };
648
649 let task = self
650 .loading_diffs
651 .entry((buffer_id, DiffKind::Uncommitted))
652 .or_insert_with(|| {
653 let changes = repo.update(cx, |repo, cx| {
654 repo.load_committed_text(buffer_id, repo_path, cx)
655 });
656
657 cx.spawn(async move |this, cx| {
658 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
659 .await
660 .map_err(Arc::new)
661 })
662 .shared()
663 })
664 .clone();
665
666 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
667 }
668
669 async fn open_diff_internal(
670 this: WeakEntity<Self>,
671 kind: DiffKind,
672 texts: Result<DiffBasesChange>,
673 buffer_entity: Entity<Buffer>,
674 cx: &mut AsyncApp,
675 ) -> Result<Entity<BufferDiff>> {
676 let diff_bases_change = match texts {
677 Err(e) => {
678 this.update(cx, |this, cx| {
679 let buffer = buffer_entity.read(cx);
680 let buffer_id = buffer.remote_id();
681 this.loading_diffs.remove(&(buffer_id, kind));
682 })?;
683 return Err(e);
684 }
685 Ok(change) => change,
686 };
687
688 this.update(cx, |this, cx| {
689 let buffer = buffer_entity.read(cx);
690 let buffer_id = buffer.remote_id();
691 let language = buffer.language().cloned();
692 let language_registry = buffer.language_registry();
693 let text_snapshot = buffer.text_snapshot();
694 this.loading_diffs.remove(&(buffer_id, kind));
695
696 let git_store = cx.weak_entity();
697 let diff_state = this
698 .diffs
699 .entry(buffer_id)
700 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
701
702 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
703
704 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
705 diff_state.update(cx, |diff_state, cx| {
706 diff_state.language = language;
707 diff_state.language_registry = language_registry;
708
709 match kind {
710 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
711 DiffKind::Uncommitted => {
712 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
713 diff
714 } else {
715 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
716 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
717 unstaged_diff
718 };
719
720 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
721 diff_state.uncommitted_diff = Some(diff.downgrade())
722 }
723 }
724
725 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
726 let rx = diff_state.wait_for_recalculation();
727
728 anyhow::Ok(async move {
729 if let Some(rx) = rx {
730 rx.await;
731 }
732 Ok(diff)
733 })
734 })
735 })??
736 .await
737 }
738
739 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
740 let diff_state = self.diffs.get(&buffer_id)?;
741 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
742 }
743
744 pub fn get_uncommitted_diff(
745 &self,
746 buffer_id: BufferId,
747 cx: &App,
748 ) -> Option<Entity<BufferDiff>> {
749 let diff_state = self.diffs.get(&buffer_id)?;
750 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
751 }
752
753 pub fn open_conflict_set(
754 &mut self,
755 buffer: Entity<Buffer>,
756 cx: &mut Context<Self>,
757 ) -> Entity<ConflictSet> {
758 log::debug!("open conflict set");
759 let buffer_id = buffer.read(cx).remote_id();
760
761 if let Some(git_state) = self.diffs.get(&buffer_id) {
762 if let Some(conflict_set) = git_state
763 .read(cx)
764 .conflict_set
765 .as_ref()
766 .and_then(|weak| weak.upgrade())
767 {
768 let conflict_set = conflict_set.clone();
769 let buffer_snapshot = buffer.read(cx).text_snapshot();
770
771 git_state.update(cx, |state, cx| {
772 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
773 });
774
775 return conflict_set;
776 }
777 }
778
779 let is_unmerged = self
780 .repository_and_path_for_buffer_id(buffer_id, cx)
781 .map_or(false, |(repo, path)| {
782 repo.read(cx).snapshot.has_conflict(&path)
783 });
784 let git_store = cx.weak_entity();
785 let buffer_git_state = self
786 .diffs
787 .entry(buffer_id)
788 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
789 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
790
791 self._subscriptions
792 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
793 cx.emit(GitStoreEvent::ConflictsUpdated);
794 }));
795
796 buffer_git_state.update(cx, |state, cx| {
797 state.conflict_set = Some(conflict_set.downgrade());
798 let buffer_snapshot = buffer.read(cx).text_snapshot();
799 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
800 });
801
802 conflict_set
803 }
804
805 pub fn project_path_git_status(
806 &self,
807 project_path: &ProjectPath,
808 cx: &App,
809 ) -> Option<FileStatus> {
810 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
811 Some(repo.read(cx).status_for_path(&repo_path)?.status)
812 }
813
814 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
815 let mut work_directory_abs_paths = Vec::new();
816 let mut checkpoints = Vec::new();
817 for repository in self.repositories.values() {
818 repository.update(cx, |repository, _| {
819 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
820 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
821 });
822 }
823
824 cx.background_executor().spawn(async move {
825 let checkpoints = future::try_join_all(checkpoints).await?;
826 Ok(GitStoreCheckpoint {
827 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
828 .into_iter()
829 .zip(checkpoints)
830 .collect(),
831 })
832 })
833 }
834
835 pub fn restore_checkpoint(
836 &self,
837 checkpoint: GitStoreCheckpoint,
838 cx: &mut App,
839 ) -> Task<Result<()>> {
840 let repositories_by_work_dir_abs_path = self
841 .repositories
842 .values()
843 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
844 .collect::<HashMap<_, _>>();
845
846 let mut tasks = Vec::new();
847 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
848 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
849 let restore = repository.update(cx, |repository, _| {
850 repository.restore_checkpoint(checkpoint)
851 });
852 tasks.push(async move { restore.await? });
853 }
854 }
855 cx.background_spawn(async move {
856 future::try_join_all(tasks).await?;
857 Ok(())
858 })
859 }
860
861 /// Compares two checkpoints, returning true if they are equal.
862 pub fn compare_checkpoints(
863 &self,
864 left: GitStoreCheckpoint,
865 mut right: GitStoreCheckpoint,
866 cx: &mut App,
867 ) -> Task<Result<bool>> {
868 let repositories_by_work_dir_abs_path = self
869 .repositories
870 .values()
871 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
872 .collect::<HashMap<_, _>>();
873
874 let mut tasks = Vec::new();
875 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
876 if let Some(right_checkpoint) = right
877 .checkpoints_by_work_dir_abs_path
878 .remove(&work_dir_abs_path)
879 {
880 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
881 {
882 let compare = repository.update(cx, |repository, _| {
883 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
884 });
885
886 tasks.push(async move { compare.await? });
887 }
888 } else {
889 return Task::ready(Ok(false));
890 }
891 }
892 cx.background_spawn(async move {
893 Ok(future::try_join_all(tasks)
894 .await?
895 .into_iter()
896 .all(|result| result))
897 })
898 }
899
900 /// Blames a buffer.
901 pub fn blame_buffer(
902 &self,
903 buffer: &Entity<Buffer>,
904 version: Option<clock::Global>,
905 cx: &mut App,
906 ) -> Task<Result<Option<Blame>>> {
907 let buffer = buffer.read(cx);
908 let Some((repo, repo_path)) =
909 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
910 else {
911 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
912 };
913 let content = match &version {
914 Some(version) => buffer.rope_for_version(version).clone(),
915 None => buffer.as_rope().clone(),
916 };
917 let version = version.unwrap_or(buffer.version());
918 let buffer_id = buffer.remote_id();
919
920 let rx = repo.update(cx, |repo, _| {
921 repo.send_job(None, move |state, _| async move {
922 match state {
923 RepositoryState::Local { backend, .. } => backend
924 .blame(repo_path.clone(), content)
925 .await
926 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
927 .map(Some),
928 RepositoryState::Remote { project_id, client } => {
929 let response = client
930 .request(proto::BlameBuffer {
931 project_id: project_id.to_proto(),
932 buffer_id: buffer_id.into(),
933 version: serialize_version(&version),
934 })
935 .await?;
936 Ok(deserialize_blame_buffer_response(response))
937 }
938 }
939 })
940 });
941
942 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
943 }
944
945 pub fn get_permalink_to_line(
946 &self,
947 buffer: &Entity<Buffer>,
948 selection: Range<u32>,
949 cx: &mut App,
950 ) -> Task<Result<url::Url>> {
951 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
952 return Task::ready(Err(anyhow!("buffer has no file")));
953 };
954
955 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
956 &(file.worktree.read(cx).id(), file.path.clone()).into(),
957 cx,
958 ) else {
959 // If we're not in a Git repo, check whether this is a Rust source
960 // file in the Cargo registry (presumably opened with go-to-definition
961 // from a normal Rust file). If so, we can put together a permalink
962 // using crate metadata.
963 if buffer
964 .read(cx)
965 .language()
966 .is_none_or(|lang| lang.name() != "Rust".into())
967 {
968 return Task::ready(Err(anyhow!("no permalink available")));
969 }
970 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
971 return Task::ready(Err(anyhow!("no permalink available")));
972 };
973 return cx.spawn(async move |cx| {
974 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
975 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
976 .context("no permalink available")
977 });
978
979 // TODO remote case
980 };
981
982 let buffer_id = buffer.read(cx).remote_id();
983 let branch = repo.read(cx).branch.clone();
984 let remote = branch
985 .as_ref()
986 .and_then(|b| b.upstream.as_ref())
987 .and_then(|b| b.remote_name())
988 .unwrap_or("origin")
989 .to_string();
990
991 let rx = repo.update(cx, |repo, _| {
992 repo.send_job(None, move |state, cx| async move {
993 match state {
994 RepositoryState::Local { backend, .. } => {
995 let origin_url = backend
996 .remote_url(&remote)
997 .with_context(|| format!("remote \"{remote}\" not found"))?;
998
999 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1000
1001 let provider_registry =
1002 cx.update(GitHostingProviderRegistry::default_global)?;
1003
1004 let (provider, remote) =
1005 parse_git_remote_url(provider_registry, &origin_url)
1006 .context("parsing Git remote URL")?;
1007
1008 let path = repo_path.to_str().with_context(|| {
1009 format!("converting repo path {repo_path:?} to string")
1010 })?;
1011
1012 Ok(provider.build_permalink(
1013 remote,
1014 BuildPermalinkParams {
1015 sha: &sha,
1016 path,
1017 selection: Some(selection),
1018 },
1019 ))
1020 }
1021 RepositoryState::Remote { project_id, client } => {
1022 let response = client
1023 .request(proto::GetPermalinkToLine {
1024 project_id: project_id.to_proto(),
1025 buffer_id: buffer_id.into(),
1026 selection: Some(proto::Range {
1027 start: selection.start as u64,
1028 end: selection.end as u64,
1029 }),
1030 })
1031 .await?;
1032
1033 url::Url::parse(&response.permalink).context("failed to parse permalink")
1034 }
1035 }
1036 })
1037 });
1038 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1039 }
1040
1041 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1042 match &self.state {
1043 GitStoreState::Local {
1044 downstream: downstream_client,
1045 ..
1046 } => downstream_client
1047 .as_ref()
1048 .map(|state| (state.client.clone(), state.project_id)),
1049 GitStoreState::Ssh {
1050 downstream: downstream_client,
1051 ..
1052 } => downstream_client.clone(),
1053 GitStoreState::Remote { .. } => None,
1054 }
1055 }
1056
1057 fn upstream_client(&self) -> Option<AnyProtoClient> {
1058 match &self.state {
1059 GitStoreState::Local { .. } => None,
1060 GitStoreState::Ssh {
1061 upstream_client, ..
1062 }
1063 | GitStoreState::Remote {
1064 upstream_client, ..
1065 } => Some(upstream_client.clone()),
1066 }
1067 }
1068
1069 fn on_worktree_store_event(
1070 &mut self,
1071 worktree_store: Entity<WorktreeStore>,
1072 event: &WorktreeStoreEvent,
1073 cx: &mut Context<Self>,
1074 ) {
1075 let GitStoreState::Local {
1076 project_environment,
1077 downstream,
1078 next_repository_id,
1079 fs,
1080 } = &self.state
1081 else {
1082 return;
1083 };
1084
1085 match event {
1086 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1087 if let Some(worktree) = self
1088 .worktree_store
1089 .read(cx)
1090 .worktree_for_id(*worktree_id, cx)
1091 {
1092 let paths_by_git_repo =
1093 self.process_updated_entries(&worktree, updated_entries, cx);
1094 let downstream = downstream
1095 .as_ref()
1096 .map(|downstream| downstream.updates_tx.clone());
1097 cx.spawn(async move |_, cx| {
1098 let paths_by_git_repo = paths_by_git_repo.await;
1099 for (repo, paths) in paths_by_git_repo {
1100 repo.update(cx, |repo, cx| {
1101 repo.paths_changed(paths, downstream.clone(), cx);
1102 })
1103 .ok();
1104 }
1105 })
1106 .detach();
1107 }
1108 }
1109 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1110 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1111 else {
1112 return;
1113 };
1114 if !worktree.read(cx).is_visible() {
1115 log::debug!(
1116 "not adding repositories for local worktree {:?} because it's not visible",
1117 worktree.read(cx).abs_path()
1118 );
1119 return;
1120 }
1121 self.update_repositories_from_worktree(
1122 project_environment.clone(),
1123 next_repository_id.clone(),
1124 downstream
1125 .as_ref()
1126 .map(|downstream| downstream.updates_tx.clone()),
1127 changed_repos.clone(),
1128 fs.clone(),
1129 cx,
1130 );
1131 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1132 }
1133 _ => {}
1134 }
1135 }
1136
1137 fn on_repository_event(
1138 &mut self,
1139 repo: Entity<Repository>,
1140 event: &RepositoryEvent,
1141 cx: &mut Context<Self>,
1142 ) {
1143 let id = repo.read(cx).id;
1144 let repo_snapshot = repo.read(cx).snapshot.clone();
1145 for (buffer_id, diff) in self.diffs.iter() {
1146 if let Some((buffer_repo, repo_path)) =
1147 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1148 {
1149 if buffer_repo == repo {
1150 diff.update(cx, |diff, cx| {
1151 if let Some(conflict_set) = &diff.conflict_set {
1152 let conflict_status_changed =
1153 conflict_set.update(cx, |conflict_set, cx| {
1154 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1155 conflict_set.set_has_conflict(has_conflict, cx)
1156 })?;
1157 if conflict_status_changed {
1158 let buffer_store = self.buffer_store.read(cx);
1159 if let Some(buffer) = buffer_store.get(*buffer_id) {
1160 let _ = diff.reparse_conflict_markers(
1161 buffer.read(cx).text_snapshot(),
1162 cx,
1163 );
1164 }
1165 }
1166 }
1167 anyhow::Ok(())
1168 })
1169 .ok();
1170 }
1171 }
1172 }
1173 cx.emit(GitStoreEvent::RepositoryUpdated(
1174 id,
1175 event.clone(),
1176 self.active_repo_id == Some(id),
1177 ))
1178 }
1179
1180 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1181 cx.emit(GitStoreEvent::JobsUpdated)
1182 }
1183
1184 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1185 fn update_repositories_from_worktree(
1186 &mut self,
1187 project_environment: Entity<ProjectEnvironment>,
1188 next_repository_id: Arc<AtomicU64>,
1189 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1190 updated_git_repositories: UpdatedGitRepositoriesSet,
1191 fs: Arc<dyn Fs>,
1192 cx: &mut Context<Self>,
1193 ) {
1194 let mut removed_ids = Vec::new();
1195 for update in updated_git_repositories.iter() {
1196 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1197 let existing_work_directory_abs_path =
1198 repo.read(cx).work_directory_abs_path.clone();
1199 Some(&existing_work_directory_abs_path)
1200 == update.old_work_directory_abs_path.as_ref()
1201 || Some(&existing_work_directory_abs_path)
1202 == update.new_work_directory_abs_path.as_ref()
1203 }) {
1204 if let Some(new_work_directory_abs_path) =
1205 update.new_work_directory_abs_path.clone()
1206 {
1207 existing.update(cx, |existing, cx| {
1208 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1209 existing.schedule_scan(updates_tx.clone(), cx);
1210 });
1211 } else {
1212 removed_ids.push(*id);
1213 }
1214 } else if let UpdatedGitRepository {
1215 new_work_directory_abs_path: Some(work_directory_abs_path),
1216 dot_git_abs_path: Some(dot_git_abs_path),
1217 repository_dir_abs_path: Some(repository_dir_abs_path),
1218 common_dir_abs_path: Some(common_dir_abs_path),
1219 ..
1220 } = update
1221 {
1222 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1223 let git_store = cx.weak_entity();
1224 let repo = cx.new(|cx| {
1225 let mut repo = Repository::local(
1226 id,
1227 work_directory_abs_path.clone(),
1228 dot_git_abs_path.clone(),
1229 repository_dir_abs_path.clone(),
1230 common_dir_abs_path.clone(),
1231 project_environment.downgrade(),
1232 fs.clone(),
1233 git_store,
1234 cx,
1235 );
1236 repo.schedule_scan(updates_tx.clone(), cx);
1237 repo
1238 });
1239 self._subscriptions
1240 .push(cx.subscribe(&repo, Self::on_repository_event));
1241 self._subscriptions
1242 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1243 self.repositories.insert(id, repo);
1244 cx.emit(GitStoreEvent::RepositoryAdded(id));
1245 self.active_repo_id.get_or_insert_with(|| {
1246 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1247 id
1248 });
1249 }
1250 }
1251
1252 for id in removed_ids {
1253 if self.active_repo_id == Some(id) {
1254 self.active_repo_id = None;
1255 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1256 }
1257 self.repositories.remove(&id);
1258 if let Some(updates_tx) = updates_tx.as_ref() {
1259 updates_tx
1260 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1261 .ok();
1262 }
1263 }
1264 }
1265
1266 fn on_buffer_store_event(
1267 &mut self,
1268 _: Entity<BufferStore>,
1269 event: &BufferStoreEvent,
1270 cx: &mut Context<Self>,
1271 ) {
1272 match event {
1273 BufferStoreEvent::BufferAdded(buffer) => {
1274 cx.subscribe(&buffer, |this, buffer, event, cx| {
1275 if let BufferEvent::LanguageChanged = event {
1276 let buffer_id = buffer.read(cx).remote_id();
1277 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1278 diff_state.update(cx, |diff_state, cx| {
1279 diff_state.buffer_language_changed(buffer, cx);
1280 });
1281 }
1282 }
1283 })
1284 .detach();
1285 }
1286 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1287 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1288 diffs.remove(buffer_id);
1289 }
1290 }
1291 BufferStoreEvent::BufferDropped(buffer_id) => {
1292 self.diffs.remove(&buffer_id);
1293 for diffs in self.shared_diffs.values_mut() {
1294 diffs.remove(buffer_id);
1295 }
1296 }
1297
1298 _ => {}
1299 }
1300 }
1301
1302 pub fn recalculate_buffer_diffs(
1303 &mut self,
1304 buffers: Vec<Entity<Buffer>>,
1305 cx: &mut Context<Self>,
1306 ) -> impl Future<Output = ()> + use<> {
1307 let mut futures = Vec::new();
1308 for buffer in buffers {
1309 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1310 let buffer = buffer.read(cx).text_snapshot();
1311 diff_state.update(cx, |diff_state, cx| {
1312 diff_state.recalculate_diffs(buffer.clone(), cx);
1313 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1314 });
1315 futures.push(diff_state.update(cx, |diff_state, cx| {
1316 diff_state
1317 .reparse_conflict_markers(buffer, cx)
1318 .map(|_| {})
1319 .boxed()
1320 }));
1321 }
1322 }
1323 async move {
1324 futures::future::join_all(futures).await;
1325 }
1326 }
1327
1328 fn on_buffer_diff_event(
1329 &mut self,
1330 diff: Entity<buffer_diff::BufferDiff>,
1331 event: &BufferDiffEvent,
1332 cx: &mut Context<Self>,
1333 ) {
1334 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1335 let buffer_id = diff.read(cx).buffer_id;
1336 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1337 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1338 diff_state.hunk_staging_operation_count += 1;
1339 diff_state.hunk_staging_operation_count
1340 });
1341 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1342 let recv = repo.update(cx, |repo, cx| {
1343 log::debug!("hunks changed for {}", path.display());
1344 repo.spawn_set_index_text_job(
1345 path,
1346 new_index_text.as_ref().map(|rope| rope.to_string()),
1347 Some(hunk_staging_operation_count),
1348 cx,
1349 )
1350 });
1351 let diff = diff.downgrade();
1352 cx.spawn(async move |this, cx| {
1353 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1354 diff.update(cx, |diff, cx| {
1355 diff.clear_pending_hunks(cx);
1356 })
1357 .ok();
1358 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1359 .ok();
1360 }
1361 })
1362 .detach();
1363 }
1364 }
1365 }
1366 }
1367
1368 fn local_worktree_git_repos_changed(
1369 &mut self,
1370 worktree: Entity<Worktree>,
1371 changed_repos: &UpdatedGitRepositoriesSet,
1372 cx: &mut Context<Self>,
1373 ) {
1374 log::debug!("local worktree repos changed");
1375 debug_assert!(worktree.read(cx).is_local());
1376
1377 for repository in self.repositories.values() {
1378 repository.update(cx, |repository, cx| {
1379 let repo_abs_path = &repository.work_directory_abs_path;
1380 if changed_repos.iter().any(|update| {
1381 update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1382 || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1383 }) {
1384 repository.reload_buffer_diff_bases(cx);
1385 }
1386 });
1387 }
1388 }
1389
1390 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1391 &self.repositories
1392 }
1393
1394 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1395 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1396 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1397 Some(status.status)
1398 }
1399
1400 pub fn repository_and_path_for_buffer_id(
1401 &self,
1402 buffer_id: BufferId,
1403 cx: &App,
1404 ) -> Option<(Entity<Repository>, RepoPath)> {
1405 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1406 let project_path = buffer.read(cx).project_path(cx)?;
1407 self.repository_and_path_for_project_path(&project_path, cx)
1408 }
1409
1410 pub fn repository_and_path_for_project_path(
1411 &self,
1412 path: &ProjectPath,
1413 cx: &App,
1414 ) -> Option<(Entity<Repository>, RepoPath)> {
1415 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1416 self.repositories
1417 .values()
1418 .filter_map(|repo| {
1419 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1420 Some((repo.clone(), repo_path))
1421 })
1422 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1423 }
1424
1425 pub fn git_init(
1426 &self,
1427 path: Arc<Path>,
1428 fallback_branch_name: String,
1429 cx: &App,
1430 ) -> Task<Result<()>> {
1431 match &self.state {
1432 GitStoreState::Local { fs, .. } => {
1433 let fs = fs.clone();
1434 cx.background_executor()
1435 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1436 }
1437 GitStoreState::Ssh {
1438 upstream_client,
1439 upstream_project_id: project_id,
1440 ..
1441 }
1442 | GitStoreState::Remote {
1443 upstream_client,
1444 upstream_project_id: project_id,
1445 ..
1446 } => {
1447 let client = upstream_client.clone();
1448 let project_id = *project_id;
1449 cx.background_executor().spawn(async move {
1450 client
1451 .request(proto::GitInit {
1452 project_id: project_id.0,
1453 abs_path: path.to_string_lossy().to_string(),
1454 fallback_branch_name,
1455 })
1456 .await?;
1457 Ok(())
1458 })
1459 }
1460 }
1461 }
1462
1463 async fn handle_update_repository(
1464 this: Entity<Self>,
1465 envelope: TypedEnvelope<proto::UpdateRepository>,
1466 mut cx: AsyncApp,
1467 ) -> Result<()> {
1468 this.update(&mut cx, |this, cx| {
1469 let mut update = envelope.payload;
1470
1471 let id = RepositoryId::from_proto(update.id);
1472 let client = this
1473 .upstream_client()
1474 .context("no upstream client")?
1475 .clone();
1476
1477 let mut is_new = false;
1478 let repo = this.repositories.entry(id).or_insert_with(|| {
1479 is_new = true;
1480 let git_store = cx.weak_entity();
1481 cx.new(|cx| {
1482 Repository::remote(
1483 id,
1484 Path::new(&update.abs_path).into(),
1485 ProjectId(update.project_id),
1486 client,
1487 git_store,
1488 cx,
1489 )
1490 })
1491 });
1492 if is_new {
1493 this._subscriptions
1494 .push(cx.subscribe(&repo, Self::on_repository_event))
1495 }
1496
1497 repo.update(cx, {
1498 let update = update.clone();
1499 |repo, cx| repo.apply_remote_update(update, is_new, cx)
1500 })?;
1501
1502 this.active_repo_id.get_or_insert_with(|| {
1503 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1504 id
1505 });
1506
1507 if let Some((client, project_id)) = this.downstream_client() {
1508 update.project_id = project_id.to_proto();
1509 client.send(update).log_err();
1510 }
1511 Ok(())
1512 })?
1513 }
1514
1515 async fn handle_remove_repository(
1516 this: Entity<Self>,
1517 envelope: TypedEnvelope<proto::RemoveRepository>,
1518 mut cx: AsyncApp,
1519 ) -> Result<()> {
1520 this.update(&mut cx, |this, cx| {
1521 let mut update = envelope.payload;
1522 let id = RepositoryId::from_proto(update.id);
1523 this.repositories.remove(&id);
1524 if let Some((client, project_id)) = this.downstream_client() {
1525 update.project_id = project_id.to_proto();
1526 client.send(update).log_err();
1527 }
1528 if this.active_repo_id == Some(id) {
1529 this.active_repo_id = None;
1530 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1531 }
1532 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1533 })
1534 }
1535
1536 async fn handle_git_init(
1537 this: Entity<Self>,
1538 envelope: TypedEnvelope<proto::GitInit>,
1539 cx: AsyncApp,
1540 ) -> Result<proto::Ack> {
1541 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1542 let name = envelope.payload.fallback_branch_name;
1543 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1544 .await?;
1545
1546 Ok(proto::Ack {})
1547 }
1548
1549 async fn handle_fetch(
1550 this: Entity<Self>,
1551 envelope: TypedEnvelope<proto::Fetch>,
1552 mut cx: AsyncApp,
1553 ) -> Result<proto::RemoteMessageResponse> {
1554 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1555 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1556 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1557 let askpass_id = envelope.payload.askpass_id;
1558
1559 let askpass = make_remote_delegate(
1560 this,
1561 envelope.payload.project_id,
1562 repository_id,
1563 askpass_id,
1564 &mut cx,
1565 );
1566
1567 let remote_output = repository_handle
1568 .update(&mut cx, |repository_handle, cx| {
1569 repository_handle.fetch(fetch_options, askpass, cx)
1570 })?
1571 .await??;
1572
1573 Ok(proto::RemoteMessageResponse {
1574 stdout: remote_output.stdout,
1575 stderr: remote_output.stderr,
1576 })
1577 }
1578
1579 async fn handle_push(
1580 this: Entity<Self>,
1581 envelope: TypedEnvelope<proto::Push>,
1582 mut cx: AsyncApp,
1583 ) -> Result<proto::RemoteMessageResponse> {
1584 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1585 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1586
1587 let askpass_id = envelope.payload.askpass_id;
1588 let askpass = make_remote_delegate(
1589 this,
1590 envelope.payload.project_id,
1591 repository_id,
1592 askpass_id,
1593 &mut cx,
1594 );
1595
1596 let options = envelope
1597 .payload
1598 .options
1599 .as_ref()
1600 .map(|_| match envelope.payload.options() {
1601 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1602 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1603 });
1604
1605 let branch_name = envelope.payload.branch_name.into();
1606 let remote_name = envelope.payload.remote_name.into();
1607
1608 let remote_output = repository_handle
1609 .update(&mut cx, |repository_handle, cx| {
1610 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1611 })?
1612 .await??;
1613 Ok(proto::RemoteMessageResponse {
1614 stdout: remote_output.stdout,
1615 stderr: remote_output.stderr,
1616 })
1617 }
1618
1619 async fn handle_pull(
1620 this: Entity<Self>,
1621 envelope: TypedEnvelope<proto::Pull>,
1622 mut cx: AsyncApp,
1623 ) -> Result<proto::RemoteMessageResponse> {
1624 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1625 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1626 let askpass_id = envelope.payload.askpass_id;
1627 let askpass = make_remote_delegate(
1628 this,
1629 envelope.payload.project_id,
1630 repository_id,
1631 askpass_id,
1632 &mut cx,
1633 );
1634
1635 let branch_name = envelope.payload.branch_name.into();
1636 let remote_name = envelope.payload.remote_name.into();
1637
1638 let remote_message = repository_handle
1639 .update(&mut cx, |repository_handle, cx| {
1640 repository_handle.pull(branch_name, remote_name, askpass, cx)
1641 })?
1642 .await??;
1643
1644 Ok(proto::RemoteMessageResponse {
1645 stdout: remote_message.stdout,
1646 stderr: remote_message.stderr,
1647 })
1648 }
1649
1650 async fn handle_stage(
1651 this: Entity<Self>,
1652 envelope: TypedEnvelope<proto::Stage>,
1653 mut cx: AsyncApp,
1654 ) -> Result<proto::Ack> {
1655 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1656 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1657
1658 let entries = envelope
1659 .payload
1660 .paths
1661 .into_iter()
1662 .map(PathBuf::from)
1663 .map(RepoPath::new)
1664 .collect();
1665
1666 repository_handle
1667 .update(&mut cx, |repository_handle, cx| {
1668 repository_handle.stage_entries(entries, cx)
1669 })?
1670 .await?;
1671 Ok(proto::Ack {})
1672 }
1673
1674 async fn handle_unstage(
1675 this: Entity<Self>,
1676 envelope: TypedEnvelope<proto::Unstage>,
1677 mut cx: AsyncApp,
1678 ) -> Result<proto::Ack> {
1679 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1680 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1681
1682 let entries = envelope
1683 .payload
1684 .paths
1685 .into_iter()
1686 .map(PathBuf::from)
1687 .map(RepoPath::new)
1688 .collect();
1689
1690 repository_handle
1691 .update(&mut cx, |repository_handle, cx| {
1692 repository_handle.unstage_entries(entries, cx)
1693 })?
1694 .await?;
1695
1696 Ok(proto::Ack {})
1697 }
1698
1699 async fn handle_set_index_text(
1700 this: Entity<Self>,
1701 envelope: TypedEnvelope<proto::SetIndexText>,
1702 mut cx: AsyncApp,
1703 ) -> Result<proto::Ack> {
1704 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1705 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1706 let repo_path = RepoPath::from_str(&envelope.payload.path);
1707
1708 repository_handle
1709 .update(&mut cx, |repository_handle, cx| {
1710 repository_handle.spawn_set_index_text_job(
1711 repo_path,
1712 envelope.payload.text,
1713 None,
1714 cx,
1715 )
1716 })?
1717 .await??;
1718 Ok(proto::Ack {})
1719 }
1720
1721 async fn handle_commit(
1722 this: Entity<Self>,
1723 envelope: TypedEnvelope<proto::Commit>,
1724 mut cx: AsyncApp,
1725 ) -> Result<proto::Ack> {
1726 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1727 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1728
1729 let message = SharedString::from(envelope.payload.message);
1730 let name = envelope.payload.name.map(SharedString::from);
1731 let email = envelope.payload.email.map(SharedString::from);
1732 let options = envelope.payload.options.unwrap_or_default();
1733
1734 repository_handle
1735 .update(&mut cx, |repository_handle, cx| {
1736 repository_handle.commit(
1737 message,
1738 name.zip(email),
1739 CommitOptions {
1740 amend: options.amend,
1741 signoff: options.signoff,
1742 },
1743 cx,
1744 )
1745 })?
1746 .await??;
1747 Ok(proto::Ack {})
1748 }
1749
1750 async fn handle_get_remotes(
1751 this: Entity<Self>,
1752 envelope: TypedEnvelope<proto::GetRemotes>,
1753 mut cx: AsyncApp,
1754 ) -> Result<proto::GetRemotesResponse> {
1755 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1756 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1757
1758 let branch_name = envelope.payload.branch_name;
1759
1760 let remotes = repository_handle
1761 .update(&mut cx, |repository_handle, _| {
1762 repository_handle.get_remotes(branch_name)
1763 })?
1764 .await??;
1765
1766 Ok(proto::GetRemotesResponse {
1767 remotes: remotes
1768 .into_iter()
1769 .map(|remotes| proto::get_remotes_response::Remote {
1770 name: remotes.name.to_string(),
1771 })
1772 .collect::<Vec<_>>(),
1773 })
1774 }
1775
1776 async fn handle_get_branches(
1777 this: Entity<Self>,
1778 envelope: TypedEnvelope<proto::GitGetBranches>,
1779 mut cx: AsyncApp,
1780 ) -> Result<proto::GitBranchesResponse> {
1781 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1782 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1783
1784 let branches = repository_handle
1785 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1786 .await??;
1787
1788 Ok(proto::GitBranchesResponse {
1789 branches: branches
1790 .into_iter()
1791 .map(|branch| branch_to_proto(&branch))
1792 .collect::<Vec<_>>(),
1793 })
1794 }
1795 async fn handle_create_branch(
1796 this: Entity<Self>,
1797 envelope: TypedEnvelope<proto::GitCreateBranch>,
1798 mut cx: AsyncApp,
1799 ) -> Result<proto::Ack> {
1800 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1801 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1802 let branch_name = envelope.payload.branch_name;
1803
1804 repository_handle
1805 .update(&mut cx, |repository_handle, _| {
1806 repository_handle.create_branch(branch_name)
1807 })?
1808 .await??;
1809
1810 Ok(proto::Ack {})
1811 }
1812
1813 async fn handle_change_branch(
1814 this: Entity<Self>,
1815 envelope: TypedEnvelope<proto::GitChangeBranch>,
1816 mut cx: AsyncApp,
1817 ) -> Result<proto::Ack> {
1818 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1819 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1820 let branch_name = envelope.payload.branch_name;
1821
1822 repository_handle
1823 .update(&mut cx, |repository_handle, _| {
1824 repository_handle.change_branch(branch_name)
1825 })?
1826 .await??;
1827
1828 Ok(proto::Ack {})
1829 }
1830
1831 async fn handle_show(
1832 this: Entity<Self>,
1833 envelope: TypedEnvelope<proto::GitShow>,
1834 mut cx: AsyncApp,
1835 ) -> Result<proto::GitCommitDetails> {
1836 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1837 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1838
1839 let commit = repository_handle
1840 .update(&mut cx, |repository_handle, _| {
1841 repository_handle.show(envelope.payload.commit)
1842 })?
1843 .await??;
1844 Ok(proto::GitCommitDetails {
1845 sha: commit.sha.into(),
1846 message: commit.message.into(),
1847 commit_timestamp: commit.commit_timestamp,
1848 author_email: commit.author_email.into(),
1849 author_name: commit.author_name.into(),
1850 })
1851 }
1852
1853 async fn handle_load_commit_diff(
1854 this: Entity<Self>,
1855 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1856 mut cx: AsyncApp,
1857 ) -> Result<proto::LoadCommitDiffResponse> {
1858 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1859 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1860
1861 let commit_diff = repository_handle
1862 .update(&mut cx, |repository_handle, _| {
1863 repository_handle.load_commit_diff(envelope.payload.commit)
1864 })?
1865 .await??;
1866 Ok(proto::LoadCommitDiffResponse {
1867 files: commit_diff
1868 .files
1869 .into_iter()
1870 .map(|file| proto::CommitFile {
1871 path: file.path.to_string(),
1872 old_text: file.old_text,
1873 new_text: file.new_text,
1874 })
1875 .collect(),
1876 })
1877 }
1878
1879 async fn handle_reset(
1880 this: Entity<Self>,
1881 envelope: TypedEnvelope<proto::GitReset>,
1882 mut cx: AsyncApp,
1883 ) -> Result<proto::Ack> {
1884 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1885 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1886
1887 let mode = match envelope.payload.mode() {
1888 git_reset::ResetMode::Soft => ResetMode::Soft,
1889 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1890 };
1891
1892 repository_handle
1893 .update(&mut cx, |repository_handle, cx| {
1894 repository_handle.reset(envelope.payload.commit, mode, cx)
1895 })?
1896 .await??;
1897 Ok(proto::Ack {})
1898 }
1899
1900 async fn handle_checkout_files(
1901 this: Entity<Self>,
1902 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1903 mut cx: AsyncApp,
1904 ) -> Result<proto::Ack> {
1905 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1906 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1907 let paths = envelope
1908 .payload
1909 .paths
1910 .iter()
1911 .map(|s| RepoPath::from_str(s))
1912 .collect();
1913
1914 repository_handle
1915 .update(&mut cx, |repository_handle, cx| {
1916 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1917 })?
1918 .await??;
1919 Ok(proto::Ack {})
1920 }
1921
1922 async fn handle_open_commit_message_buffer(
1923 this: Entity<Self>,
1924 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1925 mut cx: AsyncApp,
1926 ) -> Result<proto::OpenBufferResponse> {
1927 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1928 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1929 let buffer = repository
1930 .update(&mut cx, |repository, cx| {
1931 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1932 })?
1933 .await?;
1934
1935 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1936 this.update(&mut cx, |this, cx| {
1937 this.buffer_store.update(cx, |buffer_store, cx| {
1938 buffer_store
1939 .create_buffer_for_peer(
1940 &buffer,
1941 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1942 cx,
1943 )
1944 .detach_and_log_err(cx);
1945 })
1946 })?;
1947
1948 Ok(proto::OpenBufferResponse {
1949 buffer_id: buffer_id.to_proto(),
1950 })
1951 }
1952
1953 async fn handle_askpass(
1954 this: Entity<Self>,
1955 envelope: TypedEnvelope<proto::AskPassRequest>,
1956 mut cx: AsyncApp,
1957 ) -> Result<proto::AskPassResponse> {
1958 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1959 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1960
1961 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
1962 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
1963 debug_panic!("no askpass found");
1964 anyhow::bail!("no askpass found");
1965 };
1966
1967 let response = askpass.ask_password(envelope.payload.prompt).await?;
1968
1969 delegates
1970 .lock()
1971 .insert(envelope.payload.askpass_id, askpass);
1972
1973 Ok(proto::AskPassResponse { response })
1974 }
1975
1976 async fn handle_check_for_pushed_commits(
1977 this: Entity<Self>,
1978 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
1979 mut cx: AsyncApp,
1980 ) -> Result<proto::CheckForPushedCommitsResponse> {
1981 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1982 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1983
1984 let branches = repository_handle
1985 .update(&mut cx, |repository_handle, _| {
1986 repository_handle.check_for_pushed_commits()
1987 })?
1988 .await??;
1989 Ok(proto::CheckForPushedCommitsResponse {
1990 pushed_to: branches
1991 .into_iter()
1992 .map(|commit| commit.to_string())
1993 .collect(),
1994 })
1995 }
1996
1997 async fn handle_git_diff(
1998 this: Entity<Self>,
1999 envelope: TypedEnvelope<proto::GitDiff>,
2000 mut cx: AsyncApp,
2001 ) -> Result<proto::GitDiffResponse> {
2002 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2003 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2004 let diff_type = match envelope.payload.diff_type() {
2005 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2006 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2007 };
2008
2009 let mut diff = repository_handle
2010 .update(&mut cx, |repository_handle, cx| {
2011 repository_handle.diff(diff_type, cx)
2012 })?
2013 .await??;
2014 const ONE_MB: usize = 1_000_000;
2015 if diff.len() > ONE_MB {
2016 diff = diff.chars().take(ONE_MB).collect()
2017 }
2018
2019 Ok(proto::GitDiffResponse { diff })
2020 }
2021
2022 async fn handle_open_unstaged_diff(
2023 this: Entity<Self>,
2024 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2025 mut cx: AsyncApp,
2026 ) -> Result<proto::OpenUnstagedDiffResponse> {
2027 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2028 let diff = this
2029 .update(&mut cx, |this, cx| {
2030 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2031 Some(this.open_unstaged_diff(buffer, cx))
2032 })?
2033 .context("missing buffer")?
2034 .await?;
2035 this.update(&mut cx, |this, _| {
2036 let shared_diffs = this
2037 .shared_diffs
2038 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2039 .or_default();
2040 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2041 })?;
2042 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2043 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2044 }
2045
2046 async fn handle_open_uncommitted_diff(
2047 this: Entity<Self>,
2048 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2049 mut cx: AsyncApp,
2050 ) -> Result<proto::OpenUncommittedDiffResponse> {
2051 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2052 let diff = this
2053 .update(&mut cx, |this, cx| {
2054 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2055 Some(this.open_uncommitted_diff(buffer, cx))
2056 })?
2057 .context("missing buffer")?
2058 .await?;
2059 this.update(&mut cx, |this, _| {
2060 let shared_diffs = this
2061 .shared_diffs
2062 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2063 .or_default();
2064 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2065 })?;
2066 diff.read_with(&cx, |diff, cx| {
2067 use proto::open_uncommitted_diff_response::Mode;
2068
2069 let unstaged_diff = diff.secondary_diff();
2070 let index_snapshot = unstaged_diff.and_then(|diff| {
2071 let diff = diff.read(cx);
2072 diff.base_text_exists().then(|| diff.base_text())
2073 });
2074
2075 let mode;
2076 let staged_text;
2077 let committed_text;
2078 if diff.base_text_exists() {
2079 let committed_snapshot = diff.base_text();
2080 committed_text = Some(committed_snapshot.text());
2081 if let Some(index_text) = index_snapshot {
2082 if index_text.remote_id() == committed_snapshot.remote_id() {
2083 mode = Mode::IndexMatchesHead;
2084 staged_text = None;
2085 } else {
2086 mode = Mode::IndexAndHead;
2087 staged_text = Some(index_text.text());
2088 }
2089 } else {
2090 mode = Mode::IndexAndHead;
2091 staged_text = None;
2092 }
2093 } else {
2094 mode = Mode::IndexAndHead;
2095 committed_text = None;
2096 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2097 }
2098
2099 proto::OpenUncommittedDiffResponse {
2100 committed_text,
2101 staged_text,
2102 mode: mode.into(),
2103 }
2104 })
2105 }
2106
2107 async fn handle_update_diff_bases(
2108 this: Entity<Self>,
2109 request: TypedEnvelope<proto::UpdateDiffBases>,
2110 mut cx: AsyncApp,
2111 ) -> Result<()> {
2112 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2113 this.update(&mut cx, |this, cx| {
2114 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2115 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2116 let buffer = buffer.read(cx).text_snapshot();
2117 diff_state.update(cx, |diff_state, cx| {
2118 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2119 })
2120 }
2121 }
2122 })
2123 }
2124
2125 async fn handle_blame_buffer(
2126 this: Entity<Self>,
2127 envelope: TypedEnvelope<proto::BlameBuffer>,
2128 mut cx: AsyncApp,
2129 ) -> Result<proto::BlameBufferResponse> {
2130 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2131 let version = deserialize_version(&envelope.payload.version);
2132 let buffer = this.read_with(&cx, |this, cx| {
2133 this.buffer_store.read(cx).get_existing(buffer_id)
2134 })??;
2135 buffer
2136 .update(&mut cx, |buffer, _| {
2137 buffer.wait_for_version(version.clone())
2138 })?
2139 .await?;
2140 let blame = this
2141 .update(&mut cx, |this, cx| {
2142 this.blame_buffer(&buffer, Some(version), cx)
2143 })?
2144 .await?;
2145 Ok(serialize_blame_buffer_response(blame))
2146 }
2147
2148 async fn handle_get_permalink_to_line(
2149 this: Entity<Self>,
2150 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2151 mut cx: AsyncApp,
2152 ) -> Result<proto::GetPermalinkToLineResponse> {
2153 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2154 // let version = deserialize_version(&envelope.payload.version);
2155 let selection = {
2156 let proto_selection = envelope
2157 .payload
2158 .selection
2159 .context("no selection to get permalink for defined")?;
2160 proto_selection.start as u32..proto_selection.end as u32
2161 };
2162 let buffer = this.read_with(&cx, |this, cx| {
2163 this.buffer_store.read(cx).get_existing(buffer_id)
2164 })??;
2165 let permalink = this
2166 .update(&mut cx, |this, cx| {
2167 this.get_permalink_to_line(&buffer, selection, cx)
2168 })?
2169 .await?;
2170 Ok(proto::GetPermalinkToLineResponse {
2171 permalink: permalink.to_string(),
2172 })
2173 }
2174
2175 fn repository_for_request(
2176 this: &Entity<Self>,
2177 id: RepositoryId,
2178 cx: &mut AsyncApp,
2179 ) -> Result<Entity<Repository>> {
2180 this.read_with(cx, |this, _| {
2181 this.repositories
2182 .get(&id)
2183 .context("missing repository handle")
2184 .cloned()
2185 })?
2186 }
2187
2188 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2189 self.repositories
2190 .iter()
2191 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2192 .collect()
2193 }
2194
2195 fn process_updated_entries(
2196 &self,
2197 worktree: &Entity<Worktree>,
2198 updated_entries: &[(Arc<Path>, ProjectEntryId, PathChange)],
2199 cx: &mut App,
2200 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2201 let mut repo_paths = self
2202 .repositories
2203 .values()
2204 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2205 .collect::<Vec<_>>();
2206 let mut entries: Vec<_> = updated_entries
2207 .iter()
2208 .map(|(path, _, _)| path.clone())
2209 .collect();
2210 entries.sort();
2211 let worktree = worktree.read(cx);
2212
2213 let entries = entries
2214 .into_iter()
2215 .filter_map(|path| worktree.absolutize(&path).ok())
2216 .collect::<Arc<[_]>>();
2217
2218 let executor = cx.background_executor().clone();
2219 cx.background_executor().spawn(async move {
2220 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2221 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2222 let mut tasks = FuturesOrdered::new();
2223 for (repo_path, repo) in repo_paths.into_iter().rev() {
2224 let entries = entries.clone();
2225 let task = executor.spawn(async move {
2226 // Find all repository paths that belong to this repo
2227 let mut ix = entries.partition_point(|path| path < &*repo_path);
2228 if ix == entries.len() {
2229 return None;
2230 };
2231
2232 let mut paths = vec![];
2233 // All paths prefixed by a given repo will constitute a continuous range.
2234 while let Some(path) = entries.get(ix)
2235 && let Some(repo_path) =
2236 RepositorySnapshot::abs_path_to_repo_path_inner(&repo_path, &path)
2237 {
2238 paths.push((repo_path, ix));
2239 ix += 1;
2240 }
2241 Some((repo, paths))
2242 });
2243 tasks.push_back(task);
2244 }
2245
2246 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2247 let mut path_was_used = vec![false; entries.len()];
2248 let tasks = tasks.collect::<Vec<_>>().await;
2249 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2250 // We always want to assign a path to it's innermost repository.
2251 for t in tasks {
2252 let Some((repo, paths)) = t else {
2253 continue;
2254 };
2255 let entry = paths_by_git_repo.entry(repo).or_default();
2256 for (repo_path, ix) in paths {
2257 if path_was_used[ix] {
2258 continue;
2259 }
2260 path_was_used[ix] = true;
2261 entry.push(repo_path);
2262 }
2263 }
2264
2265 paths_by_git_repo
2266 })
2267 }
2268}
2269
2270impl BufferGitState {
2271 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2272 Self {
2273 unstaged_diff: Default::default(),
2274 uncommitted_diff: Default::default(),
2275 recalculate_diff_task: Default::default(),
2276 language: Default::default(),
2277 language_registry: Default::default(),
2278 recalculating_tx: postage::watch::channel_with(false).0,
2279 hunk_staging_operation_count: 0,
2280 hunk_staging_operation_count_as_of_write: 0,
2281 head_text: Default::default(),
2282 index_text: Default::default(),
2283 head_changed: Default::default(),
2284 index_changed: Default::default(),
2285 language_changed: Default::default(),
2286 conflict_updated_futures: Default::default(),
2287 conflict_set: Default::default(),
2288 reparse_conflict_markers_task: Default::default(),
2289 }
2290 }
2291
2292 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2293 self.language = buffer.read(cx).language().cloned();
2294 self.language_changed = true;
2295 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2296 }
2297
2298 fn reparse_conflict_markers(
2299 &mut self,
2300 buffer: text::BufferSnapshot,
2301 cx: &mut Context<Self>,
2302 ) -> oneshot::Receiver<()> {
2303 let (tx, rx) = oneshot::channel();
2304
2305 let Some(conflict_set) = self
2306 .conflict_set
2307 .as_ref()
2308 .and_then(|conflict_set| conflict_set.upgrade())
2309 else {
2310 return rx;
2311 };
2312
2313 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2314 if conflict_set.has_conflict {
2315 Some(conflict_set.snapshot())
2316 } else {
2317 None
2318 }
2319 });
2320
2321 if let Some(old_snapshot) = old_snapshot {
2322 self.conflict_updated_futures.push(tx);
2323 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2324 let (snapshot, changed_range) = cx
2325 .background_spawn(async move {
2326 let new_snapshot = ConflictSet::parse(&buffer);
2327 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2328 (new_snapshot, changed_range)
2329 })
2330 .await;
2331 this.update(cx, |this, cx| {
2332 if let Some(conflict_set) = &this.conflict_set {
2333 conflict_set
2334 .update(cx, |conflict_set, cx| {
2335 conflict_set.set_snapshot(snapshot, changed_range, cx);
2336 })
2337 .ok();
2338 }
2339 let futures = std::mem::take(&mut this.conflict_updated_futures);
2340 for tx in futures {
2341 tx.send(()).ok();
2342 }
2343 })
2344 }))
2345 }
2346
2347 rx
2348 }
2349
2350 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2351 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2352 }
2353
2354 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2355 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2356 }
2357
2358 fn handle_base_texts_updated(
2359 &mut self,
2360 buffer: text::BufferSnapshot,
2361 message: proto::UpdateDiffBases,
2362 cx: &mut Context<Self>,
2363 ) {
2364 use proto::update_diff_bases::Mode;
2365
2366 let Some(mode) = Mode::from_i32(message.mode) else {
2367 return;
2368 };
2369
2370 let diff_bases_change = match mode {
2371 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2372 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2373 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2374 Mode::IndexAndHead => DiffBasesChange::SetEach {
2375 index: message.staged_text,
2376 head: message.committed_text,
2377 },
2378 };
2379
2380 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2381 }
2382
2383 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2384 if *self.recalculating_tx.borrow() {
2385 let mut rx = self.recalculating_tx.subscribe();
2386 return Some(async move {
2387 loop {
2388 let is_recalculating = rx.recv().await;
2389 if is_recalculating != Some(true) {
2390 break;
2391 }
2392 }
2393 });
2394 } else {
2395 None
2396 }
2397 }
2398
2399 fn diff_bases_changed(
2400 &mut self,
2401 buffer: text::BufferSnapshot,
2402 diff_bases_change: Option<DiffBasesChange>,
2403 cx: &mut Context<Self>,
2404 ) {
2405 match diff_bases_change {
2406 Some(DiffBasesChange::SetIndex(index)) => {
2407 self.index_text = index.map(|mut index| {
2408 text::LineEnding::normalize(&mut index);
2409 Arc::new(index)
2410 });
2411 self.index_changed = true;
2412 }
2413 Some(DiffBasesChange::SetHead(head)) => {
2414 self.head_text = head.map(|mut head| {
2415 text::LineEnding::normalize(&mut head);
2416 Arc::new(head)
2417 });
2418 self.head_changed = true;
2419 }
2420 Some(DiffBasesChange::SetBoth(text)) => {
2421 let text = text.map(|mut text| {
2422 text::LineEnding::normalize(&mut text);
2423 Arc::new(text)
2424 });
2425 self.head_text = text.clone();
2426 self.index_text = text;
2427 self.head_changed = true;
2428 self.index_changed = true;
2429 }
2430 Some(DiffBasesChange::SetEach { index, head }) => {
2431 self.index_text = index.map(|mut index| {
2432 text::LineEnding::normalize(&mut index);
2433 Arc::new(index)
2434 });
2435 self.index_changed = true;
2436 self.head_text = head.map(|mut head| {
2437 text::LineEnding::normalize(&mut head);
2438 Arc::new(head)
2439 });
2440 self.head_changed = true;
2441 }
2442 None => {}
2443 }
2444
2445 self.recalculate_diffs(buffer, cx)
2446 }
2447
2448 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2449 *self.recalculating_tx.borrow_mut() = true;
2450
2451 let language = self.language.clone();
2452 let language_registry = self.language_registry.clone();
2453 let unstaged_diff = self.unstaged_diff();
2454 let uncommitted_diff = self.uncommitted_diff();
2455 let head = self.head_text.clone();
2456 let index = self.index_text.clone();
2457 let index_changed = self.index_changed;
2458 let head_changed = self.head_changed;
2459 let language_changed = self.language_changed;
2460 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2461 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2462 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2463 (None, None) => true,
2464 _ => false,
2465 };
2466 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2467 log::debug!(
2468 "start recalculating diffs for buffer {}",
2469 buffer.remote_id()
2470 );
2471
2472 let mut new_unstaged_diff = None;
2473 if let Some(unstaged_diff) = &unstaged_diff {
2474 new_unstaged_diff = Some(
2475 BufferDiff::update_diff(
2476 unstaged_diff.clone(),
2477 buffer.clone(),
2478 index,
2479 index_changed,
2480 language_changed,
2481 language.clone(),
2482 language_registry.clone(),
2483 cx,
2484 )
2485 .await?,
2486 );
2487 }
2488
2489 let mut new_uncommitted_diff = None;
2490 if let Some(uncommitted_diff) = &uncommitted_diff {
2491 new_uncommitted_diff = if index_matches_head {
2492 new_unstaged_diff.clone()
2493 } else {
2494 Some(
2495 BufferDiff::update_diff(
2496 uncommitted_diff.clone(),
2497 buffer.clone(),
2498 head,
2499 head_changed,
2500 language_changed,
2501 language.clone(),
2502 language_registry.clone(),
2503 cx,
2504 )
2505 .await?,
2506 )
2507 }
2508 }
2509
2510 let cancel = this.update(cx, |this, _| {
2511 // This checks whether all pending stage/unstage operations
2512 // have quiesced (i.e. both the corresponding write and the
2513 // read of that write have completed). If not, then we cancel
2514 // this recalculation attempt to avoid invalidating pending
2515 // state too quickly; another recalculation will come along
2516 // later and clear the pending state once the state of the index has settled.
2517 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2518 *this.recalculating_tx.borrow_mut() = false;
2519 true
2520 } else {
2521 false
2522 }
2523 })?;
2524 if cancel {
2525 log::debug!(
2526 concat!(
2527 "aborting recalculating diffs for buffer {}",
2528 "due to subsequent hunk operations",
2529 ),
2530 buffer.remote_id()
2531 );
2532 return Ok(());
2533 }
2534
2535 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2536 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2537 {
2538 unstaged_diff.update(cx, |diff, cx| {
2539 if language_changed {
2540 diff.language_changed(cx);
2541 }
2542 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2543 })?
2544 } else {
2545 None
2546 };
2547
2548 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2549 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2550 {
2551 uncommitted_diff.update(cx, |diff, cx| {
2552 if language_changed {
2553 diff.language_changed(cx);
2554 }
2555 diff.set_snapshot_with_secondary(
2556 new_uncommitted_diff,
2557 &buffer,
2558 unstaged_changed_range,
2559 true,
2560 cx,
2561 );
2562 })?;
2563 }
2564
2565 log::debug!(
2566 "finished recalculating diffs for buffer {}",
2567 buffer.remote_id()
2568 );
2569
2570 if let Some(this) = this.upgrade() {
2571 this.update(cx, |this, _| {
2572 this.index_changed = false;
2573 this.head_changed = false;
2574 this.language_changed = false;
2575 *this.recalculating_tx.borrow_mut() = false;
2576 })?;
2577 }
2578
2579 Ok(())
2580 }));
2581 }
2582}
2583
2584fn make_remote_delegate(
2585 this: Entity<GitStore>,
2586 project_id: u64,
2587 repository_id: RepositoryId,
2588 askpass_id: u64,
2589 cx: &mut AsyncApp,
2590) -> AskPassDelegate {
2591 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2592 this.update(cx, |this, cx| {
2593 let Some((client, _)) = this.downstream_client() else {
2594 return;
2595 };
2596 let response = client.request(proto::AskPassRequest {
2597 project_id,
2598 repository_id: repository_id.to_proto(),
2599 askpass_id,
2600 prompt,
2601 });
2602 cx.spawn(async move |_, _| {
2603 tx.send(response.await?.response).ok();
2604 anyhow::Ok(())
2605 })
2606 .detach_and_log_err(cx);
2607 })
2608 .log_err();
2609 })
2610}
2611
2612impl RepositoryId {
2613 pub fn to_proto(self) -> u64 {
2614 self.0
2615 }
2616
2617 pub fn from_proto(id: u64) -> Self {
2618 RepositoryId(id)
2619 }
2620}
2621
2622impl RepositorySnapshot {
2623 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2624 Self {
2625 id,
2626 statuses_by_path: Default::default(),
2627 work_directory_abs_path,
2628 branch: None,
2629 head_commit: None,
2630 scan_id: 0,
2631 merge: Default::default(),
2632 }
2633 }
2634
2635 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2636 proto::UpdateRepository {
2637 branch_summary: self.branch.as_ref().map(branch_to_proto),
2638 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2639 updated_statuses: self
2640 .statuses_by_path
2641 .iter()
2642 .map(|entry| entry.to_proto())
2643 .collect(),
2644 removed_statuses: Default::default(),
2645 current_merge_conflicts: self
2646 .merge
2647 .conflicted_paths
2648 .iter()
2649 .map(|repo_path| repo_path.to_proto())
2650 .collect(),
2651 project_id,
2652 id: self.id.to_proto(),
2653 abs_path: self.work_directory_abs_path.to_proto(),
2654 entry_ids: vec![self.id.to_proto()],
2655 scan_id: self.scan_id,
2656 is_last_update: true,
2657 }
2658 }
2659
2660 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2661 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2662 let mut removed_statuses: Vec<String> = Vec::new();
2663
2664 let mut new_statuses = self.statuses_by_path.iter().peekable();
2665 let mut old_statuses = old.statuses_by_path.iter().peekable();
2666
2667 let mut current_new_entry = new_statuses.next();
2668 let mut current_old_entry = old_statuses.next();
2669 loop {
2670 match (current_new_entry, current_old_entry) {
2671 (Some(new_entry), Some(old_entry)) => {
2672 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2673 Ordering::Less => {
2674 updated_statuses.push(new_entry.to_proto());
2675 current_new_entry = new_statuses.next();
2676 }
2677 Ordering::Equal => {
2678 if new_entry.status != old_entry.status {
2679 updated_statuses.push(new_entry.to_proto());
2680 }
2681 current_old_entry = old_statuses.next();
2682 current_new_entry = new_statuses.next();
2683 }
2684 Ordering::Greater => {
2685 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2686 current_old_entry = old_statuses.next();
2687 }
2688 }
2689 }
2690 (None, Some(old_entry)) => {
2691 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2692 current_old_entry = old_statuses.next();
2693 }
2694 (Some(new_entry), None) => {
2695 updated_statuses.push(new_entry.to_proto());
2696 current_new_entry = new_statuses.next();
2697 }
2698 (None, None) => break,
2699 }
2700 }
2701
2702 proto::UpdateRepository {
2703 branch_summary: self.branch.as_ref().map(branch_to_proto),
2704 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2705 updated_statuses,
2706 removed_statuses,
2707 current_merge_conflicts: self
2708 .merge
2709 .conflicted_paths
2710 .iter()
2711 .map(|path| path.as_ref().to_proto())
2712 .collect(),
2713 project_id,
2714 id: self.id.to_proto(),
2715 abs_path: self.work_directory_abs_path.to_proto(),
2716 entry_ids: vec![],
2717 scan_id: self.scan_id,
2718 is_last_update: true,
2719 }
2720 }
2721
2722 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2723 self.statuses_by_path.iter().cloned()
2724 }
2725
2726 pub fn status_summary(&self) -> GitSummary {
2727 self.statuses_by_path.summary().item_summary
2728 }
2729
2730 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2731 self.statuses_by_path
2732 .get(&PathKey(path.0.clone()), &())
2733 .cloned()
2734 }
2735
2736 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2737 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path)
2738 }
2739
2740 #[inline]
2741 fn abs_path_to_repo_path_inner(
2742 work_directory_abs_path: &Path,
2743 abs_path: &Path,
2744 ) -> Option<RepoPath> {
2745 abs_path
2746 .strip_prefix(&work_directory_abs_path)
2747 .map(RepoPath::from)
2748 .ok()
2749 }
2750
2751 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
2752 self.merge.conflicted_paths.contains(&repo_path)
2753 }
2754
2755 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2756 let had_conflict_on_last_merge_head_change =
2757 self.merge.conflicted_paths.contains(&repo_path);
2758 let has_conflict_currently = self
2759 .status_for_path(&repo_path)
2760 .map_or(false, |entry| entry.status.is_conflicted());
2761 had_conflict_on_last_merge_head_change || has_conflict_currently
2762 }
2763
2764 /// This is the name that will be displayed in the repository selector for this repository.
2765 pub fn display_name(&self) -> SharedString {
2766 self.work_directory_abs_path
2767 .file_name()
2768 .unwrap_or_default()
2769 .to_string_lossy()
2770 .to_string()
2771 .into()
2772 }
2773}
2774
2775impl MergeDetails {
2776 async fn load(
2777 backend: &Arc<dyn GitRepository>,
2778 status: &SumTree<StatusEntry>,
2779 prev_snapshot: &RepositorySnapshot,
2780 ) -> Result<(MergeDetails, bool)> {
2781 log::debug!("load merge details");
2782 let message = backend.merge_message().await;
2783 let heads = backend
2784 .revparse_batch(vec![
2785 "MERGE_HEAD".into(),
2786 "CHERRY_PICK_HEAD".into(),
2787 "REBASE_HEAD".into(),
2788 "REVERT_HEAD".into(),
2789 "APPLY_HEAD".into(),
2790 ])
2791 .await
2792 .log_err()
2793 .unwrap_or_default()
2794 .into_iter()
2795 .map(|opt| opt.map(SharedString::from))
2796 .collect::<Vec<_>>();
2797 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2798 let conflicted_paths = if merge_heads_changed {
2799 let current_conflicted_paths = TreeSet::from_ordered_entries(
2800 status
2801 .iter()
2802 .filter(|entry| entry.status.is_conflicted())
2803 .map(|entry| entry.repo_path.clone()),
2804 );
2805
2806 // It can happen that we run a scan while a lengthy merge is in progress
2807 // that will eventually result in conflicts, but before those conflicts
2808 // are reported by `git status`. Since for the moment we only care about
2809 // the merge heads state for the purposes of tracking conflicts, don't update
2810 // this state until we see some conflicts.
2811 if heads.iter().any(Option::is_some)
2812 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2813 && current_conflicted_paths.is_empty()
2814 {
2815 log::debug!("not updating merge heads because no conflicts found");
2816 return Ok((
2817 MergeDetails {
2818 message: message.map(SharedString::from),
2819 ..prev_snapshot.merge.clone()
2820 },
2821 false,
2822 ));
2823 }
2824
2825 current_conflicted_paths
2826 } else {
2827 prev_snapshot.merge.conflicted_paths.clone()
2828 };
2829 let details = MergeDetails {
2830 conflicted_paths,
2831 message: message.map(SharedString::from),
2832 heads,
2833 };
2834 Ok((details, merge_heads_changed))
2835 }
2836}
2837
2838impl Repository {
2839 pub fn snapshot(&self) -> RepositorySnapshot {
2840 self.snapshot.clone()
2841 }
2842
2843 fn local(
2844 id: RepositoryId,
2845 work_directory_abs_path: Arc<Path>,
2846 dot_git_abs_path: Arc<Path>,
2847 repository_dir_abs_path: Arc<Path>,
2848 common_dir_abs_path: Arc<Path>,
2849 project_environment: WeakEntity<ProjectEnvironment>,
2850 fs: Arc<dyn Fs>,
2851 git_store: WeakEntity<GitStore>,
2852 cx: &mut Context<Self>,
2853 ) -> Self {
2854 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2855 Repository {
2856 this: cx.weak_entity(),
2857 git_store,
2858 snapshot,
2859 commit_message_buffer: None,
2860 askpass_delegates: Default::default(),
2861 paths_needing_status_update: Default::default(),
2862 latest_askpass_id: 0,
2863 job_sender: Repository::spawn_local_git_worker(
2864 work_directory_abs_path,
2865 dot_git_abs_path,
2866 repository_dir_abs_path,
2867 common_dir_abs_path,
2868 project_environment,
2869 fs,
2870 cx,
2871 ),
2872 job_id: 0,
2873 active_jobs: Default::default(),
2874 }
2875 }
2876
2877 fn remote(
2878 id: RepositoryId,
2879 work_directory_abs_path: Arc<Path>,
2880 project_id: ProjectId,
2881 client: AnyProtoClient,
2882 git_store: WeakEntity<GitStore>,
2883 cx: &mut Context<Self>,
2884 ) -> Self {
2885 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2886 Self {
2887 this: cx.weak_entity(),
2888 snapshot,
2889 commit_message_buffer: None,
2890 git_store,
2891 paths_needing_status_update: Default::default(),
2892 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2893 askpass_delegates: Default::default(),
2894 latest_askpass_id: 0,
2895 active_jobs: Default::default(),
2896 job_id: 0,
2897 }
2898 }
2899
2900 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2901 self.git_store.upgrade()
2902 }
2903
2904 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
2905 let this = cx.weak_entity();
2906 let git_store = self.git_store.clone();
2907 let _ = self.send_keyed_job(
2908 Some(GitJobKey::ReloadBufferDiffBases),
2909 None,
2910 |state, mut cx| async move {
2911 let RepositoryState::Local { backend, .. } = state else {
2912 log::error!("tried to recompute diffs for a non-local repository");
2913 return Ok(());
2914 };
2915
2916 let Some(this) = this.upgrade() else {
2917 return Ok(());
2918 };
2919
2920 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
2921 git_store.update(cx, |git_store, cx| {
2922 git_store
2923 .diffs
2924 .iter()
2925 .filter_map(|(buffer_id, diff_state)| {
2926 let buffer_store = git_store.buffer_store.read(cx);
2927 let buffer = buffer_store.get(*buffer_id)?;
2928 let file = File::from_dyn(buffer.read(cx).file())?;
2929 let abs_path =
2930 file.worktree.read(cx).absolutize(&file.path).ok()?;
2931 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
2932 log::debug!(
2933 "start reload diff bases for repo path {}",
2934 repo_path.0.display()
2935 );
2936 diff_state.update(cx, |diff_state, _| {
2937 let has_unstaged_diff = diff_state
2938 .unstaged_diff
2939 .as_ref()
2940 .is_some_and(|diff| diff.is_upgradable());
2941 let has_uncommitted_diff = diff_state
2942 .uncommitted_diff
2943 .as_ref()
2944 .is_some_and(|set| set.is_upgradable());
2945
2946 Some((
2947 buffer,
2948 repo_path,
2949 has_unstaged_diff.then(|| diff_state.index_text.clone()),
2950 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
2951 ))
2952 })
2953 })
2954 .collect::<Vec<_>>()
2955 })
2956 })??;
2957
2958 let buffer_diff_base_changes = cx
2959 .background_spawn(async move {
2960 let mut changes = Vec::new();
2961 for (buffer, repo_path, current_index_text, current_head_text) in
2962 &repo_diff_state_updates
2963 {
2964 let index_text = if current_index_text.is_some() {
2965 backend.load_index_text(repo_path.clone()).await
2966 } else {
2967 None
2968 };
2969 let head_text = if current_head_text.is_some() {
2970 backend.load_committed_text(repo_path.clone()).await
2971 } else {
2972 None
2973 };
2974
2975 let change =
2976 match (current_index_text.as_ref(), current_head_text.as_ref()) {
2977 (Some(current_index), Some(current_head)) => {
2978 let index_changed =
2979 index_text.as_ref() != current_index.as_deref();
2980 let head_changed =
2981 head_text.as_ref() != current_head.as_deref();
2982 if index_changed && head_changed {
2983 if index_text == head_text {
2984 Some(DiffBasesChange::SetBoth(head_text))
2985 } else {
2986 Some(DiffBasesChange::SetEach {
2987 index: index_text,
2988 head: head_text,
2989 })
2990 }
2991 } else if index_changed {
2992 Some(DiffBasesChange::SetIndex(index_text))
2993 } else if head_changed {
2994 Some(DiffBasesChange::SetHead(head_text))
2995 } else {
2996 None
2997 }
2998 }
2999 (Some(current_index), None) => {
3000 let index_changed =
3001 index_text.as_ref() != current_index.as_deref();
3002 index_changed
3003 .then_some(DiffBasesChange::SetIndex(index_text))
3004 }
3005 (None, Some(current_head)) => {
3006 let head_changed =
3007 head_text.as_ref() != current_head.as_deref();
3008 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3009 }
3010 (None, None) => None,
3011 };
3012
3013 changes.push((buffer.clone(), change))
3014 }
3015 changes
3016 })
3017 .await;
3018
3019 git_store.update(&mut cx, |git_store, cx| {
3020 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3021 let buffer_snapshot = buffer.read(cx).text_snapshot();
3022 let buffer_id = buffer_snapshot.remote_id();
3023 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3024 continue;
3025 };
3026
3027 let downstream_client = git_store.downstream_client();
3028 diff_state.update(cx, |diff_state, cx| {
3029 use proto::update_diff_bases::Mode;
3030
3031 if let Some((diff_bases_change, (client, project_id))) =
3032 diff_bases_change.clone().zip(downstream_client)
3033 {
3034 let (staged_text, committed_text, mode) = match diff_bases_change {
3035 DiffBasesChange::SetIndex(index) => {
3036 (index, None, Mode::IndexOnly)
3037 }
3038 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3039 DiffBasesChange::SetEach { index, head } => {
3040 (index, head, Mode::IndexAndHead)
3041 }
3042 DiffBasesChange::SetBoth(text) => {
3043 (None, text, Mode::IndexMatchesHead)
3044 }
3045 };
3046 client
3047 .send(proto::UpdateDiffBases {
3048 project_id: project_id.to_proto(),
3049 buffer_id: buffer_id.to_proto(),
3050 staged_text,
3051 committed_text,
3052 mode: mode as i32,
3053 })
3054 .log_err();
3055 }
3056
3057 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3058 });
3059 }
3060 })
3061 },
3062 );
3063 }
3064
3065 pub fn send_job<F, Fut, R>(
3066 &mut self,
3067 status: Option<SharedString>,
3068 job: F,
3069 ) -> oneshot::Receiver<R>
3070 where
3071 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3072 Fut: Future<Output = R> + 'static,
3073 R: Send + 'static,
3074 {
3075 self.send_keyed_job(None, status, job)
3076 }
3077
3078 fn send_keyed_job<F, Fut, R>(
3079 &mut self,
3080 key: Option<GitJobKey>,
3081 status: Option<SharedString>,
3082 job: F,
3083 ) -> oneshot::Receiver<R>
3084 where
3085 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3086 Fut: Future<Output = R> + 'static,
3087 R: Send + 'static,
3088 {
3089 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3090 let job_id = post_inc(&mut self.job_id);
3091 let this = self.this.clone();
3092 self.job_sender
3093 .unbounded_send(GitJob {
3094 key,
3095 job: Box::new(move |state, cx: &mut AsyncApp| {
3096 let job = job(state, cx.clone());
3097 cx.spawn(async move |cx| {
3098 if let Some(s) = status.clone() {
3099 this.update(cx, |this, cx| {
3100 this.active_jobs.insert(
3101 job_id,
3102 JobInfo {
3103 start: Instant::now(),
3104 message: s.clone(),
3105 },
3106 );
3107
3108 cx.notify();
3109 })
3110 .ok();
3111 }
3112 let result = job.await;
3113
3114 this.update(cx, |this, cx| {
3115 this.active_jobs.remove(&job_id);
3116 cx.notify();
3117 })
3118 .ok();
3119
3120 result_tx.send(result).ok();
3121 })
3122 }),
3123 })
3124 .ok();
3125 result_rx
3126 }
3127
3128 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3129 let Some(git_store) = self.git_store.upgrade() else {
3130 return;
3131 };
3132 let entity = cx.entity();
3133 git_store.update(cx, |git_store, cx| {
3134 let Some((&id, _)) = git_store
3135 .repositories
3136 .iter()
3137 .find(|(_, handle)| *handle == &entity)
3138 else {
3139 return;
3140 };
3141 git_store.active_repo_id = Some(id);
3142 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3143 });
3144 }
3145
3146 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3147 self.snapshot.status()
3148 }
3149
3150 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3151 let git_store = self.git_store.upgrade()?;
3152 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3153 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3154 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3155 Some(ProjectPath {
3156 worktree_id: worktree.read(cx).id(),
3157 path: relative_path.into(),
3158 })
3159 }
3160
3161 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3162 let git_store = self.git_store.upgrade()?;
3163 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3164 let abs_path = worktree_store.absolutize(path, cx)?;
3165 self.snapshot.abs_path_to_repo_path(&abs_path)
3166 }
3167
3168 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3169 other
3170 .read(cx)
3171 .snapshot
3172 .work_directory_abs_path
3173 .starts_with(&self.snapshot.work_directory_abs_path)
3174 }
3175
3176 pub fn open_commit_buffer(
3177 &mut self,
3178 languages: Option<Arc<LanguageRegistry>>,
3179 buffer_store: Entity<BufferStore>,
3180 cx: &mut Context<Self>,
3181 ) -> Task<Result<Entity<Buffer>>> {
3182 let id = self.id;
3183 if let Some(buffer) = self.commit_message_buffer.clone() {
3184 return Task::ready(Ok(buffer));
3185 }
3186 let this = cx.weak_entity();
3187
3188 let rx = self.send_job(None, move |state, mut cx| async move {
3189 let Some(this) = this.upgrade() else {
3190 bail!("git store was dropped");
3191 };
3192 match state {
3193 RepositoryState::Local { .. } => {
3194 this.update(&mut cx, |_, cx| {
3195 Self::open_local_commit_buffer(languages, buffer_store, cx)
3196 })?
3197 .await
3198 }
3199 RepositoryState::Remote { project_id, client } => {
3200 let request = client.request(proto::OpenCommitMessageBuffer {
3201 project_id: project_id.0,
3202 repository_id: id.to_proto(),
3203 });
3204 let response = request.await.context("requesting to open commit buffer")?;
3205 let buffer_id = BufferId::new(response.buffer_id)?;
3206 let buffer = buffer_store
3207 .update(&mut cx, |buffer_store, cx| {
3208 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3209 })?
3210 .await?;
3211 if let Some(language_registry) = languages {
3212 let git_commit_language =
3213 language_registry.language_for_name("Git Commit").await?;
3214 buffer.update(&mut cx, |buffer, cx| {
3215 buffer.set_language(Some(git_commit_language), cx);
3216 })?;
3217 }
3218 this.update(&mut cx, |this, _| {
3219 this.commit_message_buffer = Some(buffer.clone());
3220 })?;
3221 Ok(buffer)
3222 }
3223 }
3224 });
3225
3226 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3227 }
3228
3229 fn open_local_commit_buffer(
3230 language_registry: Option<Arc<LanguageRegistry>>,
3231 buffer_store: Entity<BufferStore>,
3232 cx: &mut Context<Self>,
3233 ) -> Task<Result<Entity<Buffer>>> {
3234 cx.spawn(async move |repository, cx| {
3235 let buffer = buffer_store
3236 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
3237 .await?;
3238
3239 if let Some(language_registry) = language_registry {
3240 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3241 buffer.update(cx, |buffer, cx| {
3242 buffer.set_language(Some(git_commit_language), cx);
3243 })?;
3244 }
3245
3246 repository.update(cx, |repository, _| {
3247 repository.commit_message_buffer = Some(buffer.clone());
3248 })?;
3249 Ok(buffer)
3250 })
3251 }
3252
3253 pub fn checkout_files(
3254 &mut self,
3255 commit: &str,
3256 paths: Vec<RepoPath>,
3257 _cx: &mut App,
3258 ) -> oneshot::Receiver<Result<()>> {
3259 let commit = commit.to_string();
3260 let id = self.id;
3261
3262 self.send_job(
3263 Some(format!("git checkout {}", commit).into()),
3264 move |git_repo, _| async move {
3265 match git_repo {
3266 RepositoryState::Local {
3267 backend,
3268 environment,
3269 ..
3270 } => {
3271 backend
3272 .checkout_files(commit, paths, environment.clone())
3273 .await
3274 }
3275 RepositoryState::Remote { project_id, client } => {
3276 client
3277 .request(proto::GitCheckoutFiles {
3278 project_id: project_id.0,
3279 repository_id: id.to_proto(),
3280 commit,
3281 paths: paths
3282 .into_iter()
3283 .map(|p| p.to_string_lossy().to_string())
3284 .collect(),
3285 })
3286 .await?;
3287
3288 Ok(())
3289 }
3290 }
3291 },
3292 )
3293 }
3294
3295 pub fn reset(
3296 &mut self,
3297 commit: String,
3298 reset_mode: ResetMode,
3299 _cx: &mut App,
3300 ) -> oneshot::Receiver<Result<()>> {
3301 let commit = commit.to_string();
3302 let id = self.id;
3303
3304 self.send_job(None, move |git_repo, _| async move {
3305 match git_repo {
3306 RepositoryState::Local {
3307 backend,
3308 environment,
3309 ..
3310 } => backend.reset(commit, reset_mode, environment).await,
3311 RepositoryState::Remote { project_id, client } => {
3312 client
3313 .request(proto::GitReset {
3314 project_id: project_id.0,
3315 repository_id: id.to_proto(),
3316 commit,
3317 mode: match reset_mode {
3318 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3319 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3320 },
3321 })
3322 .await?;
3323
3324 Ok(())
3325 }
3326 }
3327 })
3328 }
3329
3330 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3331 let id = self.id;
3332 self.send_job(None, move |git_repo, _cx| async move {
3333 match git_repo {
3334 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3335 RepositoryState::Remote { project_id, client } => {
3336 let resp = client
3337 .request(proto::GitShow {
3338 project_id: project_id.0,
3339 repository_id: id.to_proto(),
3340 commit,
3341 })
3342 .await?;
3343
3344 Ok(CommitDetails {
3345 sha: resp.sha.into(),
3346 message: resp.message.into(),
3347 commit_timestamp: resp.commit_timestamp,
3348 author_email: resp.author_email.into(),
3349 author_name: resp.author_name.into(),
3350 })
3351 }
3352 }
3353 })
3354 }
3355
3356 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3357 let id = self.id;
3358 self.send_job(None, move |git_repo, cx| async move {
3359 match git_repo {
3360 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3361 RepositoryState::Remote {
3362 client, project_id, ..
3363 } => {
3364 let response = client
3365 .request(proto::LoadCommitDiff {
3366 project_id: project_id.0,
3367 repository_id: id.to_proto(),
3368 commit,
3369 })
3370 .await?;
3371 Ok(CommitDiff {
3372 files: response
3373 .files
3374 .into_iter()
3375 .map(|file| CommitFile {
3376 path: Path::new(&file.path).into(),
3377 old_text: file.old_text,
3378 new_text: file.new_text,
3379 })
3380 .collect(),
3381 })
3382 }
3383 }
3384 })
3385 }
3386
3387 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3388 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3389 }
3390
3391 pub fn stage_entries(
3392 &self,
3393 entries: Vec<RepoPath>,
3394 cx: &mut Context<Self>,
3395 ) -> Task<anyhow::Result<()>> {
3396 if entries.is_empty() {
3397 return Task::ready(Ok(()));
3398 }
3399 let id = self.id;
3400
3401 let mut save_futures = Vec::new();
3402 if let Some(buffer_store) = self.buffer_store(cx) {
3403 buffer_store.update(cx, |buffer_store, cx| {
3404 for path in &entries {
3405 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3406 continue;
3407 };
3408 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3409 if buffer
3410 .read(cx)
3411 .file()
3412 .map_or(false, |file| file.disk_state().exists())
3413 {
3414 save_futures.push(buffer_store.save_buffer(buffer, cx));
3415 }
3416 }
3417 }
3418 })
3419 }
3420
3421 cx.spawn(async move |this, cx| {
3422 for save_future in save_futures {
3423 save_future.await?;
3424 }
3425
3426 this.update(cx, |this, _| {
3427 this.send_job(None, move |git_repo, _cx| async move {
3428 match git_repo {
3429 RepositoryState::Local {
3430 backend,
3431 environment,
3432 ..
3433 } => backend.stage_paths(entries, environment.clone()).await,
3434 RepositoryState::Remote { project_id, client } => {
3435 client
3436 .request(proto::Stage {
3437 project_id: project_id.0,
3438 repository_id: id.to_proto(),
3439 paths: entries
3440 .into_iter()
3441 .map(|repo_path| repo_path.as_ref().to_proto())
3442 .collect(),
3443 })
3444 .await
3445 .context("sending stage request")?;
3446
3447 Ok(())
3448 }
3449 }
3450 })
3451 })?
3452 .await??;
3453
3454 Ok(())
3455 })
3456 }
3457
3458 pub fn unstage_entries(
3459 &self,
3460 entries: Vec<RepoPath>,
3461 cx: &mut Context<Self>,
3462 ) -> Task<anyhow::Result<()>> {
3463 if entries.is_empty() {
3464 return Task::ready(Ok(()));
3465 }
3466 let id = self.id;
3467
3468 let mut save_futures = Vec::new();
3469 if let Some(buffer_store) = self.buffer_store(cx) {
3470 buffer_store.update(cx, |buffer_store, cx| {
3471 for path in &entries {
3472 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3473 continue;
3474 };
3475 if let Some(buffer) = buffer_store.get_by_path(&project_path) {
3476 if buffer
3477 .read(cx)
3478 .file()
3479 .map_or(false, |file| file.disk_state().exists())
3480 {
3481 save_futures.push(buffer_store.save_buffer(buffer, cx));
3482 }
3483 }
3484 }
3485 })
3486 }
3487
3488 cx.spawn(async move |this, cx| {
3489 for save_future in save_futures {
3490 save_future.await?;
3491 }
3492
3493 this.update(cx, |this, _| {
3494 this.send_job(None, move |git_repo, _cx| async move {
3495 match git_repo {
3496 RepositoryState::Local {
3497 backend,
3498 environment,
3499 ..
3500 } => backend.unstage_paths(entries, environment).await,
3501 RepositoryState::Remote { project_id, client } => {
3502 client
3503 .request(proto::Unstage {
3504 project_id: project_id.0,
3505 repository_id: id.to_proto(),
3506 paths: entries
3507 .into_iter()
3508 .map(|repo_path| repo_path.as_ref().to_proto())
3509 .collect(),
3510 })
3511 .await
3512 .context("sending unstage request")?;
3513
3514 Ok(())
3515 }
3516 }
3517 })
3518 })?
3519 .await??;
3520
3521 Ok(())
3522 })
3523 }
3524
3525 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3526 let to_stage = self
3527 .cached_status()
3528 .filter(|entry| !entry.status.staging().is_fully_staged())
3529 .map(|entry| entry.repo_path.clone())
3530 .collect();
3531 self.stage_entries(to_stage, cx)
3532 }
3533
3534 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3535 let to_unstage = self
3536 .cached_status()
3537 .filter(|entry| entry.status.staging().has_staged())
3538 .map(|entry| entry.repo_path.clone())
3539 .collect();
3540 self.unstage_entries(to_unstage, cx)
3541 }
3542
3543 pub fn commit(
3544 &mut self,
3545 message: SharedString,
3546 name_and_email: Option<(SharedString, SharedString)>,
3547 options: CommitOptions,
3548 _cx: &mut App,
3549 ) -> oneshot::Receiver<Result<()>> {
3550 let id = self.id;
3551
3552 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3553 match git_repo {
3554 RepositoryState::Local {
3555 backend,
3556 environment,
3557 ..
3558 } => {
3559 backend
3560 .commit(message, name_and_email, options, environment)
3561 .await
3562 }
3563 RepositoryState::Remote { project_id, client } => {
3564 let (name, email) = name_and_email.unzip();
3565 client
3566 .request(proto::Commit {
3567 project_id: project_id.0,
3568 repository_id: id.to_proto(),
3569 message: String::from(message),
3570 name: name.map(String::from),
3571 email: email.map(String::from),
3572 options: Some(proto::commit::CommitOptions {
3573 amend: options.amend,
3574 signoff: options.signoff,
3575 }),
3576 })
3577 .await
3578 .context("sending commit request")?;
3579
3580 Ok(())
3581 }
3582 }
3583 })
3584 }
3585
3586 pub fn fetch(
3587 &mut self,
3588 fetch_options: FetchOptions,
3589 askpass: AskPassDelegate,
3590 _cx: &mut App,
3591 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3592 let askpass_delegates = self.askpass_delegates.clone();
3593 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3594 let id = self.id;
3595
3596 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3597 match git_repo {
3598 RepositoryState::Local {
3599 backend,
3600 environment,
3601 ..
3602 } => backend.fetch(fetch_options, askpass, environment, cx).await,
3603 RepositoryState::Remote { project_id, client } => {
3604 askpass_delegates.lock().insert(askpass_id, askpass);
3605 let _defer = util::defer(|| {
3606 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3607 debug_assert!(askpass_delegate.is_some());
3608 });
3609
3610 let response = client
3611 .request(proto::Fetch {
3612 project_id: project_id.0,
3613 repository_id: id.to_proto(),
3614 askpass_id,
3615 remote: fetch_options.to_proto(),
3616 })
3617 .await
3618 .context("sending fetch request")?;
3619
3620 Ok(RemoteCommandOutput {
3621 stdout: response.stdout,
3622 stderr: response.stderr,
3623 })
3624 }
3625 }
3626 })
3627 }
3628
3629 pub fn push(
3630 &mut self,
3631 branch: SharedString,
3632 remote: SharedString,
3633 options: Option<PushOptions>,
3634 askpass: AskPassDelegate,
3635 cx: &mut Context<Self>,
3636 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3637 let askpass_delegates = self.askpass_delegates.clone();
3638 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3639 let id = self.id;
3640
3641 let args = options
3642 .map(|option| match option {
3643 PushOptions::SetUpstream => " --set-upstream",
3644 PushOptions::Force => " --force-with-lease",
3645 })
3646 .unwrap_or("");
3647
3648 let updates_tx = self
3649 .git_store()
3650 .and_then(|git_store| match &git_store.read(cx).state {
3651 GitStoreState::Local { downstream, .. } => downstream
3652 .as_ref()
3653 .map(|downstream| downstream.updates_tx.clone()),
3654 _ => None,
3655 });
3656
3657 let this = cx.weak_entity();
3658 self.send_job(
3659 Some(format!("git push {} {} {}", args, branch, remote).into()),
3660 move |git_repo, mut cx| async move {
3661 match git_repo {
3662 RepositoryState::Local {
3663 backend,
3664 environment,
3665 ..
3666 } => {
3667 let result = backend
3668 .push(
3669 branch.to_string(),
3670 remote.to_string(),
3671 options,
3672 askpass,
3673 environment.clone(),
3674 cx.clone(),
3675 )
3676 .await;
3677 if result.is_ok() {
3678 let branches = backend.branches().await?;
3679 let branch = branches.into_iter().find(|branch| branch.is_head);
3680 log::info!("head branch after scan is {branch:?}");
3681 let snapshot = this.update(&mut cx, |this, cx| {
3682 this.snapshot.branch = branch;
3683 let snapshot = this.snapshot.clone();
3684 cx.emit(RepositoryEvent::Updated {
3685 full_scan: false,
3686 new_instance: false,
3687 });
3688 snapshot
3689 })?;
3690 if let Some(updates_tx) = updates_tx {
3691 updates_tx
3692 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3693 .ok();
3694 }
3695 }
3696 result
3697 }
3698 RepositoryState::Remote { project_id, client } => {
3699 askpass_delegates.lock().insert(askpass_id, askpass);
3700 let _defer = util::defer(|| {
3701 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3702 debug_assert!(askpass_delegate.is_some());
3703 });
3704 let response = client
3705 .request(proto::Push {
3706 project_id: project_id.0,
3707 repository_id: id.to_proto(),
3708 askpass_id,
3709 branch_name: branch.to_string(),
3710 remote_name: remote.to_string(),
3711 options: options.map(|options| match options {
3712 PushOptions::Force => proto::push::PushOptions::Force,
3713 PushOptions::SetUpstream => {
3714 proto::push::PushOptions::SetUpstream
3715 }
3716 }
3717 as i32),
3718 })
3719 .await
3720 .context("sending push request")?;
3721
3722 Ok(RemoteCommandOutput {
3723 stdout: response.stdout,
3724 stderr: response.stderr,
3725 })
3726 }
3727 }
3728 },
3729 )
3730 }
3731
3732 pub fn pull(
3733 &mut self,
3734 branch: SharedString,
3735 remote: SharedString,
3736 askpass: AskPassDelegate,
3737 _cx: &mut App,
3738 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3739 let askpass_delegates = self.askpass_delegates.clone();
3740 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3741 let id = self.id;
3742
3743 self.send_job(
3744 Some(format!("git pull {} {}", remote, branch).into()),
3745 move |git_repo, cx| async move {
3746 match git_repo {
3747 RepositoryState::Local {
3748 backend,
3749 environment,
3750 ..
3751 } => {
3752 backend
3753 .pull(
3754 branch.to_string(),
3755 remote.to_string(),
3756 askpass,
3757 environment.clone(),
3758 cx,
3759 )
3760 .await
3761 }
3762 RepositoryState::Remote { project_id, client } => {
3763 askpass_delegates.lock().insert(askpass_id, askpass);
3764 let _defer = util::defer(|| {
3765 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3766 debug_assert!(askpass_delegate.is_some());
3767 });
3768 let response = client
3769 .request(proto::Pull {
3770 project_id: project_id.0,
3771 repository_id: id.to_proto(),
3772 askpass_id,
3773 branch_name: branch.to_string(),
3774 remote_name: remote.to_string(),
3775 })
3776 .await
3777 .context("sending pull request")?;
3778
3779 Ok(RemoteCommandOutput {
3780 stdout: response.stdout,
3781 stderr: response.stderr,
3782 })
3783 }
3784 }
3785 },
3786 )
3787 }
3788
3789 fn spawn_set_index_text_job(
3790 &mut self,
3791 path: RepoPath,
3792 content: Option<String>,
3793 hunk_staging_operation_count: Option<usize>,
3794 cx: &mut Context<Self>,
3795 ) -> oneshot::Receiver<anyhow::Result<()>> {
3796 let id = self.id;
3797 let this = cx.weak_entity();
3798 let git_store = self.git_store.clone();
3799 self.send_keyed_job(
3800 Some(GitJobKey::WriteIndex(path.clone())),
3801 None,
3802 move |git_repo, mut cx| async move {
3803 log::debug!("start updating index text for buffer {}", path.display());
3804 match git_repo {
3805 RepositoryState::Local {
3806 backend,
3807 environment,
3808 ..
3809 } => {
3810 backend
3811 .set_index_text(path.clone(), content, environment.clone())
3812 .await?;
3813 }
3814 RepositoryState::Remote { project_id, client } => {
3815 client
3816 .request(proto::SetIndexText {
3817 project_id: project_id.0,
3818 repository_id: id.to_proto(),
3819 path: path.as_ref().to_proto(),
3820 text: content,
3821 })
3822 .await?;
3823 }
3824 }
3825 log::debug!("finish updating index text for buffer {}", path.display());
3826
3827 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
3828 let project_path = this
3829 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
3830 .ok()
3831 .flatten();
3832 git_store.update(&mut cx, |git_store, cx| {
3833 let buffer_id = git_store
3834 .buffer_store
3835 .read(cx)
3836 .get_by_path(&project_path?)?
3837 .read(cx)
3838 .remote_id();
3839 let diff_state = git_store.diffs.get(&buffer_id)?;
3840 diff_state.update(cx, |diff_state, _| {
3841 diff_state.hunk_staging_operation_count_as_of_write =
3842 hunk_staging_operation_count;
3843 });
3844 Some(())
3845 })?;
3846 }
3847 Ok(())
3848 },
3849 )
3850 }
3851
3852 pub fn get_remotes(
3853 &mut self,
3854 branch_name: Option<String>,
3855 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3856 let id = self.id;
3857 self.send_job(None, move |repo, _cx| async move {
3858 match repo {
3859 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3860 RepositoryState::Remote { project_id, client } => {
3861 let response = client
3862 .request(proto::GetRemotes {
3863 project_id: project_id.0,
3864 repository_id: id.to_proto(),
3865 branch_name,
3866 })
3867 .await?;
3868
3869 let remotes = response
3870 .remotes
3871 .into_iter()
3872 .map(|remotes| git::repository::Remote {
3873 name: remotes.name.into(),
3874 })
3875 .collect();
3876
3877 Ok(remotes)
3878 }
3879 }
3880 })
3881 }
3882
3883 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3884 let id = self.id;
3885 self.send_job(None, move |repo, _| async move {
3886 match repo {
3887 RepositoryState::Local { backend, .. } => backend.branches().await,
3888 RepositoryState::Remote { project_id, client } => {
3889 let response = client
3890 .request(proto::GitGetBranches {
3891 project_id: project_id.0,
3892 repository_id: id.to_proto(),
3893 })
3894 .await?;
3895
3896 let branches = response
3897 .branches
3898 .into_iter()
3899 .map(|branch| proto_to_branch(&branch))
3900 .collect();
3901
3902 Ok(branches)
3903 }
3904 }
3905 })
3906 }
3907
3908 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3909 let id = self.id;
3910 self.send_job(None, move |repo, _cx| async move {
3911 match repo {
3912 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3913 RepositoryState::Remote { project_id, client } => {
3914 let response = client
3915 .request(proto::GitDiff {
3916 project_id: project_id.0,
3917 repository_id: id.to_proto(),
3918 diff_type: match diff_type {
3919 DiffType::HeadToIndex => {
3920 proto::git_diff::DiffType::HeadToIndex.into()
3921 }
3922 DiffType::HeadToWorktree => {
3923 proto::git_diff::DiffType::HeadToWorktree.into()
3924 }
3925 },
3926 })
3927 .await?;
3928
3929 Ok(response.diff)
3930 }
3931 }
3932 })
3933 }
3934
3935 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3936 let id = self.id;
3937 self.send_job(
3938 Some(format!("git switch -c {branch_name}").into()),
3939 move |repo, _cx| async move {
3940 match repo {
3941 RepositoryState::Local { backend, .. } => {
3942 backend.create_branch(branch_name).await
3943 }
3944 RepositoryState::Remote { project_id, client } => {
3945 client
3946 .request(proto::GitCreateBranch {
3947 project_id: project_id.0,
3948 repository_id: id.to_proto(),
3949 branch_name,
3950 })
3951 .await?;
3952
3953 Ok(())
3954 }
3955 }
3956 },
3957 )
3958 }
3959
3960 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3961 let id = self.id;
3962 self.send_job(
3963 Some(format!("git switch {branch_name}").into()),
3964 move |repo, _cx| async move {
3965 match repo {
3966 RepositoryState::Local { backend, .. } => {
3967 backend.change_branch(branch_name).await
3968 }
3969 RepositoryState::Remote { project_id, client } => {
3970 client
3971 .request(proto::GitChangeBranch {
3972 project_id: project_id.0,
3973 repository_id: id.to_proto(),
3974 branch_name,
3975 })
3976 .await?;
3977
3978 Ok(())
3979 }
3980 }
3981 },
3982 )
3983 }
3984
3985 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3986 let id = self.id;
3987 self.send_job(None, move |repo, _cx| async move {
3988 match repo {
3989 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3990 RepositoryState::Remote { project_id, client } => {
3991 let response = client
3992 .request(proto::CheckForPushedCommits {
3993 project_id: project_id.0,
3994 repository_id: id.to_proto(),
3995 })
3996 .await?;
3997
3998 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3999
4000 Ok(branches)
4001 }
4002 }
4003 })
4004 }
4005
4006 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4007 self.send_job(None, |repo, _cx| async move {
4008 match repo {
4009 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4010 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4011 }
4012 })
4013 }
4014
4015 pub fn restore_checkpoint(
4016 &mut self,
4017 checkpoint: GitRepositoryCheckpoint,
4018 ) -> oneshot::Receiver<Result<()>> {
4019 self.send_job(None, move |repo, _cx| async move {
4020 match repo {
4021 RepositoryState::Local { backend, .. } => {
4022 backend.restore_checkpoint(checkpoint).await
4023 }
4024 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4025 }
4026 })
4027 }
4028
4029 pub(crate) fn apply_remote_update(
4030 &mut self,
4031 update: proto::UpdateRepository,
4032 is_new: bool,
4033 cx: &mut Context<Self>,
4034 ) -> Result<()> {
4035 let conflicted_paths = TreeSet::from_ordered_entries(
4036 update
4037 .current_merge_conflicts
4038 .into_iter()
4039 .map(|path| RepoPath(Path::new(&path).into())),
4040 );
4041 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
4042 self.snapshot.head_commit = update
4043 .head_commit_details
4044 .as_ref()
4045 .map(proto_to_commit_details);
4046
4047 self.snapshot.merge.conflicted_paths = conflicted_paths;
4048
4049 let edits = update
4050 .removed_statuses
4051 .into_iter()
4052 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
4053 .chain(
4054 update
4055 .updated_statuses
4056 .into_iter()
4057 .filter_map(|updated_status| {
4058 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4059 }),
4060 )
4061 .collect::<Vec<_>>();
4062 self.snapshot.statuses_by_path.edit(edits, &());
4063 if update.is_last_update {
4064 self.snapshot.scan_id = update.scan_id;
4065 }
4066 cx.emit(RepositoryEvent::Updated {
4067 full_scan: true,
4068 new_instance: is_new,
4069 });
4070 Ok(())
4071 }
4072
4073 pub fn compare_checkpoints(
4074 &mut self,
4075 left: GitRepositoryCheckpoint,
4076 right: GitRepositoryCheckpoint,
4077 ) -> oneshot::Receiver<Result<bool>> {
4078 self.send_job(None, move |repo, _cx| async move {
4079 match repo {
4080 RepositoryState::Local { backend, .. } => {
4081 backend.compare_checkpoints(left, right).await
4082 }
4083 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4084 }
4085 })
4086 }
4087
4088 pub fn diff_checkpoints(
4089 &mut self,
4090 base_checkpoint: GitRepositoryCheckpoint,
4091 target_checkpoint: GitRepositoryCheckpoint,
4092 ) -> oneshot::Receiver<Result<String>> {
4093 self.send_job(None, move |repo, _cx| async move {
4094 match repo {
4095 RepositoryState::Local { backend, .. } => {
4096 backend
4097 .diff_checkpoints(base_checkpoint, target_checkpoint)
4098 .await
4099 }
4100 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4101 }
4102 })
4103 }
4104
4105 fn schedule_scan(
4106 &mut self,
4107 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4108 cx: &mut Context<Self>,
4109 ) {
4110 let this = cx.weak_entity();
4111 let _ = self.send_keyed_job(
4112 Some(GitJobKey::ReloadGitState),
4113 None,
4114 |state, mut cx| async move {
4115 log::debug!("run scheduled git status scan");
4116
4117 let Some(this) = this.upgrade() else {
4118 return Ok(());
4119 };
4120 let RepositoryState::Local { backend, .. } = state else {
4121 bail!("not a local repository")
4122 };
4123 let (snapshot, events) = this
4124 .read_with(&mut cx, |this, _| {
4125 compute_snapshot(
4126 this.id,
4127 this.work_directory_abs_path.clone(),
4128 this.snapshot.clone(),
4129 backend.clone(),
4130 )
4131 })?
4132 .await?;
4133 this.update(&mut cx, |this, cx| {
4134 this.snapshot = snapshot.clone();
4135 for event in events {
4136 cx.emit(event);
4137 }
4138 })?;
4139 if let Some(updates_tx) = updates_tx {
4140 updates_tx
4141 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4142 .ok();
4143 }
4144 Ok(())
4145 },
4146 );
4147 }
4148
4149 fn spawn_local_git_worker(
4150 work_directory_abs_path: Arc<Path>,
4151 dot_git_abs_path: Arc<Path>,
4152 _repository_dir_abs_path: Arc<Path>,
4153 _common_dir_abs_path: Arc<Path>,
4154 project_environment: WeakEntity<ProjectEnvironment>,
4155 fs: Arc<dyn Fs>,
4156 cx: &mut Context<Self>,
4157 ) -> mpsc::UnboundedSender<GitJob> {
4158 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4159
4160 cx.spawn(async move |_, cx| {
4161 let environment = project_environment
4162 .upgrade()
4163 .context("missing project environment")?
4164 .update(cx, |project_environment, cx| {
4165 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4166 })?
4167 .await
4168 .unwrap_or_else(|| {
4169 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4170 HashMap::default()
4171 });
4172 let backend = cx
4173 .background_spawn(async move {
4174 fs.open_repo(&dot_git_abs_path)
4175 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4176 })
4177 .await?;
4178
4179 if let Some(git_hosting_provider_registry) =
4180 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4181 {
4182 git_hosting_providers::register_additional_providers(
4183 git_hosting_provider_registry,
4184 backend.clone(),
4185 );
4186 }
4187
4188 let state = RepositoryState::Local {
4189 backend,
4190 environment: Arc::new(environment),
4191 };
4192 let mut jobs = VecDeque::new();
4193 loop {
4194 while let Ok(Some(next_job)) = job_rx.try_next() {
4195 jobs.push_back(next_job);
4196 }
4197
4198 if let Some(job) = jobs.pop_front() {
4199 if let Some(current_key) = &job.key {
4200 if jobs
4201 .iter()
4202 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4203 {
4204 continue;
4205 }
4206 }
4207 (job.job)(state.clone(), cx).await;
4208 } else if let Some(job) = job_rx.next().await {
4209 jobs.push_back(job);
4210 } else {
4211 break;
4212 }
4213 }
4214 anyhow::Ok(())
4215 })
4216 .detach_and_log_err(cx);
4217
4218 job_tx
4219 }
4220
4221 fn spawn_remote_git_worker(
4222 project_id: ProjectId,
4223 client: AnyProtoClient,
4224 cx: &mut Context<Self>,
4225 ) -> mpsc::UnboundedSender<GitJob> {
4226 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4227
4228 cx.spawn(async move |_, cx| {
4229 let state = RepositoryState::Remote { project_id, client };
4230 let mut jobs = VecDeque::new();
4231 loop {
4232 while let Ok(Some(next_job)) = job_rx.try_next() {
4233 jobs.push_back(next_job);
4234 }
4235
4236 if let Some(job) = jobs.pop_front() {
4237 if let Some(current_key) = &job.key {
4238 if jobs
4239 .iter()
4240 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4241 {
4242 continue;
4243 }
4244 }
4245 (job.job)(state.clone(), cx).await;
4246 } else if let Some(job) = job_rx.next().await {
4247 jobs.push_back(job);
4248 } else {
4249 break;
4250 }
4251 }
4252 anyhow::Ok(())
4253 })
4254 .detach_and_log_err(cx);
4255
4256 job_tx
4257 }
4258
4259 fn load_staged_text(
4260 &mut self,
4261 buffer_id: BufferId,
4262 repo_path: RepoPath,
4263 cx: &App,
4264 ) -> Task<Result<Option<String>>> {
4265 let rx = self.send_job(None, move |state, _| async move {
4266 match state {
4267 RepositoryState::Local { backend, .. } => {
4268 anyhow::Ok(backend.load_index_text(repo_path).await)
4269 }
4270 RepositoryState::Remote { project_id, client } => {
4271 let response = client
4272 .request(proto::OpenUnstagedDiff {
4273 project_id: project_id.to_proto(),
4274 buffer_id: buffer_id.to_proto(),
4275 })
4276 .await?;
4277 Ok(response.staged_text)
4278 }
4279 }
4280 });
4281 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4282 }
4283
4284 fn load_committed_text(
4285 &mut self,
4286 buffer_id: BufferId,
4287 repo_path: RepoPath,
4288 cx: &App,
4289 ) -> Task<Result<DiffBasesChange>> {
4290 let rx = self.send_job(None, move |state, _| async move {
4291 match state {
4292 RepositoryState::Local { backend, .. } => {
4293 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4294 let staged_text = backend.load_index_text(repo_path).await;
4295 let diff_bases_change = if committed_text == staged_text {
4296 DiffBasesChange::SetBoth(committed_text)
4297 } else {
4298 DiffBasesChange::SetEach {
4299 index: staged_text,
4300 head: committed_text,
4301 }
4302 };
4303 anyhow::Ok(diff_bases_change)
4304 }
4305 RepositoryState::Remote { project_id, client } => {
4306 use proto::open_uncommitted_diff_response::Mode;
4307
4308 let response = client
4309 .request(proto::OpenUncommittedDiff {
4310 project_id: project_id.to_proto(),
4311 buffer_id: buffer_id.to_proto(),
4312 })
4313 .await?;
4314 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4315 let bases = match mode {
4316 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4317 Mode::IndexAndHead => DiffBasesChange::SetEach {
4318 head: response.committed_text,
4319 index: response.staged_text,
4320 },
4321 };
4322 Ok(bases)
4323 }
4324 }
4325 });
4326
4327 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4328 }
4329
4330 fn paths_changed(
4331 &mut self,
4332 paths: Vec<RepoPath>,
4333 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4334 cx: &mut Context<Self>,
4335 ) {
4336 self.paths_needing_status_update.extend(paths);
4337
4338 let this = cx.weak_entity();
4339 let _ = self.send_keyed_job(
4340 Some(GitJobKey::RefreshStatuses),
4341 None,
4342 |state, mut cx| async move {
4343 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4344 (
4345 this.snapshot.clone(),
4346 mem::take(&mut this.paths_needing_status_update),
4347 )
4348 })?;
4349 let RepositoryState::Local { backend, .. } = state else {
4350 bail!("not a local repository")
4351 };
4352
4353 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4354 let statuses = backend.status(&paths).await?;
4355
4356 let changed_path_statuses = cx
4357 .background_spawn(async move {
4358 let mut changed_path_statuses = Vec::new();
4359 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4360 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4361
4362 for (repo_path, status) in &*statuses.entries {
4363 changed_paths.remove(repo_path);
4364 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left) {
4365 if cursor.item().is_some_and(|entry| entry.status == *status) {
4366 continue;
4367 }
4368 }
4369
4370 changed_path_statuses.push(Edit::Insert(StatusEntry {
4371 repo_path: repo_path.clone(),
4372 status: *status,
4373 }));
4374 }
4375 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4376 for path in changed_paths.into_iter() {
4377 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
4378 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4379 }
4380 }
4381 changed_path_statuses
4382 })
4383 .await;
4384
4385 this.update(&mut cx, |this, cx| {
4386 if !changed_path_statuses.is_empty() {
4387 this.snapshot
4388 .statuses_by_path
4389 .edit(changed_path_statuses, &());
4390 this.snapshot.scan_id += 1;
4391 if let Some(updates_tx) = updates_tx {
4392 updates_tx
4393 .unbounded_send(DownstreamUpdate::UpdateRepository(
4394 this.snapshot.clone(),
4395 ))
4396 .ok();
4397 }
4398 }
4399 cx.emit(RepositoryEvent::Updated {
4400 full_scan: false,
4401 new_instance: false,
4402 });
4403 })
4404 },
4405 );
4406 }
4407
4408 /// currently running git command and when it started
4409 pub fn current_job(&self) -> Option<JobInfo> {
4410 self.active_jobs.values().next().cloned()
4411 }
4412
4413 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4414 self.send_job(None, |_, _| async {})
4415 }
4416}
4417
4418fn get_permalink_in_rust_registry_src(
4419 provider_registry: Arc<GitHostingProviderRegistry>,
4420 path: PathBuf,
4421 selection: Range<u32>,
4422) -> Result<url::Url> {
4423 #[derive(Deserialize)]
4424 struct CargoVcsGit {
4425 sha1: String,
4426 }
4427
4428 #[derive(Deserialize)]
4429 struct CargoVcsInfo {
4430 git: CargoVcsGit,
4431 path_in_vcs: String,
4432 }
4433
4434 #[derive(Deserialize)]
4435 struct CargoPackage {
4436 repository: String,
4437 }
4438
4439 #[derive(Deserialize)]
4440 struct CargoToml {
4441 package: CargoPackage,
4442 }
4443
4444 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4445 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4446 Some((dir, json))
4447 }) else {
4448 bail!("No .cargo_vcs_info.json found in parent directories")
4449 };
4450 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4451 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4452 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4453 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4454 .context("parsing package.repository field of manifest")?;
4455 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4456 let permalink = provider.build_permalink(
4457 remote,
4458 BuildPermalinkParams {
4459 sha: &cargo_vcs_info.git.sha1,
4460 path: &path.to_string_lossy(),
4461 selection: Some(selection),
4462 },
4463 );
4464 Ok(permalink)
4465}
4466
4467fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4468 let Some(blame) = blame else {
4469 return proto::BlameBufferResponse {
4470 blame_response: None,
4471 };
4472 };
4473
4474 let entries = blame
4475 .entries
4476 .into_iter()
4477 .map(|entry| proto::BlameEntry {
4478 sha: entry.sha.as_bytes().into(),
4479 start_line: entry.range.start,
4480 end_line: entry.range.end,
4481 original_line_number: entry.original_line_number,
4482 author: entry.author,
4483 author_mail: entry.author_mail,
4484 author_time: entry.author_time,
4485 author_tz: entry.author_tz,
4486 committer: entry.committer_name,
4487 committer_mail: entry.committer_email,
4488 committer_time: entry.committer_time,
4489 committer_tz: entry.committer_tz,
4490 summary: entry.summary,
4491 previous: entry.previous,
4492 filename: entry.filename,
4493 })
4494 .collect::<Vec<_>>();
4495
4496 let messages = blame
4497 .messages
4498 .into_iter()
4499 .map(|(oid, message)| proto::CommitMessage {
4500 oid: oid.as_bytes().into(),
4501 message,
4502 })
4503 .collect::<Vec<_>>();
4504
4505 proto::BlameBufferResponse {
4506 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4507 entries,
4508 messages,
4509 remote_url: blame.remote_url,
4510 }),
4511 }
4512}
4513
4514fn deserialize_blame_buffer_response(
4515 response: proto::BlameBufferResponse,
4516) -> Option<git::blame::Blame> {
4517 let response = response.blame_response?;
4518 let entries = response
4519 .entries
4520 .into_iter()
4521 .filter_map(|entry| {
4522 Some(git::blame::BlameEntry {
4523 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4524 range: entry.start_line..entry.end_line,
4525 original_line_number: entry.original_line_number,
4526 committer_name: entry.committer,
4527 committer_time: entry.committer_time,
4528 committer_tz: entry.committer_tz,
4529 committer_email: entry.committer_mail,
4530 author: entry.author,
4531 author_mail: entry.author_mail,
4532 author_time: entry.author_time,
4533 author_tz: entry.author_tz,
4534 summary: entry.summary,
4535 previous: entry.previous,
4536 filename: entry.filename,
4537 })
4538 })
4539 .collect::<Vec<_>>();
4540
4541 let messages = response
4542 .messages
4543 .into_iter()
4544 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4545 .collect::<HashMap<_, _>>();
4546
4547 Some(Blame {
4548 entries,
4549 messages,
4550 remote_url: response.remote_url,
4551 })
4552}
4553
4554fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4555 proto::Branch {
4556 is_head: branch.is_head,
4557 ref_name: branch.ref_name.to_string(),
4558 unix_timestamp: branch
4559 .most_recent_commit
4560 .as_ref()
4561 .map(|commit| commit.commit_timestamp as u64),
4562 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4563 ref_name: upstream.ref_name.to_string(),
4564 tracking: upstream
4565 .tracking
4566 .status()
4567 .map(|upstream| proto::UpstreamTracking {
4568 ahead: upstream.ahead as u64,
4569 behind: upstream.behind as u64,
4570 }),
4571 }),
4572 most_recent_commit: branch
4573 .most_recent_commit
4574 .as_ref()
4575 .map(|commit| proto::CommitSummary {
4576 sha: commit.sha.to_string(),
4577 subject: commit.subject.to_string(),
4578 commit_timestamp: commit.commit_timestamp,
4579 }),
4580 }
4581}
4582
4583fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4584 git::repository::Branch {
4585 is_head: proto.is_head,
4586 ref_name: proto.ref_name.clone().into(),
4587 upstream: proto
4588 .upstream
4589 .as_ref()
4590 .map(|upstream| git::repository::Upstream {
4591 ref_name: upstream.ref_name.to_string().into(),
4592 tracking: upstream
4593 .tracking
4594 .as_ref()
4595 .map(|tracking| {
4596 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4597 ahead: tracking.ahead as u32,
4598 behind: tracking.behind as u32,
4599 })
4600 })
4601 .unwrap_or(git::repository::UpstreamTracking::Gone),
4602 }),
4603 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4604 git::repository::CommitSummary {
4605 sha: commit.sha.to_string().into(),
4606 subject: commit.subject.to_string().into(),
4607 commit_timestamp: commit.commit_timestamp,
4608 has_parent: true,
4609 }
4610 }),
4611 }
4612}
4613
4614fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4615 proto::GitCommitDetails {
4616 sha: commit.sha.to_string(),
4617 message: commit.message.to_string(),
4618 commit_timestamp: commit.commit_timestamp,
4619 author_email: commit.author_email.to_string(),
4620 author_name: commit.author_name.to_string(),
4621 }
4622}
4623
4624fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4625 CommitDetails {
4626 sha: proto.sha.clone().into(),
4627 message: proto.message.clone().into(),
4628 commit_timestamp: proto.commit_timestamp,
4629 author_email: proto.author_email.clone().into(),
4630 author_name: proto.author_name.clone().into(),
4631 }
4632}
4633
4634async fn compute_snapshot(
4635 id: RepositoryId,
4636 work_directory_abs_path: Arc<Path>,
4637 prev_snapshot: RepositorySnapshot,
4638 backend: Arc<dyn GitRepository>,
4639) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4640 let mut events = Vec::new();
4641 let branches = backend.branches().await?;
4642 let branch = branches.into_iter().find(|branch| branch.is_head);
4643 let statuses = backend
4644 .status(std::slice::from_ref(&WORK_DIRECTORY_REPO_PATH))
4645 .await?;
4646 let statuses_by_path = SumTree::from_iter(
4647 statuses
4648 .entries
4649 .iter()
4650 .map(|(repo_path, status)| StatusEntry {
4651 repo_path: repo_path.clone(),
4652 status: *status,
4653 }),
4654 &(),
4655 );
4656 let (merge_details, merge_heads_changed) =
4657 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4658 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4659
4660 if merge_heads_changed
4661 || branch != prev_snapshot.branch
4662 || statuses_by_path != prev_snapshot.statuses_by_path
4663 {
4664 events.push(RepositoryEvent::Updated {
4665 full_scan: true,
4666 new_instance: false,
4667 });
4668 }
4669
4670 // Cache merge conflict paths so they don't change from staging/unstaging,
4671 // until the merge heads change (at commit time, etc.).
4672 if merge_heads_changed {
4673 events.push(RepositoryEvent::MergeHeadsChanged);
4674 }
4675
4676 // Useful when branch is None in detached head state
4677 let head_commit = match backend.head_sha().await {
4678 Some(head_sha) => backend.show(head_sha).await.log_err(),
4679 None => None,
4680 };
4681
4682 let snapshot = RepositorySnapshot {
4683 id,
4684 statuses_by_path,
4685 work_directory_abs_path,
4686 scan_id: prev_snapshot.scan_id + 1,
4687 branch,
4688 head_commit,
4689 merge: merge_details,
4690 };
4691
4692 Ok((snapshot, events))
4693}
4694
4695fn status_from_proto(
4696 simple_status: i32,
4697 status: Option<proto::GitFileStatus>,
4698) -> anyhow::Result<FileStatus> {
4699 use proto::git_file_status::Variant;
4700
4701 let Some(variant) = status.and_then(|status| status.variant) else {
4702 let code = proto::GitStatus::from_i32(simple_status)
4703 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
4704 let result = match code {
4705 proto::GitStatus::Added => TrackedStatus {
4706 worktree_status: StatusCode::Added,
4707 index_status: StatusCode::Unmodified,
4708 }
4709 .into(),
4710 proto::GitStatus::Modified => TrackedStatus {
4711 worktree_status: StatusCode::Modified,
4712 index_status: StatusCode::Unmodified,
4713 }
4714 .into(),
4715 proto::GitStatus::Conflict => UnmergedStatus {
4716 first_head: UnmergedStatusCode::Updated,
4717 second_head: UnmergedStatusCode::Updated,
4718 }
4719 .into(),
4720 proto::GitStatus::Deleted => TrackedStatus {
4721 worktree_status: StatusCode::Deleted,
4722 index_status: StatusCode::Unmodified,
4723 }
4724 .into(),
4725 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
4726 };
4727 return Ok(result);
4728 };
4729
4730 let result = match variant {
4731 Variant::Untracked(_) => FileStatus::Untracked,
4732 Variant::Ignored(_) => FileStatus::Ignored,
4733 Variant::Unmerged(unmerged) => {
4734 let [first_head, second_head] =
4735 [unmerged.first_head, unmerged.second_head].map(|head| {
4736 let code = proto::GitStatus::from_i32(head)
4737 .with_context(|| format!("Invalid git status code: {head}"))?;
4738 let result = match code {
4739 proto::GitStatus::Added => UnmergedStatusCode::Added,
4740 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4741 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4742 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
4743 };
4744 Ok(result)
4745 });
4746 let [first_head, second_head] = [first_head?, second_head?];
4747 UnmergedStatus {
4748 first_head,
4749 second_head,
4750 }
4751 .into()
4752 }
4753 Variant::Tracked(tracked) => {
4754 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4755 .map(|status| {
4756 let code = proto::GitStatus::from_i32(status)
4757 .with_context(|| format!("Invalid git status code: {status}"))?;
4758 let result = match code {
4759 proto::GitStatus::Modified => StatusCode::Modified,
4760 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4761 proto::GitStatus::Added => StatusCode::Added,
4762 proto::GitStatus::Deleted => StatusCode::Deleted,
4763 proto::GitStatus::Renamed => StatusCode::Renamed,
4764 proto::GitStatus::Copied => StatusCode::Copied,
4765 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4766 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
4767 };
4768 Ok(result)
4769 });
4770 let [index_status, worktree_status] = [index_status?, worktree_status?];
4771 TrackedStatus {
4772 index_status,
4773 worktree_status,
4774 }
4775 .into()
4776 }
4777 };
4778 Ok(result)
4779}
4780
4781fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4782 use proto::git_file_status::{Tracked, Unmerged, Variant};
4783
4784 let variant = match status {
4785 FileStatus::Untracked => Variant::Untracked(Default::default()),
4786 FileStatus::Ignored => Variant::Ignored(Default::default()),
4787 FileStatus::Unmerged(UnmergedStatus {
4788 first_head,
4789 second_head,
4790 }) => Variant::Unmerged(Unmerged {
4791 first_head: unmerged_status_to_proto(first_head),
4792 second_head: unmerged_status_to_proto(second_head),
4793 }),
4794 FileStatus::Tracked(TrackedStatus {
4795 index_status,
4796 worktree_status,
4797 }) => Variant::Tracked(Tracked {
4798 index_status: tracked_status_to_proto(index_status),
4799 worktree_status: tracked_status_to_proto(worktree_status),
4800 }),
4801 };
4802 proto::GitFileStatus {
4803 variant: Some(variant),
4804 }
4805}
4806
4807fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4808 match code {
4809 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4810 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4811 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4812 }
4813}
4814
4815fn tracked_status_to_proto(code: StatusCode) -> i32 {
4816 match code {
4817 StatusCode::Added => proto::GitStatus::Added as _,
4818 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4819 StatusCode::Modified => proto::GitStatus::Modified as _,
4820 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4821 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4822 StatusCode::Copied => proto::GitStatus::Copied as _,
4823 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4824 }
4825}