1mod conflict_set;
2pub mod git_traversal;
3
4use crate::{
5 ProjectEnvironment, ProjectItem, ProjectPath,
6 buffer_store::{BufferStore, BufferStoreEvent},
7 worktree_store::{WorktreeStore, WorktreeStoreEvent},
8};
9use anyhow::{Context as _, Result, anyhow, bail};
10use askpass::AskPassDelegate;
11use buffer_diff::{BufferDiff, BufferDiffEvent};
12use client::ProjectId;
13use collections::HashMap;
14pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
15use fs::Fs;
16use futures::{
17 FutureExt, StreamExt as _,
18 channel::{mpsc, oneshot},
19 future::{self, Shared},
20};
21use git::{
22 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
23 blame::Blame,
24 parse_git_remote_url,
25 repository::{
26 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, GitRepository,
27 GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
28 UpstreamTrackingStatus,
29 },
30 status::{
31 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
32 },
33};
34use gpui::{
35 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
36 WeakEntity,
37};
38use language::{
39 Buffer, BufferEvent, Language, LanguageRegistry,
40 proto::{deserialize_version, serialize_version},
41};
42use parking_lot::Mutex;
43use postage::stream::Stream as _;
44use rpc::{
45 AnyProtoClient, TypedEnvelope,
46 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
47};
48use serde::Deserialize;
49use std::{
50 cmp::Ordering,
51 collections::{BTreeSet, VecDeque},
52 future::Future,
53 mem,
54 ops::Range,
55 path::{Path, PathBuf},
56 sync::{
57 Arc,
58 atomic::{self, AtomicU64},
59 },
60 time::Instant,
61};
62use sum_tree::{Edit, SumTree, TreeSet};
63use text::{Bias, BufferId};
64use util::{ResultExt, debug_panic, post_inc};
65use worktree::{
66 File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet,
67 UpdatedGitRepository, Worktree,
68};
69
70pub struct GitStore {
71 state: GitStoreState,
72 buffer_store: Entity<BufferStore>,
73 worktree_store: Entity<WorktreeStore>,
74 repositories: HashMap<RepositoryId, Entity<Repository>>,
75 active_repo_id: Option<RepositoryId>,
76 #[allow(clippy::type_complexity)]
77 loading_diffs:
78 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
79 diffs: HashMap<BufferId, Entity<BufferGitState>>,
80 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
81 _subscriptions: Vec<Subscription>,
82}
83
84#[derive(Default)]
85struct SharedDiffs {
86 unstaged: Option<Entity<BufferDiff>>,
87 uncommitted: Option<Entity<BufferDiff>>,
88}
89
90struct BufferGitState {
91 unstaged_diff: Option<WeakEntity<BufferDiff>>,
92 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
93 conflict_set: Option<WeakEntity<ConflictSet>>,
94 recalculate_diff_task: Option<Task<Result<()>>>,
95 reparse_conflict_markers_task: Option<Task<Result<()>>>,
96 language: Option<Arc<Language>>,
97 language_registry: Option<Arc<LanguageRegistry>>,
98 conflict_updated_futures: Vec<oneshot::Sender<()>>,
99 recalculating_tx: postage::watch::Sender<bool>,
100
101 /// These operation counts are used to ensure that head and index text
102 /// values read from the git repository are up-to-date with any hunk staging
103 /// operations that have been performed on the BufferDiff.
104 ///
105 /// The operation count is incremented immediately when the user initiates a
106 /// hunk stage/unstage operation. Then, upon finishing writing the new index
107 /// text do disk, the `operation count as of write` is updated to reflect
108 /// the operation count that prompted the write.
109 hunk_staging_operation_count: usize,
110 hunk_staging_operation_count_as_of_write: usize,
111
112 head_text: Option<Arc<String>>,
113 index_text: Option<Arc<String>>,
114 head_changed: bool,
115 index_changed: bool,
116 language_changed: bool,
117}
118
119#[derive(Clone, Debug)]
120enum DiffBasesChange {
121 SetIndex(Option<String>),
122 SetHead(Option<String>),
123 SetEach {
124 index: Option<String>,
125 head: Option<String>,
126 },
127 SetBoth(Option<String>),
128}
129
130#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
131enum DiffKind {
132 Unstaged,
133 Uncommitted,
134}
135
136enum GitStoreState {
137 Local {
138 next_repository_id: Arc<AtomicU64>,
139 downstream: Option<LocalDownstreamState>,
140 project_environment: Entity<ProjectEnvironment>,
141 fs: Arc<dyn Fs>,
142 },
143 Ssh {
144 upstream_client: AnyProtoClient,
145 upstream_project_id: ProjectId,
146 downstream: Option<(AnyProtoClient, ProjectId)>,
147 },
148 Remote {
149 upstream_client: AnyProtoClient,
150 upstream_project_id: ProjectId,
151 },
152}
153
154enum DownstreamUpdate {
155 UpdateRepository(RepositorySnapshot),
156 RemoveRepository(RepositoryId),
157}
158
159struct LocalDownstreamState {
160 client: AnyProtoClient,
161 project_id: ProjectId,
162 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
163 _task: Task<Result<()>>,
164}
165
166#[derive(Clone, Debug)]
167pub struct GitStoreCheckpoint {
168 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
169}
170
171#[derive(Clone, Debug, PartialEq, Eq)]
172pub struct StatusEntry {
173 pub repo_path: RepoPath,
174 pub status: FileStatus,
175}
176
177impl StatusEntry {
178 fn to_proto(&self) -> proto::StatusEntry {
179 let simple_status = match self.status {
180 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
181 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
182 FileStatus::Tracked(TrackedStatus {
183 index_status,
184 worktree_status,
185 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
186 worktree_status
187 } else {
188 index_status
189 }),
190 };
191
192 proto::StatusEntry {
193 repo_path: self.repo_path.as_ref().to_proto(),
194 simple_status,
195 status: Some(status_to_proto(self.status)),
196 }
197 }
198}
199
200impl TryFrom<proto::StatusEntry> for StatusEntry {
201 type Error = anyhow::Error;
202
203 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
204 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
205 let status = status_from_proto(value.simple_status, value.status)?;
206 Ok(Self { repo_path, status })
207 }
208}
209
210impl sum_tree::Item for StatusEntry {
211 type Summary = PathSummary<GitSummary>;
212
213 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
214 PathSummary {
215 max_path: self.repo_path.0.clone(),
216 item_summary: self.status.summary(),
217 }
218 }
219}
220
221impl sum_tree::KeyedItem for StatusEntry {
222 type Key = PathKey;
223
224 fn key(&self) -> Self::Key {
225 PathKey(self.repo_path.0.clone())
226 }
227}
228
229#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
230pub struct RepositoryId(pub u64);
231
232#[derive(Clone, Debug, Default, PartialEq, Eq)]
233pub struct MergeDetails {
234 pub conflicted_paths: TreeSet<RepoPath>,
235 pub message: Option<SharedString>,
236 pub heads: Vec<Option<SharedString>>,
237}
238
239#[derive(Clone, Debug, PartialEq, Eq)]
240pub struct RepositorySnapshot {
241 pub id: RepositoryId,
242 pub statuses_by_path: SumTree<StatusEntry>,
243 pub work_directory_abs_path: Arc<Path>,
244 pub branch: Option<Branch>,
245 pub head_commit: Option<CommitDetails>,
246 pub scan_id: u64,
247 pub merge: MergeDetails,
248}
249
250type JobId = u64;
251
252#[derive(Clone, Debug, PartialEq, Eq)]
253pub struct JobInfo {
254 pub start: Instant,
255 pub message: SharedString,
256}
257
258pub struct Repository {
259 this: WeakEntity<Self>,
260 snapshot: RepositorySnapshot,
261 commit_message_buffer: Option<Entity<Buffer>>,
262 git_store: WeakEntity<GitStore>,
263 // For a local repository, holds paths that have had worktree events since the last status scan completed,
264 // and that should be examined during the next status scan.
265 paths_needing_status_update: BTreeSet<RepoPath>,
266 job_sender: mpsc::UnboundedSender<GitJob>,
267 active_jobs: HashMap<JobId, JobInfo>,
268 job_id: JobId,
269 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
270 latest_askpass_id: u64,
271}
272
273impl std::ops::Deref for Repository {
274 type Target = RepositorySnapshot;
275
276 fn deref(&self) -> &Self::Target {
277 &self.snapshot
278 }
279}
280
281#[derive(Clone)]
282pub enum RepositoryState {
283 Local {
284 backend: Arc<dyn GitRepository>,
285 environment: Arc<HashMap<String, String>>,
286 },
287 Remote {
288 project_id: ProjectId,
289 client: AnyProtoClient,
290 },
291}
292
293#[derive(Clone, Debug)]
294pub enum RepositoryEvent {
295 Updated { full_scan: bool },
296 MergeHeadsChanged,
297}
298
299#[derive(Clone, Debug)]
300pub struct JobsUpdated;
301
302#[derive(Debug)]
303pub enum GitStoreEvent {
304 ActiveRepositoryChanged(Option<RepositoryId>),
305 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
306 RepositoryAdded(RepositoryId),
307 RepositoryRemoved(RepositoryId),
308 IndexWriteError(anyhow::Error),
309 JobsUpdated,
310 ConflictsUpdated,
311}
312
313impl EventEmitter<RepositoryEvent> for Repository {}
314impl EventEmitter<JobsUpdated> for Repository {}
315impl EventEmitter<GitStoreEvent> for GitStore {}
316
317pub struct GitJob {
318 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
319 key: Option<GitJobKey>,
320}
321
322#[derive(PartialEq, Eq)]
323enum GitJobKey {
324 WriteIndex(RepoPath),
325 ReloadBufferDiffBases,
326 RefreshStatuses,
327 ReloadGitState,
328}
329
330impl GitStore {
331 pub fn local(
332 worktree_store: &Entity<WorktreeStore>,
333 buffer_store: Entity<BufferStore>,
334 environment: Entity<ProjectEnvironment>,
335 fs: Arc<dyn Fs>,
336 cx: &mut Context<Self>,
337 ) -> Self {
338 Self::new(
339 worktree_store.clone(),
340 buffer_store,
341 GitStoreState::Local {
342 next_repository_id: Arc::new(AtomicU64::new(1)),
343 downstream: None,
344 project_environment: environment,
345 fs,
346 },
347 cx,
348 )
349 }
350
351 pub fn remote(
352 worktree_store: &Entity<WorktreeStore>,
353 buffer_store: Entity<BufferStore>,
354 upstream_client: AnyProtoClient,
355 project_id: ProjectId,
356 cx: &mut Context<Self>,
357 ) -> Self {
358 Self::new(
359 worktree_store.clone(),
360 buffer_store,
361 GitStoreState::Remote {
362 upstream_client,
363 upstream_project_id: project_id,
364 },
365 cx,
366 )
367 }
368
369 pub fn ssh(
370 worktree_store: &Entity<WorktreeStore>,
371 buffer_store: Entity<BufferStore>,
372 upstream_client: AnyProtoClient,
373 cx: &mut Context<Self>,
374 ) -> Self {
375 Self::new(
376 worktree_store.clone(),
377 buffer_store,
378 GitStoreState::Ssh {
379 upstream_client,
380 upstream_project_id: ProjectId(SSH_PROJECT_ID),
381 downstream: None,
382 },
383 cx,
384 )
385 }
386
387 fn new(
388 worktree_store: Entity<WorktreeStore>,
389 buffer_store: Entity<BufferStore>,
390 state: GitStoreState,
391 cx: &mut Context<Self>,
392 ) -> Self {
393 let _subscriptions = vec![
394 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
395 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
396 ];
397
398 GitStore {
399 state,
400 buffer_store,
401 worktree_store,
402 repositories: HashMap::default(),
403 active_repo_id: None,
404 _subscriptions,
405 loading_diffs: HashMap::default(),
406 shared_diffs: HashMap::default(),
407 diffs: HashMap::default(),
408 }
409 }
410
411 pub fn init(client: &AnyProtoClient) {
412 client.add_entity_request_handler(Self::handle_get_remotes);
413 client.add_entity_request_handler(Self::handle_get_branches);
414 client.add_entity_request_handler(Self::handle_change_branch);
415 client.add_entity_request_handler(Self::handle_create_branch);
416 client.add_entity_request_handler(Self::handle_git_init);
417 client.add_entity_request_handler(Self::handle_push);
418 client.add_entity_request_handler(Self::handle_pull);
419 client.add_entity_request_handler(Self::handle_fetch);
420 client.add_entity_request_handler(Self::handle_stage);
421 client.add_entity_request_handler(Self::handle_unstage);
422 client.add_entity_request_handler(Self::handle_commit);
423 client.add_entity_request_handler(Self::handle_reset);
424 client.add_entity_request_handler(Self::handle_show);
425 client.add_entity_request_handler(Self::handle_load_commit_diff);
426 client.add_entity_request_handler(Self::handle_checkout_files);
427 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
428 client.add_entity_request_handler(Self::handle_set_index_text);
429 client.add_entity_request_handler(Self::handle_askpass);
430 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
431 client.add_entity_request_handler(Self::handle_git_diff);
432 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
433 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
434 client.add_entity_message_handler(Self::handle_update_diff_bases);
435 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
436 client.add_entity_request_handler(Self::handle_blame_buffer);
437 client.add_entity_message_handler(Self::handle_update_repository);
438 client.add_entity_message_handler(Self::handle_remove_repository);
439 }
440
441 pub fn is_local(&self) -> bool {
442 matches!(self.state, GitStoreState::Local { .. })
443 }
444
445 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
446 match &mut self.state {
447 GitStoreState::Ssh {
448 downstream: downstream_client,
449 ..
450 } => {
451 for repo in self.repositories.values() {
452 let update = repo.read(cx).snapshot.initial_update(project_id);
453 for update in split_repository_update(update) {
454 client.send(update).log_err();
455 }
456 }
457 *downstream_client = Some((client, ProjectId(project_id)));
458 }
459 GitStoreState::Local {
460 downstream: downstream_client,
461 ..
462 } => {
463 let mut snapshots = HashMap::default();
464 let (updates_tx, mut updates_rx) = mpsc::unbounded();
465 for repo in self.repositories.values() {
466 updates_tx
467 .unbounded_send(DownstreamUpdate::UpdateRepository(
468 repo.read(cx).snapshot.clone(),
469 ))
470 .ok();
471 }
472 *downstream_client = Some(LocalDownstreamState {
473 client: client.clone(),
474 project_id: ProjectId(project_id),
475 updates_tx,
476 _task: cx.spawn(async move |this, cx| {
477 cx.background_spawn(async move {
478 while let Some(update) = updates_rx.next().await {
479 match update {
480 DownstreamUpdate::UpdateRepository(snapshot) => {
481 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
482 {
483 let update =
484 snapshot.build_update(old_snapshot, project_id);
485 *old_snapshot = snapshot;
486 for update in split_repository_update(update) {
487 client.send(update)?;
488 }
489 } else {
490 let update = snapshot.initial_update(project_id);
491 for update in split_repository_update(update) {
492 client.send(update)?;
493 }
494 snapshots.insert(snapshot.id, snapshot);
495 }
496 }
497 DownstreamUpdate::RemoveRepository(id) => {
498 client.send(proto::RemoveRepository {
499 project_id,
500 id: id.to_proto(),
501 })?;
502 }
503 }
504 }
505 anyhow::Ok(())
506 })
507 .await
508 .ok();
509 this.update(cx, |this, _| {
510 if let GitStoreState::Local {
511 downstream: downstream_client,
512 ..
513 } = &mut this.state
514 {
515 downstream_client.take();
516 } else {
517 unreachable!("unshared called on remote store");
518 }
519 })
520 }),
521 });
522 }
523 GitStoreState::Remote { .. } => {
524 debug_panic!("shared called on remote store");
525 }
526 }
527 }
528
529 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
530 match &mut self.state {
531 GitStoreState::Local {
532 downstream: downstream_client,
533 ..
534 } => {
535 downstream_client.take();
536 }
537 GitStoreState::Ssh {
538 downstream: downstream_client,
539 ..
540 } => {
541 downstream_client.take();
542 }
543 GitStoreState::Remote { .. } => {
544 debug_panic!("unshared called on remote store");
545 }
546 }
547 self.shared_diffs.clear();
548 }
549
550 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
551 self.shared_diffs.remove(peer_id);
552 }
553
554 pub fn active_repository(&self) -> Option<Entity<Repository>> {
555 self.active_repo_id
556 .as_ref()
557 .map(|id| self.repositories[&id].clone())
558 }
559
560 pub fn open_unstaged_diff(
561 &mut self,
562 buffer: Entity<Buffer>,
563 cx: &mut Context<Self>,
564 ) -> Task<Result<Entity<BufferDiff>>> {
565 let buffer_id = buffer.read(cx).remote_id();
566 if let Some(diff_state) = self.diffs.get(&buffer_id) {
567 if let Some(unstaged_diff) = diff_state
568 .read(cx)
569 .unstaged_diff
570 .as_ref()
571 .and_then(|weak| weak.upgrade())
572 {
573 if let Some(task) =
574 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
575 {
576 return cx.background_executor().spawn(async move {
577 task.await;
578 Ok(unstaged_diff)
579 });
580 }
581 return Task::ready(Ok(unstaged_diff));
582 }
583 }
584
585 let Some((repo, repo_path)) =
586 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
587 else {
588 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
589 };
590
591 let task = self
592 .loading_diffs
593 .entry((buffer_id, DiffKind::Unstaged))
594 .or_insert_with(|| {
595 let staged_text = repo.update(cx, |repo, cx| {
596 repo.load_staged_text(buffer_id, repo_path, cx)
597 });
598 cx.spawn(async move |this, cx| {
599 Self::open_diff_internal(
600 this,
601 DiffKind::Unstaged,
602 staged_text.await.map(DiffBasesChange::SetIndex),
603 buffer,
604 cx,
605 )
606 .await
607 .map_err(Arc::new)
608 })
609 .shared()
610 })
611 .clone();
612
613 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
614 }
615
616 pub fn open_uncommitted_diff(
617 &mut self,
618 buffer: Entity<Buffer>,
619 cx: &mut Context<Self>,
620 ) -> Task<Result<Entity<BufferDiff>>> {
621 let buffer_id = buffer.read(cx).remote_id();
622
623 if let Some(diff_state) = self.diffs.get(&buffer_id) {
624 if let Some(uncommitted_diff) = diff_state
625 .read(cx)
626 .uncommitted_diff
627 .as_ref()
628 .and_then(|weak| weak.upgrade())
629 {
630 if let Some(task) =
631 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
632 {
633 return cx.background_executor().spawn(async move {
634 task.await;
635 Ok(uncommitted_diff)
636 });
637 }
638 return Task::ready(Ok(uncommitted_diff));
639 }
640 }
641
642 let Some((repo, repo_path)) =
643 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
644 else {
645 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
646 };
647
648 let task = self
649 .loading_diffs
650 .entry((buffer_id, DiffKind::Uncommitted))
651 .or_insert_with(|| {
652 let changes = repo.update(cx, |repo, cx| {
653 repo.load_committed_text(buffer_id, repo_path, cx)
654 });
655
656 cx.spawn(async move |this, cx| {
657 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
658 .await
659 .map_err(Arc::new)
660 })
661 .shared()
662 })
663 .clone();
664
665 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
666 }
667
668 async fn open_diff_internal(
669 this: WeakEntity<Self>,
670 kind: DiffKind,
671 texts: Result<DiffBasesChange>,
672 buffer_entity: Entity<Buffer>,
673 cx: &mut AsyncApp,
674 ) -> Result<Entity<BufferDiff>> {
675 let diff_bases_change = match texts {
676 Err(e) => {
677 this.update(cx, |this, cx| {
678 let buffer = buffer_entity.read(cx);
679 let buffer_id = buffer.remote_id();
680 this.loading_diffs.remove(&(buffer_id, kind));
681 })?;
682 return Err(e);
683 }
684 Ok(change) => change,
685 };
686
687 this.update(cx, |this, cx| {
688 let buffer = buffer_entity.read(cx);
689 let buffer_id = buffer.remote_id();
690 let language = buffer.language().cloned();
691 let language_registry = buffer.language_registry();
692 let text_snapshot = buffer.text_snapshot();
693 this.loading_diffs.remove(&(buffer_id, kind));
694
695 let git_store = cx.weak_entity();
696 let diff_state = this
697 .diffs
698 .entry(buffer_id)
699 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
700
701 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
702
703 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
704 diff_state.update(cx, |diff_state, cx| {
705 diff_state.language = language;
706 diff_state.language_registry = language_registry;
707
708 match kind {
709 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
710 DiffKind::Uncommitted => {
711 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
712 diff
713 } else {
714 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
715 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
716 unstaged_diff
717 };
718
719 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
720 diff_state.uncommitted_diff = Some(diff.downgrade())
721 }
722 }
723
724 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
725 let rx = diff_state.wait_for_recalculation();
726
727 anyhow::Ok(async move {
728 if let Some(rx) = rx {
729 rx.await;
730 }
731 Ok(diff)
732 })
733 })
734 })??
735 .await
736 }
737
738 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
739 let diff_state = self.diffs.get(&buffer_id)?;
740 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
741 }
742
743 pub fn get_uncommitted_diff(
744 &self,
745 buffer_id: BufferId,
746 cx: &App,
747 ) -> Option<Entity<BufferDiff>> {
748 let diff_state = self.diffs.get(&buffer_id)?;
749 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
750 }
751
752 pub fn open_conflict_set(
753 &mut self,
754 buffer: Entity<Buffer>,
755 cx: &mut Context<Self>,
756 ) -> Entity<ConflictSet> {
757 log::debug!("open conflict set");
758 let buffer_id = buffer.read(cx).remote_id();
759
760 if let Some(git_state) = self.diffs.get(&buffer_id) {
761 if let Some(conflict_set) = git_state
762 .read(cx)
763 .conflict_set
764 .as_ref()
765 .and_then(|weak| weak.upgrade())
766 {
767 let conflict_set = conflict_set.clone();
768 let buffer_snapshot = buffer.read(cx).text_snapshot();
769
770 git_state.update(cx, |state, cx| {
771 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
772 });
773
774 return conflict_set;
775 }
776 }
777
778 let is_unmerged = self
779 .repository_and_path_for_buffer_id(buffer_id, cx)
780 .map_or(false, |(repo, path)| {
781 repo.read(cx)
782 .snapshot
783 .merge
784 .conflicted_paths
785 .contains(&path)
786 });
787 let git_store = cx.weak_entity();
788 let buffer_git_state = self
789 .diffs
790 .entry(buffer_id)
791 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
792 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
793
794 self._subscriptions
795 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
796 cx.emit(GitStoreEvent::ConflictsUpdated);
797 }));
798
799 buffer_git_state.update(cx, |state, cx| {
800 state.conflict_set = Some(conflict_set.downgrade());
801 let buffer_snapshot = buffer.read(cx).text_snapshot();
802 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
803 });
804
805 conflict_set
806 }
807
808 pub fn project_path_git_status(
809 &self,
810 project_path: &ProjectPath,
811 cx: &App,
812 ) -> Option<FileStatus> {
813 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
814 Some(repo.read(cx).status_for_path(&repo_path)?.status)
815 }
816
817 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
818 let mut work_directory_abs_paths = Vec::new();
819 let mut checkpoints = Vec::new();
820 for repository in self.repositories.values() {
821 repository.update(cx, |repository, _| {
822 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
823 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
824 });
825 }
826
827 cx.background_executor().spawn(async move {
828 let checkpoints = future::try_join_all(checkpoints).await?;
829 Ok(GitStoreCheckpoint {
830 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
831 .into_iter()
832 .zip(checkpoints)
833 .collect(),
834 })
835 })
836 }
837
838 pub fn restore_checkpoint(
839 &self,
840 checkpoint: GitStoreCheckpoint,
841 cx: &mut App,
842 ) -> Task<Result<()>> {
843 let repositories_by_work_dir_abs_path = self
844 .repositories
845 .values()
846 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
847 .collect::<HashMap<_, _>>();
848
849 let mut tasks = Vec::new();
850 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
851 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
852 let restore = repository.update(cx, |repository, _| {
853 repository.restore_checkpoint(checkpoint)
854 });
855 tasks.push(async move { restore.await? });
856 }
857 }
858 cx.background_spawn(async move {
859 future::try_join_all(tasks).await?;
860 Ok(())
861 })
862 }
863
864 /// Compares two checkpoints, returning true if they are equal.
865 pub fn compare_checkpoints(
866 &self,
867 left: GitStoreCheckpoint,
868 mut right: GitStoreCheckpoint,
869 cx: &mut App,
870 ) -> Task<Result<bool>> {
871 let repositories_by_work_dir_abs_path = self
872 .repositories
873 .values()
874 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
875 .collect::<HashMap<_, _>>();
876
877 let mut tasks = Vec::new();
878 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
879 if let Some(right_checkpoint) = right
880 .checkpoints_by_work_dir_abs_path
881 .remove(&work_dir_abs_path)
882 {
883 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
884 {
885 let compare = repository.update(cx, |repository, _| {
886 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
887 });
888
889 tasks.push(async move { compare.await? });
890 }
891 } else {
892 return Task::ready(Ok(false));
893 }
894 }
895 cx.background_spawn(async move {
896 Ok(future::try_join_all(tasks)
897 .await?
898 .into_iter()
899 .all(|result| result))
900 })
901 }
902
903 /// Blames a buffer.
904 pub fn blame_buffer(
905 &self,
906 buffer: &Entity<Buffer>,
907 version: Option<clock::Global>,
908 cx: &mut App,
909 ) -> Task<Result<Option<Blame>>> {
910 let buffer = buffer.read(cx);
911 let Some((repo, repo_path)) =
912 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
913 else {
914 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
915 };
916 let content = match &version {
917 Some(version) => buffer.rope_for_version(version).clone(),
918 None => buffer.as_rope().clone(),
919 };
920 let version = version.unwrap_or(buffer.version());
921 let buffer_id = buffer.remote_id();
922
923 let rx = repo.update(cx, |repo, _| {
924 repo.send_job(None, move |state, _| async move {
925 match state {
926 RepositoryState::Local { backend, .. } => backend
927 .blame(repo_path.clone(), content)
928 .await
929 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
930 .map(Some),
931 RepositoryState::Remote { project_id, client } => {
932 let response = client
933 .request(proto::BlameBuffer {
934 project_id: project_id.to_proto(),
935 buffer_id: buffer_id.into(),
936 version: serialize_version(&version),
937 })
938 .await?;
939 Ok(deserialize_blame_buffer_response(response))
940 }
941 }
942 })
943 });
944
945 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
946 }
947
948 pub fn get_permalink_to_line(
949 &self,
950 buffer: &Entity<Buffer>,
951 selection: Range<u32>,
952 cx: &mut App,
953 ) -> Task<Result<url::Url>> {
954 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
955 return Task::ready(Err(anyhow!("buffer has no file")));
956 };
957
958 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
959 &(file.worktree.read(cx).id(), file.path.clone()).into(),
960 cx,
961 ) else {
962 // If we're not in a Git repo, check whether this is a Rust source
963 // file in the Cargo registry (presumably opened with go-to-definition
964 // from a normal Rust file). If so, we can put together a permalink
965 // using crate metadata.
966 if buffer
967 .read(cx)
968 .language()
969 .is_none_or(|lang| lang.name() != "Rust".into())
970 {
971 return Task::ready(Err(anyhow!("no permalink available")));
972 }
973 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
974 return Task::ready(Err(anyhow!("no permalink available")));
975 };
976 return cx.spawn(async move |cx| {
977 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
978 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
979 .map_err(|_| anyhow!("no permalink available"))
980 });
981
982 // TODO remote case
983 };
984
985 let buffer_id = buffer.read(cx).remote_id();
986 let branch = repo.read(cx).branch.clone();
987 let remote = branch
988 .as_ref()
989 .and_then(|b| b.upstream.as_ref())
990 .and_then(|b| b.remote_name())
991 .unwrap_or("origin")
992 .to_string();
993
994 let rx = repo.update(cx, |repo, _| {
995 repo.send_job(None, move |state, cx| async move {
996 match state {
997 RepositoryState::Local { backend, .. } => {
998 let origin_url = backend
999 .remote_url(&remote)
1000 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
1001
1002 let sha = backend
1003 .head_sha()
1004 .await
1005 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
1006
1007 let provider_registry =
1008 cx.update(GitHostingProviderRegistry::default_global)?;
1009
1010 let (provider, remote) =
1011 parse_git_remote_url(provider_registry, &origin_url)
1012 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
1013
1014 let path = repo_path
1015 .to_str()
1016 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
1017
1018 Ok(provider.build_permalink(
1019 remote,
1020 BuildPermalinkParams {
1021 sha: &sha,
1022 path,
1023 selection: Some(selection),
1024 },
1025 ))
1026 }
1027 RepositoryState::Remote { project_id, client } => {
1028 let response = client
1029 .request(proto::GetPermalinkToLine {
1030 project_id: project_id.to_proto(),
1031 buffer_id: buffer_id.into(),
1032 selection: Some(proto::Range {
1033 start: selection.start as u64,
1034 end: selection.end as u64,
1035 }),
1036 })
1037 .await?;
1038
1039 url::Url::parse(&response.permalink).context("failed to parse permalink")
1040 }
1041 }
1042 })
1043 });
1044 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1045 }
1046
1047 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1048 match &self.state {
1049 GitStoreState::Local {
1050 downstream: downstream_client,
1051 ..
1052 } => downstream_client
1053 .as_ref()
1054 .map(|state| (state.client.clone(), state.project_id)),
1055 GitStoreState::Ssh {
1056 downstream: downstream_client,
1057 ..
1058 } => downstream_client.clone(),
1059 GitStoreState::Remote { .. } => None,
1060 }
1061 }
1062
1063 fn upstream_client(&self) -> Option<AnyProtoClient> {
1064 match &self.state {
1065 GitStoreState::Local { .. } => None,
1066 GitStoreState::Ssh {
1067 upstream_client, ..
1068 }
1069 | GitStoreState::Remote {
1070 upstream_client, ..
1071 } => Some(upstream_client.clone()),
1072 }
1073 }
1074
1075 fn on_worktree_store_event(
1076 &mut self,
1077 worktree_store: Entity<WorktreeStore>,
1078 event: &WorktreeStoreEvent,
1079 cx: &mut Context<Self>,
1080 ) {
1081 let GitStoreState::Local {
1082 project_environment,
1083 downstream,
1084 next_repository_id,
1085 fs,
1086 } = &self.state
1087 else {
1088 return;
1089 };
1090
1091 match event {
1092 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1093 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
1094 for (relative_path, _, _) in updated_entries.iter() {
1095 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1096 &(*worktree_id, relative_path.clone()).into(),
1097 cx,
1098 ) else {
1099 continue;
1100 };
1101 paths_by_git_repo.entry(repo).or_default().push(repo_path)
1102 }
1103
1104 for (repo, paths) in paths_by_git_repo {
1105 repo.update(cx, |repo, cx| {
1106 repo.paths_changed(
1107 paths,
1108 downstream
1109 .as_ref()
1110 .map(|downstream| downstream.updates_tx.clone()),
1111 cx,
1112 );
1113 });
1114 }
1115 }
1116 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1117 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1118 else {
1119 return;
1120 };
1121 if !worktree.read(cx).is_visible() {
1122 log::debug!(
1123 "not adding repositories for local worktree {:?} because it's not visible",
1124 worktree.read(cx).abs_path()
1125 );
1126 return;
1127 }
1128 self.update_repositories_from_worktree(
1129 project_environment.clone(),
1130 next_repository_id.clone(),
1131 downstream
1132 .as_ref()
1133 .map(|downstream| downstream.updates_tx.clone()),
1134 changed_repos.clone(),
1135 fs.clone(),
1136 cx,
1137 );
1138 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1139 }
1140 _ => {}
1141 }
1142 }
1143
1144 fn on_repository_event(
1145 &mut self,
1146 repo: Entity<Repository>,
1147 event: &RepositoryEvent,
1148 cx: &mut Context<Self>,
1149 ) {
1150 let id = repo.read(cx).id;
1151 let merge_conflicts = repo.read(cx).snapshot.merge.conflicted_paths.clone();
1152 for (buffer_id, diff) in self.diffs.iter() {
1153 if let Some((buffer_repo, repo_path)) =
1154 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1155 {
1156 if buffer_repo == repo {
1157 diff.update(cx, |diff, cx| {
1158 if let Some(conflict_set) = &diff.conflict_set {
1159 let conflict_status_changed =
1160 conflict_set.update(cx, |conflict_set, cx| {
1161 let has_conflict = merge_conflicts.contains(&repo_path);
1162 conflict_set.set_has_conflict(has_conflict, cx)
1163 })?;
1164 if conflict_status_changed {
1165 let buffer_store = self.buffer_store.read(cx);
1166 if let Some(buffer) = buffer_store.get(*buffer_id) {
1167 let _ = diff.reparse_conflict_markers(
1168 buffer.read(cx).text_snapshot(),
1169 cx,
1170 );
1171 }
1172 }
1173 }
1174 anyhow::Ok(())
1175 })
1176 .ok();
1177 }
1178 }
1179 }
1180 cx.emit(GitStoreEvent::RepositoryUpdated(
1181 id,
1182 event.clone(),
1183 self.active_repo_id == Some(id),
1184 ))
1185 }
1186
1187 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1188 cx.emit(GitStoreEvent::JobsUpdated)
1189 }
1190
1191 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1192 fn update_repositories_from_worktree(
1193 &mut self,
1194 project_environment: Entity<ProjectEnvironment>,
1195 next_repository_id: Arc<AtomicU64>,
1196 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1197 updated_git_repositories: UpdatedGitRepositoriesSet,
1198 fs: Arc<dyn Fs>,
1199 cx: &mut Context<Self>,
1200 ) {
1201 let mut removed_ids = Vec::new();
1202 for update in updated_git_repositories.iter() {
1203 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1204 let existing_work_directory_abs_path =
1205 repo.read(cx).work_directory_abs_path.clone();
1206 Some(&existing_work_directory_abs_path)
1207 == update.old_work_directory_abs_path.as_ref()
1208 || Some(&existing_work_directory_abs_path)
1209 == update.new_work_directory_abs_path.as_ref()
1210 }) {
1211 if let Some(new_work_directory_abs_path) =
1212 update.new_work_directory_abs_path.clone()
1213 {
1214 existing.update(cx, |existing, cx| {
1215 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1216 existing.schedule_scan(updates_tx.clone(), cx);
1217 });
1218 } else {
1219 removed_ids.push(*id);
1220 }
1221 } else if let UpdatedGitRepository {
1222 new_work_directory_abs_path: Some(work_directory_abs_path),
1223 dot_git_abs_path: Some(dot_git_abs_path),
1224 repository_dir_abs_path: Some(repository_dir_abs_path),
1225 common_dir_abs_path: Some(common_dir_abs_path),
1226 ..
1227 } = update
1228 {
1229 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1230 let git_store = cx.weak_entity();
1231 let repo = cx.new(|cx| {
1232 let mut repo = Repository::local(
1233 id,
1234 work_directory_abs_path.clone(),
1235 dot_git_abs_path.clone(),
1236 repository_dir_abs_path.clone(),
1237 common_dir_abs_path.clone(),
1238 project_environment.downgrade(),
1239 fs.clone(),
1240 git_store,
1241 cx,
1242 );
1243 repo.schedule_scan(updates_tx.clone(), cx);
1244 repo
1245 });
1246 self._subscriptions
1247 .push(cx.subscribe(&repo, Self::on_repository_event));
1248 self._subscriptions
1249 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1250 self.repositories.insert(id, repo);
1251 cx.emit(GitStoreEvent::RepositoryAdded(id));
1252 self.active_repo_id.get_or_insert_with(|| {
1253 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1254 id
1255 });
1256 }
1257 }
1258
1259 for id in removed_ids {
1260 if self.active_repo_id == Some(id) {
1261 self.active_repo_id = None;
1262 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1263 }
1264 self.repositories.remove(&id);
1265 if let Some(updates_tx) = updates_tx.as_ref() {
1266 updates_tx
1267 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1268 .ok();
1269 }
1270 }
1271 }
1272
1273 fn on_buffer_store_event(
1274 &mut self,
1275 _: Entity<BufferStore>,
1276 event: &BufferStoreEvent,
1277 cx: &mut Context<Self>,
1278 ) {
1279 match event {
1280 BufferStoreEvent::BufferAdded(buffer) => {
1281 cx.subscribe(&buffer, |this, buffer, event, cx| {
1282 if let BufferEvent::LanguageChanged = event {
1283 let buffer_id = buffer.read(cx).remote_id();
1284 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1285 diff_state.update(cx, |diff_state, cx| {
1286 diff_state.buffer_language_changed(buffer, cx);
1287 });
1288 }
1289 }
1290 })
1291 .detach();
1292 }
1293 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1294 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1295 diffs.remove(buffer_id);
1296 }
1297 }
1298 BufferStoreEvent::BufferDropped(buffer_id) => {
1299 self.diffs.remove(&buffer_id);
1300 for diffs in self.shared_diffs.values_mut() {
1301 diffs.remove(buffer_id);
1302 }
1303 }
1304
1305 _ => {}
1306 }
1307 }
1308
1309 pub fn recalculate_buffer_diffs(
1310 &mut self,
1311 buffers: Vec<Entity<Buffer>>,
1312 cx: &mut Context<Self>,
1313 ) -> impl Future<Output = ()> + use<> {
1314 let mut futures = Vec::new();
1315 for buffer in buffers {
1316 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1317 let buffer = buffer.read(cx).text_snapshot();
1318 diff_state.update(cx, |diff_state, cx| {
1319 diff_state.recalculate_diffs(buffer.clone(), cx);
1320 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1321 });
1322 futures.push(diff_state.update(cx, |diff_state, cx| {
1323 diff_state
1324 .reparse_conflict_markers(buffer, cx)
1325 .map(|_| {})
1326 .boxed()
1327 }));
1328 }
1329 }
1330 async move {
1331 futures::future::join_all(futures).await;
1332 }
1333 }
1334
1335 fn on_buffer_diff_event(
1336 &mut self,
1337 diff: Entity<buffer_diff::BufferDiff>,
1338 event: &BufferDiffEvent,
1339 cx: &mut Context<Self>,
1340 ) {
1341 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1342 let buffer_id = diff.read(cx).buffer_id;
1343 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1344 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1345 diff_state.hunk_staging_operation_count += 1;
1346 diff_state.hunk_staging_operation_count
1347 });
1348 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1349 let recv = repo.update(cx, |repo, cx| {
1350 log::debug!("hunks changed for {}", path.display());
1351 repo.spawn_set_index_text_job(
1352 path,
1353 new_index_text.as_ref().map(|rope| rope.to_string()),
1354 Some(hunk_staging_operation_count),
1355 cx,
1356 )
1357 });
1358 let diff = diff.downgrade();
1359 cx.spawn(async move |this, cx| {
1360 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1361 diff.update(cx, |diff, cx| {
1362 diff.clear_pending_hunks(cx);
1363 })
1364 .ok();
1365 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1366 .ok();
1367 }
1368 })
1369 .detach();
1370 }
1371 }
1372 }
1373 }
1374
1375 fn local_worktree_git_repos_changed(
1376 &mut self,
1377 worktree: Entity<Worktree>,
1378 changed_repos: &UpdatedGitRepositoriesSet,
1379 cx: &mut Context<Self>,
1380 ) {
1381 log::debug!("local worktree repos changed");
1382 debug_assert!(worktree.read(cx).is_local());
1383
1384 for repository in self.repositories.values() {
1385 repository.update(cx, |repository, cx| {
1386 let repo_abs_path = &repository.work_directory_abs_path;
1387 if changed_repos.iter().any(|update| {
1388 update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1389 || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1390 }) {
1391 repository.reload_buffer_diff_bases(cx);
1392 }
1393 });
1394 }
1395 }
1396
1397 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1398 &self.repositories
1399 }
1400
1401 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1402 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1403 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1404 Some(status.status)
1405 }
1406
1407 pub fn repository_and_path_for_buffer_id(
1408 &self,
1409 buffer_id: BufferId,
1410 cx: &App,
1411 ) -> Option<(Entity<Repository>, RepoPath)> {
1412 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1413 let project_path = buffer.read(cx).project_path(cx)?;
1414 self.repository_and_path_for_project_path(&project_path, cx)
1415 }
1416
1417 pub fn repository_and_path_for_project_path(
1418 &self,
1419 path: &ProjectPath,
1420 cx: &App,
1421 ) -> Option<(Entity<Repository>, RepoPath)> {
1422 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1423 self.repositories
1424 .values()
1425 .filter_map(|repo| {
1426 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1427 Some((repo.clone(), repo_path))
1428 })
1429 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1430 }
1431
1432 pub fn git_init(
1433 &self,
1434 path: Arc<Path>,
1435 fallback_branch_name: String,
1436 cx: &App,
1437 ) -> Task<Result<()>> {
1438 match &self.state {
1439 GitStoreState::Local { fs, .. } => {
1440 let fs = fs.clone();
1441 cx.background_executor()
1442 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1443 }
1444 GitStoreState::Ssh {
1445 upstream_client,
1446 upstream_project_id: project_id,
1447 ..
1448 }
1449 | GitStoreState::Remote {
1450 upstream_client,
1451 upstream_project_id: project_id,
1452 ..
1453 } => {
1454 let client = upstream_client.clone();
1455 let project_id = *project_id;
1456 cx.background_executor().spawn(async move {
1457 client
1458 .request(proto::GitInit {
1459 project_id: project_id.0,
1460 abs_path: path.to_string_lossy().to_string(),
1461 fallback_branch_name,
1462 })
1463 .await?;
1464 Ok(())
1465 })
1466 }
1467 }
1468 }
1469
1470 async fn handle_update_repository(
1471 this: Entity<Self>,
1472 envelope: TypedEnvelope<proto::UpdateRepository>,
1473 mut cx: AsyncApp,
1474 ) -> Result<()> {
1475 this.update(&mut cx, |this, cx| {
1476 let mut update = envelope.payload;
1477
1478 let id = RepositoryId::from_proto(update.id);
1479 let client = this
1480 .upstream_client()
1481 .context("no upstream client")?
1482 .clone();
1483
1484 let mut is_new = false;
1485 let repo = this.repositories.entry(id).or_insert_with(|| {
1486 is_new = true;
1487 let git_store = cx.weak_entity();
1488 cx.new(|cx| {
1489 Repository::remote(
1490 id,
1491 Path::new(&update.abs_path).into(),
1492 ProjectId(update.project_id),
1493 client,
1494 git_store,
1495 cx,
1496 )
1497 })
1498 });
1499 if is_new {
1500 this._subscriptions
1501 .push(cx.subscribe(&repo, Self::on_repository_event))
1502 }
1503
1504 repo.update(cx, {
1505 let update = update.clone();
1506 |repo, cx| repo.apply_remote_update(update, cx)
1507 })?;
1508
1509 this.active_repo_id.get_or_insert_with(|| {
1510 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1511 id
1512 });
1513
1514 if let Some((client, project_id)) = this.downstream_client() {
1515 update.project_id = project_id.to_proto();
1516 client.send(update).log_err();
1517 }
1518 Ok(())
1519 })?
1520 }
1521
1522 async fn handle_remove_repository(
1523 this: Entity<Self>,
1524 envelope: TypedEnvelope<proto::RemoveRepository>,
1525 mut cx: AsyncApp,
1526 ) -> Result<()> {
1527 this.update(&mut cx, |this, cx| {
1528 let mut update = envelope.payload;
1529 let id = RepositoryId::from_proto(update.id);
1530 this.repositories.remove(&id);
1531 if let Some((client, project_id)) = this.downstream_client() {
1532 update.project_id = project_id.to_proto();
1533 client.send(update).log_err();
1534 }
1535 if this.active_repo_id == Some(id) {
1536 this.active_repo_id = None;
1537 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1538 }
1539 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1540 })
1541 }
1542
1543 async fn handle_git_init(
1544 this: Entity<Self>,
1545 envelope: TypedEnvelope<proto::GitInit>,
1546 cx: AsyncApp,
1547 ) -> Result<proto::Ack> {
1548 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1549 let name = envelope.payload.fallback_branch_name;
1550 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1551 .await?;
1552
1553 Ok(proto::Ack {})
1554 }
1555
1556 async fn handle_fetch(
1557 this: Entity<Self>,
1558 envelope: TypedEnvelope<proto::Fetch>,
1559 mut cx: AsyncApp,
1560 ) -> Result<proto::RemoteMessageResponse> {
1561 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1562 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1563 let askpass_id = envelope.payload.askpass_id;
1564
1565 let askpass = make_remote_delegate(
1566 this,
1567 envelope.payload.project_id,
1568 repository_id,
1569 askpass_id,
1570 &mut cx,
1571 );
1572
1573 let remote_output = repository_handle
1574 .update(&mut cx, |repository_handle, cx| {
1575 repository_handle.fetch(askpass, cx)
1576 })?
1577 .await??;
1578
1579 Ok(proto::RemoteMessageResponse {
1580 stdout: remote_output.stdout,
1581 stderr: remote_output.stderr,
1582 })
1583 }
1584
1585 async fn handle_push(
1586 this: Entity<Self>,
1587 envelope: TypedEnvelope<proto::Push>,
1588 mut cx: AsyncApp,
1589 ) -> Result<proto::RemoteMessageResponse> {
1590 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1591 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1592
1593 let askpass_id = envelope.payload.askpass_id;
1594 let askpass = make_remote_delegate(
1595 this,
1596 envelope.payload.project_id,
1597 repository_id,
1598 askpass_id,
1599 &mut cx,
1600 );
1601
1602 let options = envelope
1603 .payload
1604 .options
1605 .as_ref()
1606 .map(|_| match envelope.payload.options() {
1607 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1608 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1609 });
1610
1611 let branch_name = envelope.payload.branch_name.into();
1612 let remote_name = envelope.payload.remote_name.into();
1613
1614 let remote_output = repository_handle
1615 .update(&mut cx, |repository_handle, cx| {
1616 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1617 })?
1618 .await??;
1619 Ok(proto::RemoteMessageResponse {
1620 stdout: remote_output.stdout,
1621 stderr: remote_output.stderr,
1622 })
1623 }
1624
1625 async fn handle_pull(
1626 this: Entity<Self>,
1627 envelope: TypedEnvelope<proto::Pull>,
1628 mut cx: AsyncApp,
1629 ) -> Result<proto::RemoteMessageResponse> {
1630 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1631 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1632 let askpass_id = envelope.payload.askpass_id;
1633 let askpass = make_remote_delegate(
1634 this,
1635 envelope.payload.project_id,
1636 repository_id,
1637 askpass_id,
1638 &mut cx,
1639 );
1640
1641 let branch_name = envelope.payload.branch_name.into();
1642 let remote_name = envelope.payload.remote_name.into();
1643
1644 let remote_message = repository_handle
1645 .update(&mut cx, |repository_handle, cx| {
1646 repository_handle.pull(branch_name, remote_name, askpass, cx)
1647 })?
1648 .await??;
1649
1650 Ok(proto::RemoteMessageResponse {
1651 stdout: remote_message.stdout,
1652 stderr: remote_message.stderr,
1653 })
1654 }
1655
1656 async fn handle_stage(
1657 this: Entity<Self>,
1658 envelope: TypedEnvelope<proto::Stage>,
1659 mut cx: AsyncApp,
1660 ) -> Result<proto::Ack> {
1661 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1662 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1663
1664 let entries = envelope
1665 .payload
1666 .paths
1667 .into_iter()
1668 .map(PathBuf::from)
1669 .map(RepoPath::new)
1670 .collect();
1671
1672 repository_handle
1673 .update(&mut cx, |repository_handle, cx| {
1674 repository_handle.stage_entries(entries, cx)
1675 })?
1676 .await?;
1677 Ok(proto::Ack {})
1678 }
1679
1680 async fn handle_unstage(
1681 this: Entity<Self>,
1682 envelope: TypedEnvelope<proto::Unstage>,
1683 mut cx: AsyncApp,
1684 ) -> Result<proto::Ack> {
1685 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1686 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1687
1688 let entries = envelope
1689 .payload
1690 .paths
1691 .into_iter()
1692 .map(PathBuf::from)
1693 .map(RepoPath::new)
1694 .collect();
1695
1696 repository_handle
1697 .update(&mut cx, |repository_handle, cx| {
1698 repository_handle.unstage_entries(entries, cx)
1699 })?
1700 .await?;
1701
1702 Ok(proto::Ack {})
1703 }
1704
1705 async fn handle_set_index_text(
1706 this: Entity<Self>,
1707 envelope: TypedEnvelope<proto::SetIndexText>,
1708 mut cx: AsyncApp,
1709 ) -> Result<proto::Ack> {
1710 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1711 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1712 let repo_path = RepoPath::from_str(&envelope.payload.path);
1713
1714 repository_handle
1715 .update(&mut cx, |repository_handle, cx| {
1716 repository_handle.spawn_set_index_text_job(
1717 repo_path,
1718 envelope.payload.text,
1719 None,
1720 cx,
1721 )
1722 })?
1723 .await??;
1724 Ok(proto::Ack {})
1725 }
1726
1727 async fn handle_commit(
1728 this: Entity<Self>,
1729 envelope: TypedEnvelope<proto::Commit>,
1730 mut cx: AsyncApp,
1731 ) -> Result<proto::Ack> {
1732 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1733 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1734
1735 let message = SharedString::from(envelope.payload.message);
1736 let name = envelope.payload.name.map(SharedString::from);
1737 let email = envelope.payload.email.map(SharedString::from);
1738 let options = envelope.payload.options.unwrap_or_default();
1739
1740 repository_handle
1741 .update(&mut cx, |repository_handle, cx| {
1742 repository_handle.commit(
1743 message,
1744 name.zip(email),
1745 CommitOptions {
1746 amend: options.amend,
1747 },
1748 cx,
1749 )
1750 })?
1751 .await??;
1752 Ok(proto::Ack {})
1753 }
1754
1755 async fn handle_get_remotes(
1756 this: Entity<Self>,
1757 envelope: TypedEnvelope<proto::GetRemotes>,
1758 mut cx: AsyncApp,
1759 ) -> Result<proto::GetRemotesResponse> {
1760 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1761 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1762
1763 let branch_name = envelope.payload.branch_name;
1764
1765 let remotes = repository_handle
1766 .update(&mut cx, |repository_handle, _| {
1767 repository_handle.get_remotes(branch_name)
1768 })?
1769 .await??;
1770
1771 Ok(proto::GetRemotesResponse {
1772 remotes: remotes
1773 .into_iter()
1774 .map(|remotes| proto::get_remotes_response::Remote {
1775 name: remotes.name.to_string(),
1776 })
1777 .collect::<Vec<_>>(),
1778 })
1779 }
1780
1781 async fn handle_get_branches(
1782 this: Entity<Self>,
1783 envelope: TypedEnvelope<proto::GitGetBranches>,
1784 mut cx: AsyncApp,
1785 ) -> Result<proto::GitBranchesResponse> {
1786 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1787 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1788
1789 let branches = repository_handle
1790 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1791 .await??;
1792
1793 Ok(proto::GitBranchesResponse {
1794 branches: branches
1795 .into_iter()
1796 .map(|branch| branch_to_proto(&branch))
1797 .collect::<Vec<_>>(),
1798 })
1799 }
1800 async fn handle_create_branch(
1801 this: Entity<Self>,
1802 envelope: TypedEnvelope<proto::GitCreateBranch>,
1803 mut cx: AsyncApp,
1804 ) -> Result<proto::Ack> {
1805 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1806 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1807 let branch_name = envelope.payload.branch_name;
1808
1809 repository_handle
1810 .update(&mut cx, |repository_handle, _| {
1811 repository_handle.create_branch(branch_name)
1812 })?
1813 .await??;
1814
1815 Ok(proto::Ack {})
1816 }
1817
1818 async fn handle_change_branch(
1819 this: Entity<Self>,
1820 envelope: TypedEnvelope<proto::GitChangeBranch>,
1821 mut cx: AsyncApp,
1822 ) -> Result<proto::Ack> {
1823 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1824 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1825 let branch_name = envelope.payload.branch_name;
1826
1827 repository_handle
1828 .update(&mut cx, |repository_handle, _| {
1829 repository_handle.change_branch(branch_name)
1830 })?
1831 .await??;
1832
1833 Ok(proto::Ack {})
1834 }
1835
1836 async fn handle_show(
1837 this: Entity<Self>,
1838 envelope: TypedEnvelope<proto::GitShow>,
1839 mut cx: AsyncApp,
1840 ) -> Result<proto::GitCommitDetails> {
1841 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1842 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1843
1844 let commit = repository_handle
1845 .update(&mut cx, |repository_handle, _| {
1846 repository_handle.show(envelope.payload.commit)
1847 })?
1848 .await??;
1849 Ok(proto::GitCommitDetails {
1850 sha: commit.sha.into(),
1851 message: commit.message.into(),
1852 commit_timestamp: commit.commit_timestamp,
1853 author_email: commit.author_email.into(),
1854 author_name: commit.author_name.into(),
1855 })
1856 }
1857
1858 async fn handle_load_commit_diff(
1859 this: Entity<Self>,
1860 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1861 mut cx: AsyncApp,
1862 ) -> Result<proto::LoadCommitDiffResponse> {
1863 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1864 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1865
1866 let commit_diff = repository_handle
1867 .update(&mut cx, |repository_handle, _| {
1868 repository_handle.load_commit_diff(envelope.payload.commit)
1869 })?
1870 .await??;
1871 Ok(proto::LoadCommitDiffResponse {
1872 files: commit_diff
1873 .files
1874 .into_iter()
1875 .map(|file| proto::CommitFile {
1876 path: file.path.to_string(),
1877 old_text: file.old_text,
1878 new_text: file.new_text,
1879 })
1880 .collect(),
1881 })
1882 }
1883
1884 async fn handle_reset(
1885 this: Entity<Self>,
1886 envelope: TypedEnvelope<proto::GitReset>,
1887 mut cx: AsyncApp,
1888 ) -> Result<proto::Ack> {
1889 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1890 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1891
1892 let mode = match envelope.payload.mode() {
1893 git_reset::ResetMode::Soft => ResetMode::Soft,
1894 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1895 };
1896
1897 repository_handle
1898 .update(&mut cx, |repository_handle, cx| {
1899 repository_handle.reset(envelope.payload.commit, mode, cx)
1900 })?
1901 .await??;
1902 Ok(proto::Ack {})
1903 }
1904
1905 async fn handle_checkout_files(
1906 this: Entity<Self>,
1907 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1908 mut cx: AsyncApp,
1909 ) -> Result<proto::Ack> {
1910 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1911 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1912 let paths = envelope
1913 .payload
1914 .paths
1915 .iter()
1916 .map(|s| RepoPath::from_str(s))
1917 .collect();
1918
1919 repository_handle
1920 .update(&mut cx, |repository_handle, cx| {
1921 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1922 })?
1923 .await??;
1924 Ok(proto::Ack {})
1925 }
1926
1927 async fn handle_open_commit_message_buffer(
1928 this: Entity<Self>,
1929 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1930 mut cx: AsyncApp,
1931 ) -> Result<proto::OpenBufferResponse> {
1932 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1933 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1934 let buffer = repository
1935 .update(&mut cx, |repository, cx| {
1936 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1937 })?
1938 .await?;
1939
1940 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1941 this.update(&mut cx, |this, cx| {
1942 this.buffer_store.update(cx, |buffer_store, cx| {
1943 buffer_store
1944 .create_buffer_for_peer(
1945 &buffer,
1946 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1947 cx,
1948 )
1949 .detach_and_log_err(cx);
1950 })
1951 })?;
1952
1953 Ok(proto::OpenBufferResponse {
1954 buffer_id: buffer_id.to_proto(),
1955 })
1956 }
1957
1958 async fn handle_askpass(
1959 this: Entity<Self>,
1960 envelope: TypedEnvelope<proto::AskPassRequest>,
1961 mut cx: AsyncApp,
1962 ) -> Result<proto::AskPassResponse> {
1963 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1964 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1965
1966 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
1967 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
1968 debug_panic!("no askpass found");
1969 return Err(anyhow::anyhow!("no askpass found"));
1970 };
1971
1972 let response = askpass.ask_password(envelope.payload.prompt).await?;
1973
1974 delegates
1975 .lock()
1976 .insert(envelope.payload.askpass_id, askpass);
1977
1978 Ok(proto::AskPassResponse { response })
1979 }
1980
1981 async fn handle_check_for_pushed_commits(
1982 this: Entity<Self>,
1983 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
1984 mut cx: AsyncApp,
1985 ) -> Result<proto::CheckForPushedCommitsResponse> {
1986 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1987 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1988
1989 let branches = repository_handle
1990 .update(&mut cx, |repository_handle, _| {
1991 repository_handle.check_for_pushed_commits()
1992 })?
1993 .await??;
1994 Ok(proto::CheckForPushedCommitsResponse {
1995 pushed_to: branches
1996 .into_iter()
1997 .map(|commit| commit.to_string())
1998 .collect(),
1999 })
2000 }
2001
2002 async fn handle_git_diff(
2003 this: Entity<Self>,
2004 envelope: TypedEnvelope<proto::GitDiff>,
2005 mut cx: AsyncApp,
2006 ) -> Result<proto::GitDiffResponse> {
2007 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2008 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2009 let diff_type = match envelope.payload.diff_type() {
2010 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2011 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2012 };
2013
2014 let mut diff = repository_handle
2015 .update(&mut cx, |repository_handle, cx| {
2016 repository_handle.diff(diff_type, cx)
2017 })?
2018 .await??;
2019 const ONE_MB: usize = 1_000_000;
2020 if diff.len() > ONE_MB {
2021 diff = diff.chars().take(ONE_MB).collect()
2022 }
2023
2024 Ok(proto::GitDiffResponse { diff })
2025 }
2026
2027 async fn handle_open_unstaged_diff(
2028 this: Entity<Self>,
2029 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2030 mut cx: AsyncApp,
2031 ) -> Result<proto::OpenUnstagedDiffResponse> {
2032 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2033 let diff = this
2034 .update(&mut cx, |this, cx| {
2035 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2036 Some(this.open_unstaged_diff(buffer, cx))
2037 })?
2038 .ok_or_else(|| anyhow!("no such buffer"))?
2039 .await?;
2040 this.update(&mut cx, |this, _| {
2041 let shared_diffs = this
2042 .shared_diffs
2043 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2044 .or_default();
2045 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2046 })?;
2047 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2048 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2049 }
2050
2051 async fn handle_open_uncommitted_diff(
2052 this: Entity<Self>,
2053 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2054 mut cx: AsyncApp,
2055 ) -> Result<proto::OpenUncommittedDiffResponse> {
2056 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2057 let diff = this
2058 .update(&mut cx, |this, cx| {
2059 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2060 Some(this.open_uncommitted_diff(buffer, cx))
2061 })?
2062 .ok_or_else(|| anyhow!("no such buffer"))?
2063 .await?;
2064 this.update(&mut cx, |this, _| {
2065 let shared_diffs = this
2066 .shared_diffs
2067 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2068 .or_default();
2069 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2070 })?;
2071 diff.read_with(&cx, |diff, cx| {
2072 use proto::open_uncommitted_diff_response::Mode;
2073
2074 let unstaged_diff = diff.secondary_diff();
2075 let index_snapshot = unstaged_diff.and_then(|diff| {
2076 let diff = diff.read(cx);
2077 diff.base_text_exists().then(|| diff.base_text())
2078 });
2079
2080 let mode;
2081 let staged_text;
2082 let committed_text;
2083 if diff.base_text_exists() {
2084 let committed_snapshot = diff.base_text();
2085 committed_text = Some(committed_snapshot.text());
2086 if let Some(index_text) = index_snapshot {
2087 if index_text.remote_id() == committed_snapshot.remote_id() {
2088 mode = Mode::IndexMatchesHead;
2089 staged_text = None;
2090 } else {
2091 mode = Mode::IndexAndHead;
2092 staged_text = Some(index_text.text());
2093 }
2094 } else {
2095 mode = Mode::IndexAndHead;
2096 staged_text = None;
2097 }
2098 } else {
2099 mode = Mode::IndexAndHead;
2100 committed_text = None;
2101 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2102 }
2103
2104 proto::OpenUncommittedDiffResponse {
2105 committed_text,
2106 staged_text,
2107 mode: mode.into(),
2108 }
2109 })
2110 }
2111
2112 async fn handle_update_diff_bases(
2113 this: Entity<Self>,
2114 request: TypedEnvelope<proto::UpdateDiffBases>,
2115 mut cx: AsyncApp,
2116 ) -> Result<()> {
2117 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2118 this.update(&mut cx, |this, cx| {
2119 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2120 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2121 let buffer = buffer.read(cx).text_snapshot();
2122 diff_state.update(cx, |diff_state, cx| {
2123 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2124 })
2125 }
2126 }
2127 })
2128 }
2129
2130 async fn handle_blame_buffer(
2131 this: Entity<Self>,
2132 envelope: TypedEnvelope<proto::BlameBuffer>,
2133 mut cx: AsyncApp,
2134 ) -> Result<proto::BlameBufferResponse> {
2135 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2136 let version = deserialize_version(&envelope.payload.version);
2137 let buffer = this.read_with(&cx, |this, cx| {
2138 this.buffer_store.read(cx).get_existing(buffer_id)
2139 })??;
2140 buffer
2141 .update(&mut cx, |buffer, _| {
2142 buffer.wait_for_version(version.clone())
2143 })?
2144 .await?;
2145 let blame = this
2146 .update(&mut cx, |this, cx| {
2147 this.blame_buffer(&buffer, Some(version), cx)
2148 })?
2149 .await?;
2150 Ok(serialize_blame_buffer_response(blame))
2151 }
2152
2153 async fn handle_get_permalink_to_line(
2154 this: Entity<Self>,
2155 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2156 mut cx: AsyncApp,
2157 ) -> Result<proto::GetPermalinkToLineResponse> {
2158 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2159 // let version = deserialize_version(&envelope.payload.version);
2160 let selection = {
2161 let proto_selection = envelope
2162 .payload
2163 .selection
2164 .context("no selection to get permalink for defined")?;
2165 proto_selection.start as u32..proto_selection.end as u32
2166 };
2167 let buffer = this.read_with(&cx, |this, cx| {
2168 this.buffer_store.read(cx).get_existing(buffer_id)
2169 })??;
2170 let permalink = this
2171 .update(&mut cx, |this, cx| {
2172 this.get_permalink_to_line(&buffer, selection, cx)
2173 })?
2174 .await?;
2175 Ok(proto::GetPermalinkToLineResponse {
2176 permalink: permalink.to_string(),
2177 })
2178 }
2179
2180 fn repository_for_request(
2181 this: &Entity<Self>,
2182 id: RepositoryId,
2183 cx: &mut AsyncApp,
2184 ) -> Result<Entity<Repository>> {
2185 this.update(cx, |this, _| {
2186 this.repositories
2187 .get(&id)
2188 .context("missing repository handle")
2189 .cloned()
2190 })?
2191 }
2192
2193 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2194 self.repositories
2195 .iter()
2196 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2197 .collect()
2198 }
2199}
2200
2201impl BufferGitState {
2202 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2203 Self {
2204 unstaged_diff: Default::default(),
2205 uncommitted_diff: Default::default(),
2206 recalculate_diff_task: Default::default(),
2207 language: Default::default(),
2208 language_registry: Default::default(),
2209 recalculating_tx: postage::watch::channel_with(false).0,
2210 hunk_staging_operation_count: 0,
2211 hunk_staging_operation_count_as_of_write: 0,
2212 head_text: Default::default(),
2213 index_text: Default::default(),
2214 head_changed: Default::default(),
2215 index_changed: Default::default(),
2216 language_changed: Default::default(),
2217 conflict_updated_futures: Default::default(),
2218 conflict_set: Default::default(),
2219 reparse_conflict_markers_task: Default::default(),
2220 }
2221 }
2222
2223 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2224 self.language = buffer.read(cx).language().cloned();
2225 self.language_changed = true;
2226 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2227 }
2228
2229 fn reparse_conflict_markers(
2230 &mut self,
2231 buffer: text::BufferSnapshot,
2232 cx: &mut Context<Self>,
2233 ) -> oneshot::Receiver<()> {
2234 let (tx, rx) = oneshot::channel();
2235
2236 let Some(conflict_set) = self
2237 .conflict_set
2238 .as_ref()
2239 .and_then(|conflict_set| conflict_set.upgrade())
2240 else {
2241 return rx;
2242 };
2243
2244 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2245 if conflict_set.has_conflict {
2246 Some(conflict_set.snapshot())
2247 } else {
2248 None
2249 }
2250 });
2251
2252 if let Some(old_snapshot) = old_snapshot {
2253 self.conflict_updated_futures.push(tx);
2254 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2255 let (snapshot, changed_range) = cx
2256 .background_spawn(async move {
2257 let new_snapshot = ConflictSet::parse(&buffer);
2258 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2259 (new_snapshot, changed_range)
2260 })
2261 .await;
2262 this.update(cx, |this, cx| {
2263 if let Some(conflict_set) = &this.conflict_set {
2264 conflict_set
2265 .update(cx, |conflict_set, cx| {
2266 conflict_set.set_snapshot(snapshot, changed_range, cx);
2267 })
2268 .ok();
2269 }
2270 let futures = std::mem::take(&mut this.conflict_updated_futures);
2271 for tx in futures {
2272 tx.send(()).ok();
2273 }
2274 })
2275 }))
2276 }
2277
2278 rx
2279 }
2280
2281 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2282 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2283 }
2284
2285 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2286 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2287 }
2288
2289 fn handle_base_texts_updated(
2290 &mut self,
2291 buffer: text::BufferSnapshot,
2292 message: proto::UpdateDiffBases,
2293 cx: &mut Context<Self>,
2294 ) {
2295 use proto::update_diff_bases::Mode;
2296
2297 let Some(mode) = Mode::from_i32(message.mode) else {
2298 return;
2299 };
2300
2301 let diff_bases_change = match mode {
2302 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2303 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2304 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2305 Mode::IndexAndHead => DiffBasesChange::SetEach {
2306 index: message.staged_text,
2307 head: message.committed_text,
2308 },
2309 };
2310
2311 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2312 }
2313
2314 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2315 if *self.recalculating_tx.borrow() {
2316 let mut rx = self.recalculating_tx.subscribe();
2317 return Some(async move {
2318 loop {
2319 let is_recalculating = rx.recv().await;
2320 if is_recalculating != Some(true) {
2321 break;
2322 }
2323 }
2324 });
2325 } else {
2326 None
2327 }
2328 }
2329
2330 fn diff_bases_changed(
2331 &mut self,
2332 buffer: text::BufferSnapshot,
2333 diff_bases_change: Option<DiffBasesChange>,
2334 cx: &mut Context<Self>,
2335 ) {
2336 match diff_bases_change {
2337 Some(DiffBasesChange::SetIndex(index)) => {
2338 self.index_text = index.map(|mut index| {
2339 text::LineEnding::normalize(&mut index);
2340 Arc::new(index)
2341 });
2342 self.index_changed = true;
2343 }
2344 Some(DiffBasesChange::SetHead(head)) => {
2345 self.head_text = head.map(|mut head| {
2346 text::LineEnding::normalize(&mut head);
2347 Arc::new(head)
2348 });
2349 self.head_changed = true;
2350 }
2351 Some(DiffBasesChange::SetBoth(text)) => {
2352 let text = text.map(|mut text| {
2353 text::LineEnding::normalize(&mut text);
2354 Arc::new(text)
2355 });
2356 self.head_text = text.clone();
2357 self.index_text = text;
2358 self.head_changed = true;
2359 self.index_changed = true;
2360 }
2361 Some(DiffBasesChange::SetEach { index, head }) => {
2362 self.index_text = index.map(|mut index| {
2363 text::LineEnding::normalize(&mut index);
2364 Arc::new(index)
2365 });
2366 self.index_changed = true;
2367 self.head_text = head.map(|mut head| {
2368 text::LineEnding::normalize(&mut head);
2369 Arc::new(head)
2370 });
2371 self.head_changed = true;
2372 }
2373 None => {}
2374 }
2375
2376 self.recalculate_diffs(buffer, cx)
2377 }
2378
2379 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2380 *self.recalculating_tx.borrow_mut() = true;
2381
2382 let language = self.language.clone();
2383 let language_registry = self.language_registry.clone();
2384 let unstaged_diff = self.unstaged_diff();
2385 let uncommitted_diff = self.uncommitted_diff();
2386 let head = self.head_text.clone();
2387 let index = self.index_text.clone();
2388 let index_changed = self.index_changed;
2389 let head_changed = self.head_changed;
2390 let language_changed = self.language_changed;
2391 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2392 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2393 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2394 (None, None) => true,
2395 _ => false,
2396 };
2397 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2398 log::debug!(
2399 "start recalculating diffs for buffer {}",
2400 buffer.remote_id()
2401 );
2402
2403 let mut new_unstaged_diff = None;
2404 if let Some(unstaged_diff) = &unstaged_diff {
2405 new_unstaged_diff = Some(
2406 BufferDiff::update_diff(
2407 unstaged_diff.clone(),
2408 buffer.clone(),
2409 index,
2410 index_changed,
2411 language_changed,
2412 language.clone(),
2413 language_registry.clone(),
2414 cx,
2415 )
2416 .await?,
2417 );
2418 }
2419
2420 let mut new_uncommitted_diff = None;
2421 if let Some(uncommitted_diff) = &uncommitted_diff {
2422 new_uncommitted_diff = if index_matches_head {
2423 new_unstaged_diff.clone()
2424 } else {
2425 Some(
2426 BufferDiff::update_diff(
2427 uncommitted_diff.clone(),
2428 buffer.clone(),
2429 head,
2430 head_changed,
2431 language_changed,
2432 language.clone(),
2433 language_registry.clone(),
2434 cx,
2435 )
2436 .await?,
2437 )
2438 }
2439 }
2440
2441 let cancel = this.update(cx, |this, _| {
2442 // This checks whether all pending stage/unstage operations
2443 // have quiesced (i.e. both the corresponding write and the
2444 // read of that write have completed). If not, then we cancel
2445 // this recalculation attempt to avoid invalidating pending
2446 // state too quickly; another recalculation will come along
2447 // later and clear the pending state once the state of the index has settled.
2448 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2449 *this.recalculating_tx.borrow_mut() = false;
2450 true
2451 } else {
2452 false
2453 }
2454 })?;
2455 if cancel {
2456 log::debug!(
2457 concat!(
2458 "aborting recalculating diffs for buffer {}",
2459 "due to subsequent hunk operations",
2460 ),
2461 buffer.remote_id()
2462 );
2463 return Ok(());
2464 }
2465
2466 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2467 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2468 {
2469 unstaged_diff.update(cx, |diff, cx| {
2470 if language_changed {
2471 diff.language_changed(cx);
2472 }
2473 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2474 })?
2475 } else {
2476 None
2477 };
2478
2479 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2480 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2481 {
2482 uncommitted_diff.update(cx, |diff, cx| {
2483 if language_changed {
2484 diff.language_changed(cx);
2485 }
2486 diff.set_snapshot_with_secondary(
2487 new_uncommitted_diff,
2488 &buffer,
2489 unstaged_changed_range,
2490 true,
2491 cx,
2492 );
2493 })?;
2494 }
2495
2496 log::debug!(
2497 "finished recalculating diffs for buffer {}",
2498 buffer.remote_id()
2499 );
2500
2501 if let Some(this) = this.upgrade() {
2502 this.update(cx, |this, _| {
2503 this.index_changed = false;
2504 this.head_changed = false;
2505 this.language_changed = false;
2506 *this.recalculating_tx.borrow_mut() = false;
2507 })?;
2508 }
2509
2510 Ok(())
2511 }));
2512 }
2513}
2514
2515fn make_remote_delegate(
2516 this: Entity<GitStore>,
2517 project_id: u64,
2518 repository_id: RepositoryId,
2519 askpass_id: u64,
2520 cx: &mut AsyncApp,
2521) -> AskPassDelegate {
2522 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2523 this.update(cx, |this, cx| {
2524 let Some((client, _)) = this.downstream_client() else {
2525 return;
2526 };
2527 let response = client.request(proto::AskPassRequest {
2528 project_id,
2529 repository_id: repository_id.to_proto(),
2530 askpass_id,
2531 prompt,
2532 });
2533 cx.spawn(async move |_, _| {
2534 tx.send(response.await?.response).ok();
2535 anyhow::Ok(())
2536 })
2537 .detach_and_log_err(cx);
2538 })
2539 .log_err();
2540 })
2541}
2542
2543impl RepositoryId {
2544 pub fn to_proto(self) -> u64 {
2545 self.0
2546 }
2547
2548 pub fn from_proto(id: u64) -> Self {
2549 RepositoryId(id)
2550 }
2551}
2552
2553impl RepositorySnapshot {
2554 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2555 Self {
2556 id,
2557 statuses_by_path: Default::default(),
2558 work_directory_abs_path,
2559 branch: None,
2560 head_commit: None,
2561 scan_id: 0,
2562 merge: Default::default(),
2563 }
2564 }
2565
2566 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2567 proto::UpdateRepository {
2568 branch_summary: self.branch.as_ref().map(branch_to_proto),
2569 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2570 updated_statuses: self
2571 .statuses_by_path
2572 .iter()
2573 .map(|entry| entry.to_proto())
2574 .collect(),
2575 removed_statuses: Default::default(),
2576 current_merge_conflicts: self
2577 .merge
2578 .conflicted_paths
2579 .iter()
2580 .map(|repo_path| repo_path.to_proto())
2581 .collect(),
2582 project_id,
2583 id: self.id.to_proto(),
2584 abs_path: self.work_directory_abs_path.to_proto(),
2585 entry_ids: vec![self.id.to_proto()],
2586 scan_id: self.scan_id,
2587 is_last_update: true,
2588 }
2589 }
2590
2591 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2592 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2593 let mut removed_statuses: Vec<String> = Vec::new();
2594
2595 let mut new_statuses = self.statuses_by_path.iter().peekable();
2596 let mut old_statuses = old.statuses_by_path.iter().peekable();
2597
2598 let mut current_new_entry = new_statuses.next();
2599 let mut current_old_entry = old_statuses.next();
2600 loop {
2601 match (current_new_entry, current_old_entry) {
2602 (Some(new_entry), Some(old_entry)) => {
2603 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2604 Ordering::Less => {
2605 updated_statuses.push(new_entry.to_proto());
2606 current_new_entry = new_statuses.next();
2607 }
2608 Ordering::Equal => {
2609 if new_entry.status != old_entry.status {
2610 updated_statuses.push(new_entry.to_proto());
2611 }
2612 current_old_entry = old_statuses.next();
2613 current_new_entry = new_statuses.next();
2614 }
2615 Ordering::Greater => {
2616 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2617 current_old_entry = old_statuses.next();
2618 }
2619 }
2620 }
2621 (None, Some(old_entry)) => {
2622 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2623 current_old_entry = old_statuses.next();
2624 }
2625 (Some(new_entry), None) => {
2626 updated_statuses.push(new_entry.to_proto());
2627 current_new_entry = new_statuses.next();
2628 }
2629 (None, None) => break,
2630 }
2631 }
2632
2633 proto::UpdateRepository {
2634 branch_summary: self.branch.as_ref().map(branch_to_proto),
2635 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2636 updated_statuses,
2637 removed_statuses,
2638 current_merge_conflicts: self
2639 .merge
2640 .conflicted_paths
2641 .iter()
2642 .map(|path| path.as_ref().to_proto())
2643 .collect(),
2644 project_id,
2645 id: self.id.to_proto(),
2646 abs_path: self.work_directory_abs_path.to_proto(),
2647 entry_ids: vec![],
2648 scan_id: self.scan_id,
2649 is_last_update: true,
2650 }
2651 }
2652
2653 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2654 self.statuses_by_path.iter().cloned()
2655 }
2656
2657 pub fn status_summary(&self) -> GitSummary {
2658 self.statuses_by_path.summary().item_summary
2659 }
2660
2661 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2662 self.statuses_by_path
2663 .get(&PathKey(path.0.clone()), &())
2664 .cloned()
2665 }
2666
2667 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2668 abs_path
2669 .strip_prefix(&self.work_directory_abs_path)
2670 .map(RepoPath::from)
2671 .ok()
2672 }
2673
2674 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2675 self.merge.conflicted_paths.contains(repo_path)
2676 }
2677
2678 /// This is the name that will be displayed in the repository selector for this repository.
2679 pub fn display_name(&self) -> SharedString {
2680 self.work_directory_abs_path
2681 .file_name()
2682 .unwrap_or_default()
2683 .to_string_lossy()
2684 .to_string()
2685 .into()
2686 }
2687}
2688
2689impl MergeDetails {
2690 async fn load(
2691 backend: &Arc<dyn GitRepository>,
2692 status: &SumTree<StatusEntry>,
2693 prev_snapshot: &RepositorySnapshot,
2694 ) -> Result<(MergeDetails, bool)> {
2695 log::debug!("load merge details");
2696 let message = backend.merge_message().await;
2697 let heads = backend
2698 .revparse_batch(vec![
2699 "MERGE_HEAD".into(),
2700 "CHERRY_PICK_HEAD".into(),
2701 "REBASE_HEAD".into(),
2702 "REVERT_HEAD".into(),
2703 "APPLY_HEAD".into(),
2704 ])
2705 .await
2706 .log_err()
2707 .unwrap_or_default()
2708 .into_iter()
2709 .map(|opt| opt.map(SharedString::from))
2710 .collect::<Vec<_>>();
2711 let merge_heads_changed = heads != prev_snapshot.merge.heads;
2712 let conflicted_paths = if merge_heads_changed {
2713 let current_conflicted_paths = TreeSet::from_ordered_entries(
2714 status
2715 .iter()
2716 .filter(|entry| entry.status.is_conflicted())
2717 .map(|entry| entry.repo_path.clone()),
2718 );
2719
2720 // It can happen that we run a scan while a lengthy merge is in progress
2721 // that will eventually result in conflicts, but before those conflicts
2722 // are reported by `git status`. Since for the moment we only care about
2723 // the merge heads state for the purposes of tracking conflicts, don't update
2724 // this state until we see some conflicts.
2725 if heads.iter().any(Option::is_some)
2726 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
2727 && current_conflicted_paths.is_empty()
2728 {
2729 log::debug!("not updating merge heads because no conflicts found");
2730 return Ok((
2731 MergeDetails {
2732 message: message.map(SharedString::from),
2733 ..prev_snapshot.merge.clone()
2734 },
2735 false,
2736 ));
2737 }
2738
2739 current_conflicted_paths
2740 } else {
2741 prev_snapshot.merge.conflicted_paths.clone()
2742 };
2743 let details = MergeDetails {
2744 conflicted_paths,
2745 message: message.map(SharedString::from),
2746 heads,
2747 };
2748 Ok((details, merge_heads_changed))
2749 }
2750}
2751
2752impl Repository {
2753 pub fn snapshot(&self) -> RepositorySnapshot {
2754 self.snapshot.clone()
2755 }
2756
2757 fn local(
2758 id: RepositoryId,
2759 work_directory_abs_path: Arc<Path>,
2760 dot_git_abs_path: Arc<Path>,
2761 repository_dir_abs_path: Arc<Path>,
2762 common_dir_abs_path: Arc<Path>,
2763 project_environment: WeakEntity<ProjectEnvironment>,
2764 fs: Arc<dyn Fs>,
2765 git_store: WeakEntity<GitStore>,
2766 cx: &mut Context<Self>,
2767 ) -> Self {
2768 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2769 Repository {
2770 this: cx.weak_entity(),
2771 git_store,
2772 snapshot,
2773 commit_message_buffer: None,
2774 askpass_delegates: Default::default(),
2775 paths_needing_status_update: Default::default(),
2776 latest_askpass_id: 0,
2777 job_sender: Repository::spawn_local_git_worker(
2778 work_directory_abs_path,
2779 dot_git_abs_path,
2780 repository_dir_abs_path,
2781 common_dir_abs_path,
2782 project_environment,
2783 fs,
2784 cx,
2785 ),
2786 job_id: 0,
2787 active_jobs: Default::default(),
2788 }
2789 }
2790
2791 fn remote(
2792 id: RepositoryId,
2793 work_directory_abs_path: Arc<Path>,
2794 project_id: ProjectId,
2795 client: AnyProtoClient,
2796 git_store: WeakEntity<GitStore>,
2797 cx: &mut Context<Self>,
2798 ) -> Self {
2799 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2800 Self {
2801 this: cx.weak_entity(),
2802 snapshot,
2803 commit_message_buffer: None,
2804 git_store,
2805 paths_needing_status_update: Default::default(),
2806 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2807 askpass_delegates: Default::default(),
2808 latest_askpass_id: 0,
2809 active_jobs: Default::default(),
2810 job_id: 0,
2811 }
2812 }
2813
2814 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2815 self.git_store.upgrade()
2816 }
2817
2818 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
2819 let this = cx.weak_entity();
2820 let git_store = self.git_store.clone();
2821 let _ = self.send_keyed_job(
2822 Some(GitJobKey::ReloadBufferDiffBases),
2823 None,
2824 |state, mut cx| async move {
2825 let RepositoryState::Local { backend, .. } = state else {
2826 log::error!("tried to recompute diffs for a non-local repository");
2827 return Ok(());
2828 };
2829
2830 let Some(this) = this.upgrade() else {
2831 return Ok(());
2832 };
2833
2834 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
2835 git_store.update(cx, |git_store, cx| {
2836 git_store
2837 .diffs
2838 .iter()
2839 .filter_map(|(buffer_id, diff_state)| {
2840 let buffer_store = git_store.buffer_store.read(cx);
2841 let buffer = buffer_store.get(*buffer_id)?;
2842 let file = File::from_dyn(buffer.read(cx).file())?;
2843 let abs_path =
2844 file.worktree.read(cx).absolutize(&file.path).ok()?;
2845 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
2846 log::debug!(
2847 "start reload diff bases for repo path {}",
2848 repo_path.0.display()
2849 );
2850 diff_state.update(cx, |diff_state, _| {
2851 let has_unstaged_diff = diff_state
2852 .unstaged_diff
2853 .as_ref()
2854 .is_some_and(|diff| diff.is_upgradable());
2855 let has_uncommitted_diff = diff_state
2856 .uncommitted_diff
2857 .as_ref()
2858 .is_some_and(|set| set.is_upgradable());
2859
2860 Some((
2861 buffer,
2862 repo_path,
2863 has_unstaged_diff.then(|| diff_state.index_text.clone()),
2864 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
2865 ))
2866 })
2867 })
2868 .collect::<Vec<_>>()
2869 })
2870 })??;
2871
2872 let buffer_diff_base_changes = cx
2873 .background_spawn(async move {
2874 let mut changes = Vec::new();
2875 for (buffer, repo_path, current_index_text, current_head_text) in
2876 &repo_diff_state_updates
2877 {
2878 let index_text = if current_index_text.is_some() {
2879 backend.load_index_text(repo_path.clone()).await
2880 } else {
2881 None
2882 };
2883 let head_text = if current_head_text.is_some() {
2884 backend.load_committed_text(repo_path.clone()).await
2885 } else {
2886 None
2887 };
2888
2889 let change =
2890 match (current_index_text.as_ref(), current_head_text.as_ref()) {
2891 (Some(current_index), Some(current_head)) => {
2892 let index_changed =
2893 index_text.as_ref() != current_index.as_deref();
2894 let head_changed =
2895 head_text.as_ref() != current_head.as_deref();
2896 if index_changed && head_changed {
2897 if index_text == head_text {
2898 Some(DiffBasesChange::SetBoth(head_text))
2899 } else {
2900 Some(DiffBasesChange::SetEach {
2901 index: index_text,
2902 head: head_text,
2903 })
2904 }
2905 } else if index_changed {
2906 Some(DiffBasesChange::SetIndex(index_text))
2907 } else if head_changed {
2908 Some(DiffBasesChange::SetHead(head_text))
2909 } else {
2910 None
2911 }
2912 }
2913 (Some(current_index), None) => {
2914 let index_changed =
2915 index_text.as_ref() != current_index.as_deref();
2916 index_changed
2917 .then_some(DiffBasesChange::SetIndex(index_text))
2918 }
2919 (None, Some(current_head)) => {
2920 let head_changed =
2921 head_text.as_ref() != current_head.as_deref();
2922 head_changed.then_some(DiffBasesChange::SetHead(head_text))
2923 }
2924 (None, None) => None,
2925 };
2926
2927 changes.push((buffer.clone(), change))
2928 }
2929 changes
2930 })
2931 .await;
2932
2933 git_store.update(&mut cx, |git_store, cx| {
2934 for (buffer, diff_bases_change) in buffer_diff_base_changes {
2935 let buffer_snapshot = buffer.read(cx).text_snapshot();
2936 let buffer_id = buffer_snapshot.remote_id();
2937 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
2938 continue;
2939 };
2940
2941 let downstream_client = git_store.downstream_client();
2942 diff_state.update(cx, |diff_state, cx| {
2943 use proto::update_diff_bases::Mode;
2944
2945 if let Some((diff_bases_change, (client, project_id))) =
2946 diff_bases_change.clone().zip(downstream_client)
2947 {
2948 let (staged_text, committed_text, mode) = match diff_bases_change {
2949 DiffBasesChange::SetIndex(index) => {
2950 (index, None, Mode::IndexOnly)
2951 }
2952 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
2953 DiffBasesChange::SetEach { index, head } => {
2954 (index, head, Mode::IndexAndHead)
2955 }
2956 DiffBasesChange::SetBoth(text) => {
2957 (None, text, Mode::IndexMatchesHead)
2958 }
2959 };
2960 client
2961 .send(proto::UpdateDiffBases {
2962 project_id: project_id.to_proto(),
2963 buffer_id: buffer_id.to_proto(),
2964 staged_text,
2965 committed_text,
2966 mode: mode as i32,
2967 })
2968 .log_err();
2969 }
2970
2971 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
2972 });
2973 }
2974 })
2975 },
2976 );
2977 }
2978
2979 pub fn send_job<F, Fut, R>(
2980 &mut self,
2981 status: Option<SharedString>,
2982 job: F,
2983 ) -> oneshot::Receiver<R>
2984 where
2985 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2986 Fut: Future<Output = R> + 'static,
2987 R: Send + 'static,
2988 {
2989 self.send_keyed_job(None, status, job)
2990 }
2991
2992 fn send_keyed_job<F, Fut, R>(
2993 &mut self,
2994 key: Option<GitJobKey>,
2995 status: Option<SharedString>,
2996 job: F,
2997 ) -> oneshot::Receiver<R>
2998 where
2999 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3000 Fut: Future<Output = R> + 'static,
3001 R: Send + 'static,
3002 {
3003 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3004 let job_id = post_inc(&mut self.job_id);
3005 let this = self.this.clone();
3006 self.job_sender
3007 .unbounded_send(GitJob {
3008 key,
3009 job: Box::new(move |state, cx: &mut AsyncApp| {
3010 let job = job(state, cx.clone());
3011 cx.spawn(async move |cx| {
3012 if let Some(s) = status.clone() {
3013 this.update(cx, |this, cx| {
3014 this.active_jobs.insert(
3015 job_id,
3016 JobInfo {
3017 start: Instant::now(),
3018 message: s.clone(),
3019 },
3020 );
3021
3022 cx.notify();
3023 })
3024 .ok();
3025 }
3026 let result = job.await;
3027
3028 this.update(cx, |this, cx| {
3029 this.active_jobs.remove(&job_id);
3030 cx.notify();
3031 })
3032 .ok();
3033
3034 result_tx.send(result).ok();
3035 })
3036 }),
3037 })
3038 .ok();
3039 result_rx
3040 }
3041
3042 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3043 let Some(git_store) = self.git_store.upgrade() else {
3044 return;
3045 };
3046 let entity = cx.entity();
3047 git_store.update(cx, |git_store, cx| {
3048 let Some((&id, _)) = git_store
3049 .repositories
3050 .iter()
3051 .find(|(_, handle)| *handle == &entity)
3052 else {
3053 return;
3054 };
3055 git_store.active_repo_id = Some(id);
3056 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3057 });
3058 }
3059
3060 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3061 self.snapshot.status()
3062 }
3063
3064 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3065 let git_store = self.git_store.upgrade()?;
3066 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3067 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
3068 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3069 Some(ProjectPath {
3070 worktree_id: worktree.read(cx).id(),
3071 path: relative_path.into(),
3072 })
3073 }
3074
3075 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3076 let git_store = self.git_store.upgrade()?;
3077 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3078 let abs_path = worktree_store.absolutize(path, cx)?;
3079 self.snapshot.abs_path_to_repo_path(&abs_path)
3080 }
3081
3082 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3083 other
3084 .read(cx)
3085 .snapshot
3086 .work_directory_abs_path
3087 .starts_with(&self.snapshot.work_directory_abs_path)
3088 }
3089
3090 pub fn open_commit_buffer(
3091 &mut self,
3092 languages: Option<Arc<LanguageRegistry>>,
3093 buffer_store: Entity<BufferStore>,
3094 cx: &mut Context<Self>,
3095 ) -> Task<Result<Entity<Buffer>>> {
3096 let id = self.id;
3097 if let Some(buffer) = self.commit_message_buffer.clone() {
3098 return Task::ready(Ok(buffer));
3099 }
3100 let this = cx.weak_entity();
3101
3102 let rx = self.send_job(None, move |state, mut cx| async move {
3103 let Some(this) = this.upgrade() else {
3104 bail!("git store was dropped");
3105 };
3106 match state {
3107 RepositoryState::Local { .. } => {
3108 this.update(&mut cx, |_, cx| {
3109 Self::open_local_commit_buffer(languages, buffer_store, cx)
3110 })?
3111 .await
3112 }
3113 RepositoryState::Remote { project_id, client } => {
3114 let request = client.request(proto::OpenCommitMessageBuffer {
3115 project_id: project_id.0,
3116 repository_id: id.to_proto(),
3117 });
3118 let response = request.await.context("requesting to open commit buffer")?;
3119 let buffer_id = BufferId::new(response.buffer_id)?;
3120 let buffer = buffer_store
3121 .update(&mut cx, |buffer_store, cx| {
3122 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3123 })?
3124 .await?;
3125 if let Some(language_registry) = languages {
3126 let git_commit_language =
3127 language_registry.language_for_name("Git Commit").await?;
3128 buffer.update(&mut cx, |buffer, cx| {
3129 buffer.set_language(Some(git_commit_language), cx);
3130 })?;
3131 }
3132 this.update(&mut cx, |this, _| {
3133 this.commit_message_buffer = Some(buffer.clone());
3134 })?;
3135 Ok(buffer)
3136 }
3137 }
3138 });
3139
3140 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3141 }
3142
3143 fn open_local_commit_buffer(
3144 language_registry: Option<Arc<LanguageRegistry>>,
3145 buffer_store: Entity<BufferStore>,
3146 cx: &mut Context<Self>,
3147 ) -> Task<Result<Entity<Buffer>>> {
3148 cx.spawn(async move |repository, cx| {
3149 let buffer = buffer_store
3150 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
3151 .await?;
3152
3153 if let Some(language_registry) = language_registry {
3154 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3155 buffer.update(cx, |buffer, cx| {
3156 buffer.set_language(Some(git_commit_language), cx);
3157 })?;
3158 }
3159
3160 repository.update(cx, |repository, _| {
3161 repository.commit_message_buffer = Some(buffer.clone());
3162 })?;
3163 Ok(buffer)
3164 })
3165 }
3166
3167 pub fn checkout_files(
3168 &mut self,
3169 commit: &str,
3170 paths: Vec<RepoPath>,
3171 _cx: &mut App,
3172 ) -> oneshot::Receiver<Result<()>> {
3173 let commit = commit.to_string();
3174 let id = self.id;
3175
3176 self.send_job(
3177 Some(format!("git checkout {}", commit).into()),
3178 move |git_repo, _| async move {
3179 match git_repo {
3180 RepositoryState::Local {
3181 backend,
3182 environment,
3183 ..
3184 } => {
3185 backend
3186 .checkout_files(commit, paths, environment.clone())
3187 .await
3188 }
3189 RepositoryState::Remote { project_id, client } => {
3190 client
3191 .request(proto::GitCheckoutFiles {
3192 project_id: project_id.0,
3193 repository_id: id.to_proto(),
3194 commit,
3195 paths: paths
3196 .into_iter()
3197 .map(|p| p.to_string_lossy().to_string())
3198 .collect(),
3199 })
3200 .await?;
3201
3202 Ok(())
3203 }
3204 }
3205 },
3206 )
3207 }
3208
3209 pub fn reset(
3210 &mut self,
3211 commit: String,
3212 reset_mode: ResetMode,
3213 _cx: &mut App,
3214 ) -> oneshot::Receiver<Result<()>> {
3215 let commit = commit.to_string();
3216 let id = self.id;
3217
3218 self.send_job(None, move |git_repo, _| async move {
3219 match git_repo {
3220 RepositoryState::Local {
3221 backend,
3222 environment,
3223 ..
3224 } => backend.reset(commit, reset_mode, environment).await,
3225 RepositoryState::Remote { project_id, client } => {
3226 client
3227 .request(proto::GitReset {
3228 project_id: project_id.0,
3229 repository_id: id.to_proto(),
3230 commit,
3231 mode: match reset_mode {
3232 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3233 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3234 },
3235 })
3236 .await?;
3237
3238 Ok(())
3239 }
3240 }
3241 })
3242 }
3243
3244 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3245 let id = self.id;
3246 self.send_job(None, move |git_repo, _cx| async move {
3247 match git_repo {
3248 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3249 RepositoryState::Remote { project_id, client } => {
3250 let resp = client
3251 .request(proto::GitShow {
3252 project_id: project_id.0,
3253 repository_id: id.to_proto(),
3254 commit,
3255 })
3256 .await?;
3257
3258 Ok(CommitDetails {
3259 sha: resp.sha.into(),
3260 message: resp.message.into(),
3261 commit_timestamp: resp.commit_timestamp,
3262 author_email: resp.author_email.into(),
3263 author_name: resp.author_name.into(),
3264 })
3265 }
3266 }
3267 })
3268 }
3269
3270 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3271 let id = self.id;
3272 self.send_job(None, move |git_repo, cx| async move {
3273 match git_repo {
3274 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3275 RepositoryState::Remote {
3276 client, project_id, ..
3277 } => {
3278 let response = client
3279 .request(proto::LoadCommitDiff {
3280 project_id: project_id.0,
3281 repository_id: id.to_proto(),
3282 commit,
3283 })
3284 .await?;
3285 Ok(CommitDiff {
3286 files: response
3287 .files
3288 .into_iter()
3289 .map(|file| CommitFile {
3290 path: Path::new(&file.path).into(),
3291 old_text: file.old_text,
3292 new_text: file.new_text,
3293 })
3294 .collect(),
3295 })
3296 }
3297 }
3298 })
3299 }
3300
3301 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3302 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3303 }
3304
3305 pub fn stage_entries(
3306 &self,
3307 entries: Vec<RepoPath>,
3308 cx: &mut Context<Self>,
3309 ) -> Task<anyhow::Result<()>> {
3310 if entries.is_empty() {
3311 return Task::ready(Ok(()));
3312 }
3313 let id = self.id;
3314
3315 let mut save_futures = Vec::new();
3316 if let Some(buffer_store) = self.buffer_store(cx) {
3317 buffer_store.update(cx, |buffer_store, cx| {
3318 for path in &entries {
3319 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3320 continue;
3321 };
3322 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3323 if buffer
3324 .read(cx)
3325 .file()
3326 .map_or(false, |file| file.disk_state().exists())
3327 {
3328 save_futures.push(buffer_store.save_buffer(buffer, cx));
3329 }
3330 }
3331 }
3332 })
3333 }
3334
3335 cx.spawn(async move |this, cx| {
3336 for save_future in save_futures {
3337 save_future.await?;
3338 }
3339
3340 this.update(cx, |this, _| {
3341 this.send_job(None, move |git_repo, _cx| async move {
3342 match git_repo {
3343 RepositoryState::Local {
3344 backend,
3345 environment,
3346 ..
3347 } => backend.stage_paths(entries, environment.clone()).await,
3348 RepositoryState::Remote { project_id, client } => {
3349 client
3350 .request(proto::Stage {
3351 project_id: project_id.0,
3352 repository_id: id.to_proto(),
3353 paths: entries
3354 .into_iter()
3355 .map(|repo_path| repo_path.as_ref().to_proto())
3356 .collect(),
3357 })
3358 .await
3359 .context("sending stage request")?;
3360
3361 Ok(())
3362 }
3363 }
3364 })
3365 })?
3366 .await??;
3367
3368 Ok(())
3369 })
3370 }
3371
3372 pub fn unstage_entries(
3373 &self,
3374 entries: Vec<RepoPath>,
3375 cx: &mut Context<Self>,
3376 ) -> Task<anyhow::Result<()>> {
3377 if entries.is_empty() {
3378 return Task::ready(Ok(()));
3379 }
3380 let id = self.id;
3381
3382 let mut save_futures = Vec::new();
3383 if let Some(buffer_store) = self.buffer_store(cx) {
3384 buffer_store.update(cx, |buffer_store, cx| {
3385 for path in &entries {
3386 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3387 continue;
3388 };
3389 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3390 if buffer
3391 .read(cx)
3392 .file()
3393 .map_or(false, |file| file.disk_state().exists())
3394 {
3395 save_futures.push(buffer_store.save_buffer(buffer, cx));
3396 }
3397 }
3398 }
3399 })
3400 }
3401
3402 cx.spawn(async move |this, cx| {
3403 for save_future in save_futures {
3404 save_future.await?;
3405 }
3406
3407 this.update(cx, |this, _| {
3408 this.send_job(None, move |git_repo, _cx| async move {
3409 match git_repo {
3410 RepositoryState::Local {
3411 backend,
3412 environment,
3413 ..
3414 } => backend.unstage_paths(entries, environment).await,
3415 RepositoryState::Remote { project_id, client } => {
3416 client
3417 .request(proto::Unstage {
3418 project_id: project_id.0,
3419 repository_id: id.to_proto(),
3420 paths: entries
3421 .into_iter()
3422 .map(|repo_path| repo_path.as_ref().to_proto())
3423 .collect(),
3424 })
3425 .await
3426 .context("sending unstage request")?;
3427
3428 Ok(())
3429 }
3430 }
3431 })
3432 })?
3433 .await??;
3434
3435 Ok(())
3436 })
3437 }
3438
3439 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3440 let to_stage = self
3441 .cached_status()
3442 .filter(|entry| !entry.status.staging().is_fully_staged())
3443 .map(|entry| entry.repo_path.clone())
3444 .collect();
3445 self.stage_entries(to_stage, cx)
3446 }
3447
3448 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3449 let to_unstage = self
3450 .cached_status()
3451 .filter(|entry| entry.status.staging().has_staged())
3452 .map(|entry| entry.repo_path.clone())
3453 .collect();
3454 self.unstage_entries(to_unstage, cx)
3455 }
3456
3457 pub fn commit(
3458 &mut self,
3459 message: SharedString,
3460 name_and_email: Option<(SharedString, SharedString)>,
3461 options: CommitOptions,
3462 _cx: &mut App,
3463 ) -> oneshot::Receiver<Result<()>> {
3464 let id = self.id;
3465
3466 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3467 match git_repo {
3468 RepositoryState::Local {
3469 backend,
3470 environment,
3471 ..
3472 } => {
3473 backend
3474 .commit(message, name_and_email, options, environment)
3475 .await
3476 }
3477 RepositoryState::Remote { project_id, client } => {
3478 let (name, email) = name_and_email.unzip();
3479 client
3480 .request(proto::Commit {
3481 project_id: project_id.0,
3482 repository_id: id.to_proto(),
3483 message: String::from(message),
3484 name: name.map(String::from),
3485 email: email.map(String::from),
3486 options: Some(proto::commit::CommitOptions {
3487 amend: options.amend,
3488 }),
3489 })
3490 .await
3491 .context("sending commit request")?;
3492
3493 Ok(())
3494 }
3495 }
3496 })
3497 }
3498
3499 pub fn fetch(
3500 &mut self,
3501 askpass: AskPassDelegate,
3502 _cx: &mut App,
3503 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3504 let askpass_delegates = self.askpass_delegates.clone();
3505 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3506 let id = self.id;
3507
3508 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3509 match git_repo {
3510 RepositoryState::Local {
3511 backend,
3512 environment,
3513 ..
3514 } => backend.fetch(askpass, environment, cx).await,
3515 RepositoryState::Remote { project_id, client } => {
3516 askpass_delegates.lock().insert(askpass_id, askpass);
3517 let _defer = util::defer(|| {
3518 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3519 debug_assert!(askpass_delegate.is_some());
3520 });
3521
3522 let response = client
3523 .request(proto::Fetch {
3524 project_id: project_id.0,
3525 repository_id: id.to_proto(),
3526 askpass_id,
3527 })
3528 .await
3529 .context("sending fetch request")?;
3530
3531 Ok(RemoteCommandOutput {
3532 stdout: response.stdout,
3533 stderr: response.stderr,
3534 })
3535 }
3536 }
3537 })
3538 }
3539
3540 pub fn push(
3541 &mut self,
3542 branch: SharedString,
3543 remote: SharedString,
3544 options: Option<PushOptions>,
3545 askpass: AskPassDelegate,
3546 cx: &mut Context<Self>,
3547 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3548 let askpass_delegates = self.askpass_delegates.clone();
3549 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3550 let id = self.id;
3551
3552 let args = options
3553 .map(|option| match option {
3554 PushOptions::SetUpstream => " --set-upstream",
3555 PushOptions::Force => " --force",
3556 })
3557 .unwrap_or("");
3558
3559 let updates_tx = self
3560 .git_store()
3561 .and_then(|git_store| match &git_store.read(cx).state {
3562 GitStoreState::Local { downstream, .. } => downstream
3563 .as_ref()
3564 .map(|downstream| downstream.updates_tx.clone()),
3565 _ => None,
3566 });
3567
3568 let this = cx.weak_entity();
3569 self.send_job(
3570 Some(format!("git push {} {} {}", args, branch, remote).into()),
3571 move |git_repo, mut cx| async move {
3572 match git_repo {
3573 RepositoryState::Local {
3574 backend,
3575 environment,
3576 ..
3577 } => {
3578 let result = backend
3579 .push(
3580 branch.to_string(),
3581 remote.to_string(),
3582 options,
3583 askpass,
3584 environment.clone(),
3585 cx.clone(),
3586 )
3587 .await;
3588 if result.is_ok() {
3589 let branches = backend.branches().await?;
3590 let branch = branches.into_iter().find(|branch| branch.is_head);
3591 log::info!("head branch after scan is {branch:?}");
3592 let snapshot = this.update(&mut cx, |this, cx| {
3593 this.snapshot.branch = branch;
3594 let snapshot = this.snapshot.clone();
3595 cx.emit(RepositoryEvent::Updated { full_scan: false });
3596 snapshot
3597 })?;
3598 if let Some(updates_tx) = updates_tx {
3599 updates_tx
3600 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3601 .ok();
3602 }
3603 }
3604 result
3605 }
3606 RepositoryState::Remote { project_id, client } => {
3607 askpass_delegates.lock().insert(askpass_id, askpass);
3608 let _defer = util::defer(|| {
3609 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3610 debug_assert!(askpass_delegate.is_some());
3611 });
3612 let response = client
3613 .request(proto::Push {
3614 project_id: project_id.0,
3615 repository_id: id.to_proto(),
3616 askpass_id,
3617 branch_name: branch.to_string(),
3618 remote_name: remote.to_string(),
3619 options: options.map(|options| match options {
3620 PushOptions::Force => proto::push::PushOptions::Force,
3621 PushOptions::SetUpstream => {
3622 proto::push::PushOptions::SetUpstream
3623 }
3624 }
3625 as i32),
3626 })
3627 .await
3628 .context("sending push request")?;
3629
3630 Ok(RemoteCommandOutput {
3631 stdout: response.stdout,
3632 stderr: response.stderr,
3633 })
3634 }
3635 }
3636 },
3637 )
3638 }
3639
3640 pub fn pull(
3641 &mut self,
3642 branch: SharedString,
3643 remote: SharedString,
3644 askpass: AskPassDelegate,
3645 _cx: &mut App,
3646 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3647 let askpass_delegates = self.askpass_delegates.clone();
3648 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3649 let id = self.id;
3650
3651 self.send_job(
3652 Some(format!("git pull {} {}", remote, branch).into()),
3653 move |git_repo, cx| async move {
3654 match git_repo {
3655 RepositoryState::Local {
3656 backend,
3657 environment,
3658 ..
3659 } => {
3660 backend
3661 .pull(
3662 branch.to_string(),
3663 remote.to_string(),
3664 askpass,
3665 environment.clone(),
3666 cx,
3667 )
3668 .await
3669 }
3670 RepositoryState::Remote { project_id, client } => {
3671 askpass_delegates.lock().insert(askpass_id, askpass);
3672 let _defer = util::defer(|| {
3673 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3674 debug_assert!(askpass_delegate.is_some());
3675 });
3676 let response = client
3677 .request(proto::Pull {
3678 project_id: project_id.0,
3679 repository_id: id.to_proto(),
3680 askpass_id,
3681 branch_name: branch.to_string(),
3682 remote_name: remote.to_string(),
3683 })
3684 .await
3685 .context("sending pull request")?;
3686
3687 Ok(RemoteCommandOutput {
3688 stdout: response.stdout,
3689 stderr: response.stderr,
3690 })
3691 }
3692 }
3693 },
3694 )
3695 }
3696
3697 fn spawn_set_index_text_job(
3698 &mut self,
3699 path: RepoPath,
3700 content: Option<String>,
3701 hunk_staging_operation_count: Option<usize>,
3702 cx: &mut Context<Self>,
3703 ) -> oneshot::Receiver<anyhow::Result<()>> {
3704 let id = self.id;
3705 let this = cx.weak_entity();
3706 let git_store = self.git_store.clone();
3707 self.send_keyed_job(
3708 Some(GitJobKey::WriteIndex(path.clone())),
3709 None,
3710 move |git_repo, mut cx| async move {
3711 log::debug!("start updating index text for buffer {}", path.display());
3712 match git_repo {
3713 RepositoryState::Local {
3714 backend,
3715 environment,
3716 ..
3717 } => {
3718 backend
3719 .set_index_text(path.clone(), content, environment.clone())
3720 .await?;
3721 }
3722 RepositoryState::Remote { project_id, client } => {
3723 client
3724 .request(proto::SetIndexText {
3725 project_id: project_id.0,
3726 repository_id: id.to_proto(),
3727 path: path.as_ref().to_proto(),
3728 text: content,
3729 })
3730 .await?;
3731 }
3732 }
3733 log::debug!("finish updating index text for buffer {}", path.display());
3734
3735 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
3736 let project_path = this
3737 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
3738 .ok()
3739 .flatten();
3740 git_store.update(&mut cx, |git_store, cx| {
3741 let buffer_id = git_store
3742 .buffer_store
3743 .read(cx)
3744 .get_by_path(&project_path?, cx)?
3745 .read(cx)
3746 .remote_id();
3747 let diff_state = git_store.diffs.get(&buffer_id)?;
3748 diff_state.update(cx, |diff_state, _| {
3749 diff_state.hunk_staging_operation_count_as_of_write =
3750 hunk_staging_operation_count;
3751 });
3752 Some(())
3753 })?;
3754 }
3755 Ok(())
3756 },
3757 )
3758 }
3759
3760 pub fn get_remotes(
3761 &mut self,
3762 branch_name: Option<String>,
3763 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3764 let id = self.id;
3765 self.send_job(None, move |repo, _cx| async move {
3766 match repo {
3767 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3768 RepositoryState::Remote { project_id, client } => {
3769 let response = client
3770 .request(proto::GetRemotes {
3771 project_id: project_id.0,
3772 repository_id: id.to_proto(),
3773 branch_name,
3774 })
3775 .await?;
3776
3777 let remotes = response
3778 .remotes
3779 .into_iter()
3780 .map(|remotes| git::repository::Remote {
3781 name: remotes.name.into(),
3782 })
3783 .collect();
3784
3785 Ok(remotes)
3786 }
3787 }
3788 })
3789 }
3790
3791 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3792 let id = self.id;
3793 self.send_job(None, move |repo, _| async move {
3794 match repo {
3795 RepositoryState::Local { backend, .. } => backend.branches().await,
3796 RepositoryState::Remote { project_id, client } => {
3797 let response = client
3798 .request(proto::GitGetBranches {
3799 project_id: project_id.0,
3800 repository_id: id.to_proto(),
3801 })
3802 .await?;
3803
3804 let branches = response
3805 .branches
3806 .into_iter()
3807 .map(|branch| proto_to_branch(&branch))
3808 .collect();
3809
3810 Ok(branches)
3811 }
3812 }
3813 })
3814 }
3815
3816 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3817 let id = self.id;
3818 self.send_job(None, move |repo, _cx| async move {
3819 match repo {
3820 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3821 RepositoryState::Remote { project_id, client } => {
3822 let response = client
3823 .request(proto::GitDiff {
3824 project_id: project_id.0,
3825 repository_id: id.to_proto(),
3826 diff_type: match diff_type {
3827 DiffType::HeadToIndex => {
3828 proto::git_diff::DiffType::HeadToIndex.into()
3829 }
3830 DiffType::HeadToWorktree => {
3831 proto::git_diff::DiffType::HeadToWorktree.into()
3832 }
3833 },
3834 })
3835 .await?;
3836
3837 Ok(response.diff)
3838 }
3839 }
3840 })
3841 }
3842
3843 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3844 let id = self.id;
3845 self.send_job(
3846 Some(format!("git switch -c {branch_name}").into()),
3847 move |repo, _cx| async move {
3848 match repo {
3849 RepositoryState::Local { backend, .. } => {
3850 backend.create_branch(branch_name).await
3851 }
3852 RepositoryState::Remote { project_id, client } => {
3853 client
3854 .request(proto::GitCreateBranch {
3855 project_id: project_id.0,
3856 repository_id: id.to_proto(),
3857 branch_name,
3858 })
3859 .await?;
3860
3861 Ok(())
3862 }
3863 }
3864 },
3865 )
3866 }
3867
3868 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3869 let id = self.id;
3870 self.send_job(
3871 Some(format!("git switch {branch_name}").into()),
3872 move |repo, _cx| async move {
3873 match repo {
3874 RepositoryState::Local { backend, .. } => {
3875 backend.change_branch(branch_name).await
3876 }
3877 RepositoryState::Remote { project_id, client } => {
3878 client
3879 .request(proto::GitChangeBranch {
3880 project_id: project_id.0,
3881 repository_id: id.to_proto(),
3882 branch_name,
3883 })
3884 .await?;
3885
3886 Ok(())
3887 }
3888 }
3889 },
3890 )
3891 }
3892
3893 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3894 let id = self.id;
3895 self.send_job(None, move |repo, _cx| async move {
3896 match repo {
3897 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3898 RepositoryState::Remote { project_id, client } => {
3899 let response = client
3900 .request(proto::CheckForPushedCommits {
3901 project_id: project_id.0,
3902 repository_id: id.to_proto(),
3903 })
3904 .await?;
3905
3906 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3907
3908 Ok(branches)
3909 }
3910 }
3911 })
3912 }
3913
3914 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3915 self.send_job(None, |repo, _cx| async move {
3916 match repo {
3917 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
3918 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3919 }
3920 })
3921 }
3922
3923 pub fn restore_checkpoint(
3924 &mut self,
3925 checkpoint: GitRepositoryCheckpoint,
3926 ) -> oneshot::Receiver<Result<()>> {
3927 self.send_job(None, move |repo, _cx| async move {
3928 match repo {
3929 RepositoryState::Local { backend, .. } => {
3930 backend.restore_checkpoint(checkpoint).await
3931 }
3932 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3933 }
3934 })
3935 }
3936
3937 pub(crate) fn apply_remote_update(
3938 &mut self,
3939 update: proto::UpdateRepository,
3940 cx: &mut Context<Self>,
3941 ) -> Result<()> {
3942 let conflicted_paths = TreeSet::from_ordered_entries(
3943 update
3944 .current_merge_conflicts
3945 .into_iter()
3946 .map(|path| RepoPath(Path::new(&path).into())),
3947 );
3948 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
3949 self.snapshot.head_commit = update
3950 .head_commit_details
3951 .as_ref()
3952 .map(proto_to_commit_details);
3953
3954 self.snapshot.merge.conflicted_paths = conflicted_paths;
3955
3956 let edits = update
3957 .removed_statuses
3958 .into_iter()
3959 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3960 .chain(
3961 update
3962 .updated_statuses
3963 .into_iter()
3964 .filter_map(|updated_status| {
3965 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3966 }),
3967 )
3968 .collect::<Vec<_>>();
3969 self.snapshot.statuses_by_path.edit(edits, &());
3970 if update.is_last_update {
3971 self.snapshot.scan_id = update.scan_id;
3972 }
3973 cx.emit(RepositoryEvent::Updated { full_scan: true });
3974 Ok(())
3975 }
3976
3977 pub fn compare_checkpoints(
3978 &mut self,
3979 left: GitRepositoryCheckpoint,
3980 right: GitRepositoryCheckpoint,
3981 ) -> oneshot::Receiver<Result<bool>> {
3982 self.send_job(None, move |repo, _cx| async move {
3983 match repo {
3984 RepositoryState::Local { backend, .. } => {
3985 backend.compare_checkpoints(left, right).await
3986 }
3987 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3988 }
3989 })
3990 }
3991
3992 pub fn diff_checkpoints(
3993 &mut self,
3994 base_checkpoint: GitRepositoryCheckpoint,
3995 target_checkpoint: GitRepositoryCheckpoint,
3996 ) -> oneshot::Receiver<Result<String>> {
3997 self.send_job(None, move |repo, _cx| async move {
3998 match repo {
3999 RepositoryState::Local { backend, .. } => {
4000 backend
4001 .diff_checkpoints(base_checkpoint, target_checkpoint)
4002 .await
4003 }
4004 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
4005 }
4006 })
4007 }
4008
4009 fn schedule_scan(
4010 &mut self,
4011 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4012 cx: &mut Context<Self>,
4013 ) {
4014 let this = cx.weak_entity();
4015 let _ = self.send_keyed_job(
4016 Some(GitJobKey::ReloadGitState),
4017 None,
4018 |state, mut cx| async move {
4019 log::debug!("run scheduled git status scan");
4020
4021 let Some(this) = this.upgrade() else {
4022 return Ok(());
4023 };
4024 let RepositoryState::Local { backend, .. } = state else {
4025 bail!("not a local repository")
4026 };
4027 let (snapshot, events) = this
4028 .update(&mut cx, |this, _| {
4029 compute_snapshot(
4030 this.id,
4031 this.work_directory_abs_path.clone(),
4032 this.snapshot.clone(),
4033 backend.clone(),
4034 )
4035 })?
4036 .await?;
4037 this.update(&mut cx, |this, cx| {
4038 this.snapshot = snapshot.clone();
4039 for event in events {
4040 cx.emit(event);
4041 }
4042 })?;
4043 if let Some(updates_tx) = updates_tx {
4044 updates_tx
4045 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4046 .ok();
4047 }
4048 Ok(())
4049 },
4050 );
4051 }
4052
4053 fn spawn_local_git_worker(
4054 work_directory_abs_path: Arc<Path>,
4055 dot_git_abs_path: Arc<Path>,
4056 _repository_dir_abs_path: Arc<Path>,
4057 _common_dir_abs_path: Arc<Path>,
4058 project_environment: WeakEntity<ProjectEnvironment>,
4059 fs: Arc<dyn Fs>,
4060 cx: &mut Context<Self>,
4061 ) -> mpsc::UnboundedSender<GitJob> {
4062 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4063
4064 cx.spawn(async move |_, cx| {
4065 let environment = project_environment
4066 .upgrade()
4067 .ok_or_else(|| anyhow!("missing project environment"))?
4068 .update(cx, |project_environment, cx| {
4069 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
4070 })?
4071 .await
4072 .unwrap_or_else(|| {
4073 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4074 HashMap::default()
4075 });
4076 let backend = cx
4077 .background_spawn(async move {
4078 fs.open_repo(&dot_git_abs_path)
4079 .ok_or_else(|| anyhow!("failed to build repository"))
4080 })
4081 .await?;
4082
4083 if let Some(git_hosting_provider_registry) =
4084 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4085 {
4086 git_hosting_providers::register_additional_providers(
4087 git_hosting_provider_registry,
4088 backend.clone(),
4089 );
4090 }
4091
4092 let state = RepositoryState::Local {
4093 backend,
4094 environment: Arc::new(environment),
4095 };
4096 let mut jobs = VecDeque::new();
4097 loop {
4098 while let Ok(Some(next_job)) = job_rx.try_next() {
4099 jobs.push_back(next_job);
4100 }
4101
4102 if let Some(job) = jobs.pop_front() {
4103 if let Some(current_key) = &job.key {
4104 if jobs
4105 .iter()
4106 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4107 {
4108 continue;
4109 }
4110 }
4111 (job.job)(state.clone(), cx).await;
4112 } else if let Some(job) = job_rx.next().await {
4113 jobs.push_back(job);
4114 } else {
4115 break;
4116 }
4117 }
4118 anyhow::Ok(())
4119 })
4120 .detach_and_log_err(cx);
4121
4122 job_tx
4123 }
4124
4125 fn spawn_remote_git_worker(
4126 project_id: ProjectId,
4127 client: AnyProtoClient,
4128 cx: &mut Context<Self>,
4129 ) -> mpsc::UnboundedSender<GitJob> {
4130 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4131
4132 cx.spawn(async move |_, cx| {
4133 let state = RepositoryState::Remote { project_id, client };
4134 let mut jobs = VecDeque::new();
4135 loop {
4136 while let Ok(Some(next_job)) = job_rx.try_next() {
4137 jobs.push_back(next_job);
4138 }
4139
4140 if let Some(job) = jobs.pop_front() {
4141 if let Some(current_key) = &job.key {
4142 if jobs
4143 .iter()
4144 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4145 {
4146 continue;
4147 }
4148 }
4149 (job.job)(state.clone(), cx).await;
4150 } else if let Some(job) = job_rx.next().await {
4151 jobs.push_back(job);
4152 } else {
4153 break;
4154 }
4155 }
4156 anyhow::Ok(())
4157 })
4158 .detach_and_log_err(cx);
4159
4160 job_tx
4161 }
4162
4163 fn load_staged_text(
4164 &mut self,
4165 buffer_id: BufferId,
4166 repo_path: RepoPath,
4167 cx: &App,
4168 ) -> Task<Result<Option<String>>> {
4169 let rx = self.send_job(None, move |state, _| async move {
4170 match state {
4171 RepositoryState::Local { backend, .. } => {
4172 anyhow::Ok(backend.load_index_text(repo_path).await)
4173 }
4174 RepositoryState::Remote { project_id, client } => {
4175 let response = client
4176 .request(proto::OpenUnstagedDiff {
4177 project_id: project_id.to_proto(),
4178 buffer_id: buffer_id.to_proto(),
4179 })
4180 .await?;
4181 Ok(response.staged_text)
4182 }
4183 }
4184 });
4185 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4186 }
4187
4188 fn load_committed_text(
4189 &mut self,
4190 buffer_id: BufferId,
4191 repo_path: RepoPath,
4192 cx: &App,
4193 ) -> Task<Result<DiffBasesChange>> {
4194 let rx = self.send_job(None, move |state, _| async move {
4195 match state {
4196 RepositoryState::Local { backend, .. } => {
4197 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4198 let staged_text = backend.load_index_text(repo_path).await;
4199 let diff_bases_change = if committed_text == staged_text {
4200 DiffBasesChange::SetBoth(committed_text)
4201 } else {
4202 DiffBasesChange::SetEach {
4203 index: staged_text,
4204 head: committed_text,
4205 }
4206 };
4207 anyhow::Ok(diff_bases_change)
4208 }
4209 RepositoryState::Remote { project_id, client } => {
4210 use proto::open_uncommitted_diff_response::Mode;
4211
4212 let response = client
4213 .request(proto::OpenUncommittedDiff {
4214 project_id: project_id.to_proto(),
4215 buffer_id: buffer_id.to_proto(),
4216 })
4217 .await?;
4218 let mode =
4219 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
4220 let bases = match mode {
4221 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4222 Mode::IndexAndHead => DiffBasesChange::SetEach {
4223 head: response.committed_text,
4224 index: response.staged_text,
4225 },
4226 };
4227 Ok(bases)
4228 }
4229 }
4230 });
4231
4232 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4233 }
4234
4235 fn paths_changed(
4236 &mut self,
4237 paths: Vec<RepoPath>,
4238 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4239 cx: &mut Context<Self>,
4240 ) {
4241 self.paths_needing_status_update.extend(paths);
4242
4243 let this = cx.weak_entity();
4244 let _ = self.send_keyed_job(
4245 Some(GitJobKey::RefreshStatuses),
4246 None,
4247 |state, mut cx| async move {
4248 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4249 (
4250 this.snapshot.clone(),
4251 mem::take(&mut this.paths_needing_status_update),
4252 )
4253 })?;
4254 let RepositoryState::Local { backend, .. } = state else {
4255 bail!("not a local repository")
4256 };
4257
4258 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4259 let statuses = backend.status(&paths).await?;
4260
4261 let changed_path_statuses = cx
4262 .background_spawn(async move {
4263 let mut changed_path_statuses = Vec::new();
4264 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4265 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4266
4267 for (repo_path, status) in &*statuses.entries {
4268 changed_paths.remove(repo_path);
4269 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
4270 if cursor.item().is_some_and(|entry| entry.status == *status) {
4271 continue;
4272 }
4273 }
4274
4275 changed_path_statuses.push(Edit::Insert(StatusEntry {
4276 repo_path: repo_path.clone(),
4277 status: *status,
4278 }));
4279 }
4280 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4281 for path in changed_paths.into_iter() {
4282 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
4283 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
4284 }
4285 }
4286 changed_path_statuses
4287 })
4288 .await;
4289
4290 this.update(&mut cx, |this, cx| {
4291 if !changed_path_statuses.is_empty() {
4292 this.snapshot
4293 .statuses_by_path
4294 .edit(changed_path_statuses, &());
4295 this.snapshot.scan_id += 1;
4296 if let Some(updates_tx) = updates_tx {
4297 updates_tx
4298 .unbounded_send(DownstreamUpdate::UpdateRepository(
4299 this.snapshot.clone(),
4300 ))
4301 .ok();
4302 }
4303 }
4304 cx.emit(RepositoryEvent::Updated { full_scan: false });
4305 })
4306 },
4307 );
4308 }
4309
4310 /// currently running git command and when it started
4311 pub fn current_job(&self) -> Option<JobInfo> {
4312 self.active_jobs.values().next().cloned()
4313 }
4314
4315 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
4316 self.send_job(None, |_, _| async {})
4317 }
4318}
4319
4320fn get_permalink_in_rust_registry_src(
4321 provider_registry: Arc<GitHostingProviderRegistry>,
4322 path: PathBuf,
4323 selection: Range<u32>,
4324) -> Result<url::Url> {
4325 #[derive(Deserialize)]
4326 struct CargoVcsGit {
4327 sha1: String,
4328 }
4329
4330 #[derive(Deserialize)]
4331 struct CargoVcsInfo {
4332 git: CargoVcsGit,
4333 path_in_vcs: String,
4334 }
4335
4336 #[derive(Deserialize)]
4337 struct CargoPackage {
4338 repository: String,
4339 }
4340
4341 #[derive(Deserialize)]
4342 struct CargoToml {
4343 package: CargoPackage,
4344 }
4345
4346 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4347 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4348 Some((dir, json))
4349 }) else {
4350 bail!("No .cargo_vcs_info.json found in parent directories")
4351 };
4352 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4353 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4354 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4355 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4356 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
4357 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4358 let permalink = provider.build_permalink(
4359 remote,
4360 BuildPermalinkParams {
4361 sha: &cargo_vcs_info.git.sha1,
4362 path: &path.to_string_lossy(),
4363 selection: Some(selection),
4364 },
4365 );
4366 Ok(permalink)
4367}
4368
4369fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4370 let Some(blame) = blame else {
4371 return proto::BlameBufferResponse {
4372 blame_response: None,
4373 };
4374 };
4375
4376 let entries = blame
4377 .entries
4378 .into_iter()
4379 .map(|entry| proto::BlameEntry {
4380 sha: entry.sha.as_bytes().into(),
4381 start_line: entry.range.start,
4382 end_line: entry.range.end,
4383 original_line_number: entry.original_line_number,
4384 author: entry.author.clone(),
4385 author_mail: entry.author_mail.clone(),
4386 author_time: entry.author_time,
4387 author_tz: entry.author_tz.clone(),
4388 committer: entry.committer_name.clone(),
4389 committer_mail: entry.committer_email.clone(),
4390 committer_time: entry.committer_time,
4391 committer_tz: entry.committer_tz.clone(),
4392 summary: entry.summary.clone(),
4393 previous: entry.previous.clone(),
4394 filename: entry.filename.clone(),
4395 })
4396 .collect::<Vec<_>>();
4397
4398 let messages = blame
4399 .messages
4400 .into_iter()
4401 .map(|(oid, message)| proto::CommitMessage {
4402 oid: oid.as_bytes().into(),
4403 message,
4404 })
4405 .collect::<Vec<_>>();
4406
4407 proto::BlameBufferResponse {
4408 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4409 entries,
4410 messages,
4411 remote_url: blame.remote_url,
4412 }),
4413 }
4414}
4415
4416fn deserialize_blame_buffer_response(
4417 response: proto::BlameBufferResponse,
4418) -> Option<git::blame::Blame> {
4419 let response = response.blame_response?;
4420 let entries = response
4421 .entries
4422 .into_iter()
4423 .filter_map(|entry| {
4424 Some(git::blame::BlameEntry {
4425 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4426 range: entry.start_line..entry.end_line,
4427 original_line_number: entry.original_line_number,
4428 committer_name: entry.committer,
4429 committer_time: entry.committer_time,
4430 committer_tz: entry.committer_tz,
4431 committer_email: entry.committer_mail,
4432 author: entry.author,
4433 author_mail: entry.author_mail,
4434 author_time: entry.author_time,
4435 author_tz: entry.author_tz,
4436 summary: entry.summary,
4437 previous: entry.previous,
4438 filename: entry.filename,
4439 })
4440 })
4441 .collect::<Vec<_>>();
4442
4443 let messages = response
4444 .messages
4445 .into_iter()
4446 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4447 .collect::<HashMap<_, _>>();
4448
4449 Some(Blame {
4450 entries,
4451 messages,
4452 remote_url: response.remote_url,
4453 })
4454}
4455
4456fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4457 proto::Branch {
4458 is_head: branch.is_head,
4459 ref_name: branch.ref_name.to_string(),
4460 unix_timestamp: branch
4461 .most_recent_commit
4462 .as_ref()
4463 .map(|commit| commit.commit_timestamp as u64),
4464 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4465 ref_name: upstream.ref_name.to_string(),
4466 tracking: upstream
4467 .tracking
4468 .status()
4469 .map(|upstream| proto::UpstreamTracking {
4470 ahead: upstream.ahead as u64,
4471 behind: upstream.behind as u64,
4472 }),
4473 }),
4474 most_recent_commit: branch
4475 .most_recent_commit
4476 .as_ref()
4477 .map(|commit| proto::CommitSummary {
4478 sha: commit.sha.to_string(),
4479 subject: commit.subject.to_string(),
4480 commit_timestamp: commit.commit_timestamp,
4481 }),
4482 }
4483}
4484
4485fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4486 git::repository::Branch {
4487 is_head: proto.is_head,
4488 ref_name: proto.ref_name.clone().into(),
4489 upstream: proto
4490 .upstream
4491 .as_ref()
4492 .map(|upstream| git::repository::Upstream {
4493 ref_name: upstream.ref_name.to_string().into(),
4494 tracking: upstream
4495 .tracking
4496 .as_ref()
4497 .map(|tracking| {
4498 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4499 ahead: tracking.ahead as u32,
4500 behind: tracking.behind as u32,
4501 })
4502 })
4503 .unwrap_or(git::repository::UpstreamTracking::Gone),
4504 }),
4505 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4506 git::repository::CommitSummary {
4507 sha: commit.sha.to_string().into(),
4508 subject: commit.subject.to_string().into(),
4509 commit_timestamp: commit.commit_timestamp,
4510 has_parent: true,
4511 }
4512 }),
4513 }
4514}
4515
4516fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
4517 proto::GitCommitDetails {
4518 sha: commit.sha.to_string(),
4519 message: commit.message.to_string(),
4520 commit_timestamp: commit.commit_timestamp,
4521 author_email: commit.author_email.to_string(),
4522 author_name: commit.author_name.to_string(),
4523 }
4524}
4525
4526fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
4527 CommitDetails {
4528 sha: proto.sha.clone().into(),
4529 message: proto.message.clone().into(),
4530 commit_timestamp: proto.commit_timestamp,
4531 author_email: proto.author_email.clone().into(),
4532 author_name: proto.author_name.clone().into(),
4533 }
4534}
4535
4536async fn compute_snapshot(
4537 id: RepositoryId,
4538 work_directory_abs_path: Arc<Path>,
4539 prev_snapshot: RepositorySnapshot,
4540 backend: Arc<dyn GitRepository>,
4541) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4542 let mut events = Vec::new();
4543 let branches = backend.branches().await?;
4544 let branch = branches.into_iter().find(|branch| branch.is_head);
4545 let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?;
4546 let statuses_by_path = SumTree::from_iter(
4547 statuses
4548 .entries
4549 .iter()
4550 .map(|(repo_path, status)| StatusEntry {
4551 repo_path: repo_path.clone(),
4552 status: *status,
4553 }),
4554 &(),
4555 );
4556 let (merge_details, merge_heads_changed) =
4557 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
4558 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
4559
4560 if merge_heads_changed
4561 || branch != prev_snapshot.branch
4562 || statuses_by_path != prev_snapshot.statuses_by_path
4563 {
4564 events.push(RepositoryEvent::Updated { full_scan: true });
4565 }
4566
4567 // Cache merge conflict paths so they don't change from staging/unstaging,
4568 // until the merge heads change (at commit time, etc.).
4569 if merge_heads_changed {
4570 events.push(RepositoryEvent::MergeHeadsChanged);
4571 }
4572
4573 // Useful when branch is None in detached head state
4574 let head_commit = match backend.head_sha().await {
4575 Some(head_sha) => backend.show(head_sha).await.log_err(),
4576 None => None,
4577 };
4578
4579 let snapshot = RepositorySnapshot {
4580 id,
4581 statuses_by_path,
4582 work_directory_abs_path,
4583 scan_id: prev_snapshot.scan_id + 1,
4584 branch,
4585 head_commit,
4586 merge: merge_details,
4587 };
4588
4589 Ok((snapshot, events))
4590}
4591
4592fn status_from_proto(
4593 simple_status: i32,
4594 status: Option<proto::GitFileStatus>,
4595) -> anyhow::Result<FileStatus> {
4596 use proto::git_file_status::Variant;
4597
4598 let Some(variant) = status.and_then(|status| status.variant) else {
4599 let code = proto::GitStatus::from_i32(simple_status)
4600 .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
4601 let result = match code {
4602 proto::GitStatus::Added => TrackedStatus {
4603 worktree_status: StatusCode::Added,
4604 index_status: StatusCode::Unmodified,
4605 }
4606 .into(),
4607 proto::GitStatus::Modified => TrackedStatus {
4608 worktree_status: StatusCode::Modified,
4609 index_status: StatusCode::Unmodified,
4610 }
4611 .into(),
4612 proto::GitStatus::Conflict => UnmergedStatus {
4613 first_head: UnmergedStatusCode::Updated,
4614 second_head: UnmergedStatusCode::Updated,
4615 }
4616 .into(),
4617 proto::GitStatus::Deleted => TrackedStatus {
4618 worktree_status: StatusCode::Deleted,
4619 index_status: StatusCode::Unmodified,
4620 }
4621 .into(),
4622 _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
4623 };
4624 return Ok(result);
4625 };
4626
4627 let result = match variant {
4628 Variant::Untracked(_) => FileStatus::Untracked,
4629 Variant::Ignored(_) => FileStatus::Ignored,
4630 Variant::Unmerged(unmerged) => {
4631 let [first_head, second_head] =
4632 [unmerged.first_head, unmerged.second_head].map(|head| {
4633 let code = proto::GitStatus::from_i32(head)
4634 .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
4635 let result = match code {
4636 proto::GitStatus::Added => UnmergedStatusCode::Added,
4637 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4638 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4639 _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
4640 };
4641 Ok(result)
4642 });
4643 let [first_head, second_head] = [first_head?, second_head?];
4644 UnmergedStatus {
4645 first_head,
4646 second_head,
4647 }
4648 .into()
4649 }
4650 Variant::Tracked(tracked) => {
4651 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4652 .map(|status| {
4653 let code = proto::GitStatus::from_i32(status)
4654 .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
4655 let result = match code {
4656 proto::GitStatus::Modified => StatusCode::Modified,
4657 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4658 proto::GitStatus::Added => StatusCode::Added,
4659 proto::GitStatus::Deleted => StatusCode::Deleted,
4660 proto::GitStatus::Renamed => StatusCode::Renamed,
4661 proto::GitStatus::Copied => StatusCode::Copied,
4662 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4663 _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
4664 };
4665 Ok(result)
4666 });
4667 let [index_status, worktree_status] = [index_status?, worktree_status?];
4668 TrackedStatus {
4669 index_status,
4670 worktree_status,
4671 }
4672 .into()
4673 }
4674 };
4675 Ok(result)
4676}
4677
4678fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4679 use proto::git_file_status::{Tracked, Unmerged, Variant};
4680
4681 let variant = match status {
4682 FileStatus::Untracked => Variant::Untracked(Default::default()),
4683 FileStatus::Ignored => Variant::Ignored(Default::default()),
4684 FileStatus::Unmerged(UnmergedStatus {
4685 first_head,
4686 second_head,
4687 }) => Variant::Unmerged(Unmerged {
4688 first_head: unmerged_status_to_proto(first_head),
4689 second_head: unmerged_status_to_proto(second_head),
4690 }),
4691 FileStatus::Tracked(TrackedStatus {
4692 index_status,
4693 worktree_status,
4694 }) => Variant::Tracked(Tracked {
4695 index_status: tracked_status_to_proto(index_status),
4696 worktree_status: tracked_status_to_proto(worktree_status),
4697 }),
4698 };
4699 proto::GitFileStatus {
4700 variant: Some(variant),
4701 }
4702}
4703
4704fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4705 match code {
4706 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4707 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4708 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4709 }
4710}
4711
4712fn tracked_status_to_proto(code: StatusCode) -> i32 {
4713 match code {
4714 StatusCode::Added => proto::GitStatus::Added as _,
4715 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4716 StatusCode::Modified => proto::GitStatus::Modified as _,
4717 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4718 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4719 StatusCode::Copied => proto::GitStatus::Copied as _,
4720 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4721 }
4722}