1pub mod git_traversal;
2
3use crate::{
4 ProjectEnvironment, ProjectItem, ProjectPath,
5 buffer_store::{BufferStore, BufferStoreEvent},
6 worktree_store::{WorktreeStore, WorktreeStoreEvent},
7};
8use anyhow::{Context as _, Result, anyhow, bail};
9use askpass::AskPassDelegate;
10use buffer_diff::{BufferDiff, BufferDiffEvent};
11use client::ProjectId;
12use collections::HashMap;
13use fs::Fs;
14use futures::{
15 FutureExt as _, StreamExt as _,
16 channel::{mpsc, oneshot},
17 future::{self, Shared},
18};
19use git::{
20 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
21 blame::Blame,
22 parse_git_remote_url,
23 repository::{
24 Branch, CommitDetails, CommitDiff, CommitFile, DiffType, GitRepository,
25 GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
26 UpstreamTrackingStatus,
27 },
28 status::{
29 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
30 },
31};
32use gpui::{
33 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
34 WeakEntity,
35};
36use language::{
37 Buffer, BufferEvent, Language, LanguageRegistry,
38 proto::{deserialize_version, serialize_version},
39};
40use parking_lot::Mutex;
41use postage::stream::Stream as _;
42use rpc::{
43 AnyProtoClient, TypedEnvelope,
44 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
45};
46use serde::Deserialize;
47use std::{
48 cmp::Ordering,
49 collections::{BTreeSet, VecDeque},
50 future::Future,
51 mem,
52 ops::Range,
53 path::{Path, PathBuf},
54 sync::{
55 Arc,
56 atomic::{self, AtomicU64},
57 },
58 time::Instant,
59};
60use sum_tree::{Edit, SumTree, TreeSet};
61use text::{Bias, BufferId};
62use util::{ResultExt, debug_panic, post_inc};
63use worktree::{
64 File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet, Worktree,
65};
66
67pub struct GitStore {
68 state: GitStoreState,
69 buffer_store: Entity<BufferStore>,
70 worktree_store: Entity<WorktreeStore>,
71 repositories: HashMap<RepositoryId, Entity<Repository>>,
72 active_repo_id: Option<RepositoryId>,
73 #[allow(clippy::type_complexity)]
74 loading_diffs:
75 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
76 diffs: HashMap<BufferId, Entity<BufferDiffState>>,
77 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
78 _subscriptions: Vec<Subscription>,
79}
80
81#[derive(Default)]
82struct SharedDiffs {
83 unstaged: Option<Entity<BufferDiff>>,
84 uncommitted: Option<Entity<BufferDiff>>,
85}
86
87struct BufferDiffState {
88 unstaged_diff: Option<WeakEntity<BufferDiff>>,
89 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
90 recalculate_diff_task: Option<Task<Result<()>>>,
91 language: Option<Arc<Language>>,
92 language_registry: Option<Arc<LanguageRegistry>>,
93 recalculating_tx: postage::watch::Sender<bool>,
94
95 /// These operation counts are used to ensure that head and index text
96 /// values read from the git repository are up-to-date with any hunk staging
97 /// operations that have been performed on the BufferDiff.
98 ///
99 /// The operation_count is incremented immediately when the user initiates a
100 /// hunk stage/unstage operation. Then, upon writing the new index text do
101 /// disk, the `operation_count_as_of_write` is updated to reflect the
102 /// operation_count that prompted the write. Finally, when reloading
103 /// index/head text from disk in response to a filesystem event, the
104 /// `operation_count_as_of_read` is updated to reflect the latest previous
105 /// write.
106 hunk_staging_operation_count: usize,
107 hunk_staging_operation_count_as_of_write: usize,
108 hunk_staging_operation_count_as_of_read: usize,
109
110 head_text: Option<Arc<String>>,
111 index_text: Option<Arc<String>>,
112 head_changed: bool,
113 index_changed: bool,
114 language_changed: bool,
115}
116
117#[derive(Clone, Debug)]
118enum DiffBasesChange {
119 SetIndex(Option<String>),
120 SetHead(Option<String>),
121 SetEach {
122 index: Option<String>,
123 head: Option<String>,
124 },
125 SetBoth(Option<String>),
126}
127
128#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
129enum DiffKind {
130 Unstaged,
131 Uncommitted,
132}
133
134enum GitStoreState {
135 Local {
136 next_repository_id: Arc<AtomicU64>,
137 downstream: Option<LocalDownstreamState>,
138 project_environment: Entity<ProjectEnvironment>,
139 fs: Arc<dyn Fs>,
140 },
141 Ssh {
142 upstream_client: AnyProtoClient,
143 upstream_project_id: ProjectId,
144 downstream: Option<(AnyProtoClient, ProjectId)>,
145 },
146 Remote {
147 upstream_client: AnyProtoClient,
148 upstream_project_id: ProjectId,
149 },
150}
151
152enum DownstreamUpdate {
153 UpdateRepository(RepositorySnapshot),
154 RemoveRepository(RepositoryId),
155}
156
157struct LocalDownstreamState {
158 client: AnyProtoClient,
159 project_id: ProjectId,
160 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
161 _task: Task<Result<()>>,
162}
163
164#[derive(Clone)]
165pub struct GitStoreCheckpoint {
166 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
167}
168
169#[derive(Clone, Debug, PartialEq, Eq)]
170pub struct StatusEntry {
171 pub repo_path: RepoPath,
172 pub status: FileStatus,
173}
174
175impl StatusEntry {
176 fn to_proto(&self) -> proto::StatusEntry {
177 let simple_status = match self.status {
178 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
179 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
180 FileStatus::Tracked(TrackedStatus {
181 index_status,
182 worktree_status,
183 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
184 worktree_status
185 } else {
186 index_status
187 }),
188 };
189
190 proto::StatusEntry {
191 repo_path: self.repo_path.as_ref().to_proto(),
192 simple_status,
193 status: Some(status_to_proto(self.status)),
194 }
195 }
196}
197
198impl TryFrom<proto::StatusEntry> for StatusEntry {
199 type Error = anyhow::Error;
200
201 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
202 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
203 let status = status_from_proto(value.simple_status, value.status)?;
204 Ok(Self { repo_path, status })
205 }
206}
207
208impl sum_tree::Item for StatusEntry {
209 type Summary = PathSummary<GitSummary>;
210
211 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
212 PathSummary {
213 max_path: self.repo_path.0.clone(),
214 item_summary: self.status.summary(),
215 }
216 }
217}
218
219impl sum_tree::KeyedItem for StatusEntry {
220 type Key = PathKey;
221
222 fn key(&self) -> Self::Key {
223 PathKey(self.repo_path.0.clone())
224 }
225}
226
227#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
228pub struct RepositoryId(pub u64);
229
230#[derive(Clone, Debug, PartialEq, Eq)]
231pub struct RepositorySnapshot {
232 pub id: RepositoryId,
233 pub merge_message: Option<SharedString>,
234 pub statuses_by_path: SumTree<StatusEntry>,
235 pub work_directory_abs_path: Arc<Path>,
236 pub branch: Option<Branch>,
237 pub merge_conflicts: TreeSet<RepoPath>,
238 pub merge_head_shas: Vec<SharedString>,
239 pub scan_id: u64,
240}
241
242type JobId = u64;
243
244#[derive(Clone, Debug, PartialEq, Eq)]
245pub struct JobInfo {
246 pub start: Instant,
247 pub message: SharedString,
248}
249
250pub struct Repository {
251 this: WeakEntity<Self>,
252 snapshot: RepositorySnapshot,
253 commit_message_buffer: Option<Entity<Buffer>>,
254 git_store: WeakEntity<GitStore>,
255 // For a local repository, holds paths that have had worktree events since the last status scan completed,
256 // and that should be examined during the next status scan.
257 paths_needing_status_update: BTreeSet<RepoPath>,
258 job_sender: mpsc::UnboundedSender<GitJob>,
259 active_jobs: HashMap<JobId, JobInfo>,
260 job_id: JobId,
261 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
262 latest_askpass_id: u64,
263}
264
265impl std::ops::Deref for Repository {
266 type Target = RepositorySnapshot;
267
268 fn deref(&self) -> &Self::Target {
269 &self.snapshot
270 }
271}
272
273#[derive(Clone)]
274pub enum RepositoryState {
275 Local {
276 backend: Arc<dyn GitRepository>,
277 environment: Arc<HashMap<String, String>>,
278 },
279 Remote {
280 project_id: ProjectId,
281 client: AnyProtoClient,
282 },
283}
284
285#[derive(Clone, Debug)]
286pub enum RepositoryEvent {
287 Updated { full_scan: bool },
288 MergeHeadsChanged,
289}
290
291#[derive(Clone, Debug)]
292pub struct JobsUpdated;
293
294#[derive(Debug)]
295pub enum GitStoreEvent {
296 ActiveRepositoryChanged(Option<RepositoryId>),
297 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
298 RepositoryAdded(RepositoryId),
299 RepositoryRemoved(RepositoryId),
300 IndexWriteError(anyhow::Error),
301 JobsUpdated,
302}
303
304impl EventEmitter<RepositoryEvent> for Repository {}
305impl EventEmitter<JobsUpdated> for Repository {}
306impl EventEmitter<GitStoreEvent> for GitStore {}
307
308pub struct GitJob {
309 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
310 key: Option<GitJobKey>,
311}
312
313#[derive(PartialEq, Eq)]
314enum GitJobKey {
315 WriteIndex(RepoPath),
316 ReloadBufferDiffBases,
317 RefreshStatuses,
318 ReloadGitState,
319}
320
321impl GitStore {
322 pub fn local(
323 worktree_store: &Entity<WorktreeStore>,
324 buffer_store: Entity<BufferStore>,
325 environment: Entity<ProjectEnvironment>,
326 fs: Arc<dyn Fs>,
327 cx: &mut Context<Self>,
328 ) -> Self {
329 Self::new(
330 worktree_store.clone(),
331 buffer_store,
332 GitStoreState::Local {
333 next_repository_id: Arc::new(AtomicU64::new(1)),
334 downstream: None,
335 project_environment: environment,
336 fs,
337 },
338 cx,
339 )
340 }
341
342 pub fn remote(
343 worktree_store: &Entity<WorktreeStore>,
344 buffer_store: Entity<BufferStore>,
345 upstream_client: AnyProtoClient,
346 project_id: ProjectId,
347 cx: &mut Context<Self>,
348 ) -> Self {
349 Self::new(
350 worktree_store.clone(),
351 buffer_store,
352 GitStoreState::Remote {
353 upstream_client,
354 upstream_project_id: project_id,
355 },
356 cx,
357 )
358 }
359
360 pub fn ssh(
361 worktree_store: &Entity<WorktreeStore>,
362 buffer_store: Entity<BufferStore>,
363 upstream_client: AnyProtoClient,
364 cx: &mut Context<Self>,
365 ) -> Self {
366 Self::new(
367 worktree_store.clone(),
368 buffer_store,
369 GitStoreState::Ssh {
370 upstream_client,
371 upstream_project_id: ProjectId(SSH_PROJECT_ID),
372 downstream: None,
373 },
374 cx,
375 )
376 }
377
378 fn new(
379 worktree_store: Entity<WorktreeStore>,
380 buffer_store: Entity<BufferStore>,
381 state: GitStoreState,
382 cx: &mut Context<Self>,
383 ) -> Self {
384 let _subscriptions = vec![
385 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
386 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
387 ];
388
389 GitStore {
390 state,
391 buffer_store,
392 worktree_store,
393 repositories: HashMap::default(),
394 active_repo_id: None,
395 _subscriptions,
396 loading_diffs: HashMap::default(),
397 shared_diffs: HashMap::default(),
398 diffs: HashMap::default(),
399 }
400 }
401
402 pub fn init(client: &AnyProtoClient) {
403 client.add_entity_request_handler(Self::handle_get_remotes);
404 client.add_entity_request_handler(Self::handle_get_branches);
405 client.add_entity_request_handler(Self::handle_change_branch);
406 client.add_entity_request_handler(Self::handle_create_branch);
407 client.add_entity_request_handler(Self::handle_git_init);
408 client.add_entity_request_handler(Self::handle_push);
409 client.add_entity_request_handler(Self::handle_pull);
410 client.add_entity_request_handler(Self::handle_fetch);
411 client.add_entity_request_handler(Self::handle_stage);
412 client.add_entity_request_handler(Self::handle_unstage);
413 client.add_entity_request_handler(Self::handle_commit);
414 client.add_entity_request_handler(Self::handle_reset);
415 client.add_entity_request_handler(Self::handle_show);
416 client.add_entity_request_handler(Self::handle_load_commit_diff);
417 client.add_entity_request_handler(Self::handle_checkout_files);
418 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
419 client.add_entity_request_handler(Self::handle_set_index_text);
420 client.add_entity_request_handler(Self::handle_askpass);
421 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
422 client.add_entity_request_handler(Self::handle_git_diff);
423 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
424 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
425 client.add_entity_message_handler(Self::handle_update_diff_bases);
426 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
427 client.add_entity_request_handler(Self::handle_blame_buffer);
428 client.add_entity_message_handler(Self::handle_update_repository);
429 client.add_entity_message_handler(Self::handle_remove_repository);
430 }
431
432 pub fn is_local(&self) -> bool {
433 matches!(self.state, GitStoreState::Local { .. })
434 }
435
436 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
437 match &mut self.state {
438 GitStoreState::Ssh {
439 downstream: downstream_client,
440 ..
441 } => {
442 for repo in self.repositories.values() {
443 let update = repo.read(cx).snapshot.initial_update(project_id);
444 for update in split_repository_update(update) {
445 client.send(update).log_err();
446 }
447 }
448 *downstream_client = Some((client, ProjectId(project_id)));
449 }
450 GitStoreState::Local {
451 downstream: downstream_client,
452 ..
453 } => {
454 let mut snapshots = HashMap::default();
455 let (updates_tx, mut updates_rx) = mpsc::unbounded();
456 for repo in self.repositories.values() {
457 updates_tx
458 .unbounded_send(DownstreamUpdate::UpdateRepository(
459 repo.read(cx).snapshot.clone(),
460 ))
461 .ok();
462 }
463 *downstream_client = Some(LocalDownstreamState {
464 client: client.clone(),
465 project_id: ProjectId(project_id),
466 updates_tx,
467 _task: cx.spawn(async move |this, cx| {
468 cx.background_spawn(async move {
469 while let Some(update) = updates_rx.next().await {
470 match update {
471 DownstreamUpdate::UpdateRepository(snapshot) => {
472 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
473 {
474 let update =
475 snapshot.build_update(old_snapshot, project_id);
476 *old_snapshot = snapshot;
477 for update in split_repository_update(update) {
478 client.send(update)?;
479 }
480 } else {
481 let update = snapshot.initial_update(project_id);
482 for update in split_repository_update(update) {
483 client.send(update)?;
484 }
485 snapshots.insert(snapshot.id, snapshot);
486 }
487 }
488 DownstreamUpdate::RemoveRepository(id) => {
489 client.send(proto::RemoveRepository {
490 project_id,
491 id: id.to_proto(),
492 })?;
493 }
494 }
495 }
496 anyhow::Ok(())
497 })
498 .await
499 .ok();
500 this.update(cx, |this, _| {
501 if let GitStoreState::Local {
502 downstream: downstream_client,
503 ..
504 } = &mut this.state
505 {
506 downstream_client.take();
507 } else {
508 unreachable!("unshared called on remote store");
509 }
510 })
511 }),
512 });
513 }
514 GitStoreState::Remote { .. } => {
515 debug_panic!("shared called on remote store");
516 }
517 }
518 }
519
520 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
521 match &mut self.state {
522 GitStoreState::Local {
523 downstream: downstream_client,
524 ..
525 } => {
526 downstream_client.take();
527 }
528 GitStoreState::Ssh {
529 downstream: downstream_client,
530 ..
531 } => {
532 downstream_client.take();
533 }
534 GitStoreState::Remote { .. } => {
535 debug_panic!("unshared called on remote store");
536 }
537 }
538 self.shared_diffs.clear();
539 }
540
541 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
542 self.shared_diffs.remove(peer_id);
543 }
544
545 pub fn active_repository(&self) -> Option<Entity<Repository>> {
546 self.active_repo_id
547 .as_ref()
548 .map(|id| self.repositories[&id].clone())
549 }
550
551 pub fn open_unstaged_diff(
552 &mut self,
553 buffer: Entity<Buffer>,
554 cx: &mut Context<Self>,
555 ) -> Task<Result<Entity<BufferDiff>>> {
556 let buffer_id = buffer.read(cx).remote_id();
557 if let Some(diff_state) = self.diffs.get(&buffer_id) {
558 if let Some(unstaged_diff) = diff_state
559 .read(cx)
560 .unstaged_diff
561 .as_ref()
562 .and_then(|weak| weak.upgrade())
563 {
564 if let Some(task) =
565 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
566 {
567 return cx.background_executor().spawn(async move {
568 task.await;
569 Ok(unstaged_diff)
570 });
571 }
572 return Task::ready(Ok(unstaged_diff));
573 }
574 }
575
576 let Some((repo, repo_path)) =
577 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
578 else {
579 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
580 };
581
582 let task = self
583 .loading_diffs
584 .entry((buffer_id, DiffKind::Unstaged))
585 .or_insert_with(|| {
586 let staged_text = repo.update(cx, |repo, cx| {
587 repo.load_staged_text(buffer_id, repo_path, cx)
588 });
589 cx.spawn(async move |this, cx| {
590 Self::open_diff_internal(
591 this,
592 DiffKind::Unstaged,
593 staged_text.await.map(DiffBasesChange::SetIndex),
594 buffer,
595 cx,
596 )
597 .await
598 .map_err(Arc::new)
599 })
600 .shared()
601 })
602 .clone();
603
604 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
605 }
606
607 pub fn open_uncommitted_diff(
608 &mut self,
609 buffer: Entity<Buffer>,
610 cx: &mut Context<Self>,
611 ) -> Task<Result<Entity<BufferDiff>>> {
612 let buffer_id = buffer.read(cx).remote_id();
613
614 if let Some(diff_state) = self.diffs.get(&buffer_id) {
615 if let Some(uncommitted_diff) = diff_state
616 .read(cx)
617 .uncommitted_diff
618 .as_ref()
619 .and_then(|weak| weak.upgrade())
620 {
621 if let Some(task) =
622 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
623 {
624 return cx.background_executor().spawn(async move {
625 task.await;
626 Ok(uncommitted_diff)
627 });
628 }
629 return Task::ready(Ok(uncommitted_diff));
630 }
631 }
632
633 let Some((repo, repo_path)) =
634 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
635 else {
636 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
637 };
638
639 let task = self
640 .loading_diffs
641 .entry((buffer_id, DiffKind::Uncommitted))
642 .or_insert_with(|| {
643 let changes = repo.update(cx, |repo, cx| {
644 repo.load_committed_text(buffer_id, repo_path, cx)
645 });
646
647 cx.spawn(async move |this, cx| {
648 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
649 .await
650 .map_err(Arc::new)
651 })
652 .shared()
653 })
654 .clone();
655
656 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
657 }
658
659 async fn open_diff_internal(
660 this: WeakEntity<Self>,
661 kind: DiffKind,
662 texts: Result<DiffBasesChange>,
663 buffer_entity: Entity<Buffer>,
664 cx: &mut AsyncApp,
665 ) -> Result<Entity<BufferDiff>> {
666 let diff_bases_change = match texts {
667 Err(e) => {
668 this.update(cx, |this, cx| {
669 let buffer = buffer_entity.read(cx);
670 let buffer_id = buffer.remote_id();
671 this.loading_diffs.remove(&(buffer_id, kind));
672 })?;
673 return Err(e);
674 }
675 Ok(change) => change,
676 };
677
678 this.update(cx, |this, cx| {
679 let buffer = buffer_entity.read(cx);
680 let buffer_id = buffer.remote_id();
681 let language = buffer.language().cloned();
682 let language_registry = buffer.language_registry();
683 let text_snapshot = buffer.text_snapshot();
684 this.loading_diffs.remove(&(buffer_id, kind));
685
686 let diff_state = this
687 .diffs
688 .entry(buffer_id)
689 .or_insert_with(|| cx.new(|_| BufferDiffState::default()));
690
691 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
692
693 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
694 diff_state.update(cx, |diff_state, cx| {
695 diff_state.language = language;
696 diff_state.language_registry = language_registry;
697
698 match kind {
699 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
700 DiffKind::Uncommitted => {
701 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
702 diff
703 } else {
704 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
705 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
706 unstaged_diff
707 };
708
709 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
710 diff_state.uncommitted_diff = Some(diff.downgrade())
711 }
712 }
713
714 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
715 let rx = diff_state.wait_for_recalculation();
716
717 anyhow::Ok(async move {
718 if let Some(rx) = rx {
719 rx.await;
720 }
721 Ok(diff)
722 })
723 })
724 })??
725 .await
726 }
727
728 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
729 let diff_state = self.diffs.get(&buffer_id)?;
730 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
731 }
732
733 pub fn get_uncommitted_diff(
734 &self,
735 buffer_id: BufferId,
736 cx: &App,
737 ) -> Option<Entity<BufferDiff>> {
738 let diff_state = self.diffs.get(&buffer_id)?;
739 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
740 }
741
742 pub fn project_path_git_status(
743 &self,
744 project_path: &ProjectPath,
745 cx: &App,
746 ) -> Option<FileStatus> {
747 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
748 Some(repo.read(cx).status_for_path(&repo_path)?.status)
749 }
750
751 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
752 let mut work_directory_abs_paths = Vec::new();
753 let mut checkpoints = Vec::new();
754 for repository in self.repositories.values() {
755 repository.update(cx, |repository, _| {
756 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
757 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
758 });
759 }
760
761 cx.background_executor().spawn(async move {
762 let checkpoints = future::try_join_all(checkpoints).await?;
763 Ok(GitStoreCheckpoint {
764 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
765 .into_iter()
766 .zip(checkpoints)
767 .collect(),
768 })
769 })
770 }
771
772 pub fn restore_checkpoint(
773 &self,
774 checkpoint: GitStoreCheckpoint,
775 cx: &mut App,
776 ) -> Task<Result<()>> {
777 let repositories_by_work_dir_abs_path = self
778 .repositories
779 .values()
780 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
781 .collect::<HashMap<_, _>>();
782
783 let mut tasks = Vec::new();
784 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
785 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
786 let restore = repository.update(cx, |repository, _| {
787 repository.restore_checkpoint(checkpoint)
788 });
789 tasks.push(async move { restore.await? });
790 }
791 }
792 cx.background_spawn(async move {
793 future::try_join_all(tasks).await?;
794 Ok(())
795 })
796 }
797
798 /// Compares two checkpoints, returning true if they are equal.
799 pub fn compare_checkpoints(
800 &self,
801 left: GitStoreCheckpoint,
802 mut right: GitStoreCheckpoint,
803 cx: &mut App,
804 ) -> Task<Result<bool>> {
805 let repositories_by_work_dir_abs_path = self
806 .repositories
807 .values()
808 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
809 .collect::<HashMap<_, _>>();
810
811 let mut tasks = Vec::new();
812 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
813 if let Some(right_checkpoint) = right
814 .checkpoints_by_work_dir_abs_path
815 .remove(&work_dir_abs_path)
816 {
817 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
818 {
819 let compare = repository.update(cx, |repository, _| {
820 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
821 });
822
823 tasks.push(async move { compare.await? });
824 }
825 } else {
826 return Task::ready(Ok(false));
827 }
828 }
829 cx.background_spawn(async move {
830 Ok(future::try_join_all(tasks)
831 .await?
832 .into_iter()
833 .all(|result| result))
834 })
835 }
836
837 pub fn delete_checkpoint(
838 &self,
839 checkpoint: GitStoreCheckpoint,
840 cx: &mut App,
841 ) -> Task<Result<()>> {
842 let repositories_by_work_directory_abs_path = self
843 .repositories
844 .values()
845 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
846 .collect::<HashMap<_, _>>();
847
848 let mut tasks = Vec::new();
849 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
850 if let Some(repository) =
851 repositories_by_work_directory_abs_path.get(&work_dir_abs_path)
852 {
853 let delete = repository.update(cx, |this, _| this.delete_checkpoint(checkpoint));
854 tasks.push(async move { delete.await? });
855 }
856 }
857 cx.background_spawn(async move {
858 future::try_join_all(tasks).await?;
859 Ok(())
860 })
861 }
862
863 /// Blames a buffer.
864 pub fn blame_buffer(
865 &self,
866 buffer: &Entity<Buffer>,
867 version: Option<clock::Global>,
868 cx: &mut App,
869 ) -> Task<Result<Option<Blame>>> {
870 let buffer = buffer.read(cx);
871 let Some((repo, repo_path)) =
872 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
873 else {
874 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
875 };
876 let content = match &version {
877 Some(version) => buffer.rope_for_version(version).clone(),
878 None => buffer.as_rope().clone(),
879 };
880 let version = version.unwrap_or(buffer.version());
881 let buffer_id = buffer.remote_id();
882
883 let rx = repo.update(cx, |repo, _| {
884 repo.send_job(None, move |state, _| async move {
885 match state {
886 RepositoryState::Local { backend, .. } => backend
887 .blame(repo_path.clone(), content)
888 .await
889 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
890 .map(Some),
891 RepositoryState::Remote { project_id, client } => {
892 let response = client
893 .request(proto::BlameBuffer {
894 project_id: project_id.to_proto(),
895 buffer_id: buffer_id.into(),
896 version: serialize_version(&version),
897 })
898 .await?;
899 Ok(deserialize_blame_buffer_response(response))
900 }
901 }
902 })
903 });
904
905 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
906 }
907
908 pub fn get_permalink_to_line(
909 &self,
910 buffer: &Entity<Buffer>,
911 selection: Range<u32>,
912 cx: &mut App,
913 ) -> Task<Result<url::Url>> {
914 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
915 return Task::ready(Err(anyhow!("buffer has no file")));
916 };
917
918 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
919 &(file.worktree.read(cx).id(), file.path.clone()).into(),
920 cx,
921 ) else {
922 // If we're not in a Git repo, check whether this is a Rust source
923 // file in the Cargo registry (presumably opened with go-to-definition
924 // from a normal Rust file). If so, we can put together a permalink
925 // using crate metadata.
926 if buffer
927 .read(cx)
928 .language()
929 .is_none_or(|lang| lang.name() != "Rust".into())
930 {
931 return Task::ready(Err(anyhow!("no permalink available")));
932 }
933 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
934 return Task::ready(Err(anyhow!("no permalink available")));
935 };
936 return cx.spawn(async move |cx| {
937 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
938 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
939 .map_err(|_| anyhow!("no permalink available"))
940 });
941
942 // TODO remote case
943 };
944
945 let buffer_id = buffer.read(cx).remote_id();
946 let branch = repo.read(cx).branch.clone();
947 let remote = branch
948 .as_ref()
949 .and_then(|b| b.upstream.as_ref())
950 .and_then(|b| b.remote_name())
951 .unwrap_or("origin")
952 .to_string();
953
954 let rx = repo.update(cx, |repo, _| {
955 repo.send_job(None, move |state, cx| async move {
956 match state {
957 RepositoryState::Local { backend, .. } => {
958 let origin_url = backend
959 .remote_url(&remote)
960 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
961
962 let sha = backend
963 .head_sha()
964 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
965
966 let provider_registry =
967 cx.update(GitHostingProviderRegistry::default_global)?;
968
969 let (provider, remote) =
970 parse_git_remote_url(provider_registry, &origin_url)
971 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
972
973 let path = repo_path
974 .to_str()
975 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
976
977 Ok(provider.build_permalink(
978 remote,
979 BuildPermalinkParams {
980 sha: &sha,
981 path,
982 selection: Some(selection),
983 },
984 ))
985 }
986 RepositoryState::Remote { project_id, client } => {
987 let response = client
988 .request(proto::GetPermalinkToLine {
989 project_id: project_id.to_proto(),
990 buffer_id: buffer_id.into(),
991 selection: Some(proto::Range {
992 start: selection.start as u64,
993 end: selection.end as u64,
994 }),
995 })
996 .await?;
997
998 url::Url::parse(&response.permalink).context("failed to parse permalink")
999 }
1000 }
1001 })
1002 });
1003 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1004 }
1005
1006 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1007 match &self.state {
1008 GitStoreState::Local {
1009 downstream: downstream_client,
1010 ..
1011 } => downstream_client
1012 .as_ref()
1013 .map(|state| (state.client.clone(), state.project_id)),
1014 GitStoreState::Ssh {
1015 downstream: downstream_client,
1016 ..
1017 } => downstream_client.clone(),
1018 GitStoreState::Remote { .. } => None,
1019 }
1020 }
1021
1022 fn upstream_client(&self) -> Option<AnyProtoClient> {
1023 match &self.state {
1024 GitStoreState::Local { .. } => None,
1025 GitStoreState::Ssh {
1026 upstream_client, ..
1027 }
1028 | GitStoreState::Remote {
1029 upstream_client, ..
1030 } => Some(upstream_client.clone()),
1031 }
1032 }
1033
1034 fn on_worktree_store_event(
1035 &mut self,
1036 worktree_store: Entity<WorktreeStore>,
1037 event: &WorktreeStoreEvent,
1038 cx: &mut Context<Self>,
1039 ) {
1040 let GitStoreState::Local {
1041 project_environment,
1042 downstream,
1043 next_repository_id,
1044 fs,
1045 } = &self.state
1046 else {
1047 return;
1048 };
1049
1050 match event {
1051 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1052 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
1053 for (relative_path, _, _) in updated_entries.iter() {
1054 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1055 &(*worktree_id, relative_path.clone()).into(),
1056 cx,
1057 ) else {
1058 continue;
1059 };
1060 paths_by_git_repo.entry(repo).or_default().push(repo_path)
1061 }
1062
1063 for (repo, paths) in paths_by_git_repo {
1064 repo.update(cx, |repo, cx| {
1065 repo.paths_changed(
1066 paths,
1067 downstream
1068 .as_ref()
1069 .map(|downstream| downstream.updates_tx.clone()),
1070 cx,
1071 );
1072 });
1073 }
1074 }
1075 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1076 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1077 else {
1078 return;
1079 };
1080 if !worktree.read(cx).is_visible() {
1081 log::debug!(
1082 "not adding repositories for local worktree {:?} because it's not visible",
1083 worktree.read(cx).abs_path()
1084 );
1085 return;
1086 }
1087 self.update_repositories_from_worktree(
1088 project_environment.clone(),
1089 next_repository_id.clone(),
1090 downstream
1091 .as_ref()
1092 .map(|downstream| downstream.updates_tx.clone()),
1093 changed_repos.clone(),
1094 fs.clone(),
1095 cx,
1096 );
1097 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1098 }
1099 _ => {}
1100 }
1101 }
1102
1103 fn on_repository_event(
1104 &mut self,
1105 repo: Entity<Repository>,
1106 event: &RepositoryEvent,
1107 cx: &mut Context<Self>,
1108 ) {
1109 let id = repo.read(cx).id;
1110 cx.emit(GitStoreEvent::RepositoryUpdated(
1111 id,
1112 event.clone(),
1113 self.active_repo_id == Some(id),
1114 ))
1115 }
1116
1117 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1118 cx.emit(GitStoreEvent::JobsUpdated)
1119 }
1120
1121 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1122 fn update_repositories_from_worktree(
1123 &mut self,
1124 project_environment: Entity<ProjectEnvironment>,
1125 next_repository_id: Arc<AtomicU64>,
1126 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1127 updated_git_repositories: UpdatedGitRepositoriesSet,
1128 fs: Arc<dyn Fs>,
1129 cx: &mut Context<Self>,
1130 ) {
1131 let mut removed_ids = Vec::new();
1132 for update in updated_git_repositories.iter() {
1133 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1134 let existing_work_directory_abs_path =
1135 repo.read(cx).work_directory_abs_path.clone();
1136 Some(&existing_work_directory_abs_path)
1137 == update.old_work_directory_abs_path.as_ref()
1138 || Some(&existing_work_directory_abs_path)
1139 == update.new_work_directory_abs_path.as_ref()
1140 }) {
1141 if let Some(new_work_directory_abs_path) =
1142 update.new_work_directory_abs_path.clone()
1143 {
1144 existing.update(cx, |existing, cx| {
1145 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1146 existing.schedule_scan(updates_tx.clone(), cx);
1147 });
1148 } else {
1149 removed_ids.push(*id);
1150 }
1151 } else if let Some((work_directory_abs_path, dot_git_abs_path)) = update
1152 .new_work_directory_abs_path
1153 .clone()
1154 .zip(update.dot_git_abs_path.clone())
1155 {
1156 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1157 let git_store = cx.weak_entity();
1158 let repo = cx.new(|cx| {
1159 let mut repo = Repository::local(
1160 id,
1161 work_directory_abs_path,
1162 dot_git_abs_path,
1163 project_environment.downgrade(),
1164 fs.clone(),
1165 git_store,
1166 cx,
1167 );
1168 repo.schedule_scan(updates_tx.clone(), cx);
1169 repo
1170 });
1171 self._subscriptions
1172 .push(cx.subscribe(&repo, Self::on_repository_event));
1173 self._subscriptions
1174 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1175 self.repositories.insert(id, repo);
1176 cx.emit(GitStoreEvent::RepositoryAdded(id));
1177 self.active_repo_id.get_or_insert_with(|| {
1178 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1179 id
1180 });
1181 }
1182 }
1183
1184 for id in removed_ids {
1185 if self.active_repo_id == Some(id) {
1186 self.active_repo_id = None;
1187 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1188 }
1189 self.repositories.remove(&id);
1190 if let Some(updates_tx) = updates_tx.as_ref() {
1191 updates_tx
1192 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1193 .ok();
1194 }
1195 }
1196 }
1197
1198 fn on_buffer_store_event(
1199 &mut self,
1200 _: Entity<BufferStore>,
1201 event: &BufferStoreEvent,
1202 cx: &mut Context<Self>,
1203 ) {
1204 match event {
1205 BufferStoreEvent::BufferAdded(buffer) => {
1206 cx.subscribe(&buffer, |this, buffer, event, cx| {
1207 if let BufferEvent::LanguageChanged = event {
1208 let buffer_id = buffer.read(cx).remote_id();
1209 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1210 diff_state.update(cx, |diff_state, cx| {
1211 diff_state.buffer_language_changed(buffer, cx);
1212 });
1213 }
1214 }
1215 })
1216 .detach();
1217 }
1218 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1219 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1220 diffs.remove(buffer_id);
1221 }
1222 }
1223 BufferStoreEvent::BufferDropped(buffer_id) => {
1224 self.diffs.remove(&buffer_id);
1225 for diffs in self.shared_diffs.values_mut() {
1226 diffs.remove(buffer_id);
1227 }
1228 }
1229
1230 _ => {}
1231 }
1232 }
1233
1234 pub fn recalculate_buffer_diffs(
1235 &mut self,
1236 buffers: Vec<Entity<Buffer>>,
1237 cx: &mut Context<Self>,
1238 ) -> impl Future<Output = ()> + use<> {
1239 let mut futures = Vec::new();
1240 for buffer in buffers {
1241 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1242 let buffer = buffer.read(cx).text_snapshot();
1243 diff_state.update(cx, |diff_state, cx| {
1244 diff_state.recalculate_diffs(buffer, cx);
1245 futures.extend(diff_state.wait_for_recalculation());
1246 });
1247 }
1248 }
1249 async move {
1250 futures::future::join_all(futures).await;
1251 }
1252 }
1253
1254 fn on_buffer_diff_event(
1255 &mut self,
1256 diff: Entity<buffer_diff::BufferDiff>,
1257 event: &BufferDiffEvent,
1258 cx: &mut Context<Self>,
1259 ) {
1260 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1261 let buffer_id = diff.read(cx).buffer_id;
1262 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1263 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1264 diff_state.hunk_staging_operation_count += 1;
1265 diff_state.hunk_staging_operation_count
1266 });
1267 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1268 let recv = repo.update(cx, |repo, cx| {
1269 log::debug!("hunks changed for {}", path.display());
1270 repo.spawn_set_index_text_job(
1271 path,
1272 new_index_text.as_ref().map(|rope| rope.to_string()),
1273 hunk_staging_operation_count,
1274 cx,
1275 )
1276 });
1277 let diff = diff.downgrade();
1278 cx.spawn(async move |this, cx| {
1279 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1280 diff.update(cx, |diff, cx| {
1281 diff.clear_pending_hunks(cx);
1282 })
1283 .ok();
1284 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1285 .ok();
1286 }
1287 })
1288 .detach();
1289 }
1290 }
1291 }
1292 }
1293
1294 fn local_worktree_git_repos_changed(
1295 &mut self,
1296 worktree: Entity<Worktree>,
1297 changed_repos: &UpdatedGitRepositoriesSet,
1298 cx: &mut Context<Self>,
1299 ) {
1300 log::debug!("local worktree repos changed");
1301 debug_assert!(worktree.read(cx).is_local());
1302
1303 for repository in self.repositories.values() {
1304 repository.update(cx, |repository, cx| {
1305 let repo_abs_path = &repository.work_directory_abs_path;
1306 if changed_repos.iter().any(|update| {
1307 update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1308 || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1309 }) {
1310 repository.reload_buffer_diff_bases(cx);
1311 }
1312 });
1313 }
1314 }
1315
1316 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1317 &self.repositories
1318 }
1319
1320 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1321 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1322 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1323 Some(status.status)
1324 }
1325
1326 pub fn repository_and_path_for_buffer_id(
1327 &self,
1328 buffer_id: BufferId,
1329 cx: &App,
1330 ) -> Option<(Entity<Repository>, RepoPath)> {
1331 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1332 let project_path = buffer.read(cx).project_path(cx)?;
1333 self.repository_and_path_for_project_path(&project_path, cx)
1334 }
1335
1336 pub fn repository_and_path_for_project_path(
1337 &self,
1338 path: &ProjectPath,
1339 cx: &App,
1340 ) -> Option<(Entity<Repository>, RepoPath)> {
1341 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1342 self.repositories
1343 .values()
1344 .filter_map(|repo| {
1345 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1346 Some((repo.clone(), repo_path))
1347 })
1348 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1349 }
1350
1351 pub fn git_init(
1352 &self,
1353 path: Arc<Path>,
1354 fallback_branch_name: String,
1355 cx: &App,
1356 ) -> Task<Result<()>> {
1357 match &self.state {
1358 GitStoreState::Local { fs, .. } => {
1359 let fs = fs.clone();
1360 cx.background_executor()
1361 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1362 }
1363 GitStoreState::Ssh {
1364 upstream_client,
1365 upstream_project_id: project_id,
1366 ..
1367 }
1368 | GitStoreState::Remote {
1369 upstream_client,
1370 upstream_project_id: project_id,
1371 ..
1372 } => {
1373 let client = upstream_client.clone();
1374 let project_id = *project_id;
1375 cx.background_executor().spawn(async move {
1376 client
1377 .request(proto::GitInit {
1378 project_id: project_id.0,
1379 abs_path: path.to_string_lossy().to_string(),
1380 fallback_branch_name,
1381 })
1382 .await?;
1383 Ok(())
1384 })
1385 }
1386 }
1387 }
1388
1389 async fn handle_update_repository(
1390 this: Entity<Self>,
1391 envelope: TypedEnvelope<proto::UpdateRepository>,
1392 mut cx: AsyncApp,
1393 ) -> Result<()> {
1394 this.update(&mut cx, |this, cx| {
1395 let mut update = envelope.payload;
1396
1397 let id = RepositoryId::from_proto(update.id);
1398 let client = this
1399 .upstream_client()
1400 .context("no upstream client")?
1401 .clone();
1402
1403 let mut is_new = false;
1404 let repo = this.repositories.entry(id).or_insert_with(|| {
1405 is_new = true;
1406 let git_store = cx.weak_entity();
1407 cx.new(|cx| {
1408 Repository::remote(
1409 id,
1410 Path::new(&update.abs_path).into(),
1411 ProjectId(update.project_id),
1412 client,
1413 git_store,
1414 cx,
1415 )
1416 })
1417 });
1418 if is_new {
1419 this._subscriptions
1420 .push(cx.subscribe(&repo, Self::on_repository_event))
1421 }
1422
1423 repo.update(cx, {
1424 let update = update.clone();
1425 |repo, cx| repo.apply_remote_update(update, cx)
1426 })?;
1427
1428 this.active_repo_id.get_or_insert_with(|| {
1429 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1430 id
1431 });
1432
1433 if let Some((client, project_id)) = this.downstream_client() {
1434 update.project_id = project_id.to_proto();
1435 client.send(update).log_err();
1436 }
1437 Ok(())
1438 })?
1439 }
1440
1441 async fn handle_remove_repository(
1442 this: Entity<Self>,
1443 envelope: TypedEnvelope<proto::RemoveRepository>,
1444 mut cx: AsyncApp,
1445 ) -> Result<()> {
1446 this.update(&mut cx, |this, cx| {
1447 let mut update = envelope.payload;
1448 let id = RepositoryId::from_proto(update.id);
1449 this.repositories.remove(&id);
1450 if let Some((client, project_id)) = this.downstream_client() {
1451 update.project_id = project_id.to_proto();
1452 client.send(update).log_err();
1453 }
1454 if this.active_repo_id == Some(id) {
1455 this.active_repo_id = None;
1456 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1457 }
1458 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1459 })
1460 }
1461
1462 async fn handle_git_init(
1463 this: Entity<Self>,
1464 envelope: TypedEnvelope<proto::GitInit>,
1465 cx: AsyncApp,
1466 ) -> Result<proto::Ack> {
1467 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1468 let name = envelope.payload.fallback_branch_name;
1469 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1470 .await?;
1471
1472 Ok(proto::Ack {})
1473 }
1474
1475 async fn handle_fetch(
1476 this: Entity<Self>,
1477 envelope: TypedEnvelope<proto::Fetch>,
1478 mut cx: AsyncApp,
1479 ) -> Result<proto::RemoteMessageResponse> {
1480 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1481 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1482 let askpass_id = envelope.payload.askpass_id;
1483
1484 let askpass = make_remote_delegate(
1485 this,
1486 envelope.payload.project_id,
1487 repository_id,
1488 askpass_id,
1489 &mut cx,
1490 );
1491
1492 let remote_output = repository_handle
1493 .update(&mut cx, |repository_handle, cx| {
1494 repository_handle.fetch(askpass, cx)
1495 })?
1496 .await??;
1497
1498 Ok(proto::RemoteMessageResponse {
1499 stdout: remote_output.stdout,
1500 stderr: remote_output.stderr,
1501 })
1502 }
1503
1504 async fn handle_push(
1505 this: Entity<Self>,
1506 envelope: TypedEnvelope<proto::Push>,
1507 mut cx: AsyncApp,
1508 ) -> Result<proto::RemoteMessageResponse> {
1509 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1510 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1511
1512 let askpass_id = envelope.payload.askpass_id;
1513 let askpass = make_remote_delegate(
1514 this,
1515 envelope.payload.project_id,
1516 repository_id,
1517 askpass_id,
1518 &mut cx,
1519 );
1520
1521 let options = envelope
1522 .payload
1523 .options
1524 .as_ref()
1525 .map(|_| match envelope.payload.options() {
1526 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1527 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1528 });
1529
1530 let branch_name = envelope.payload.branch_name.into();
1531 let remote_name = envelope.payload.remote_name.into();
1532
1533 let remote_output = repository_handle
1534 .update(&mut cx, |repository_handle, cx| {
1535 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1536 })?
1537 .await??;
1538 Ok(proto::RemoteMessageResponse {
1539 stdout: remote_output.stdout,
1540 stderr: remote_output.stderr,
1541 })
1542 }
1543
1544 async fn handle_pull(
1545 this: Entity<Self>,
1546 envelope: TypedEnvelope<proto::Pull>,
1547 mut cx: AsyncApp,
1548 ) -> Result<proto::RemoteMessageResponse> {
1549 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1550 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1551 let askpass_id = envelope.payload.askpass_id;
1552 let askpass = make_remote_delegate(
1553 this,
1554 envelope.payload.project_id,
1555 repository_id,
1556 askpass_id,
1557 &mut cx,
1558 );
1559
1560 let branch_name = envelope.payload.branch_name.into();
1561 let remote_name = envelope.payload.remote_name.into();
1562
1563 let remote_message = repository_handle
1564 .update(&mut cx, |repository_handle, cx| {
1565 repository_handle.pull(branch_name, remote_name, askpass, cx)
1566 })?
1567 .await??;
1568
1569 Ok(proto::RemoteMessageResponse {
1570 stdout: remote_message.stdout,
1571 stderr: remote_message.stderr,
1572 })
1573 }
1574
1575 async fn handle_stage(
1576 this: Entity<Self>,
1577 envelope: TypedEnvelope<proto::Stage>,
1578 mut cx: AsyncApp,
1579 ) -> Result<proto::Ack> {
1580 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1581 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1582
1583 let entries = envelope
1584 .payload
1585 .paths
1586 .into_iter()
1587 .map(PathBuf::from)
1588 .map(RepoPath::new)
1589 .collect();
1590
1591 repository_handle
1592 .update(&mut cx, |repository_handle, cx| {
1593 repository_handle.stage_entries(entries, cx)
1594 })?
1595 .await?;
1596 Ok(proto::Ack {})
1597 }
1598
1599 async fn handle_unstage(
1600 this: Entity<Self>,
1601 envelope: TypedEnvelope<proto::Unstage>,
1602 mut cx: AsyncApp,
1603 ) -> Result<proto::Ack> {
1604 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1605 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1606
1607 let entries = envelope
1608 .payload
1609 .paths
1610 .into_iter()
1611 .map(PathBuf::from)
1612 .map(RepoPath::new)
1613 .collect();
1614
1615 repository_handle
1616 .update(&mut cx, |repository_handle, cx| {
1617 repository_handle.unstage_entries(entries, cx)
1618 })?
1619 .await?;
1620
1621 Ok(proto::Ack {})
1622 }
1623
1624 async fn handle_set_index_text(
1625 this: Entity<Self>,
1626 envelope: TypedEnvelope<proto::SetIndexText>,
1627 mut cx: AsyncApp,
1628 ) -> Result<proto::Ack> {
1629 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1630 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1631 let repo_path = RepoPath::from_str(&envelope.payload.path);
1632
1633 let hunk_staging_operation_count = this
1634 .read_with(&cx, |this, cx| {
1635 let project_path = repository_handle
1636 .read(cx)
1637 .repo_path_to_project_path(&repo_path, cx)?;
1638 let buffer_id = this
1639 .buffer_store
1640 .read(cx)
1641 .buffer_id_for_project_path(&project_path)?;
1642 let diff_state = this.diffs.get(buffer_id)?;
1643 Some(diff_state.read(cx).hunk_staging_operation_count)
1644 })?
1645 .ok_or_else(|| anyhow!("unknown buffer"))?;
1646
1647 repository_handle
1648 .update(&mut cx, |repository_handle, cx| {
1649 repository_handle.spawn_set_index_text_job(
1650 repo_path,
1651 envelope.payload.text,
1652 hunk_staging_operation_count,
1653 cx,
1654 )
1655 })?
1656 .await??;
1657 Ok(proto::Ack {})
1658 }
1659
1660 async fn handle_commit(
1661 this: Entity<Self>,
1662 envelope: TypedEnvelope<proto::Commit>,
1663 mut cx: AsyncApp,
1664 ) -> Result<proto::Ack> {
1665 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1666 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1667
1668 let message = SharedString::from(envelope.payload.message);
1669 let name = envelope.payload.name.map(SharedString::from);
1670 let email = envelope.payload.email.map(SharedString::from);
1671
1672 repository_handle
1673 .update(&mut cx, |repository_handle, cx| {
1674 repository_handle.commit(message, name.zip(email), cx)
1675 })?
1676 .await??;
1677 Ok(proto::Ack {})
1678 }
1679
1680 async fn handle_get_remotes(
1681 this: Entity<Self>,
1682 envelope: TypedEnvelope<proto::GetRemotes>,
1683 mut cx: AsyncApp,
1684 ) -> Result<proto::GetRemotesResponse> {
1685 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1686 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1687
1688 let branch_name = envelope.payload.branch_name;
1689
1690 let remotes = repository_handle
1691 .update(&mut cx, |repository_handle, _| {
1692 repository_handle.get_remotes(branch_name)
1693 })?
1694 .await??;
1695
1696 Ok(proto::GetRemotesResponse {
1697 remotes: remotes
1698 .into_iter()
1699 .map(|remotes| proto::get_remotes_response::Remote {
1700 name: remotes.name.to_string(),
1701 })
1702 .collect::<Vec<_>>(),
1703 })
1704 }
1705
1706 async fn handle_get_branches(
1707 this: Entity<Self>,
1708 envelope: TypedEnvelope<proto::GitGetBranches>,
1709 mut cx: AsyncApp,
1710 ) -> Result<proto::GitBranchesResponse> {
1711 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1712 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1713
1714 let branches = repository_handle
1715 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1716 .await??;
1717
1718 Ok(proto::GitBranchesResponse {
1719 branches: branches
1720 .into_iter()
1721 .map(|branch| branch_to_proto(&branch))
1722 .collect::<Vec<_>>(),
1723 })
1724 }
1725 async fn handle_create_branch(
1726 this: Entity<Self>,
1727 envelope: TypedEnvelope<proto::GitCreateBranch>,
1728 mut cx: AsyncApp,
1729 ) -> Result<proto::Ack> {
1730 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1731 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1732 let branch_name = envelope.payload.branch_name;
1733
1734 repository_handle
1735 .update(&mut cx, |repository_handle, _| {
1736 repository_handle.create_branch(branch_name)
1737 })?
1738 .await??;
1739
1740 Ok(proto::Ack {})
1741 }
1742
1743 async fn handle_change_branch(
1744 this: Entity<Self>,
1745 envelope: TypedEnvelope<proto::GitChangeBranch>,
1746 mut cx: AsyncApp,
1747 ) -> Result<proto::Ack> {
1748 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1749 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1750 let branch_name = envelope.payload.branch_name;
1751
1752 repository_handle
1753 .update(&mut cx, |repository_handle, _| {
1754 repository_handle.change_branch(branch_name)
1755 })?
1756 .await??;
1757
1758 Ok(proto::Ack {})
1759 }
1760
1761 async fn handle_show(
1762 this: Entity<Self>,
1763 envelope: TypedEnvelope<proto::GitShow>,
1764 mut cx: AsyncApp,
1765 ) -> Result<proto::GitCommitDetails> {
1766 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1767 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1768
1769 let commit = repository_handle
1770 .update(&mut cx, |repository_handle, _| {
1771 repository_handle.show(envelope.payload.commit)
1772 })?
1773 .await??;
1774 Ok(proto::GitCommitDetails {
1775 sha: commit.sha.into(),
1776 message: commit.message.into(),
1777 commit_timestamp: commit.commit_timestamp,
1778 author_email: commit.author_email.into(),
1779 author_name: commit.author_name.into(),
1780 })
1781 }
1782
1783 async fn handle_load_commit_diff(
1784 this: Entity<Self>,
1785 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1786 mut cx: AsyncApp,
1787 ) -> Result<proto::LoadCommitDiffResponse> {
1788 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1789 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1790
1791 let commit_diff = repository_handle
1792 .update(&mut cx, |repository_handle, _| {
1793 repository_handle.load_commit_diff(envelope.payload.commit)
1794 })?
1795 .await??;
1796 Ok(proto::LoadCommitDiffResponse {
1797 files: commit_diff
1798 .files
1799 .into_iter()
1800 .map(|file| proto::CommitFile {
1801 path: file.path.to_string(),
1802 old_text: file.old_text,
1803 new_text: file.new_text,
1804 })
1805 .collect(),
1806 })
1807 }
1808
1809 async fn handle_reset(
1810 this: Entity<Self>,
1811 envelope: TypedEnvelope<proto::GitReset>,
1812 mut cx: AsyncApp,
1813 ) -> Result<proto::Ack> {
1814 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1815 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1816
1817 let mode = match envelope.payload.mode() {
1818 git_reset::ResetMode::Soft => ResetMode::Soft,
1819 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1820 };
1821
1822 repository_handle
1823 .update(&mut cx, |repository_handle, cx| {
1824 repository_handle.reset(envelope.payload.commit, mode, cx)
1825 })?
1826 .await??;
1827 Ok(proto::Ack {})
1828 }
1829
1830 async fn handle_checkout_files(
1831 this: Entity<Self>,
1832 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1833 mut cx: AsyncApp,
1834 ) -> Result<proto::Ack> {
1835 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1836 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1837 let paths = envelope
1838 .payload
1839 .paths
1840 .iter()
1841 .map(|s| RepoPath::from_str(s))
1842 .collect();
1843
1844 repository_handle
1845 .update(&mut cx, |repository_handle, cx| {
1846 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1847 })?
1848 .await??;
1849 Ok(proto::Ack {})
1850 }
1851
1852 async fn handle_open_commit_message_buffer(
1853 this: Entity<Self>,
1854 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1855 mut cx: AsyncApp,
1856 ) -> Result<proto::OpenBufferResponse> {
1857 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1858 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1859 let buffer = repository
1860 .update(&mut cx, |repository, cx| {
1861 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1862 })?
1863 .await?;
1864
1865 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1866 this.update(&mut cx, |this, cx| {
1867 this.buffer_store.update(cx, |buffer_store, cx| {
1868 buffer_store
1869 .create_buffer_for_peer(
1870 &buffer,
1871 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1872 cx,
1873 )
1874 .detach_and_log_err(cx);
1875 })
1876 })?;
1877
1878 Ok(proto::OpenBufferResponse {
1879 buffer_id: buffer_id.to_proto(),
1880 })
1881 }
1882
1883 async fn handle_askpass(
1884 this: Entity<Self>,
1885 envelope: TypedEnvelope<proto::AskPassRequest>,
1886 mut cx: AsyncApp,
1887 ) -> Result<proto::AskPassResponse> {
1888 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1889 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1890
1891 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
1892 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
1893 debug_panic!("no askpass found");
1894 return Err(anyhow::anyhow!("no askpass found"));
1895 };
1896
1897 let response = askpass.ask_password(envelope.payload.prompt).await?;
1898
1899 delegates
1900 .lock()
1901 .insert(envelope.payload.askpass_id, askpass);
1902
1903 Ok(proto::AskPassResponse { response })
1904 }
1905
1906 async fn handle_check_for_pushed_commits(
1907 this: Entity<Self>,
1908 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
1909 mut cx: AsyncApp,
1910 ) -> Result<proto::CheckForPushedCommitsResponse> {
1911 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1912 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1913
1914 let branches = repository_handle
1915 .update(&mut cx, |repository_handle, _| {
1916 repository_handle.check_for_pushed_commits()
1917 })?
1918 .await??;
1919 Ok(proto::CheckForPushedCommitsResponse {
1920 pushed_to: branches
1921 .into_iter()
1922 .map(|commit| commit.to_string())
1923 .collect(),
1924 })
1925 }
1926
1927 async fn handle_git_diff(
1928 this: Entity<Self>,
1929 envelope: TypedEnvelope<proto::GitDiff>,
1930 mut cx: AsyncApp,
1931 ) -> Result<proto::GitDiffResponse> {
1932 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1933 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1934 let diff_type = match envelope.payload.diff_type() {
1935 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
1936 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
1937 };
1938
1939 let mut diff = repository_handle
1940 .update(&mut cx, |repository_handle, cx| {
1941 repository_handle.diff(diff_type, cx)
1942 })?
1943 .await??;
1944 const ONE_MB: usize = 1_000_000;
1945 if diff.len() > ONE_MB {
1946 diff = diff.chars().take(ONE_MB).collect()
1947 }
1948
1949 Ok(proto::GitDiffResponse { diff })
1950 }
1951
1952 async fn handle_open_unstaged_diff(
1953 this: Entity<Self>,
1954 request: TypedEnvelope<proto::OpenUnstagedDiff>,
1955 mut cx: AsyncApp,
1956 ) -> Result<proto::OpenUnstagedDiffResponse> {
1957 let buffer_id = BufferId::new(request.payload.buffer_id)?;
1958 let diff = this
1959 .update(&mut cx, |this, cx| {
1960 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
1961 Some(this.open_unstaged_diff(buffer, cx))
1962 })?
1963 .ok_or_else(|| anyhow!("no such buffer"))?
1964 .await?;
1965 this.update(&mut cx, |this, _| {
1966 let shared_diffs = this
1967 .shared_diffs
1968 .entry(request.original_sender_id.unwrap_or(request.sender_id))
1969 .or_default();
1970 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
1971 })?;
1972 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
1973 Ok(proto::OpenUnstagedDiffResponse { staged_text })
1974 }
1975
1976 async fn handle_open_uncommitted_diff(
1977 this: Entity<Self>,
1978 request: TypedEnvelope<proto::OpenUncommittedDiff>,
1979 mut cx: AsyncApp,
1980 ) -> Result<proto::OpenUncommittedDiffResponse> {
1981 let buffer_id = BufferId::new(request.payload.buffer_id)?;
1982 let diff = this
1983 .update(&mut cx, |this, cx| {
1984 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
1985 Some(this.open_uncommitted_diff(buffer, cx))
1986 })?
1987 .ok_or_else(|| anyhow!("no such buffer"))?
1988 .await?;
1989 this.update(&mut cx, |this, _| {
1990 let shared_diffs = this
1991 .shared_diffs
1992 .entry(request.original_sender_id.unwrap_or(request.sender_id))
1993 .or_default();
1994 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
1995 })?;
1996 diff.read_with(&cx, |diff, cx| {
1997 use proto::open_uncommitted_diff_response::Mode;
1998
1999 let unstaged_diff = diff.secondary_diff();
2000 let index_snapshot = unstaged_diff.and_then(|diff| {
2001 let diff = diff.read(cx);
2002 diff.base_text_exists().then(|| diff.base_text())
2003 });
2004
2005 let mode;
2006 let staged_text;
2007 let committed_text;
2008 if diff.base_text_exists() {
2009 let committed_snapshot = diff.base_text();
2010 committed_text = Some(committed_snapshot.text());
2011 if let Some(index_text) = index_snapshot {
2012 if index_text.remote_id() == committed_snapshot.remote_id() {
2013 mode = Mode::IndexMatchesHead;
2014 staged_text = None;
2015 } else {
2016 mode = Mode::IndexAndHead;
2017 staged_text = Some(index_text.text());
2018 }
2019 } else {
2020 mode = Mode::IndexAndHead;
2021 staged_text = None;
2022 }
2023 } else {
2024 mode = Mode::IndexAndHead;
2025 committed_text = None;
2026 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2027 }
2028
2029 proto::OpenUncommittedDiffResponse {
2030 committed_text,
2031 staged_text,
2032 mode: mode.into(),
2033 }
2034 })
2035 }
2036
2037 async fn handle_update_diff_bases(
2038 this: Entity<Self>,
2039 request: TypedEnvelope<proto::UpdateDiffBases>,
2040 mut cx: AsyncApp,
2041 ) -> Result<()> {
2042 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2043 this.update(&mut cx, |this, cx| {
2044 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2045 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2046 let buffer = buffer.read(cx).text_snapshot();
2047 diff_state.update(cx, |diff_state, cx| {
2048 diff_state.hunk_staging_operation_count_as_of_read =
2049 diff_state.hunk_staging_operation_count_as_of_write;
2050 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2051 })
2052 }
2053 }
2054 })
2055 }
2056
2057 async fn handle_blame_buffer(
2058 this: Entity<Self>,
2059 envelope: TypedEnvelope<proto::BlameBuffer>,
2060 mut cx: AsyncApp,
2061 ) -> Result<proto::BlameBufferResponse> {
2062 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2063 let version = deserialize_version(&envelope.payload.version);
2064 let buffer = this.read_with(&cx, |this, cx| {
2065 this.buffer_store.read(cx).get_existing(buffer_id)
2066 })??;
2067 buffer
2068 .update(&mut cx, |buffer, _| {
2069 buffer.wait_for_version(version.clone())
2070 })?
2071 .await?;
2072 let blame = this
2073 .update(&mut cx, |this, cx| {
2074 this.blame_buffer(&buffer, Some(version), cx)
2075 })?
2076 .await?;
2077 Ok(serialize_blame_buffer_response(blame))
2078 }
2079
2080 async fn handle_get_permalink_to_line(
2081 this: Entity<Self>,
2082 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2083 mut cx: AsyncApp,
2084 ) -> Result<proto::GetPermalinkToLineResponse> {
2085 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2086 // let version = deserialize_version(&envelope.payload.version);
2087 let selection = {
2088 let proto_selection = envelope
2089 .payload
2090 .selection
2091 .context("no selection to get permalink for defined")?;
2092 proto_selection.start as u32..proto_selection.end as u32
2093 };
2094 let buffer = this.read_with(&cx, |this, cx| {
2095 this.buffer_store.read(cx).get_existing(buffer_id)
2096 })??;
2097 let permalink = this
2098 .update(&mut cx, |this, cx| {
2099 this.get_permalink_to_line(&buffer, selection, cx)
2100 })?
2101 .await?;
2102 Ok(proto::GetPermalinkToLineResponse {
2103 permalink: permalink.to_string(),
2104 })
2105 }
2106
2107 fn repository_for_request(
2108 this: &Entity<Self>,
2109 id: RepositoryId,
2110 cx: &mut AsyncApp,
2111 ) -> Result<Entity<Repository>> {
2112 this.update(cx, |this, _| {
2113 this.repositories
2114 .get(&id)
2115 .context("missing repository handle")
2116 .cloned()
2117 })?
2118 }
2119
2120 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2121 self.repositories
2122 .iter()
2123 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2124 .collect()
2125 }
2126}
2127
2128impl BufferDiffState {
2129 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2130 self.language = buffer.read(cx).language().cloned();
2131 self.language_changed = true;
2132 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2133 }
2134
2135 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2136 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2137 }
2138
2139 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2140 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2141 }
2142
2143 fn handle_base_texts_updated(
2144 &mut self,
2145 buffer: text::BufferSnapshot,
2146 message: proto::UpdateDiffBases,
2147 cx: &mut Context<Self>,
2148 ) {
2149 use proto::update_diff_bases::Mode;
2150
2151 let Some(mode) = Mode::from_i32(message.mode) else {
2152 return;
2153 };
2154
2155 let diff_bases_change = match mode {
2156 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2157 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2158 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2159 Mode::IndexAndHead => DiffBasesChange::SetEach {
2160 index: message.staged_text,
2161 head: message.committed_text,
2162 },
2163 };
2164
2165 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2166 }
2167
2168 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2169 if *self.recalculating_tx.borrow() {
2170 let mut rx = self.recalculating_tx.subscribe();
2171 return Some(async move {
2172 loop {
2173 let is_recalculating = rx.recv().await;
2174 if is_recalculating != Some(true) {
2175 break;
2176 }
2177 }
2178 });
2179 } else {
2180 None
2181 }
2182 }
2183
2184 fn diff_bases_changed(
2185 &mut self,
2186 buffer: text::BufferSnapshot,
2187 diff_bases_change: Option<DiffBasesChange>,
2188 cx: &mut Context<Self>,
2189 ) {
2190 match diff_bases_change {
2191 Some(DiffBasesChange::SetIndex(index)) => {
2192 self.index_text = index.map(|mut index| {
2193 text::LineEnding::normalize(&mut index);
2194 Arc::new(index)
2195 });
2196 self.index_changed = true;
2197 }
2198 Some(DiffBasesChange::SetHead(head)) => {
2199 self.head_text = head.map(|mut head| {
2200 text::LineEnding::normalize(&mut head);
2201 Arc::new(head)
2202 });
2203 self.head_changed = true;
2204 }
2205 Some(DiffBasesChange::SetBoth(text)) => {
2206 let text = text.map(|mut text| {
2207 text::LineEnding::normalize(&mut text);
2208 Arc::new(text)
2209 });
2210 self.head_text = text.clone();
2211 self.index_text = text;
2212 self.head_changed = true;
2213 self.index_changed = true;
2214 }
2215 Some(DiffBasesChange::SetEach { index, head }) => {
2216 self.index_text = index.map(|mut index| {
2217 text::LineEnding::normalize(&mut index);
2218 Arc::new(index)
2219 });
2220 self.index_changed = true;
2221 self.head_text = head.map(|mut head| {
2222 text::LineEnding::normalize(&mut head);
2223 Arc::new(head)
2224 });
2225 self.head_changed = true;
2226 }
2227 None => {}
2228 }
2229
2230 self.recalculate_diffs(buffer, cx)
2231 }
2232
2233 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2234 *self.recalculating_tx.borrow_mut() = true;
2235
2236 let language = self.language.clone();
2237 let language_registry = self.language_registry.clone();
2238 let unstaged_diff = self.unstaged_diff();
2239 let uncommitted_diff = self.uncommitted_diff();
2240 let head = self.head_text.clone();
2241 let index = self.index_text.clone();
2242 let index_changed = self.index_changed;
2243 let head_changed = self.head_changed;
2244 let language_changed = self.language_changed;
2245 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_read;
2246 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2247 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2248 (None, None) => true,
2249 _ => false,
2250 };
2251 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2252 log::debug!(
2253 "start recalculating diffs for buffer {}",
2254 buffer.remote_id()
2255 );
2256
2257 let mut new_unstaged_diff = None;
2258 if let Some(unstaged_diff) = &unstaged_diff {
2259 new_unstaged_diff = Some(
2260 BufferDiff::update_diff(
2261 unstaged_diff.clone(),
2262 buffer.clone(),
2263 index,
2264 index_changed,
2265 language_changed,
2266 language.clone(),
2267 language_registry.clone(),
2268 cx,
2269 )
2270 .await?,
2271 );
2272 }
2273
2274 let mut new_uncommitted_diff = None;
2275 if let Some(uncommitted_diff) = &uncommitted_diff {
2276 new_uncommitted_diff = if index_matches_head {
2277 new_unstaged_diff.clone()
2278 } else {
2279 Some(
2280 BufferDiff::update_diff(
2281 uncommitted_diff.clone(),
2282 buffer.clone(),
2283 head,
2284 head_changed,
2285 language_changed,
2286 language.clone(),
2287 language_registry.clone(),
2288 cx,
2289 )
2290 .await?,
2291 )
2292 }
2293 }
2294
2295 let cancel = this.update(cx, |this, _| {
2296 // This checks whether all pending stage/unstage operations
2297 // have quiesced (i.e. both the corresponding write and the
2298 // read of that write have completed). If not, then we cancel
2299 // this recalculation attempt to avoid invalidating pending
2300 // state too quickly; another recalculation will come along
2301 // later and clear the pending state once the state of the index has settled.
2302 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2303 *this.recalculating_tx.borrow_mut() = false;
2304 true
2305 } else {
2306 false
2307 }
2308 })?;
2309 if cancel {
2310 log::debug!(
2311 concat!(
2312 "aborting recalculating diffs for buffer {}",
2313 "due to subsequent hunk operations",
2314 ),
2315 buffer.remote_id()
2316 );
2317 return Ok(());
2318 }
2319
2320 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2321 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2322 {
2323 unstaged_diff.update(cx, |diff, cx| {
2324 if language_changed {
2325 diff.language_changed(cx);
2326 }
2327 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2328 })?
2329 } else {
2330 None
2331 };
2332
2333 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2334 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2335 {
2336 uncommitted_diff.update(cx, |diff, cx| {
2337 if language_changed {
2338 diff.language_changed(cx);
2339 }
2340 diff.set_snapshot_with_secondary(
2341 new_uncommitted_diff,
2342 &buffer,
2343 unstaged_changed_range,
2344 true,
2345 cx,
2346 );
2347 })?;
2348 }
2349
2350 log::debug!(
2351 "finished recalculating diffs for buffer {}",
2352 buffer.remote_id()
2353 );
2354
2355 if let Some(this) = this.upgrade() {
2356 this.update(cx, |this, _| {
2357 this.index_changed = false;
2358 this.head_changed = false;
2359 this.language_changed = false;
2360 *this.recalculating_tx.borrow_mut() = false;
2361 })?;
2362 }
2363
2364 Ok(())
2365 }));
2366 }
2367}
2368
2369impl Default for BufferDiffState {
2370 fn default() -> Self {
2371 Self {
2372 unstaged_diff: Default::default(),
2373 uncommitted_diff: Default::default(),
2374 recalculate_diff_task: Default::default(),
2375 language: Default::default(),
2376 language_registry: Default::default(),
2377 recalculating_tx: postage::watch::channel_with(false).0,
2378 hunk_staging_operation_count: 0,
2379 hunk_staging_operation_count_as_of_read: 0,
2380 hunk_staging_operation_count_as_of_write: 0,
2381 head_text: Default::default(),
2382 index_text: Default::default(),
2383 head_changed: Default::default(),
2384 index_changed: Default::default(),
2385 language_changed: Default::default(),
2386 }
2387 }
2388}
2389
2390fn make_remote_delegate(
2391 this: Entity<GitStore>,
2392 project_id: u64,
2393 repository_id: RepositoryId,
2394 askpass_id: u64,
2395 cx: &mut AsyncApp,
2396) -> AskPassDelegate {
2397 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2398 this.update(cx, |this, cx| {
2399 let Some((client, _)) = this.downstream_client() else {
2400 return;
2401 };
2402 let response = client.request(proto::AskPassRequest {
2403 project_id,
2404 repository_id: repository_id.to_proto(),
2405 askpass_id,
2406 prompt,
2407 });
2408 cx.spawn(async move |_, _| {
2409 tx.send(response.await?.response).ok();
2410 anyhow::Ok(())
2411 })
2412 .detach_and_log_err(cx);
2413 })
2414 .log_err();
2415 })
2416}
2417
2418impl RepositoryId {
2419 pub fn to_proto(self) -> u64 {
2420 self.0
2421 }
2422
2423 pub fn from_proto(id: u64) -> Self {
2424 RepositoryId(id)
2425 }
2426}
2427
2428impl RepositorySnapshot {
2429 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2430 Self {
2431 id,
2432 merge_message: None,
2433 statuses_by_path: Default::default(),
2434 work_directory_abs_path,
2435 branch: None,
2436 merge_conflicts: Default::default(),
2437 merge_head_shas: Default::default(),
2438 scan_id: 0,
2439 }
2440 }
2441
2442 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2443 proto::UpdateRepository {
2444 branch_summary: self.branch.as_ref().map(branch_to_proto),
2445 updated_statuses: self
2446 .statuses_by_path
2447 .iter()
2448 .map(|entry| entry.to_proto())
2449 .collect(),
2450 removed_statuses: Default::default(),
2451 current_merge_conflicts: self
2452 .merge_conflicts
2453 .iter()
2454 .map(|repo_path| repo_path.to_proto())
2455 .collect(),
2456 project_id,
2457 id: self.id.to_proto(),
2458 abs_path: self.work_directory_abs_path.to_proto(),
2459 entry_ids: vec![self.id.to_proto()],
2460 scan_id: self.scan_id,
2461 is_last_update: true,
2462 }
2463 }
2464
2465 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2466 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2467 let mut removed_statuses: Vec<String> = Vec::new();
2468
2469 let mut new_statuses = self.statuses_by_path.iter().peekable();
2470 let mut old_statuses = old.statuses_by_path.iter().peekable();
2471
2472 let mut current_new_entry = new_statuses.next();
2473 let mut current_old_entry = old_statuses.next();
2474 loop {
2475 match (current_new_entry, current_old_entry) {
2476 (Some(new_entry), Some(old_entry)) => {
2477 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2478 Ordering::Less => {
2479 updated_statuses.push(new_entry.to_proto());
2480 current_new_entry = new_statuses.next();
2481 }
2482 Ordering::Equal => {
2483 if new_entry.status != old_entry.status {
2484 updated_statuses.push(new_entry.to_proto());
2485 }
2486 current_old_entry = old_statuses.next();
2487 current_new_entry = new_statuses.next();
2488 }
2489 Ordering::Greater => {
2490 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2491 current_old_entry = old_statuses.next();
2492 }
2493 }
2494 }
2495 (None, Some(old_entry)) => {
2496 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2497 current_old_entry = old_statuses.next();
2498 }
2499 (Some(new_entry), None) => {
2500 updated_statuses.push(new_entry.to_proto());
2501 current_new_entry = new_statuses.next();
2502 }
2503 (None, None) => break,
2504 }
2505 }
2506
2507 proto::UpdateRepository {
2508 branch_summary: self.branch.as_ref().map(branch_to_proto),
2509 updated_statuses,
2510 removed_statuses,
2511 current_merge_conflicts: self
2512 .merge_conflicts
2513 .iter()
2514 .map(|path| path.as_ref().to_proto())
2515 .collect(),
2516 project_id,
2517 id: self.id.to_proto(),
2518 abs_path: self.work_directory_abs_path.to_proto(),
2519 entry_ids: vec![],
2520 scan_id: self.scan_id,
2521 is_last_update: true,
2522 }
2523 }
2524
2525 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2526 self.statuses_by_path.iter().cloned()
2527 }
2528
2529 pub fn status_summary(&self) -> GitSummary {
2530 self.statuses_by_path.summary().item_summary
2531 }
2532
2533 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2534 self.statuses_by_path
2535 .get(&PathKey(path.0.clone()), &())
2536 .cloned()
2537 }
2538
2539 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2540 abs_path
2541 .strip_prefix(&self.work_directory_abs_path)
2542 .map(RepoPath::from)
2543 .ok()
2544 }
2545
2546 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2547 self.merge_conflicts.contains(repo_path)
2548 }
2549
2550 /// This is the name that will be displayed in the repository selector for this repository.
2551 pub fn display_name(&self) -> SharedString {
2552 self.work_directory_abs_path
2553 .file_name()
2554 .unwrap_or_default()
2555 .to_string_lossy()
2556 .to_string()
2557 .into()
2558 }
2559}
2560
2561impl Repository {
2562 fn local(
2563 id: RepositoryId,
2564 work_directory_abs_path: Arc<Path>,
2565 dot_git_abs_path: Arc<Path>,
2566 project_environment: WeakEntity<ProjectEnvironment>,
2567 fs: Arc<dyn Fs>,
2568 git_store: WeakEntity<GitStore>,
2569 cx: &mut Context<Self>,
2570 ) -> Self {
2571 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2572 Repository {
2573 this: cx.weak_entity(),
2574 git_store,
2575 snapshot,
2576 commit_message_buffer: None,
2577 askpass_delegates: Default::default(),
2578 paths_needing_status_update: Default::default(),
2579 latest_askpass_id: 0,
2580 job_sender: Repository::spawn_local_git_worker(
2581 work_directory_abs_path,
2582 dot_git_abs_path,
2583 project_environment,
2584 fs,
2585 cx,
2586 ),
2587 job_id: 0,
2588 active_jobs: Default::default(),
2589 }
2590 }
2591
2592 fn remote(
2593 id: RepositoryId,
2594 work_directory_abs_path: Arc<Path>,
2595 project_id: ProjectId,
2596 client: AnyProtoClient,
2597 git_store: WeakEntity<GitStore>,
2598 cx: &mut Context<Self>,
2599 ) -> Self {
2600 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2601 Self {
2602 this: cx.weak_entity(),
2603 snapshot,
2604 commit_message_buffer: None,
2605 git_store,
2606 paths_needing_status_update: Default::default(),
2607 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2608 askpass_delegates: Default::default(),
2609 latest_askpass_id: 0,
2610 active_jobs: Default::default(),
2611 job_id: 0,
2612 }
2613 }
2614
2615 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2616 self.git_store.upgrade()
2617 }
2618
2619 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
2620 let this = cx.weak_entity();
2621 let git_store = self.git_store.clone();
2622 let _ = self.send_keyed_job(
2623 Some(GitJobKey::ReloadBufferDiffBases),
2624 None,
2625 |state, mut cx| async move {
2626 let RepositoryState::Local { backend, .. } = state else {
2627 log::error!("tried to recompute diffs for a non-local repository");
2628 return Ok(());
2629 };
2630
2631 let Some(this) = this.upgrade() else {
2632 return Ok(());
2633 };
2634
2635 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
2636 git_store.update(cx, |git_store, cx| {
2637 git_store
2638 .diffs
2639 .iter()
2640 .filter_map(|(buffer_id, diff_state)| {
2641 let buffer_store = git_store.buffer_store.read(cx);
2642 let buffer = buffer_store.get(*buffer_id)?;
2643 let file = File::from_dyn(buffer.read(cx).file())?;
2644 let abs_path =
2645 file.worktree.read(cx).absolutize(&file.path).ok()?;
2646 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
2647 log::debug!(
2648 "start reload diff bases for repo path {}",
2649 repo_path.0.display()
2650 );
2651 diff_state.update(cx, |diff_state, _| {
2652 diff_state.hunk_staging_operation_count_as_of_read =
2653 diff_state.hunk_staging_operation_count_as_of_write;
2654
2655 let has_unstaged_diff = diff_state
2656 .unstaged_diff
2657 .as_ref()
2658 .is_some_and(|diff| diff.is_upgradable());
2659 let has_uncommitted_diff = diff_state
2660 .uncommitted_diff
2661 .as_ref()
2662 .is_some_and(|set| set.is_upgradable());
2663
2664 Some((
2665 buffer,
2666 repo_path,
2667 has_unstaged_diff.then(|| diff_state.index_text.clone()),
2668 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
2669 ))
2670 })
2671 })
2672 .collect::<Vec<_>>()
2673 })
2674 })??;
2675
2676 let buffer_diff_base_changes = cx
2677 .background_spawn(async move {
2678 let mut changes = Vec::new();
2679 for (buffer, repo_path, current_index_text, current_head_text) in
2680 &repo_diff_state_updates
2681 {
2682 let index_text = if current_index_text.is_some() {
2683 backend.load_index_text(repo_path.clone()).await
2684 } else {
2685 None
2686 };
2687 let head_text = if current_head_text.is_some() {
2688 backend.load_committed_text(repo_path.clone()).await
2689 } else {
2690 None
2691 };
2692
2693 let change =
2694 match (current_index_text.as_ref(), current_head_text.as_ref()) {
2695 (Some(current_index), Some(current_head)) => {
2696 let index_changed =
2697 index_text.as_ref() != current_index.as_deref();
2698 let head_changed =
2699 head_text.as_ref() != current_head.as_deref();
2700 if index_changed && head_changed {
2701 if index_text == head_text {
2702 Some(DiffBasesChange::SetBoth(head_text))
2703 } else {
2704 Some(DiffBasesChange::SetEach {
2705 index: index_text,
2706 head: head_text,
2707 })
2708 }
2709 } else if index_changed {
2710 Some(DiffBasesChange::SetIndex(index_text))
2711 } else if head_changed {
2712 Some(DiffBasesChange::SetHead(head_text))
2713 } else {
2714 None
2715 }
2716 }
2717 (Some(current_index), None) => {
2718 let index_changed =
2719 index_text.as_ref() != current_index.as_deref();
2720 index_changed
2721 .then_some(DiffBasesChange::SetIndex(index_text))
2722 }
2723 (None, Some(current_head)) => {
2724 let head_changed =
2725 head_text.as_ref() != current_head.as_deref();
2726 head_changed.then_some(DiffBasesChange::SetHead(head_text))
2727 }
2728 (None, None) => None,
2729 };
2730
2731 changes.push((buffer.clone(), change))
2732 }
2733 changes
2734 })
2735 .await;
2736
2737 git_store.update(&mut cx, |git_store, cx| {
2738 for (buffer, diff_bases_change) in buffer_diff_base_changes {
2739 let buffer_snapshot = buffer.read(cx).text_snapshot();
2740 let buffer_id = buffer_snapshot.remote_id();
2741 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
2742 continue;
2743 };
2744
2745 let downstream_client = git_store.downstream_client();
2746 diff_state.update(cx, |diff_state, cx| {
2747 use proto::update_diff_bases::Mode;
2748
2749 if let Some((diff_bases_change, (client, project_id))) =
2750 diff_bases_change.clone().zip(downstream_client)
2751 {
2752 let (staged_text, committed_text, mode) = match diff_bases_change {
2753 DiffBasesChange::SetIndex(index) => {
2754 (index, None, Mode::IndexOnly)
2755 }
2756 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
2757 DiffBasesChange::SetEach { index, head } => {
2758 (index, head, Mode::IndexAndHead)
2759 }
2760 DiffBasesChange::SetBoth(text) => {
2761 (None, text, Mode::IndexMatchesHead)
2762 }
2763 };
2764 client
2765 .send(proto::UpdateDiffBases {
2766 project_id: project_id.to_proto(),
2767 buffer_id: buffer_id.to_proto(),
2768 staged_text,
2769 committed_text,
2770 mode: mode as i32,
2771 })
2772 .log_err();
2773 }
2774
2775 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
2776 });
2777 }
2778 })
2779 },
2780 );
2781 }
2782
2783 pub fn send_job<F, Fut, R>(
2784 &mut self,
2785 status: Option<SharedString>,
2786 job: F,
2787 ) -> oneshot::Receiver<R>
2788 where
2789 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2790 Fut: Future<Output = R> + 'static,
2791 R: Send + 'static,
2792 {
2793 self.send_keyed_job(None, status, job)
2794 }
2795
2796 fn send_keyed_job<F, Fut, R>(
2797 &mut self,
2798 key: Option<GitJobKey>,
2799 status: Option<SharedString>,
2800 job: F,
2801 ) -> oneshot::Receiver<R>
2802 where
2803 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2804 Fut: Future<Output = R> + 'static,
2805 R: Send + 'static,
2806 {
2807 let (result_tx, result_rx) = futures::channel::oneshot::channel();
2808 let job_id = post_inc(&mut self.job_id);
2809 let this = self.this.clone();
2810 self.job_sender
2811 .unbounded_send(GitJob {
2812 key,
2813 job: Box::new(move |state, cx: &mut AsyncApp| {
2814 let job = job(state, cx.clone());
2815 cx.spawn(async move |cx| {
2816 if let Some(s) = status.clone() {
2817 this.update(cx, |this, cx| {
2818 this.active_jobs.insert(
2819 job_id,
2820 JobInfo {
2821 start: Instant::now(),
2822 message: s.clone(),
2823 },
2824 );
2825
2826 cx.notify();
2827 })
2828 .ok();
2829 }
2830 let result = job.await;
2831
2832 this.update(cx, |this, cx| {
2833 this.active_jobs.remove(&job_id);
2834 cx.notify();
2835 })
2836 .ok();
2837
2838 result_tx.send(result).ok();
2839 })
2840 }),
2841 })
2842 .ok();
2843 result_rx
2844 }
2845
2846 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
2847 let Some(git_store) = self.git_store.upgrade() else {
2848 return;
2849 };
2850 let entity = cx.entity();
2851 git_store.update(cx, |git_store, cx| {
2852 let Some((&id, _)) = git_store
2853 .repositories
2854 .iter()
2855 .find(|(_, handle)| *handle == &entity)
2856 else {
2857 return;
2858 };
2859 git_store.active_repo_id = Some(id);
2860 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2861 });
2862 }
2863
2864 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
2865 self.snapshot.status()
2866 }
2867
2868 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
2869 let git_store = self.git_store.upgrade()?;
2870 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2871 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
2872 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
2873 Some(ProjectPath {
2874 worktree_id: worktree.read(cx).id(),
2875 path: relative_path.into(),
2876 })
2877 }
2878
2879 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
2880 let git_store = self.git_store.upgrade()?;
2881 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2882 let abs_path = worktree_store.absolutize(path, cx)?;
2883 self.snapshot.abs_path_to_repo_path(&abs_path)
2884 }
2885
2886 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
2887 other
2888 .read(cx)
2889 .snapshot
2890 .work_directory_abs_path
2891 .starts_with(&self.snapshot.work_directory_abs_path)
2892 }
2893
2894 pub fn open_commit_buffer(
2895 &mut self,
2896 languages: Option<Arc<LanguageRegistry>>,
2897 buffer_store: Entity<BufferStore>,
2898 cx: &mut Context<Self>,
2899 ) -> Task<Result<Entity<Buffer>>> {
2900 let id = self.id;
2901 if let Some(buffer) = self.commit_message_buffer.clone() {
2902 return Task::ready(Ok(buffer));
2903 }
2904 let this = cx.weak_entity();
2905
2906 let rx = self.send_job(None, move |state, mut cx| async move {
2907 let Some(this) = this.upgrade() else {
2908 bail!("git store was dropped");
2909 };
2910 match state {
2911 RepositoryState::Local { .. } => {
2912 this.update(&mut cx, |_, cx| {
2913 Self::open_local_commit_buffer(languages, buffer_store, cx)
2914 })?
2915 .await
2916 }
2917 RepositoryState::Remote { project_id, client } => {
2918 let request = client.request(proto::OpenCommitMessageBuffer {
2919 project_id: project_id.0,
2920 repository_id: id.to_proto(),
2921 });
2922 let response = request.await.context("requesting to open commit buffer")?;
2923 let buffer_id = BufferId::new(response.buffer_id)?;
2924 let buffer = buffer_store
2925 .update(&mut cx, |buffer_store, cx| {
2926 buffer_store.wait_for_remote_buffer(buffer_id, cx)
2927 })?
2928 .await?;
2929 if let Some(language_registry) = languages {
2930 let git_commit_language =
2931 language_registry.language_for_name("Git Commit").await?;
2932 buffer.update(&mut cx, |buffer, cx| {
2933 buffer.set_language(Some(git_commit_language), cx);
2934 })?;
2935 }
2936 this.update(&mut cx, |this, _| {
2937 this.commit_message_buffer = Some(buffer.clone());
2938 })?;
2939 Ok(buffer)
2940 }
2941 }
2942 });
2943
2944 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
2945 }
2946
2947 fn open_local_commit_buffer(
2948 language_registry: Option<Arc<LanguageRegistry>>,
2949 buffer_store: Entity<BufferStore>,
2950 cx: &mut Context<Self>,
2951 ) -> Task<Result<Entity<Buffer>>> {
2952 cx.spawn(async move |repository, cx| {
2953 let buffer = buffer_store
2954 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
2955 .await?;
2956
2957 if let Some(language_registry) = language_registry {
2958 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
2959 buffer.update(cx, |buffer, cx| {
2960 buffer.set_language(Some(git_commit_language), cx);
2961 })?;
2962 }
2963
2964 repository.update(cx, |repository, _| {
2965 repository.commit_message_buffer = Some(buffer.clone());
2966 })?;
2967 Ok(buffer)
2968 })
2969 }
2970
2971 pub fn checkout_files(
2972 &mut self,
2973 commit: &str,
2974 paths: Vec<RepoPath>,
2975 _cx: &mut App,
2976 ) -> oneshot::Receiver<Result<()>> {
2977 let commit = commit.to_string();
2978 let id = self.id;
2979
2980 self.send_job(
2981 Some(format!("git checkout {}", commit).into()),
2982 move |git_repo, _| async move {
2983 match git_repo {
2984 RepositoryState::Local {
2985 backend,
2986 environment,
2987 ..
2988 } => {
2989 backend
2990 .checkout_files(commit, paths, environment.clone())
2991 .await
2992 }
2993 RepositoryState::Remote { project_id, client } => {
2994 client
2995 .request(proto::GitCheckoutFiles {
2996 project_id: project_id.0,
2997 repository_id: id.to_proto(),
2998 commit,
2999 paths: paths
3000 .into_iter()
3001 .map(|p| p.to_string_lossy().to_string())
3002 .collect(),
3003 })
3004 .await?;
3005
3006 Ok(())
3007 }
3008 }
3009 },
3010 )
3011 }
3012
3013 pub fn reset(
3014 &mut self,
3015 commit: String,
3016 reset_mode: ResetMode,
3017 _cx: &mut App,
3018 ) -> oneshot::Receiver<Result<()>> {
3019 let commit = commit.to_string();
3020 let id = self.id;
3021
3022 self.send_job(None, move |git_repo, _| async move {
3023 match git_repo {
3024 RepositoryState::Local {
3025 backend,
3026 environment,
3027 ..
3028 } => backend.reset(commit, reset_mode, environment).await,
3029 RepositoryState::Remote { project_id, client } => {
3030 client
3031 .request(proto::GitReset {
3032 project_id: project_id.0,
3033 repository_id: id.to_proto(),
3034 commit,
3035 mode: match reset_mode {
3036 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3037 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3038 },
3039 })
3040 .await?;
3041
3042 Ok(())
3043 }
3044 }
3045 })
3046 }
3047
3048 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3049 let id = self.id;
3050 self.send_job(None, move |git_repo, _cx| async move {
3051 match git_repo {
3052 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3053 RepositoryState::Remote { project_id, client } => {
3054 let resp = client
3055 .request(proto::GitShow {
3056 project_id: project_id.0,
3057 repository_id: id.to_proto(),
3058 commit,
3059 })
3060 .await?;
3061
3062 Ok(CommitDetails {
3063 sha: resp.sha.into(),
3064 message: resp.message.into(),
3065 commit_timestamp: resp.commit_timestamp,
3066 author_email: resp.author_email.into(),
3067 author_name: resp.author_name.into(),
3068 })
3069 }
3070 }
3071 })
3072 }
3073
3074 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3075 let id = self.id;
3076 self.send_job(None, move |git_repo, cx| async move {
3077 match git_repo {
3078 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3079 RepositoryState::Remote {
3080 client, project_id, ..
3081 } => {
3082 let response = client
3083 .request(proto::LoadCommitDiff {
3084 project_id: project_id.0,
3085 repository_id: id.to_proto(),
3086 commit,
3087 })
3088 .await?;
3089 Ok(CommitDiff {
3090 files: response
3091 .files
3092 .into_iter()
3093 .map(|file| CommitFile {
3094 path: Path::new(&file.path).into(),
3095 old_text: file.old_text,
3096 new_text: file.new_text,
3097 })
3098 .collect(),
3099 })
3100 }
3101 }
3102 })
3103 }
3104
3105 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3106 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3107 }
3108
3109 pub fn stage_entries(
3110 &self,
3111 entries: Vec<RepoPath>,
3112 cx: &mut Context<Self>,
3113 ) -> Task<anyhow::Result<()>> {
3114 if entries.is_empty() {
3115 return Task::ready(Ok(()));
3116 }
3117 let id = self.id;
3118
3119 let mut save_futures = Vec::new();
3120 if let Some(buffer_store) = self.buffer_store(cx) {
3121 buffer_store.update(cx, |buffer_store, cx| {
3122 for path in &entries {
3123 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3124 continue;
3125 };
3126 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3127 if buffer
3128 .read(cx)
3129 .file()
3130 .map_or(false, |file| file.disk_state().exists())
3131 {
3132 save_futures.push(buffer_store.save_buffer(buffer, cx));
3133 }
3134 }
3135 }
3136 })
3137 }
3138
3139 cx.spawn(async move |this, cx| {
3140 for save_future in save_futures {
3141 save_future.await?;
3142 }
3143
3144 this.update(cx, |this, _| {
3145 this.send_job(None, move |git_repo, _cx| async move {
3146 match git_repo {
3147 RepositoryState::Local {
3148 backend,
3149 environment,
3150 ..
3151 } => backend.stage_paths(entries, environment.clone()).await,
3152 RepositoryState::Remote { project_id, client } => {
3153 client
3154 .request(proto::Stage {
3155 project_id: project_id.0,
3156 repository_id: id.to_proto(),
3157 paths: entries
3158 .into_iter()
3159 .map(|repo_path| repo_path.as_ref().to_proto())
3160 .collect(),
3161 })
3162 .await
3163 .context("sending stage request")?;
3164
3165 Ok(())
3166 }
3167 }
3168 })
3169 })?
3170 .await??;
3171
3172 Ok(())
3173 })
3174 }
3175
3176 pub fn unstage_entries(
3177 &self,
3178 entries: Vec<RepoPath>,
3179 cx: &mut Context<Self>,
3180 ) -> Task<anyhow::Result<()>> {
3181 if entries.is_empty() {
3182 return Task::ready(Ok(()));
3183 }
3184 let id = self.id;
3185
3186 let mut save_futures = Vec::new();
3187 if let Some(buffer_store) = self.buffer_store(cx) {
3188 buffer_store.update(cx, |buffer_store, cx| {
3189 for path in &entries {
3190 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3191 continue;
3192 };
3193 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3194 if buffer
3195 .read(cx)
3196 .file()
3197 .map_or(false, |file| file.disk_state().exists())
3198 {
3199 save_futures.push(buffer_store.save_buffer(buffer, cx));
3200 }
3201 }
3202 }
3203 })
3204 }
3205
3206 cx.spawn(async move |this, cx| {
3207 for save_future in save_futures {
3208 save_future.await?;
3209 }
3210
3211 this.update(cx, |this, _| {
3212 this.send_job(None, move |git_repo, _cx| async move {
3213 match git_repo {
3214 RepositoryState::Local {
3215 backend,
3216 environment,
3217 ..
3218 } => backend.unstage_paths(entries, environment).await,
3219 RepositoryState::Remote { project_id, client } => {
3220 client
3221 .request(proto::Unstage {
3222 project_id: project_id.0,
3223 repository_id: id.to_proto(),
3224 paths: entries
3225 .into_iter()
3226 .map(|repo_path| repo_path.as_ref().to_proto())
3227 .collect(),
3228 })
3229 .await
3230 .context("sending unstage request")?;
3231
3232 Ok(())
3233 }
3234 }
3235 })
3236 })?
3237 .await??;
3238
3239 Ok(())
3240 })
3241 }
3242
3243 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3244 let to_stage = self
3245 .cached_status()
3246 .filter(|entry| !entry.status.staging().is_fully_staged())
3247 .map(|entry| entry.repo_path.clone())
3248 .collect();
3249 self.stage_entries(to_stage, cx)
3250 }
3251
3252 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3253 let to_unstage = self
3254 .cached_status()
3255 .filter(|entry| entry.status.staging().has_staged())
3256 .map(|entry| entry.repo_path.clone())
3257 .collect();
3258 self.unstage_entries(to_unstage, cx)
3259 }
3260
3261 pub fn commit(
3262 &mut self,
3263 message: SharedString,
3264 name_and_email: Option<(SharedString, SharedString)>,
3265 _cx: &mut App,
3266 ) -> oneshot::Receiver<Result<()>> {
3267 let id = self.id;
3268
3269 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3270 match git_repo {
3271 RepositoryState::Local {
3272 backend,
3273 environment,
3274 ..
3275 } => backend.commit(message, name_and_email, environment).await,
3276 RepositoryState::Remote { project_id, client } => {
3277 let (name, email) = name_and_email.unzip();
3278 client
3279 .request(proto::Commit {
3280 project_id: project_id.0,
3281 repository_id: id.to_proto(),
3282 message: String::from(message),
3283 name: name.map(String::from),
3284 email: email.map(String::from),
3285 })
3286 .await
3287 .context("sending commit request")?;
3288
3289 Ok(())
3290 }
3291 }
3292 })
3293 }
3294
3295 pub fn fetch(
3296 &mut self,
3297 askpass: AskPassDelegate,
3298 _cx: &mut App,
3299 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3300 let askpass_delegates = self.askpass_delegates.clone();
3301 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3302 let id = self.id;
3303
3304 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3305 match git_repo {
3306 RepositoryState::Local {
3307 backend,
3308 environment,
3309 ..
3310 } => backend.fetch(askpass, environment, cx).await,
3311 RepositoryState::Remote { project_id, client } => {
3312 askpass_delegates.lock().insert(askpass_id, askpass);
3313 let _defer = util::defer(|| {
3314 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3315 debug_assert!(askpass_delegate.is_some());
3316 });
3317
3318 let response = client
3319 .request(proto::Fetch {
3320 project_id: project_id.0,
3321 repository_id: id.to_proto(),
3322 askpass_id,
3323 })
3324 .await
3325 .context("sending fetch request")?;
3326
3327 Ok(RemoteCommandOutput {
3328 stdout: response.stdout,
3329 stderr: response.stderr,
3330 })
3331 }
3332 }
3333 })
3334 }
3335
3336 pub fn push(
3337 &mut self,
3338 branch: SharedString,
3339 remote: SharedString,
3340 options: Option<PushOptions>,
3341 askpass: AskPassDelegate,
3342 cx: &mut Context<Self>,
3343 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3344 let askpass_delegates = self.askpass_delegates.clone();
3345 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3346 let id = self.id;
3347
3348 let args = options
3349 .map(|option| match option {
3350 PushOptions::SetUpstream => " --set-upstream",
3351 PushOptions::Force => " --force",
3352 })
3353 .unwrap_or("");
3354
3355 let updates_tx = self
3356 .git_store()
3357 .and_then(|git_store| match &git_store.read(cx).state {
3358 GitStoreState::Local { downstream, .. } => downstream
3359 .as_ref()
3360 .map(|downstream| downstream.updates_tx.clone()),
3361 _ => None,
3362 });
3363
3364 let this = cx.weak_entity();
3365 self.send_job(
3366 Some(format!("git push {} {} {}", args, branch, remote).into()),
3367 move |git_repo, mut cx| async move {
3368 match git_repo {
3369 RepositoryState::Local {
3370 backend,
3371 environment,
3372 ..
3373 } => {
3374 let result = backend
3375 .push(
3376 branch.to_string(),
3377 remote.to_string(),
3378 options,
3379 askpass,
3380 environment.clone(),
3381 cx.clone(),
3382 )
3383 .await;
3384 if result.is_ok() {
3385 let branches = backend.branches().await?;
3386 let branch = branches.into_iter().find(|branch| branch.is_head);
3387 log::info!("head branch after scan is {branch:?}");
3388 let snapshot = this.update(&mut cx, |this, cx| {
3389 this.snapshot.branch = branch;
3390 let snapshot = this.snapshot.clone();
3391 cx.emit(RepositoryEvent::Updated { full_scan: false });
3392 snapshot
3393 })?;
3394 if let Some(updates_tx) = updates_tx {
3395 updates_tx
3396 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3397 .ok();
3398 }
3399 }
3400 result
3401 }
3402 RepositoryState::Remote { project_id, client } => {
3403 askpass_delegates.lock().insert(askpass_id, askpass);
3404 let _defer = util::defer(|| {
3405 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3406 debug_assert!(askpass_delegate.is_some());
3407 });
3408 let response = client
3409 .request(proto::Push {
3410 project_id: project_id.0,
3411 repository_id: id.to_proto(),
3412 askpass_id,
3413 branch_name: branch.to_string(),
3414 remote_name: remote.to_string(),
3415 options: options.map(|options| match options {
3416 PushOptions::Force => proto::push::PushOptions::Force,
3417 PushOptions::SetUpstream => {
3418 proto::push::PushOptions::SetUpstream
3419 }
3420 }
3421 as i32),
3422 })
3423 .await
3424 .context("sending push request")?;
3425
3426 Ok(RemoteCommandOutput {
3427 stdout: response.stdout,
3428 stderr: response.stderr,
3429 })
3430 }
3431 }
3432 },
3433 )
3434 }
3435
3436 pub fn pull(
3437 &mut self,
3438 branch: SharedString,
3439 remote: SharedString,
3440 askpass: AskPassDelegate,
3441 _cx: &mut App,
3442 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3443 let askpass_delegates = self.askpass_delegates.clone();
3444 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3445 let id = self.id;
3446
3447 self.send_job(
3448 Some(format!("git pull {} {}", remote, branch).into()),
3449 move |git_repo, cx| async move {
3450 match git_repo {
3451 RepositoryState::Local {
3452 backend,
3453 environment,
3454 ..
3455 } => {
3456 backend
3457 .pull(
3458 branch.to_string(),
3459 remote.to_string(),
3460 askpass,
3461 environment.clone(),
3462 cx,
3463 )
3464 .await
3465 }
3466 RepositoryState::Remote { project_id, client } => {
3467 askpass_delegates.lock().insert(askpass_id, askpass);
3468 let _defer = util::defer(|| {
3469 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3470 debug_assert!(askpass_delegate.is_some());
3471 });
3472 let response = client
3473 .request(proto::Pull {
3474 project_id: project_id.0,
3475 repository_id: id.to_proto(),
3476 askpass_id,
3477 branch_name: branch.to_string(),
3478 remote_name: remote.to_string(),
3479 })
3480 .await
3481 .context("sending pull request")?;
3482
3483 Ok(RemoteCommandOutput {
3484 stdout: response.stdout,
3485 stderr: response.stderr,
3486 })
3487 }
3488 }
3489 },
3490 )
3491 }
3492
3493 fn spawn_set_index_text_job(
3494 &mut self,
3495 path: RepoPath,
3496 content: Option<String>,
3497 hunk_staging_operation_count: usize,
3498 cx: &mut Context<Self>,
3499 ) -> oneshot::Receiver<anyhow::Result<()>> {
3500 let id = self.id;
3501 let this = cx.weak_entity();
3502 let git_store = self.git_store.clone();
3503 self.send_keyed_job(
3504 Some(GitJobKey::WriteIndex(path.clone())),
3505 None,
3506 move |git_repo, mut cx| async move {
3507 log::debug!("start updating index text for buffer {}", path.display());
3508 match git_repo {
3509 RepositoryState::Local {
3510 backend,
3511 environment,
3512 ..
3513 } => {
3514 backend
3515 .set_index_text(path.clone(), content, environment.clone())
3516 .await?;
3517 }
3518 RepositoryState::Remote { project_id, client } => {
3519 client
3520 .request(proto::SetIndexText {
3521 project_id: project_id.0,
3522 repository_id: id.to_proto(),
3523 path: path.as_ref().to_proto(),
3524 text: content,
3525 })
3526 .await?;
3527 }
3528 }
3529 log::debug!("finish updating index text for buffer {}", path.display());
3530
3531 let project_path = this
3532 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
3533 .ok()
3534 .flatten();
3535 git_store.update(&mut cx, |git_store, cx| {
3536 let buffer_id = git_store
3537 .buffer_store
3538 .read(cx)
3539 .buffer_id_for_project_path(&project_path?)?;
3540 let diff_state = git_store.diffs.get(buffer_id)?;
3541 diff_state.update(cx, |diff_state, _| {
3542 diff_state.hunk_staging_operation_count_as_of_write =
3543 hunk_staging_operation_count;
3544 });
3545 Some(())
3546 })?;
3547 Ok(())
3548 },
3549 )
3550 }
3551
3552 pub fn get_remotes(
3553 &mut self,
3554 branch_name: Option<String>,
3555 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3556 let id = self.id;
3557 self.send_job(None, move |repo, _cx| async move {
3558 match repo {
3559 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3560 RepositoryState::Remote { project_id, client } => {
3561 let response = client
3562 .request(proto::GetRemotes {
3563 project_id: project_id.0,
3564 repository_id: id.to_proto(),
3565 branch_name,
3566 })
3567 .await?;
3568
3569 let remotes = response
3570 .remotes
3571 .into_iter()
3572 .map(|remotes| git::repository::Remote {
3573 name: remotes.name.into(),
3574 })
3575 .collect();
3576
3577 Ok(remotes)
3578 }
3579 }
3580 })
3581 }
3582
3583 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3584 let id = self.id;
3585 self.send_job(None, move |repo, cx| async move {
3586 match repo {
3587 RepositoryState::Local { backend, .. } => {
3588 let backend = backend.clone();
3589 cx.background_spawn(async move { backend.branches().await })
3590 .await
3591 }
3592 RepositoryState::Remote { project_id, client } => {
3593 let response = client
3594 .request(proto::GitGetBranches {
3595 project_id: project_id.0,
3596 repository_id: id.to_proto(),
3597 })
3598 .await?;
3599
3600 let branches = response
3601 .branches
3602 .into_iter()
3603 .map(|branch| proto_to_branch(&branch))
3604 .collect();
3605
3606 Ok(branches)
3607 }
3608 }
3609 })
3610 }
3611
3612 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3613 let id = self.id;
3614 self.send_job(None, move |repo, _cx| async move {
3615 match repo {
3616 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3617 RepositoryState::Remote { project_id, client } => {
3618 let response = client
3619 .request(proto::GitDiff {
3620 project_id: project_id.0,
3621 repository_id: id.to_proto(),
3622 diff_type: match diff_type {
3623 DiffType::HeadToIndex => {
3624 proto::git_diff::DiffType::HeadToIndex.into()
3625 }
3626 DiffType::HeadToWorktree => {
3627 proto::git_diff::DiffType::HeadToWorktree.into()
3628 }
3629 },
3630 })
3631 .await?;
3632
3633 Ok(response.diff)
3634 }
3635 }
3636 })
3637 }
3638
3639 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3640 let id = self.id;
3641 self.send_job(
3642 Some(format!("git switch -c {branch_name}").into()),
3643 move |repo, _cx| async move {
3644 match repo {
3645 RepositoryState::Local { backend, .. } => {
3646 backend.create_branch(branch_name).await
3647 }
3648 RepositoryState::Remote { project_id, client } => {
3649 client
3650 .request(proto::GitCreateBranch {
3651 project_id: project_id.0,
3652 repository_id: id.to_proto(),
3653 branch_name,
3654 })
3655 .await?;
3656
3657 Ok(())
3658 }
3659 }
3660 },
3661 )
3662 }
3663
3664 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3665 let id = self.id;
3666 self.send_job(
3667 Some(format!("git switch {branch_name}").into()),
3668 move |repo, _cx| async move {
3669 match repo {
3670 RepositoryState::Local { backend, .. } => {
3671 backend.change_branch(branch_name).await
3672 }
3673 RepositoryState::Remote { project_id, client } => {
3674 client
3675 .request(proto::GitChangeBranch {
3676 project_id: project_id.0,
3677 repository_id: id.to_proto(),
3678 branch_name,
3679 })
3680 .await?;
3681
3682 Ok(())
3683 }
3684 }
3685 },
3686 )
3687 }
3688
3689 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3690 let id = self.id;
3691 self.send_job(None, move |repo, _cx| async move {
3692 match repo {
3693 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3694 RepositoryState::Remote { project_id, client } => {
3695 let response = client
3696 .request(proto::CheckForPushedCommits {
3697 project_id: project_id.0,
3698 repository_id: id.to_proto(),
3699 })
3700 .await?;
3701
3702 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3703
3704 Ok(branches)
3705 }
3706 }
3707 })
3708 }
3709
3710 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3711 self.send_job(None, |repo, _cx| async move {
3712 match repo {
3713 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
3714 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3715 }
3716 })
3717 }
3718
3719 pub fn restore_checkpoint(
3720 &mut self,
3721 checkpoint: GitRepositoryCheckpoint,
3722 ) -> oneshot::Receiver<Result<()>> {
3723 self.send_job(None, move |repo, _cx| async move {
3724 match repo {
3725 RepositoryState::Local { backend, .. } => {
3726 backend.restore_checkpoint(checkpoint).await
3727 }
3728 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3729 }
3730 })
3731 }
3732
3733 pub(crate) fn apply_remote_update(
3734 &mut self,
3735 update: proto::UpdateRepository,
3736 cx: &mut Context<Self>,
3737 ) -> Result<()> {
3738 let conflicted_paths = TreeSet::from_ordered_entries(
3739 update
3740 .current_merge_conflicts
3741 .into_iter()
3742 .map(|path| RepoPath(Path::new(&path).into())),
3743 );
3744 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
3745 self.snapshot.merge_conflicts = conflicted_paths;
3746
3747 let edits = update
3748 .removed_statuses
3749 .into_iter()
3750 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3751 .chain(
3752 update
3753 .updated_statuses
3754 .into_iter()
3755 .filter_map(|updated_status| {
3756 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3757 }),
3758 )
3759 .collect::<Vec<_>>();
3760 self.snapshot.statuses_by_path.edit(edits, &());
3761 if update.is_last_update {
3762 self.snapshot.scan_id = update.scan_id;
3763 }
3764 cx.emit(RepositoryEvent::Updated { full_scan: true });
3765 Ok(())
3766 }
3767
3768 pub fn compare_checkpoints(
3769 &mut self,
3770 left: GitRepositoryCheckpoint,
3771 right: GitRepositoryCheckpoint,
3772 ) -> oneshot::Receiver<Result<bool>> {
3773 self.send_job(None, move |repo, _cx| async move {
3774 match repo {
3775 RepositoryState::Local { backend, .. } => {
3776 backend.compare_checkpoints(left, right).await
3777 }
3778 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3779 }
3780 })
3781 }
3782
3783 pub fn delete_checkpoint(
3784 &mut self,
3785 checkpoint: GitRepositoryCheckpoint,
3786 ) -> oneshot::Receiver<Result<()>> {
3787 self.send_job(None, move |repo, _cx| async move {
3788 match repo {
3789 RepositoryState::Local { backend, .. } => {
3790 backend.delete_checkpoint(checkpoint).await
3791 }
3792 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3793 }
3794 })
3795 }
3796
3797 pub fn diff_checkpoints(
3798 &mut self,
3799 base_checkpoint: GitRepositoryCheckpoint,
3800 target_checkpoint: GitRepositoryCheckpoint,
3801 ) -> oneshot::Receiver<Result<String>> {
3802 self.send_job(None, move |repo, _cx| async move {
3803 match repo {
3804 RepositoryState::Local { backend, .. } => {
3805 backend
3806 .diff_checkpoints(base_checkpoint, target_checkpoint)
3807 .await
3808 }
3809 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3810 }
3811 })
3812 }
3813
3814 fn schedule_scan(
3815 &mut self,
3816 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3817 cx: &mut Context<Self>,
3818 ) {
3819 self.paths_changed(
3820 vec![git::repository::WORK_DIRECTORY_REPO_PATH.clone()],
3821 updates_tx.clone(),
3822 cx,
3823 );
3824
3825 let this = cx.weak_entity();
3826 let _ = self.send_keyed_job(
3827 Some(GitJobKey::ReloadGitState),
3828 None,
3829 |state, mut cx| async move {
3830 let Some(this) = this.upgrade() else {
3831 return Ok(());
3832 };
3833 let RepositoryState::Local { backend, .. } = state else {
3834 bail!("not a local repository")
3835 };
3836 let (snapshot, events) = this
3837 .update(&mut cx, |this, _| {
3838 compute_snapshot(
3839 this.id,
3840 this.work_directory_abs_path.clone(),
3841 this.snapshot.clone(),
3842 backend.clone(),
3843 )
3844 })?
3845 .await?;
3846 this.update(&mut cx, |this, cx| {
3847 this.snapshot = snapshot.clone();
3848 for event in events {
3849 cx.emit(event);
3850 }
3851 })?;
3852 if let Some(updates_tx) = updates_tx {
3853 updates_tx
3854 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3855 .ok();
3856 }
3857 Ok(())
3858 },
3859 );
3860 }
3861
3862 fn spawn_local_git_worker(
3863 work_directory_abs_path: Arc<Path>,
3864 dot_git_abs_path: Arc<Path>,
3865 project_environment: WeakEntity<ProjectEnvironment>,
3866 fs: Arc<dyn Fs>,
3867 cx: &mut Context<Self>,
3868 ) -> mpsc::UnboundedSender<GitJob> {
3869 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3870
3871 cx.spawn(async move |_, cx| {
3872 let environment = project_environment
3873 .upgrade()
3874 .ok_or_else(|| anyhow!("missing project environment"))?
3875 .update(cx, |project_environment, cx| {
3876 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
3877 })?
3878 .await
3879 .unwrap_or_else(|| {
3880 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
3881 HashMap::default()
3882 });
3883 let backend = cx
3884 .background_spawn(async move {
3885 fs.open_repo(&dot_git_abs_path)
3886 .ok_or_else(|| anyhow!("failed to build repository"))
3887 })
3888 .await?;
3889
3890 if let Some(git_hosting_provider_registry) =
3891 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
3892 {
3893 git_hosting_providers::register_additional_providers(
3894 git_hosting_provider_registry,
3895 backend.clone(),
3896 );
3897 }
3898
3899 let state = RepositoryState::Local {
3900 backend,
3901 environment: Arc::new(environment),
3902 };
3903 let mut jobs = VecDeque::new();
3904 loop {
3905 while let Ok(Some(next_job)) = job_rx.try_next() {
3906 jobs.push_back(next_job);
3907 }
3908
3909 if let Some(job) = jobs.pop_front() {
3910 if let Some(current_key) = &job.key {
3911 if jobs
3912 .iter()
3913 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3914 {
3915 continue;
3916 }
3917 }
3918 (job.job)(state.clone(), cx).await;
3919 } else if let Some(job) = job_rx.next().await {
3920 jobs.push_back(job);
3921 } else {
3922 break;
3923 }
3924 }
3925 anyhow::Ok(())
3926 })
3927 .detach_and_log_err(cx);
3928
3929 job_tx
3930 }
3931
3932 fn spawn_remote_git_worker(
3933 project_id: ProjectId,
3934 client: AnyProtoClient,
3935 cx: &mut Context<Self>,
3936 ) -> mpsc::UnboundedSender<GitJob> {
3937 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3938
3939 cx.spawn(async move |_, cx| {
3940 let state = RepositoryState::Remote { project_id, client };
3941 let mut jobs = VecDeque::new();
3942 loop {
3943 while let Ok(Some(next_job)) = job_rx.try_next() {
3944 jobs.push_back(next_job);
3945 }
3946
3947 if let Some(job) = jobs.pop_front() {
3948 if let Some(current_key) = &job.key {
3949 if jobs
3950 .iter()
3951 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3952 {
3953 continue;
3954 }
3955 }
3956 (job.job)(state.clone(), cx).await;
3957 } else if let Some(job) = job_rx.next().await {
3958 jobs.push_back(job);
3959 } else {
3960 break;
3961 }
3962 }
3963 anyhow::Ok(())
3964 })
3965 .detach_and_log_err(cx);
3966
3967 job_tx
3968 }
3969
3970 fn load_staged_text(
3971 &mut self,
3972 buffer_id: BufferId,
3973 repo_path: RepoPath,
3974 cx: &App,
3975 ) -> Task<Result<Option<String>>> {
3976 let rx = self.send_job(None, move |state, _| async move {
3977 match state {
3978 RepositoryState::Local { backend, .. } => {
3979 anyhow::Ok(backend.load_index_text(repo_path).await)
3980 }
3981 RepositoryState::Remote { project_id, client } => {
3982 let response = client
3983 .request(proto::OpenUnstagedDiff {
3984 project_id: project_id.to_proto(),
3985 buffer_id: buffer_id.to_proto(),
3986 })
3987 .await?;
3988 Ok(response.staged_text)
3989 }
3990 }
3991 });
3992 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3993 }
3994
3995 fn load_committed_text(
3996 &mut self,
3997 buffer_id: BufferId,
3998 repo_path: RepoPath,
3999 cx: &App,
4000 ) -> Task<Result<DiffBasesChange>> {
4001 let rx = self.send_job(None, move |state, _| async move {
4002 match state {
4003 RepositoryState::Local { backend, .. } => {
4004 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4005 let staged_text = backend.load_index_text(repo_path).await;
4006 let diff_bases_change = if committed_text == staged_text {
4007 DiffBasesChange::SetBoth(committed_text)
4008 } else {
4009 DiffBasesChange::SetEach {
4010 index: staged_text,
4011 head: committed_text,
4012 }
4013 };
4014 anyhow::Ok(diff_bases_change)
4015 }
4016 RepositoryState::Remote { project_id, client } => {
4017 use proto::open_uncommitted_diff_response::Mode;
4018
4019 let response = client
4020 .request(proto::OpenUncommittedDiff {
4021 project_id: project_id.to_proto(),
4022 buffer_id: buffer_id.to_proto(),
4023 })
4024 .await?;
4025 let mode =
4026 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
4027 let bases = match mode {
4028 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4029 Mode::IndexAndHead => DiffBasesChange::SetEach {
4030 head: response.committed_text,
4031 index: response.staged_text,
4032 },
4033 };
4034 Ok(bases)
4035 }
4036 }
4037 });
4038
4039 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4040 }
4041
4042 fn paths_changed(
4043 &mut self,
4044 paths: Vec<RepoPath>,
4045 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4046 cx: &mut Context<Self>,
4047 ) {
4048 self.paths_needing_status_update.extend(paths);
4049
4050 let this = cx.weak_entity();
4051 let _ = self.send_keyed_job(
4052 Some(GitJobKey::RefreshStatuses),
4053 None,
4054 |state, mut cx| async move {
4055 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4056 (
4057 this.snapshot.clone(),
4058 mem::take(&mut this.paths_needing_status_update),
4059 )
4060 })?;
4061 let RepositoryState::Local { backend, .. } = state else {
4062 bail!("not a local repository")
4063 };
4064
4065 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4066 let statuses = backend.status(&paths).await?;
4067
4068 let changed_path_statuses = cx
4069 .background_spawn(async move {
4070 let mut changed_path_statuses = Vec::new();
4071 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4072 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4073
4074 for (repo_path, status) in &*statuses.entries {
4075 changed_paths.remove(repo_path);
4076 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
4077 if &cursor.item().unwrap().status == status {
4078 continue;
4079 }
4080 }
4081
4082 changed_path_statuses.push(Edit::Insert(StatusEntry {
4083 repo_path: repo_path.clone(),
4084 status: *status,
4085 }));
4086 }
4087 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4088 for path in changed_paths.iter() {
4089 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
4090 changed_path_statuses.push(Edit::Remove(PathKey(path.0.clone())));
4091 }
4092 }
4093 changed_path_statuses
4094 })
4095 .await;
4096
4097 this.update(&mut cx, |this, cx| {
4098 if !changed_path_statuses.is_empty() {
4099 this.snapshot
4100 .statuses_by_path
4101 .edit(changed_path_statuses, &());
4102 this.snapshot.scan_id += 1;
4103 if let Some(updates_tx) = updates_tx {
4104 updates_tx
4105 .unbounded_send(DownstreamUpdate::UpdateRepository(
4106 this.snapshot.clone(),
4107 ))
4108 .ok();
4109 }
4110 }
4111 cx.emit(RepositoryEvent::Updated { full_scan: false });
4112 })
4113 },
4114 );
4115 }
4116
4117 /// currently running git command and when it started
4118 pub fn current_job(&self) -> Option<JobInfo> {
4119 self.active_jobs.values().next().cloned()
4120 }
4121}
4122
4123fn get_permalink_in_rust_registry_src(
4124 provider_registry: Arc<GitHostingProviderRegistry>,
4125 path: PathBuf,
4126 selection: Range<u32>,
4127) -> Result<url::Url> {
4128 #[derive(Deserialize)]
4129 struct CargoVcsGit {
4130 sha1: String,
4131 }
4132
4133 #[derive(Deserialize)]
4134 struct CargoVcsInfo {
4135 git: CargoVcsGit,
4136 path_in_vcs: String,
4137 }
4138
4139 #[derive(Deserialize)]
4140 struct CargoPackage {
4141 repository: String,
4142 }
4143
4144 #[derive(Deserialize)]
4145 struct CargoToml {
4146 package: CargoPackage,
4147 }
4148
4149 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4150 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4151 Some((dir, json))
4152 }) else {
4153 bail!("No .cargo_vcs_info.json found in parent directories")
4154 };
4155 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4156 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4157 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4158 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4159 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
4160 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4161 let permalink = provider.build_permalink(
4162 remote,
4163 BuildPermalinkParams {
4164 sha: &cargo_vcs_info.git.sha1,
4165 path: &path.to_string_lossy(),
4166 selection: Some(selection),
4167 },
4168 );
4169 Ok(permalink)
4170}
4171
4172fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4173 let Some(blame) = blame else {
4174 return proto::BlameBufferResponse {
4175 blame_response: None,
4176 };
4177 };
4178
4179 let entries = blame
4180 .entries
4181 .into_iter()
4182 .map(|entry| proto::BlameEntry {
4183 sha: entry.sha.as_bytes().into(),
4184 start_line: entry.range.start,
4185 end_line: entry.range.end,
4186 original_line_number: entry.original_line_number,
4187 author: entry.author.clone(),
4188 author_mail: entry.author_mail.clone(),
4189 author_time: entry.author_time,
4190 author_tz: entry.author_tz.clone(),
4191 committer: entry.committer_name.clone(),
4192 committer_mail: entry.committer_email.clone(),
4193 committer_time: entry.committer_time,
4194 committer_tz: entry.committer_tz.clone(),
4195 summary: entry.summary.clone(),
4196 previous: entry.previous.clone(),
4197 filename: entry.filename.clone(),
4198 })
4199 .collect::<Vec<_>>();
4200
4201 let messages = blame
4202 .messages
4203 .into_iter()
4204 .map(|(oid, message)| proto::CommitMessage {
4205 oid: oid.as_bytes().into(),
4206 message,
4207 })
4208 .collect::<Vec<_>>();
4209
4210 proto::BlameBufferResponse {
4211 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4212 entries,
4213 messages,
4214 remote_url: blame.remote_url,
4215 }),
4216 }
4217}
4218
4219fn deserialize_blame_buffer_response(
4220 response: proto::BlameBufferResponse,
4221) -> Option<git::blame::Blame> {
4222 let response = response.blame_response?;
4223 let entries = response
4224 .entries
4225 .into_iter()
4226 .filter_map(|entry| {
4227 Some(git::blame::BlameEntry {
4228 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4229 range: entry.start_line..entry.end_line,
4230 original_line_number: entry.original_line_number,
4231 committer_name: entry.committer,
4232 committer_time: entry.committer_time,
4233 committer_tz: entry.committer_tz,
4234 committer_email: entry.committer_mail,
4235 author: entry.author,
4236 author_mail: entry.author_mail,
4237 author_time: entry.author_time,
4238 author_tz: entry.author_tz,
4239 summary: entry.summary,
4240 previous: entry.previous,
4241 filename: entry.filename,
4242 })
4243 })
4244 .collect::<Vec<_>>();
4245
4246 let messages = response
4247 .messages
4248 .into_iter()
4249 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4250 .collect::<HashMap<_, _>>();
4251
4252 Some(Blame {
4253 entries,
4254 messages,
4255 remote_url: response.remote_url,
4256 })
4257}
4258
4259fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4260 proto::Branch {
4261 is_head: branch.is_head,
4262 name: branch.name.to_string(),
4263 unix_timestamp: branch
4264 .most_recent_commit
4265 .as_ref()
4266 .map(|commit| commit.commit_timestamp as u64),
4267 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4268 ref_name: upstream.ref_name.to_string(),
4269 tracking: upstream
4270 .tracking
4271 .status()
4272 .map(|upstream| proto::UpstreamTracking {
4273 ahead: upstream.ahead as u64,
4274 behind: upstream.behind as u64,
4275 }),
4276 }),
4277 most_recent_commit: branch
4278 .most_recent_commit
4279 .as_ref()
4280 .map(|commit| proto::CommitSummary {
4281 sha: commit.sha.to_string(),
4282 subject: commit.subject.to_string(),
4283 commit_timestamp: commit.commit_timestamp,
4284 }),
4285 }
4286}
4287
4288fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4289 git::repository::Branch {
4290 is_head: proto.is_head,
4291 name: proto.name.clone().into(),
4292 upstream: proto
4293 .upstream
4294 .as_ref()
4295 .map(|upstream| git::repository::Upstream {
4296 ref_name: upstream.ref_name.to_string().into(),
4297 tracking: upstream
4298 .tracking
4299 .as_ref()
4300 .map(|tracking| {
4301 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4302 ahead: tracking.ahead as u32,
4303 behind: tracking.behind as u32,
4304 })
4305 })
4306 .unwrap_or(git::repository::UpstreamTracking::Gone),
4307 }),
4308 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4309 git::repository::CommitSummary {
4310 sha: commit.sha.to_string().into(),
4311 subject: commit.subject.to_string().into(),
4312 commit_timestamp: commit.commit_timestamp,
4313 has_parent: true,
4314 }
4315 }),
4316 }
4317}
4318
4319async fn compute_snapshot(
4320 id: RepositoryId,
4321 work_directory_abs_path: Arc<Path>,
4322 prev_snapshot: RepositorySnapshot,
4323 backend: Arc<dyn GitRepository>,
4324) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4325 let mut events = Vec::new();
4326 let branches = backend.branches().await?;
4327 let branch = branches.into_iter().find(|branch| branch.is_head);
4328 let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?;
4329 let merge_message = backend
4330 .merge_message()
4331 .await
4332 .and_then(|msg| Some(msg.lines().nth(0)?.to_owned().into()));
4333 let merge_head_shas = backend
4334 .merge_head_shas()
4335 .into_iter()
4336 .map(SharedString::from)
4337 .collect();
4338
4339 let statuses_by_path = SumTree::from_iter(
4340 statuses
4341 .entries
4342 .iter()
4343 .map(|(repo_path, status)| StatusEntry {
4344 repo_path: repo_path.clone(),
4345 status: *status,
4346 }),
4347 &(),
4348 );
4349
4350 let merge_head_shas_changed = merge_head_shas != prev_snapshot.merge_head_shas;
4351
4352 if merge_head_shas_changed
4353 || branch != prev_snapshot.branch
4354 || statuses_by_path != prev_snapshot.statuses_by_path
4355 {
4356 events.push(RepositoryEvent::Updated { full_scan: true });
4357 }
4358
4359 let mut current_merge_conflicts = TreeSet::default();
4360 for (repo_path, status) in statuses.entries.iter() {
4361 if status.is_conflicted() {
4362 current_merge_conflicts.insert(repo_path.clone());
4363 }
4364 }
4365
4366 // Cache merge conflict paths so they don't change from staging/unstaging,
4367 // until the merge heads change (at commit time, etc.).
4368 let mut merge_conflicts = prev_snapshot.merge_conflicts.clone();
4369 if merge_head_shas_changed {
4370 merge_conflicts = current_merge_conflicts;
4371 events.push(RepositoryEvent::MergeHeadsChanged);
4372 }
4373
4374 let snapshot = RepositorySnapshot {
4375 id,
4376 merge_message,
4377 statuses_by_path,
4378 work_directory_abs_path,
4379 scan_id: prev_snapshot.scan_id + 1,
4380 branch,
4381 merge_conflicts,
4382 merge_head_shas,
4383 };
4384
4385 Ok((snapshot, events))
4386}
4387
4388fn status_from_proto(
4389 simple_status: i32,
4390 status: Option<proto::GitFileStatus>,
4391) -> anyhow::Result<FileStatus> {
4392 use proto::git_file_status::Variant;
4393
4394 let Some(variant) = status.and_then(|status| status.variant) else {
4395 let code = proto::GitStatus::from_i32(simple_status)
4396 .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
4397 let result = match code {
4398 proto::GitStatus::Added => TrackedStatus {
4399 worktree_status: StatusCode::Added,
4400 index_status: StatusCode::Unmodified,
4401 }
4402 .into(),
4403 proto::GitStatus::Modified => TrackedStatus {
4404 worktree_status: StatusCode::Modified,
4405 index_status: StatusCode::Unmodified,
4406 }
4407 .into(),
4408 proto::GitStatus::Conflict => UnmergedStatus {
4409 first_head: UnmergedStatusCode::Updated,
4410 second_head: UnmergedStatusCode::Updated,
4411 }
4412 .into(),
4413 proto::GitStatus::Deleted => TrackedStatus {
4414 worktree_status: StatusCode::Deleted,
4415 index_status: StatusCode::Unmodified,
4416 }
4417 .into(),
4418 _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
4419 };
4420 return Ok(result);
4421 };
4422
4423 let result = match variant {
4424 Variant::Untracked(_) => FileStatus::Untracked,
4425 Variant::Ignored(_) => FileStatus::Ignored,
4426 Variant::Unmerged(unmerged) => {
4427 let [first_head, second_head] =
4428 [unmerged.first_head, unmerged.second_head].map(|head| {
4429 let code = proto::GitStatus::from_i32(head)
4430 .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
4431 let result = match code {
4432 proto::GitStatus::Added => UnmergedStatusCode::Added,
4433 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4434 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4435 _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
4436 };
4437 Ok(result)
4438 });
4439 let [first_head, second_head] = [first_head?, second_head?];
4440 UnmergedStatus {
4441 first_head,
4442 second_head,
4443 }
4444 .into()
4445 }
4446 Variant::Tracked(tracked) => {
4447 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4448 .map(|status| {
4449 let code = proto::GitStatus::from_i32(status)
4450 .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
4451 let result = match code {
4452 proto::GitStatus::Modified => StatusCode::Modified,
4453 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4454 proto::GitStatus::Added => StatusCode::Added,
4455 proto::GitStatus::Deleted => StatusCode::Deleted,
4456 proto::GitStatus::Renamed => StatusCode::Renamed,
4457 proto::GitStatus::Copied => StatusCode::Copied,
4458 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4459 _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
4460 };
4461 Ok(result)
4462 });
4463 let [index_status, worktree_status] = [index_status?, worktree_status?];
4464 TrackedStatus {
4465 index_status,
4466 worktree_status,
4467 }
4468 .into()
4469 }
4470 };
4471 Ok(result)
4472}
4473
4474fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4475 use proto::git_file_status::{Tracked, Unmerged, Variant};
4476
4477 let variant = match status {
4478 FileStatus::Untracked => Variant::Untracked(Default::default()),
4479 FileStatus::Ignored => Variant::Ignored(Default::default()),
4480 FileStatus::Unmerged(UnmergedStatus {
4481 first_head,
4482 second_head,
4483 }) => Variant::Unmerged(Unmerged {
4484 first_head: unmerged_status_to_proto(first_head),
4485 second_head: unmerged_status_to_proto(second_head),
4486 }),
4487 FileStatus::Tracked(TrackedStatus {
4488 index_status,
4489 worktree_status,
4490 }) => Variant::Tracked(Tracked {
4491 index_status: tracked_status_to_proto(index_status),
4492 worktree_status: tracked_status_to_proto(worktree_status),
4493 }),
4494 };
4495 proto::GitFileStatus {
4496 variant: Some(variant),
4497 }
4498}
4499
4500fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4501 match code {
4502 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4503 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4504 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4505 }
4506}
4507
4508fn tracked_status_to_proto(code: StatusCode) -> i32 {
4509 match code {
4510 StatusCode::Added => proto::GitStatus::Added as _,
4511 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4512 StatusCode::Modified => proto::GitStatus::Modified as _,
4513 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4514 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4515 StatusCode::Copied => proto::GitStatus::Copied as _,
4516 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4517 }
4518}