1pub mod git_traversal;
2
3use crate::{
4 ProjectEnvironment, ProjectItem, ProjectPath,
5 buffer_store::{BufferStore, BufferStoreEvent},
6 worktree_store::{WorktreeStore, WorktreeStoreEvent},
7};
8use anyhow::{Context as _, Result, anyhow, bail};
9use askpass::AskPassDelegate;
10use buffer_diff::{BufferDiff, BufferDiffEvent};
11use client::ProjectId;
12use collections::HashMap;
13use fs::Fs;
14use futures::{
15 FutureExt as _, StreamExt as _,
16 channel::{mpsc, oneshot},
17 future::{self, Shared},
18};
19use git::{
20 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
21 blame::Blame,
22 parse_git_remote_url,
23 repository::{
24 Branch, CommitDetails, CommitDiff, CommitFile, DiffType, GitRepository,
25 GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
26 UpstreamTrackingStatus,
27 },
28 status::{
29 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
30 },
31};
32use gpui::{
33 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
34 WeakEntity,
35};
36use language::{
37 Buffer, BufferEvent, Language, LanguageRegistry,
38 proto::{deserialize_version, serialize_version},
39};
40use parking_lot::Mutex;
41use postage::stream::Stream as _;
42use rpc::{
43 AnyProtoClient, TypedEnvelope,
44 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
45};
46use serde::Deserialize;
47use std::{
48 cmp::Ordering,
49 collections::{BTreeSet, VecDeque},
50 future::Future,
51 mem,
52 ops::Range,
53 path::{Path, PathBuf},
54 sync::{
55 Arc,
56 atomic::{self, AtomicU64},
57 },
58 time::Instant,
59};
60use sum_tree::{Edit, SumTree, TreeSet};
61use text::{Bias, BufferId};
62use util::{ResultExt, debug_panic, post_inc};
63use worktree::{
64 File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet, Worktree,
65};
66
67pub struct GitStore {
68 state: GitStoreState,
69 buffer_store: Entity<BufferStore>,
70 worktree_store: Entity<WorktreeStore>,
71 repositories: HashMap<RepositoryId, Entity<Repository>>,
72 active_repo_id: Option<RepositoryId>,
73 #[allow(clippy::type_complexity)]
74 loading_diffs:
75 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
76 diffs: HashMap<BufferId, Entity<BufferDiffState>>,
77 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
78 _subscriptions: Vec<Subscription>,
79}
80
81#[derive(Default)]
82struct SharedDiffs {
83 unstaged: Option<Entity<BufferDiff>>,
84 uncommitted: Option<Entity<BufferDiff>>,
85}
86
87struct BufferDiffState {
88 unstaged_diff: Option<WeakEntity<BufferDiff>>,
89 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
90 recalculate_diff_task: Option<Task<Result<()>>>,
91 language: Option<Arc<Language>>,
92 language_registry: Option<Arc<LanguageRegistry>>,
93 recalculating_tx: postage::watch::Sender<bool>,
94
95 /// These operation counts are used to ensure that head and index text
96 /// values read from the git repository are up-to-date with any hunk staging
97 /// operations that have been performed on the BufferDiff.
98 ///
99 /// The operation count is incremented immediately when the user initiates a
100 /// hunk stage/unstage operation. Then, upon finishing writing the new index
101 /// text do disk, the `operation count as of write` is updated to reflect
102 /// the operation count that prompted the write.
103 hunk_staging_operation_count: usize,
104 hunk_staging_operation_count_as_of_write: usize,
105
106 head_text: Option<Arc<String>>,
107 index_text: Option<Arc<String>>,
108 head_changed: bool,
109 index_changed: bool,
110 language_changed: bool,
111}
112
113#[derive(Clone, Debug)]
114enum DiffBasesChange {
115 SetIndex(Option<String>),
116 SetHead(Option<String>),
117 SetEach {
118 index: Option<String>,
119 head: Option<String>,
120 },
121 SetBoth(Option<String>),
122}
123
124#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
125enum DiffKind {
126 Unstaged,
127 Uncommitted,
128}
129
130enum GitStoreState {
131 Local {
132 next_repository_id: Arc<AtomicU64>,
133 downstream: Option<LocalDownstreamState>,
134 project_environment: Entity<ProjectEnvironment>,
135 fs: Arc<dyn Fs>,
136 },
137 Ssh {
138 upstream_client: AnyProtoClient,
139 upstream_project_id: ProjectId,
140 downstream: Option<(AnyProtoClient, ProjectId)>,
141 },
142 Remote {
143 upstream_client: AnyProtoClient,
144 upstream_project_id: ProjectId,
145 },
146}
147
148enum DownstreamUpdate {
149 UpdateRepository(RepositorySnapshot),
150 RemoveRepository(RepositoryId),
151}
152
153struct LocalDownstreamState {
154 client: AnyProtoClient,
155 project_id: ProjectId,
156 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
157 _task: Task<Result<()>>,
158}
159
160#[derive(Clone)]
161pub struct GitStoreCheckpoint {
162 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
163}
164
165#[derive(Clone, Debug, PartialEq, Eq)]
166pub struct StatusEntry {
167 pub repo_path: RepoPath,
168 pub status: FileStatus,
169}
170
171impl StatusEntry {
172 fn to_proto(&self) -> proto::StatusEntry {
173 let simple_status = match self.status {
174 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
175 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
176 FileStatus::Tracked(TrackedStatus {
177 index_status,
178 worktree_status,
179 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
180 worktree_status
181 } else {
182 index_status
183 }),
184 };
185
186 proto::StatusEntry {
187 repo_path: self.repo_path.as_ref().to_proto(),
188 simple_status,
189 status: Some(status_to_proto(self.status)),
190 }
191 }
192}
193
194impl TryFrom<proto::StatusEntry> for StatusEntry {
195 type Error = anyhow::Error;
196
197 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
198 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
199 let status = status_from_proto(value.simple_status, value.status)?;
200 Ok(Self { repo_path, status })
201 }
202}
203
204impl sum_tree::Item for StatusEntry {
205 type Summary = PathSummary<GitSummary>;
206
207 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
208 PathSummary {
209 max_path: self.repo_path.0.clone(),
210 item_summary: self.status.summary(),
211 }
212 }
213}
214
215impl sum_tree::KeyedItem for StatusEntry {
216 type Key = PathKey;
217
218 fn key(&self) -> Self::Key {
219 PathKey(self.repo_path.0.clone())
220 }
221}
222
223#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
224pub struct RepositoryId(pub u64);
225
226#[derive(Clone, Debug, PartialEq, Eq)]
227pub struct RepositorySnapshot {
228 pub id: RepositoryId,
229 pub merge_message: Option<SharedString>,
230 pub statuses_by_path: SumTree<StatusEntry>,
231 pub work_directory_abs_path: Arc<Path>,
232 pub branch: Option<Branch>,
233 pub merge_conflicts: TreeSet<RepoPath>,
234 pub merge_head_shas: Vec<SharedString>,
235 pub scan_id: u64,
236}
237
238type JobId = u64;
239
240#[derive(Clone, Debug, PartialEq, Eq)]
241pub struct JobInfo {
242 pub start: Instant,
243 pub message: SharedString,
244}
245
246pub struct Repository {
247 this: WeakEntity<Self>,
248 snapshot: RepositorySnapshot,
249 commit_message_buffer: Option<Entity<Buffer>>,
250 git_store: WeakEntity<GitStore>,
251 // For a local repository, holds paths that have had worktree events since the last status scan completed,
252 // and that should be examined during the next status scan.
253 paths_needing_status_update: BTreeSet<RepoPath>,
254 job_sender: mpsc::UnboundedSender<GitJob>,
255 active_jobs: HashMap<JobId, JobInfo>,
256 job_id: JobId,
257 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
258 latest_askpass_id: u64,
259}
260
261impl std::ops::Deref for Repository {
262 type Target = RepositorySnapshot;
263
264 fn deref(&self) -> &Self::Target {
265 &self.snapshot
266 }
267}
268
269#[derive(Clone)]
270pub enum RepositoryState {
271 Local {
272 backend: Arc<dyn GitRepository>,
273 environment: Arc<HashMap<String, String>>,
274 },
275 Remote {
276 project_id: ProjectId,
277 client: AnyProtoClient,
278 },
279}
280
281#[derive(Clone, Debug)]
282pub enum RepositoryEvent {
283 Updated { full_scan: bool },
284 MergeHeadsChanged,
285}
286
287#[derive(Clone, Debug)]
288pub struct JobsUpdated;
289
290#[derive(Debug)]
291pub enum GitStoreEvent {
292 ActiveRepositoryChanged(Option<RepositoryId>),
293 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
294 RepositoryAdded(RepositoryId),
295 RepositoryRemoved(RepositoryId),
296 IndexWriteError(anyhow::Error),
297 JobsUpdated,
298}
299
300impl EventEmitter<RepositoryEvent> for Repository {}
301impl EventEmitter<JobsUpdated> for Repository {}
302impl EventEmitter<GitStoreEvent> for GitStore {}
303
304pub struct GitJob {
305 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
306 key: Option<GitJobKey>,
307}
308
309#[derive(PartialEq, Eq)]
310enum GitJobKey {
311 WriteIndex(RepoPath),
312 ReloadBufferDiffBases,
313 RefreshStatuses,
314 ReloadGitState,
315}
316
317impl GitStore {
318 pub fn local(
319 worktree_store: &Entity<WorktreeStore>,
320 buffer_store: Entity<BufferStore>,
321 environment: Entity<ProjectEnvironment>,
322 fs: Arc<dyn Fs>,
323 cx: &mut Context<Self>,
324 ) -> Self {
325 Self::new(
326 worktree_store.clone(),
327 buffer_store,
328 GitStoreState::Local {
329 next_repository_id: Arc::new(AtomicU64::new(1)),
330 downstream: None,
331 project_environment: environment,
332 fs,
333 },
334 cx,
335 )
336 }
337
338 pub fn remote(
339 worktree_store: &Entity<WorktreeStore>,
340 buffer_store: Entity<BufferStore>,
341 upstream_client: AnyProtoClient,
342 project_id: ProjectId,
343 cx: &mut Context<Self>,
344 ) -> Self {
345 Self::new(
346 worktree_store.clone(),
347 buffer_store,
348 GitStoreState::Remote {
349 upstream_client,
350 upstream_project_id: project_id,
351 },
352 cx,
353 )
354 }
355
356 pub fn ssh(
357 worktree_store: &Entity<WorktreeStore>,
358 buffer_store: Entity<BufferStore>,
359 upstream_client: AnyProtoClient,
360 cx: &mut Context<Self>,
361 ) -> Self {
362 Self::new(
363 worktree_store.clone(),
364 buffer_store,
365 GitStoreState::Ssh {
366 upstream_client,
367 upstream_project_id: ProjectId(SSH_PROJECT_ID),
368 downstream: None,
369 },
370 cx,
371 )
372 }
373
374 fn new(
375 worktree_store: Entity<WorktreeStore>,
376 buffer_store: Entity<BufferStore>,
377 state: GitStoreState,
378 cx: &mut Context<Self>,
379 ) -> Self {
380 let _subscriptions = vec![
381 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
382 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
383 ];
384
385 GitStore {
386 state,
387 buffer_store,
388 worktree_store,
389 repositories: HashMap::default(),
390 active_repo_id: None,
391 _subscriptions,
392 loading_diffs: HashMap::default(),
393 shared_diffs: HashMap::default(),
394 diffs: HashMap::default(),
395 }
396 }
397
398 pub fn init(client: &AnyProtoClient) {
399 client.add_entity_request_handler(Self::handle_get_remotes);
400 client.add_entity_request_handler(Self::handle_get_branches);
401 client.add_entity_request_handler(Self::handle_change_branch);
402 client.add_entity_request_handler(Self::handle_create_branch);
403 client.add_entity_request_handler(Self::handle_git_init);
404 client.add_entity_request_handler(Self::handle_push);
405 client.add_entity_request_handler(Self::handle_pull);
406 client.add_entity_request_handler(Self::handle_fetch);
407 client.add_entity_request_handler(Self::handle_stage);
408 client.add_entity_request_handler(Self::handle_unstage);
409 client.add_entity_request_handler(Self::handle_commit);
410 client.add_entity_request_handler(Self::handle_reset);
411 client.add_entity_request_handler(Self::handle_show);
412 client.add_entity_request_handler(Self::handle_load_commit_diff);
413 client.add_entity_request_handler(Self::handle_checkout_files);
414 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
415 client.add_entity_request_handler(Self::handle_set_index_text);
416 client.add_entity_request_handler(Self::handle_askpass);
417 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
418 client.add_entity_request_handler(Self::handle_git_diff);
419 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
420 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
421 client.add_entity_message_handler(Self::handle_update_diff_bases);
422 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
423 client.add_entity_request_handler(Self::handle_blame_buffer);
424 client.add_entity_message_handler(Self::handle_update_repository);
425 client.add_entity_message_handler(Self::handle_remove_repository);
426 }
427
428 pub fn is_local(&self) -> bool {
429 matches!(self.state, GitStoreState::Local { .. })
430 }
431
432 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
433 match &mut self.state {
434 GitStoreState::Ssh {
435 downstream: downstream_client,
436 ..
437 } => {
438 for repo in self.repositories.values() {
439 let update = repo.read(cx).snapshot.initial_update(project_id);
440 for update in split_repository_update(update) {
441 client.send(update).log_err();
442 }
443 }
444 *downstream_client = Some((client, ProjectId(project_id)));
445 }
446 GitStoreState::Local {
447 downstream: downstream_client,
448 ..
449 } => {
450 let mut snapshots = HashMap::default();
451 let (updates_tx, mut updates_rx) = mpsc::unbounded();
452 for repo in self.repositories.values() {
453 updates_tx
454 .unbounded_send(DownstreamUpdate::UpdateRepository(
455 repo.read(cx).snapshot.clone(),
456 ))
457 .ok();
458 }
459 *downstream_client = Some(LocalDownstreamState {
460 client: client.clone(),
461 project_id: ProjectId(project_id),
462 updates_tx,
463 _task: cx.spawn(async move |this, cx| {
464 cx.background_spawn(async move {
465 while let Some(update) = updates_rx.next().await {
466 match update {
467 DownstreamUpdate::UpdateRepository(snapshot) => {
468 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
469 {
470 let update =
471 snapshot.build_update(old_snapshot, project_id);
472 *old_snapshot = snapshot;
473 for update in split_repository_update(update) {
474 client.send(update)?;
475 }
476 } else {
477 let update = snapshot.initial_update(project_id);
478 for update in split_repository_update(update) {
479 client.send(update)?;
480 }
481 snapshots.insert(snapshot.id, snapshot);
482 }
483 }
484 DownstreamUpdate::RemoveRepository(id) => {
485 client.send(proto::RemoveRepository {
486 project_id,
487 id: id.to_proto(),
488 })?;
489 }
490 }
491 }
492 anyhow::Ok(())
493 })
494 .await
495 .ok();
496 this.update(cx, |this, _| {
497 if let GitStoreState::Local {
498 downstream: downstream_client,
499 ..
500 } = &mut this.state
501 {
502 downstream_client.take();
503 } else {
504 unreachable!("unshared called on remote store");
505 }
506 })
507 }),
508 });
509 }
510 GitStoreState::Remote { .. } => {
511 debug_panic!("shared called on remote store");
512 }
513 }
514 }
515
516 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
517 match &mut self.state {
518 GitStoreState::Local {
519 downstream: downstream_client,
520 ..
521 } => {
522 downstream_client.take();
523 }
524 GitStoreState::Ssh {
525 downstream: downstream_client,
526 ..
527 } => {
528 downstream_client.take();
529 }
530 GitStoreState::Remote { .. } => {
531 debug_panic!("unshared called on remote store");
532 }
533 }
534 self.shared_diffs.clear();
535 }
536
537 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
538 self.shared_diffs.remove(peer_id);
539 }
540
541 pub fn active_repository(&self) -> Option<Entity<Repository>> {
542 self.active_repo_id
543 .as_ref()
544 .map(|id| self.repositories[&id].clone())
545 }
546
547 pub fn open_unstaged_diff(
548 &mut self,
549 buffer: Entity<Buffer>,
550 cx: &mut Context<Self>,
551 ) -> Task<Result<Entity<BufferDiff>>> {
552 let buffer_id = buffer.read(cx).remote_id();
553 if let Some(diff_state) = self.diffs.get(&buffer_id) {
554 if let Some(unstaged_diff) = diff_state
555 .read(cx)
556 .unstaged_diff
557 .as_ref()
558 .and_then(|weak| weak.upgrade())
559 {
560 if let Some(task) =
561 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
562 {
563 return cx.background_executor().spawn(async move {
564 task.await;
565 Ok(unstaged_diff)
566 });
567 }
568 return Task::ready(Ok(unstaged_diff));
569 }
570 }
571
572 let Some((repo, repo_path)) =
573 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
574 else {
575 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
576 };
577
578 let task = self
579 .loading_diffs
580 .entry((buffer_id, DiffKind::Unstaged))
581 .or_insert_with(|| {
582 let staged_text = repo.update(cx, |repo, cx| {
583 repo.load_staged_text(buffer_id, repo_path, cx)
584 });
585 cx.spawn(async move |this, cx| {
586 Self::open_diff_internal(
587 this,
588 DiffKind::Unstaged,
589 staged_text.await.map(DiffBasesChange::SetIndex),
590 buffer,
591 cx,
592 )
593 .await
594 .map_err(Arc::new)
595 })
596 .shared()
597 })
598 .clone();
599
600 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
601 }
602
603 pub fn open_uncommitted_diff(
604 &mut self,
605 buffer: Entity<Buffer>,
606 cx: &mut Context<Self>,
607 ) -> Task<Result<Entity<BufferDiff>>> {
608 let buffer_id = buffer.read(cx).remote_id();
609
610 if let Some(diff_state) = self.diffs.get(&buffer_id) {
611 if let Some(uncommitted_diff) = diff_state
612 .read(cx)
613 .uncommitted_diff
614 .as_ref()
615 .and_then(|weak| weak.upgrade())
616 {
617 if let Some(task) =
618 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
619 {
620 return cx.background_executor().spawn(async move {
621 task.await;
622 Ok(uncommitted_diff)
623 });
624 }
625 return Task::ready(Ok(uncommitted_diff));
626 }
627 }
628
629 let Some((repo, repo_path)) =
630 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
631 else {
632 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
633 };
634
635 let task = self
636 .loading_diffs
637 .entry((buffer_id, DiffKind::Uncommitted))
638 .or_insert_with(|| {
639 let changes = repo.update(cx, |repo, cx| {
640 repo.load_committed_text(buffer_id, repo_path, cx)
641 });
642
643 cx.spawn(async move |this, cx| {
644 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
645 .await
646 .map_err(Arc::new)
647 })
648 .shared()
649 })
650 .clone();
651
652 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
653 }
654
655 async fn open_diff_internal(
656 this: WeakEntity<Self>,
657 kind: DiffKind,
658 texts: Result<DiffBasesChange>,
659 buffer_entity: Entity<Buffer>,
660 cx: &mut AsyncApp,
661 ) -> Result<Entity<BufferDiff>> {
662 let diff_bases_change = match texts {
663 Err(e) => {
664 this.update(cx, |this, cx| {
665 let buffer = buffer_entity.read(cx);
666 let buffer_id = buffer.remote_id();
667 this.loading_diffs.remove(&(buffer_id, kind));
668 })?;
669 return Err(e);
670 }
671 Ok(change) => change,
672 };
673
674 this.update(cx, |this, cx| {
675 let buffer = buffer_entity.read(cx);
676 let buffer_id = buffer.remote_id();
677 let language = buffer.language().cloned();
678 let language_registry = buffer.language_registry();
679 let text_snapshot = buffer.text_snapshot();
680 this.loading_diffs.remove(&(buffer_id, kind));
681
682 let diff_state = this
683 .diffs
684 .entry(buffer_id)
685 .or_insert_with(|| cx.new(|_| BufferDiffState::default()));
686
687 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
688
689 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
690 diff_state.update(cx, |diff_state, cx| {
691 diff_state.language = language;
692 diff_state.language_registry = language_registry;
693
694 match kind {
695 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
696 DiffKind::Uncommitted => {
697 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
698 diff
699 } else {
700 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
701 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
702 unstaged_diff
703 };
704
705 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
706 diff_state.uncommitted_diff = Some(diff.downgrade())
707 }
708 }
709
710 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
711 let rx = diff_state.wait_for_recalculation();
712
713 anyhow::Ok(async move {
714 if let Some(rx) = rx {
715 rx.await;
716 }
717 Ok(diff)
718 })
719 })
720 })??
721 .await
722 }
723
724 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
725 let diff_state = self.diffs.get(&buffer_id)?;
726 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
727 }
728
729 pub fn get_uncommitted_diff(
730 &self,
731 buffer_id: BufferId,
732 cx: &App,
733 ) -> Option<Entity<BufferDiff>> {
734 let diff_state = self.diffs.get(&buffer_id)?;
735 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
736 }
737
738 pub fn project_path_git_status(
739 &self,
740 project_path: &ProjectPath,
741 cx: &App,
742 ) -> Option<FileStatus> {
743 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
744 Some(repo.read(cx).status_for_path(&repo_path)?.status)
745 }
746
747 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
748 let mut work_directory_abs_paths = Vec::new();
749 let mut checkpoints = Vec::new();
750 for repository in self.repositories.values() {
751 repository.update(cx, |repository, _| {
752 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
753 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
754 });
755 }
756
757 cx.background_executor().spawn(async move {
758 let checkpoints = future::try_join_all(checkpoints).await?;
759 Ok(GitStoreCheckpoint {
760 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
761 .into_iter()
762 .zip(checkpoints)
763 .collect(),
764 })
765 })
766 }
767
768 pub fn restore_checkpoint(
769 &self,
770 checkpoint: GitStoreCheckpoint,
771 cx: &mut App,
772 ) -> Task<Result<()>> {
773 let repositories_by_work_dir_abs_path = self
774 .repositories
775 .values()
776 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
777 .collect::<HashMap<_, _>>();
778
779 let mut tasks = Vec::new();
780 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
781 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
782 let restore = repository.update(cx, |repository, _| {
783 repository.restore_checkpoint(checkpoint)
784 });
785 tasks.push(async move { restore.await? });
786 }
787 }
788 cx.background_spawn(async move {
789 future::try_join_all(tasks).await?;
790 Ok(())
791 })
792 }
793
794 /// Compares two checkpoints, returning true if they are equal.
795 pub fn compare_checkpoints(
796 &self,
797 left: GitStoreCheckpoint,
798 mut right: GitStoreCheckpoint,
799 cx: &mut App,
800 ) -> Task<Result<bool>> {
801 let repositories_by_work_dir_abs_path = self
802 .repositories
803 .values()
804 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
805 .collect::<HashMap<_, _>>();
806
807 let mut tasks = Vec::new();
808 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
809 if let Some(right_checkpoint) = right
810 .checkpoints_by_work_dir_abs_path
811 .remove(&work_dir_abs_path)
812 {
813 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
814 {
815 let compare = repository.update(cx, |repository, _| {
816 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
817 });
818
819 tasks.push(async move { compare.await? });
820 }
821 } else {
822 return Task::ready(Ok(false));
823 }
824 }
825 cx.background_spawn(async move {
826 Ok(future::try_join_all(tasks)
827 .await?
828 .into_iter()
829 .all(|result| result))
830 })
831 }
832
833 pub fn delete_checkpoint(
834 &self,
835 checkpoint: GitStoreCheckpoint,
836 cx: &mut App,
837 ) -> Task<Result<()>> {
838 let repositories_by_work_directory_abs_path = self
839 .repositories
840 .values()
841 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
842 .collect::<HashMap<_, _>>();
843
844 let mut tasks = Vec::new();
845 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
846 if let Some(repository) =
847 repositories_by_work_directory_abs_path.get(&work_dir_abs_path)
848 {
849 let delete = repository.update(cx, |this, _| this.delete_checkpoint(checkpoint));
850 tasks.push(async move { delete.await? });
851 }
852 }
853 cx.background_spawn(async move {
854 future::try_join_all(tasks).await?;
855 Ok(())
856 })
857 }
858
859 /// Blames a buffer.
860 pub fn blame_buffer(
861 &self,
862 buffer: &Entity<Buffer>,
863 version: Option<clock::Global>,
864 cx: &mut App,
865 ) -> Task<Result<Option<Blame>>> {
866 let buffer = buffer.read(cx);
867 let Some((repo, repo_path)) =
868 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
869 else {
870 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
871 };
872 let content = match &version {
873 Some(version) => buffer.rope_for_version(version).clone(),
874 None => buffer.as_rope().clone(),
875 };
876 let version = version.unwrap_or(buffer.version());
877 let buffer_id = buffer.remote_id();
878
879 let rx = repo.update(cx, |repo, _| {
880 repo.send_job(None, move |state, _| async move {
881 match state {
882 RepositoryState::Local { backend, .. } => backend
883 .blame(repo_path.clone(), content)
884 .await
885 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
886 .map(Some),
887 RepositoryState::Remote { project_id, client } => {
888 let response = client
889 .request(proto::BlameBuffer {
890 project_id: project_id.to_proto(),
891 buffer_id: buffer_id.into(),
892 version: serialize_version(&version),
893 })
894 .await?;
895 Ok(deserialize_blame_buffer_response(response))
896 }
897 }
898 })
899 });
900
901 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
902 }
903
904 pub fn get_permalink_to_line(
905 &self,
906 buffer: &Entity<Buffer>,
907 selection: Range<u32>,
908 cx: &mut App,
909 ) -> Task<Result<url::Url>> {
910 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
911 return Task::ready(Err(anyhow!("buffer has no file")));
912 };
913
914 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
915 &(file.worktree.read(cx).id(), file.path.clone()).into(),
916 cx,
917 ) else {
918 // If we're not in a Git repo, check whether this is a Rust source
919 // file in the Cargo registry (presumably opened with go-to-definition
920 // from a normal Rust file). If so, we can put together a permalink
921 // using crate metadata.
922 if buffer
923 .read(cx)
924 .language()
925 .is_none_or(|lang| lang.name() != "Rust".into())
926 {
927 return Task::ready(Err(anyhow!("no permalink available")));
928 }
929 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
930 return Task::ready(Err(anyhow!("no permalink available")));
931 };
932 return cx.spawn(async move |cx| {
933 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
934 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
935 .map_err(|_| anyhow!("no permalink available"))
936 });
937
938 // TODO remote case
939 };
940
941 let buffer_id = buffer.read(cx).remote_id();
942 let branch = repo.read(cx).branch.clone();
943 let remote = branch
944 .as_ref()
945 .and_then(|b| b.upstream.as_ref())
946 .and_then(|b| b.remote_name())
947 .unwrap_or("origin")
948 .to_string();
949
950 let rx = repo.update(cx, |repo, _| {
951 repo.send_job(None, move |state, cx| async move {
952 match state {
953 RepositoryState::Local { backend, .. } => {
954 let origin_url = backend
955 .remote_url(&remote)
956 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
957
958 let sha = backend
959 .head_sha()
960 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
961
962 let provider_registry =
963 cx.update(GitHostingProviderRegistry::default_global)?;
964
965 let (provider, remote) =
966 parse_git_remote_url(provider_registry, &origin_url)
967 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
968
969 let path = repo_path
970 .to_str()
971 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
972
973 Ok(provider.build_permalink(
974 remote,
975 BuildPermalinkParams {
976 sha: &sha,
977 path,
978 selection: Some(selection),
979 },
980 ))
981 }
982 RepositoryState::Remote { project_id, client } => {
983 let response = client
984 .request(proto::GetPermalinkToLine {
985 project_id: project_id.to_proto(),
986 buffer_id: buffer_id.into(),
987 selection: Some(proto::Range {
988 start: selection.start as u64,
989 end: selection.end as u64,
990 }),
991 })
992 .await?;
993
994 url::Url::parse(&response.permalink).context("failed to parse permalink")
995 }
996 }
997 })
998 });
999 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1000 }
1001
1002 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1003 match &self.state {
1004 GitStoreState::Local {
1005 downstream: downstream_client,
1006 ..
1007 } => downstream_client
1008 .as_ref()
1009 .map(|state| (state.client.clone(), state.project_id)),
1010 GitStoreState::Ssh {
1011 downstream: downstream_client,
1012 ..
1013 } => downstream_client.clone(),
1014 GitStoreState::Remote { .. } => None,
1015 }
1016 }
1017
1018 fn upstream_client(&self) -> Option<AnyProtoClient> {
1019 match &self.state {
1020 GitStoreState::Local { .. } => None,
1021 GitStoreState::Ssh {
1022 upstream_client, ..
1023 }
1024 | GitStoreState::Remote {
1025 upstream_client, ..
1026 } => Some(upstream_client.clone()),
1027 }
1028 }
1029
1030 fn on_worktree_store_event(
1031 &mut self,
1032 worktree_store: Entity<WorktreeStore>,
1033 event: &WorktreeStoreEvent,
1034 cx: &mut Context<Self>,
1035 ) {
1036 let GitStoreState::Local {
1037 project_environment,
1038 downstream,
1039 next_repository_id,
1040 fs,
1041 } = &self.state
1042 else {
1043 return;
1044 };
1045
1046 match event {
1047 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1048 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
1049 for (relative_path, _, _) in updated_entries.iter() {
1050 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1051 &(*worktree_id, relative_path.clone()).into(),
1052 cx,
1053 ) else {
1054 continue;
1055 };
1056 paths_by_git_repo.entry(repo).or_default().push(repo_path)
1057 }
1058
1059 for (repo, paths) in paths_by_git_repo {
1060 repo.update(cx, |repo, cx| {
1061 repo.paths_changed(
1062 paths,
1063 downstream
1064 .as_ref()
1065 .map(|downstream| downstream.updates_tx.clone()),
1066 cx,
1067 );
1068 });
1069 }
1070 }
1071 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1072 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1073 else {
1074 return;
1075 };
1076 if !worktree.read(cx).is_visible() {
1077 log::debug!(
1078 "not adding repositories for local worktree {:?} because it's not visible",
1079 worktree.read(cx).abs_path()
1080 );
1081 return;
1082 }
1083 self.update_repositories_from_worktree(
1084 project_environment.clone(),
1085 next_repository_id.clone(),
1086 downstream
1087 .as_ref()
1088 .map(|downstream| downstream.updates_tx.clone()),
1089 changed_repos.clone(),
1090 fs.clone(),
1091 cx,
1092 );
1093 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1094 }
1095 _ => {}
1096 }
1097 }
1098
1099 fn on_repository_event(
1100 &mut self,
1101 repo: Entity<Repository>,
1102 event: &RepositoryEvent,
1103 cx: &mut Context<Self>,
1104 ) {
1105 let id = repo.read(cx).id;
1106 cx.emit(GitStoreEvent::RepositoryUpdated(
1107 id,
1108 event.clone(),
1109 self.active_repo_id == Some(id),
1110 ))
1111 }
1112
1113 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1114 cx.emit(GitStoreEvent::JobsUpdated)
1115 }
1116
1117 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1118 fn update_repositories_from_worktree(
1119 &mut self,
1120 project_environment: Entity<ProjectEnvironment>,
1121 next_repository_id: Arc<AtomicU64>,
1122 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1123 updated_git_repositories: UpdatedGitRepositoriesSet,
1124 fs: Arc<dyn Fs>,
1125 cx: &mut Context<Self>,
1126 ) {
1127 let mut removed_ids = Vec::new();
1128 for update in updated_git_repositories.iter() {
1129 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1130 let existing_work_directory_abs_path =
1131 repo.read(cx).work_directory_abs_path.clone();
1132 Some(&existing_work_directory_abs_path)
1133 == update.old_work_directory_abs_path.as_ref()
1134 || Some(&existing_work_directory_abs_path)
1135 == update.new_work_directory_abs_path.as_ref()
1136 }) {
1137 if let Some(new_work_directory_abs_path) =
1138 update.new_work_directory_abs_path.clone()
1139 {
1140 existing.update(cx, |existing, cx| {
1141 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1142 existing.schedule_scan(updates_tx.clone(), cx);
1143 });
1144 } else {
1145 removed_ids.push(*id);
1146 }
1147 } else if let Some((work_directory_abs_path, dot_git_abs_path)) = update
1148 .new_work_directory_abs_path
1149 .clone()
1150 .zip(update.dot_git_abs_path.clone())
1151 {
1152 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1153 let git_store = cx.weak_entity();
1154 let repo = cx.new(|cx| {
1155 let mut repo = Repository::local(
1156 id,
1157 work_directory_abs_path,
1158 dot_git_abs_path,
1159 project_environment.downgrade(),
1160 fs.clone(),
1161 git_store,
1162 cx,
1163 );
1164 repo.schedule_scan(updates_tx.clone(), cx);
1165 repo
1166 });
1167 self._subscriptions
1168 .push(cx.subscribe(&repo, Self::on_repository_event));
1169 self._subscriptions
1170 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1171 self.repositories.insert(id, repo);
1172 cx.emit(GitStoreEvent::RepositoryAdded(id));
1173 self.active_repo_id.get_or_insert_with(|| {
1174 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1175 id
1176 });
1177 }
1178 }
1179
1180 for id in removed_ids {
1181 if self.active_repo_id == Some(id) {
1182 self.active_repo_id = None;
1183 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1184 }
1185 self.repositories.remove(&id);
1186 if let Some(updates_tx) = updates_tx.as_ref() {
1187 updates_tx
1188 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1189 .ok();
1190 }
1191 }
1192 }
1193
1194 fn on_buffer_store_event(
1195 &mut self,
1196 _: Entity<BufferStore>,
1197 event: &BufferStoreEvent,
1198 cx: &mut Context<Self>,
1199 ) {
1200 match event {
1201 BufferStoreEvent::BufferAdded(buffer) => {
1202 cx.subscribe(&buffer, |this, buffer, event, cx| {
1203 if let BufferEvent::LanguageChanged = event {
1204 let buffer_id = buffer.read(cx).remote_id();
1205 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1206 diff_state.update(cx, |diff_state, cx| {
1207 diff_state.buffer_language_changed(buffer, cx);
1208 });
1209 }
1210 }
1211 })
1212 .detach();
1213 }
1214 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1215 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1216 diffs.remove(buffer_id);
1217 }
1218 }
1219 BufferStoreEvent::BufferDropped(buffer_id) => {
1220 self.diffs.remove(&buffer_id);
1221 for diffs in self.shared_diffs.values_mut() {
1222 diffs.remove(buffer_id);
1223 }
1224 }
1225
1226 _ => {}
1227 }
1228 }
1229
1230 pub fn recalculate_buffer_diffs(
1231 &mut self,
1232 buffers: Vec<Entity<Buffer>>,
1233 cx: &mut Context<Self>,
1234 ) -> impl Future<Output = ()> + use<> {
1235 let mut futures = Vec::new();
1236 for buffer in buffers {
1237 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1238 let buffer = buffer.read(cx).text_snapshot();
1239 diff_state.update(cx, |diff_state, cx| {
1240 diff_state.recalculate_diffs(buffer, cx);
1241 futures.extend(diff_state.wait_for_recalculation());
1242 });
1243 }
1244 }
1245 async move {
1246 futures::future::join_all(futures).await;
1247 }
1248 }
1249
1250 fn on_buffer_diff_event(
1251 &mut self,
1252 diff: Entity<buffer_diff::BufferDiff>,
1253 event: &BufferDiffEvent,
1254 cx: &mut Context<Self>,
1255 ) {
1256 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1257 let buffer_id = diff.read(cx).buffer_id;
1258 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1259 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1260 diff_state.hunk_staging_operation_count += 1;
1261 diff_state.hunk_staging_operation_count
1262 });
1263 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1264 let recv = repo.update(cx, |repo, cx| {
1265 log::debug!("hunks changed for {}", path.display());
1266 repo.spawn_set_index_text_job(
1267 path,
1268 new_index_text.as_ref().map(|rope| rope.to_string()),
1269 Some(hunk_staging_operation_count),
1270 cx,
1271 )
1272 });
1273 let diff = diff.downgrade();
1274 cx.spawn(async move |this, cx| {
1275 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1276 diff.update(cx, |diff, cx| {
1277 diff.clear_pending_hunks(cx);
1278 })
1279 .ok();
1280 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1281 .ok();
1282 }
1283 })
1284 .detach();
1285 }
1286 }
1287 }
1288 }
1289
1290 fn local_worktree_git_repos_changed(
1291 &mut self,
1292 worktree: Entity<Worktree>,
1293 changed_repos: &UpdatedGitRepositoriesSet,
1294 cx: &mut Context<Self>,
1295 ) {
1296 log::debug!("local worktree repos changed");
1297 debug_assert!(worktree.read(cx).is_local());
1298
1299 for repository in self.repositories.values() {
1300 repository.update(cx, |repository, cx| {
1301 let repo_abs_path = &repository.work_directory_abs_path;
1302 if changed_repos.iter().any(|update| {
1303 update.old_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1304 || update.new_work_directory_abs_path.as_ref() == Some(&repo_abs_path)
1305 }) {
1306 repository.reload_buffer_diff_bases(cx);
1307 }
1308 });
1309 }
1310 }
1311
1312 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1313 &self.repositories
1314 }
1315
1316 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1317 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1318 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1319 Some(status.status)
1320 }
1321
1322 pub fn repository_and_path_for_buffer_id(
1323 &self,
1324 buffer_id: BufferId,
1325 cx: &App,
1326 ) -> Option<(Entity<Repository>, RepoPath)> {
1327 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1328 let project_path = buffer.read(cx).project_path(cx)?;
1329 self.repository_and_path_for_project_path(&project_path, cx)
1330 }
1331
1332 pub fn repository_and_path_for_project_path(
1333 &self,
1334 path: &ProjectPath,
1335 cx: &App,
1336 ) -> Option<(Entity<Repository>, RepoPath)> {
1337 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1338 self.repositories
1339 .values()
1340 .filter_map(|repo| {
1341 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1342 Some((repo.clone(), repo_path))
1343 })
1344 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1345 }
1346
1347 pub fn git_init(
1348 &self,
1349 path: Arc<Path>,
1350 fallback_branch_name: String,
1351 cx: &App,
1352 ) -> Task<Result<()>> {
1353 match &self.state {
1354 GitStoreState::Local { fs, .. } => {
1355 let fs = fs.clone();
1356 cx.background_executor()
1357 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1358 }
1359 GitStoreState::Ssh {
1360 upstream_client,
1361 upstream_project_id: project_id,
1362 ..
1363 }
1364 | GitStoreState::Remote {
1365 upstream_client,
1366 upstream_project_id: project_id,
1367 ..
1368 } => {
1369 let client = upstream_client.clone();
1370 let project_id = *project_id;
1371 cx.background_executor().spawn(async move {
1372 client
1373 .request(proto::GitInit {
1374 project_id: project_id.0,
1375 abs_path: path.to_string_lossy().to_string(),
1376 fallback_branch_name,
1377 })
1378 .await?;
1379 Ok(())
1380 })
1381 }
1382 }
1383 }
1384
1385 async fn handle_update_repository(
1386 this: Entity<Self>,
1387 envelope: TypedEnvelope<proto::UpdateRepository>,
1388 mut cx: AsyncApp,
1389 ) -> Result<()> {
1390 this.update(&mut cx, |this, cx| {
1391 let mut update = envelope.payload;
1392
1393 let id = RepositoryId::from_proto(update.id);
1394 let client = this
1395 .upstream_client()
1396 .context("no upstream client")?
1397 .clone();
1398
1399 let mut is_new = false;
1400 let repo = this.repositories.entry(id).or_insert_with(|| {
1401 is_new = true;
1402 let git_store = cx.weak_entity();
1403 cx.new(|cx| {
1404 Repository::remote(
1405 id,
1406 Path::new(&update.abs_path).into(),
1407 ProjectId(update.project_id),
1408 client,
1409 git_store,
1410 cx,
1411 )
1412 })
1413 });
1414 if is_new {
1415 this._subscriptions
1416 .push(cx.subscribe(&repo, Self::on_repository_event))
1417 }
1418
1419 repo.update(cx, {
1420 let update = update.clone();
1421 |repo, cx| repo.apply_remote_update(update, cx)
1422 })?;
1423
1424 this.active_repo_id.get_or_insert_with(|| {
1425 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1426 id
1427 });
1428
1429 if let Some((client, project_id)) = this.downstream_client() {
1430 update.project_id = project_id.to_proto();
1431 client.send(update).log_err();
1432 }
1433 Ok(())
1434 })?
1435 }
1436
1437 async fn handle_remove_repository(
1438 this: Entity<Self>,
1439 envelope: TypedEnvelope<proto::RemoveRepository>,
1440 mut cx: AsyncApp,
1441 ) -> Result<()> {
1442 this.update(&mut cx, |this, cx| {
1443 let mut update = envelope.payload;
1444 let id = RepositoryId::from_proto(update.id);
1445 this.repositories.remove(&id);
1446 if let Some((client, project_id)) = this.downstream_client() {
1447 update.project_id = project_id.to_proto();
1448 client.send(update).log_err();
1449 }
1450 if this.active_repo_id == Some(id) {
1451 this.active_repo_id = None;
1452 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1453 }
1454 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1455 })
1456 }
1457
1458 async fn handle_git_init(
1459 this: Entity<Self>,
1460 envelope: TypedEnvelope<proto::GitInit>,
1461 cx: AsyncApp,
1462 ) -> Result<proto::Ack> {
1463 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1464 let name = envelope.payload.fallback_branch_name;
1465 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1466 .await?;
1467
1468 Ok(proto::Ack {})
1469 }
1470
1471 async fn handle_fetch(
1472 this: Entity<Self>,
1473 envelope: TypedEnvelope<proto::Fetch>,
1474 mut cx: AsyncApp,
1475 ) -> Result<proto::RemoteMessageResponse> {
1476 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1477 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1478 let askpass_id = envelope.payload.askpass_id;
1479
1480 let askpass = make_remote_delegate(
1481 this,
1482 envelope.payload.project_id,
1483 repository_id,
1484 askpass_id,
1485 &mut cx,
1486 );
1487
1488 let remote_output = repository_handle
1489 .update(&mut cx, |repository_handle, cx| {
1490 repository_handle.fetch(askpass, cx)
1491 })?
1492 .await??;
1493
1494 Ok(proto::RemoteMessageResponse {
1495 stdout: remote_output.stdout,
1496 stderr: remote_output.stderr,
1497 })
1498 }
1499
1500 async fn handle_push(
1501 this: Entity<Self>,
1502 envelope: TypedEnvelope<proto::Push>,
1503 mut cx: AsyncApp,
1504 ) -> Result<proto::RemoteMessageResponse> {
1505 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1506 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1507
1508 let askpass_id = envelope.payload.askpass_id;
1509 let askpass = make_remote_delegate(
1510 this,
1511 envelope.payload.project_id,
1512 repository_id,
1513 askpass_id,
1514 &mut cx,
1515 );
1516
1517 let options = envelope
1518 .payload
1519 .options
1520 .as_ref()
1521 .map(|_| match envelope.payload.options() {
1522 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1523 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1524 });
1525
1526 let branch_name = envelope.payload.branch_name.into();
1527 let remote_name = envelope.payload.remote_name.into();
1528
1529 let remote_output = repository_handle
1530 .update(&mut cx, |repository_handle, cx| {
1531 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1532 })?
1533 .await??;
1534 Ok(proto::RemoteMessageResponse {
1535 stdout: remote_output.stdout,
1536 stderr: remote_output.stderr,
1537 })
1538 }
1539
1540 async fn handle_pull(
1541 this: Entity<Self>,
1542 envelope: TypedEnvelope<proto::Pull>,
1543 mut cx: AsyncApp,
1544 ) -> Result<proto::RemoteMessageResponse> {
1545 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1546 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1547 let askpass_id = envelope.payload.askpass_id;
1548 let askpass = make_remote_delegate(
1549 this,
1550 envelope.payload.project_id,
1551 repository_id,
1552 askpass_id,
1553 &mut cx,
1554 );
1555
1556 let branch_name = envelope.payload.branch_name.into();
1557 let remote_name = envelope.payload.remote_name.into();
1558
1559 let remote_message = repository_handle
1560 .update(&mut cx, |repository_handle, cx| {
1561 repository_handle.pull(branch_name, remote_name, askpass, cx)
1562 })?
1563 .await??;
1564
1565 Ok(proto::RemoteMessageResponse {
1566 stdout: remote_message.stdout,
1567 stderr: remote_message.stderr,
1568 })
1569 }
1570
1571 async fn handle_stage(
1572 this: Entity<Self>,
1573 envelope: TypedEnvelope<proto::Stage>,
1574 mut cx: AsyncApp,
1575 ) -> Result<proto::Ack> {
1576 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1577 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1578
1579 let entries = envelope
1580 .payload
1581 .paths
1582 .into_iter()
1583 .map(PathBuf::from)
1584 .map(RepoPath::new)
1585 .collect();
1586
1587 repository_handle
1588 .update(&mut cx, |repository_handle, cx| {
1589 repository_handle.stage_entries(entries, cx)
1590 })?
1591 .await?;
1592 Ok(proto::Ack {})
1593 }
1594
1595 async fn handle_unstage(
1596 this: Entity<Self>,
1597 envelope: TypedEnvelope<proto::Unstage>,
1598 mut cx: AsyncApp,
1599 ) -> Result<proto::Ack> {
1600 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1601 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1602
1603 let entries = envelope
1604 .payload
1605 .paths
1606 .into_iter()
1607 .map(PathBuf::from)
1608 .map(RepoPath::new)
1609 .collect();
1610
1611 repository_handle
1612 .update(&mut cx, |repository_handle, cx| {
1613 repository_handle.unstage_entries(entries, cx)
1614 })?
1615 .await?;
1616
1617 Ok(proto::Ack {})
1618 }
1619
1620 async fn handle_set_index_text(
1621 this: Entity<Self>,
1622 envelope: TypedEnvelope<proto::SetIndexText>,
1623 mut cx: AsyncApp,
1624 ) -> Result<proto::Ack> {
1625 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1626 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1627 let repo_path = RepoPath::from_str(&envelope.payload.path);
1628
1629 repository_handle
1630 .update(&mut cx, |repository_handle, cx| {
1631 repository_handle.spawn_set_index_text_job(
1632 repo_path,
1633 envelope.payload.text,
1634 None,
1635 cx,
1636 )
1637 })?
1638 .await??;
1639 Ok(proto::Ack {})
1640 }
1641
1642 async fn handle_commit(
1643 this: Entity<Self>,
1644 envelope: TypedEnvelope<proto::Commit>,
1645 mut cx: AsyncApp,
1646 ) -> Result<proto::Ack> {
1647 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1648 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1649
1650 let message = SharedString::from(envelope.payload.message);
1651 let name = envelope.payload.name.map(SharedString::from);
1652 let email = envelope.payload.email.map(SharedString::from);
1653
1654 repository_handle
1655 .update(&mut cx, |repository_handle, cx| {
1656 repository_handle.commit(message, name.zip(email), cx)
1657 })?
1658 .await??;
1659 Ok(proto::Ack {})
1660 }
1661
1662 async fn handle_get_remotes(
1663 this: Entity<Self>,
1664 envelope: TypedEnvelope<proto::GetRemotes>,
1665 mut cx: AsyncApp,
1666 ) -> Result<proto::GetRemotesResponse> {
1667 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1668 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1669
1670 let branch_name = envelope.payload.branch_name;
1671
1672 let remotes = repository_handle
1673 .update(&mut cx, |repository_handle, _| {
1674 repository_handle.get_remotes(branch_name)
1675 })?
1676 .await??;
1677
1678 Ok(proto::GetRemotesResponse {
1679 remotes: remotes
1680 .into_iter()
1681 .map(|remotes| proto::get_remotes_response::Remote {
1682 name: remotes.name.to_string(),
1683 })
1684 .collect::<Vec<_>>(),
1685 })
1686 }
1687
1688 async fn handle_get_branches(
1689 this: Entity<Self>,
1690 envelope: TypedEnvelope<proto::GitGetBranches>,
1691 mut cx: AsyncApp,
1692 ) -> Result<proto::GitBranchesResponse> {
1693 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1694 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1695
1696 let branches = repository_handle
1697 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1698 .await??;
1699
1700 Ok(proto::GitBranchesResponse {
1701 branches: branches
1702 .into_iter()
1703 .map(|branch| branch_to_proto(&branch))
1704 .collect::<Vec<_>>(),
1705 })
1706 }
1707 async fn handle_create_branch(
1708 this: Entity<Self>,
1709 envelope: TypedEnvelope<proto::GitCreateBranch>,
1710 mut cx: AsyncApp,
1711 ) -> Result<proto::Ack> {
1712 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1713 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1714 let branch_name = envelope.payload.branch_name;
1715
1716 repository_handle
1717 .update(&mut cx, |repository_handle, _| {
1718 repository_handle.create_branch(branch_name)
1719 })?
1720 .await??;
1721
1722 Ok(proto::Ack {})
1723 }
1724
1725 async fn handle_change_branch(
1726 this: Entity<Self>,
1727 envelope: TypedEnvelope<proto::GitChangeBranch>,
1728 mut cx: AsyncApp,
1729 ) -> Result<proto::Ack> {
1730 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1731 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1732 let branch_name = envelope.payload.branch_name;
1733
1734 repository_handle
1735 .update(&mut cx, |repository_handle, _| {
1736 repository_handle.change_branch(branch_name)
1737 })?
1738 .await??;
1739
1740 Ok(proto::Ack {})
1741 }
1742
1743 async fn handle_show(
1744 this: Entity<Self>,
1745 envelope: TypedEnvelope<proto::GitShow>,
1746 mut cx: AsyncApp,
1747 ) -> Result<proto::GitCommitDetails> {
1748 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1749 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1750
1751 let commit = repository_handle
1752 .update(&mut cx, |repository_handle, _| {
1753 repository_handle.show(envelope.payload.commit)
1754 })?
1755 .await??;
1756 Ok(proto::GitCommitDetails {
1757 sha: commit.sha.into(),
1758 message: commit.message.into(),
1759 commit_timestamp: commit.commit_timestamp,
1760 author_email: commit.author_email.into(),
1761 author_name: commit.author_name.into(),
1762 })
1763 }
1764
1765 async fn handle_load_commit_diff(
1766 this: Entity<Self>,
1767 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1768 mut cx: AsyncApp,
1769 ) -> Result<proto::LoadCommitDiffResponse> {
1770 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1771 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1772
1773 let commit_diff = repository_handle
1774 .update(&mut cx, |repository_handle, _| {
1775 repository_handle.load_commit_diff(envelope.payload.commit)
1776 })?
1777 .await??;
1778 Ok(proto::LoadCommitDiffResponse {
1779 files: commit_diff
1780 .files
1781 .into_iter()
1782 .map(|file| proto::CommitFile {
1783 path: file.path.to_string(),
1784 old_text: file.old_text,
1785 new_text: file.new_text,
1786 })
1787 .collect(),
1788 })
1789 }
1790
1791 async fn handle_reset(
1792 this: Entity<Self>,
1793 envelope: TypedEnvelope<proto::GitReset>,
1794 mut cx: AsyncApp,
1795 ) -> Result<proto::Ack> {
1796 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1797 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1798
1799 let mode = match envelope.payload.mode() {
1800 git_reset::ResetMode::Soft => ResetMode::Soft,
1801 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1802 };
1803
1804 repository_handle
1805 .update(&mut cx, |repository_handle, cx| {
1806 repository_handle.reset(envelope.payload.commit, mode, cx)
1807 })?
1808 .await??;
1809 Ok(proto::Ack {})
1810 }
1811
1812 async fn handle_checkout_files(
1813 this: Entity<Self>,
1814 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1815 mut cx: AsyncApp,
1816 ) -> Result<proto::Ack> {
1817 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1818 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1819 let paths = envelope
1820 .payload
1821 .paths
1822 .iter()
1823 .map(|s| RepoPath::from_str(s))
1824 .collect();
1825
1826 repository_handle
1827 .update(&mut cx, |repository_handle, cx| {
1828 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1829 })?
1830 .await??;
1831 Ok(proto::Ack {})
1832 }
1833
1834 async fn handle_open_commit_message_buffer(
1835 this: Entity<Self>,
1836 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1837 mut cx: AsyncApp,
1838 ) -> Result<proto::OpenBufferResponse> {
1839 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1840 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1841 let buffer = repository
1842 .update(&mut cx, |repository, cx| {
1843 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1844 })?
1845 .await?;
1846
1847 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1848 this.update(&mut cx, |this, cx| {
1849 this.buffer_store.update(cx, |buffer_store, cx| {
1850 buffer_store
1851 .create_buffer_for_peer(
1852 &buffer,
1853 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1854 cx,
1855 )
1856 .detach_and_log_err(cx);
1857 })
1858 })?;
1859
1860 Ok(proto::OpenBufferResponse {
1861 buffer_id: buffer_id.to_proto(),
1862 })
1863 }
1864
1865 async fn handle_askpass(
1866 this: Entity<Self>,
1867 envelope: TypedEnvelope<proto::AskPassRequest>,
1868 mut cx: AsyncApp,
1869 ) -> Result<proto::AskPassResponse> {
1870 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1871 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1872
1873 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
1874 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
1875 debug_panic!("no askpass found");
1876 return Err(anyhow::anyhow!("no askpass found"));
1877 };
1878
1879 let response = askpass.ask_password(envelope.payload.prompt).await?;
1880
1881 delegates
1882 .lock()
1883 .insert(envelope.payload.askpass_id, askpass);
1884
1885 Ok(proto::AskPassResponse { response })
1886 }
1887
1888 async fn handle_check_for_pushed_commits(
1889 this: Entity<Self>,
1890 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
1891 mut cx: AsyncApp,
1892 ) -> Result<proto::CheckForPushedCommitsResponse> {
1893 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1894 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1895
1896 let branches = repository_handle
1897 .update(&mut cx, |repository_handle, _| {
1898 repository_handle.check_for_pushed_commits()
1899 })?
1900 .await??;
1901 Ok(proto::CheckForPushedCommitsResponse {
1902 pushed_to: branches
1903 .into_iter()
1904 .map(|commit| commit.to_string())
1905 .collect(),
1906 })
1907 }
1908
1909 async fn handle_git_diff(
1910 this: Entity<Self>,
1911 envelope: TypedEnvelope<proto::GitDiff>,
1912 mut cx: AsyncApp,
1913 ) -> Result<proto::GitDiffResponse> {
1914 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1915 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1916 let diff_type = match envelope.payload.diff_type() {
1917 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
1918 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
1919 };
1920
1921 let mut diff = repository_handle
1922 .update(&mut cx, |repository_handle, cx| {
1923 repository_handle.diff(diff_type, cx)
1924 })?
1925 .await??;
1926 const ONE_MB: usize = 1_000_000;
1927 if diff.len() > ONE_MB {
1928 diff = diff.chars().take(ONE_MB).collect()
1929 }
1930
1931 Ok(proto::GitDiffResponse { diff })
1932 }
1933
1934 async fn handle_open_unstaged_diff(
1935 this: Entity<Self>,
1936 request: TypedEnvelope<proto::OpenUnstagedDiff>,
1937 mut cx: AsyncApp,
1938 ) -> Result<proto::OpenUnstagedDiffResponse> {
1939 let buffer_id = BufferId::new(request.payload.buffer_id)?;
1940 let diff = this
1941 .update(&mut cx, |this, cx| {
1942 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
1943 Some(this.open_unstaged_diff(buffer, cx))
1944 })?
1945 .ok_or_else(|| anyhow!("no such buffer"))?
1946 .await?;
1947 this.update(&mut cx, |this, _| {
1948 let shared_diffs = this
1949 .shared_diffs
1950 .entry(request.original_sender_id.unwrap_or(request.sender_id))
1951 .or_default();
1952 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
1953 })?;
1954 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
1955 Ok(proto::OpenUnstagedDiffResponse { staged_text })
1956 }
1957
1958 async fn handle_open_uncommitted_diff(
1959 this: Entity<Self>,
1960 request: TypedEnvelope<proto::OpenUncommittedDiff>,
1961 mut cx: AsyncApp,
1962 ) -> Result<proto::OpenUncommittedDiffResponse> {
1963 let buffer_id = BufferId::new(request.payload.buffer_id)?;
1964 let diff = this
1965 .update(&mut cx, |this, cx| {
1966 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
1967 Some(this.open_uncommitted_diff(buffer, cx))
1968 })?
1969 .ok_or_else(|| anyhow!("no such buffer"))?
1970 .await?;
1971 this.update(&mut cx, |this, _| {
1972 let shared_diffs = this
1973 .shared_diffs
1974 .entry(request.original_sender_id.unwrap_or(request.sender_id))
1975 .or_default();
1976 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
1977 })?;
1978 diff.read_with(&cx, |diff, cx| {
1979 use proto::open_uncommitted_diff_response::Mode;
1980
1981 let unstaged_diff = diff.secondary_diff();
1982 let index_snapshot = unstaged_diff.and_then(|diff| {
1983 let diff = diff.read(cx);
1984 diff.base_text_exists().then(|| diff.base_text())
1985 });
1986
1987 let mode;
1988 let staged_text;
1989 let committed_text;
1990 if diff.base_text_exists() {
1991 let committed_snapshot = diff.base_text();
1992 committed_text = Some(committed_snapshot.text());
1993 if let Some(index_text) = index_snapshot {
1994 if index_text.remote_id() == committed_snapshot.remote_id() {
1995 mode = Mode::IndexMatchesHead;
1996 staged_text = None;
1997 } else {
1998 mode = Mode::IndexAndHead;
1999 staged_text = Some(index_text.text());
2000 }
2001 } else {
2002 mode = Mode::IndexAndHead;
2003 staged_text = None;
2004 }
2005 } else {
2006 mode = Mode::IndexAndHead;
2007 committed_text = None;
2008 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2009 }
2010
2011 proto::OpenUncommittedDiffResponse {
2012 committed_text,
2013 staged_text,
2014 mode: mode.into(),
2015 }
2016 })
2017 }
2018
2019 async fn handle_update_diff_bases(
2020 this: Entity<Self>,
2021 request: TypedEnvelope<proto::UpdateDiffBases>,
2022 mut cx: AsyncApp,
2023 ) -> Result<()> {
2024 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2025 this.update(&mut cx, |this, cx| {
2026 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2027 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2028 let buffer = buffer.read(cx).text_snapshot();
2029 diff_state.update(cx, |diff_state, cx| {
2030 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2031 })
2032 }
2033 }
2034 })
2035 }
2036
2037 async fn handle_blame_buffer(
2038 this: Entity<Self>,
2039 envelope: TypedEnvelope<proto::BlameBuffer>,
2040 mut cx: AsyncApp,
2041 ) -> Result<proto::BlameBufferResponse> {
2042 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2043 let version = deserialize_version(&envelope.payload.version);
2044 let buffer = this.read_with(&cx, |this, cx| {
2045 this.buffer_store.read(cx).get_existing(buffer_id)
2046 })??;
2047 buffer
2048 .update(&mut cx, |buffer, _| {
2049 buffer.wait_for_version(version.clone())
2050 })?
2051 .await?;
2052 let blame = this
2053 .update(&mut cx, |this, cx| {
2054 this.blame_buffer(&buffer, Some(version), cx)
2055 })?
2056 .await?;
2057 Ok(serialize_blame_buffer_response(blame))
2058 }
2059
2060 async fn handle_get_permalink_to_line(
2061 this: Entity<Self>,
2062 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2063 mut cx: AsyncApp,
2064 ) -> Result<proto::GetPermalinkToLineResponse> {
2065 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2066 // let version = deserialize_version(&envelope.payload.version);
2067 let selection = {
2068 let proto_selection = envelope
2069 .payload
2070 .selection
2071 .context("no selection to get permalink for defined")?;
2072 proto_selection.start as u32..proto_selection.end as u32
2073 };
2074 let buffer = this.read_with(&cx, |this, cx| {
2075 this.buffer_store.read(cx).get_existing(buffer_id)
2076 })??;
2077 let permalink = this
2078 .update(&mut cx, |this, cx| {
2079 this.get_permalink_to_line(&buffer, selection, cx)
2080 })?
2081 .await?;
2082 Ok(proto::GetPermalinkToLineResponse {
2083 permalink: permalink.to_string(),
2084 })
2085 }
2086
2087 fn repository_for_request(
2088 this: &Entity<Self>,
2089 id: RepositoryId,
2090 cx: &mut AsyncApp,
2091 ) -> Result<Entity<Repository>> {
2092 this.update(cx, |this, _| {
2093 this.repositories
2094 .get(&id)
2095 .context("missing repository handle")
2096 .cloned()
2097 })?
2098 }
2099
2100 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2101 self.repositories
2102 .iter()
2103 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2104 .collect()
2105 }
2106}
2107
2108impl BufferDiffState {
2109 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2110 self.language = buffer.read(cx).language().cloned();
2111 self.language_changed = true;
2112 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2113 }
2114
2115 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2116 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2117 }
2118
2119 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2120 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2121 }
2122
2123 fn handle_base_texts_updated(
2124 &mut self,
2125 buffer: text::BufferSnapshot,
2126 message: proto::UpdateDiffBases,
2127 cx: &mut Context<Self>,
2128 ) {
2129 use proto::update_diff_bases::Mode;
2130
2131 let Some(mode) = Mode::from_i32(message.mode) else {
2132 return;
2133 };
2134
2135 let diff_bases_change = match mode {
2136 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2137 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2138 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2139 Mode::IndexAndHead => DiffBasesChange::SetEach {
2140 index: message.staged_text,
2141 head: message.committed_text,
2142 },
2143 };
2144
2145 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2146 }
2147
2148 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2149 if *self.recalculating_tx.borrow() {
2150 let mut rx = self.recalculating_tx.subscribe();
2151 return Some(async move {
2152 loop {
2153 let is_recalculating = rx.recv().await;
2154 if is_recalculating != Some(true) {
2155 break;
2156 }
2157 }
2158 });
2159 } else {
2160 None
2161 }
2162 }
2163
2164 fn diff_bases_changed(
2165 &mut self,
2166 buffer: text::BufferSnapshot,
2167 diff_bases_change: Option<DiffBasesChange>,
2168 cx: &mut Context<Self>,
2169 ) {
2170 match diff_bases_change {
2171 Some(DiffBasesChange::SetIndex(index)) => {
2172 self.index_text = index.map(|mut index| {
2173 text::LineEnding::normalize(&mut index);
2174 Arc::new(index)
2175 });
2176 self.index_changed = true;
2177 }
2178 Some(DiffBasesChange::SetHead(head)) => {
2179 self.head_text = head.map(|mut head| {
2180 text::LineEnding::normalize(&mut head);
2181 Arc::new(head)
2182 });
2183 self.head_changed = true;
2184 }
2185 Some(DiffBasesChange::SetBoth(text)) => {
2186 let text = text.map(|mut text| {
2187 text::LineEnding::normalize(&mut text);
2188 Arc::new(text)
2189 });
2190 self.head_text = text.clone();
2191 self.index_text = text;
2192 self.head_changed = true;
2193 self.index_changed = true;
2194 }
2195 Some(DiffBasesChange::SetEach { index, head }) => {
2196 self.index_text = index.map(|mut index| {
2197 text::LineEnding::normalize(&mut index);
2198 Arc::new(index)
2199 });
2200 self.index_changed = true;
2201 self.head_text = head.map(|mut head| {
2202 text::LineEnding::normalize(&mut head);
2203 Arc::new(head)
2204 });
2205 self.head_changed = true;
2206 }
2207 None => {}
2208 }
2209
2210 self.recalculate_diffs(buffer, cx)
2211 }
2212
2213 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2214 *self.recalculating_tx.borrow_mut() = true;
2215
2216 let language = self.language.clone();
2217 let language_registry = self.language_registry.clone();
2218 let unstaged_diff = self.unstaged_diff();
2219 let uncommitted_diff = self.uncommitted_diff();
2220 let head = self.head_text.clone();
2221 let index = self.index_text.clone();
2222 let index_changed = self.index_changed;
2223 let head_changed = self.head_changed;
2224 let language_changed = self.language_changed;
2225 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2226 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2227 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2228 (None, None) => true,
2229 _ => false,
2230 };
2231 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2232 log::debug!(
2233 "start recalculating diffs for buffer {}",
2234 buffer.remote_id()
2235 );
2236
2237 let mut new_unstaged_diff = None;
2238 if let Some(unstaged_diff) = &unstaged_diff {
2239 new_unstaged_diff = Some(
2240 BufferDiff::update_diff(
2241 unstaged_diff.clone(),
2242 buffer.clone(),
2243 index,
2244 index_changed,
2245 language_changed,
2246 language.clone(),
2247 language_registry.clone(),
2248 cx,
2249 )
2250 .await?,
2251 );
2252 }
2253
2254 let mut new_uncommitted_diff = None;
2255 if let Some(uncommitted_diff) = &uncommitted_diff {
2256 new_uncommitted_diff = if index_matches_head {
2257 new_unstaged_diff.clone()
2258 } else {
2259 Some(
2260 BufferDiff::update_diff(
2261 uncommitted_diff.clone(),
2262 buffer.clone(),
2263 head,
2264 head_changed,
2265 language_changed,
2266 language.clone(),
2267 language_registry.clone(),
2268 cx,
2269 )
2270 .await?,
2271 )
2272 }
2273 }
2274
2275 let cancel = this.update(cx, |this, _| {
2276 // This checks whether all pending stage/unstage operations
2277 // have quiesced (i.e. both the corresponding write and the
2278 // read of that write have completed). If not, then we cancel
2279 // this recalculation attempt to avoid invalidating pending
2280 // state too quickly; another recalculation will come along
2281 // later and clear the pending state once the state of the index has settled.
2282 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2283 *this.recalculating_tx.borrow_mut() = false;
2284 true
2285 } else {
2286 false
2287 }
2288 })?;
2289 if cancel {
2290 log::debug!(
2291 concat!(
2292 "aborting recalculating diffs for buffer {}",
2293 "due to subsequent hunk operations",
2294 ),
2295 buffer.remote_id()
2296 );
2297 return Ok(());
2298 }
2299
2300 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2301 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2302 {
2303 unstaged_diff.update(cx, |diff, cx| {
2304 if language_changed {
2305 diff.language_changed(cx);
2306 }
2307 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2308 })?
2309 } else {
2310 None
2311 };
2312
2313 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2314 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2315 {
2316 uncommitted_diff.update(cx, |diff, cx| {
2317 if language_changed {
2318 diff.language_changed(cx);
2319 }
2320 diff.set_snapshot_with_secondary(
2321 new_uncommitted_diff,
2322 &buffer,
2323 unstaged_changed_range,
2324 true,
2325 cx,
2326 );
2327 })?;
2328 }
2329
2330 log::debug!(
2331 "finished recalculating diffs for buffer {}",
2332 buffer.remote_id()
2333 );
2334
2335 if let Some(this) = this.upgrade() {
2336 this.update(cx, |this, _| {
2337 this.index_changed = false;
2338 this.head_changed = false;
2339 this.language_changed = false;
2340 *this.recalculating_tx.borrow_mut() = false;
2341 })?;
2342 }
2343
2344 Ok(())
2345 }));
2346 }
2347}
2348
2349impl Default for BufferDiffState {
2350 fn default() -> Self {
2351 Self {
2352 unstaged_diff: Default::default(),
2353 uncommitted_diff: Default::default(),
2354 recalculate_diff_task: Default::default(),
2355 language: Default::default(),
2356 language_registry: Default::default(),
2357 recalculating_tx: postage::watch::channel_with(false).0,
2358 hunk_staging_operation_count: 0,
2359 hunk_staging_operation_count_as_of_write: 0,
2360 head_text: Default::default(),
2361 index_text: Default::default(),
2362 head_changed: Default::default(),
2363 index_changed: Default::default(),
2364 language_changed: Default::default(),
2365 }
2366 }
2367}
2368
2369fn make_remote_delegate(
2370 this: Entity<GitStore>,
2371 project_id: u64,
2372 repository_id: RepositoryId,
2373 askpass_id: u64,
2374 cx: &mut AsyncApp,
2375) -> AskPassDelegate {
2376 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2377 this.update(cx, |this, cx| {
2378 let Some((client, _)) = this.downstream_client() else {
2379 return;
2380 };
2381 let response = client.request(proto::AskPassRequest {
2382 project_id,
2383 repository_id: repository_id.to_proto(),
2384 askpass_id,
2385 prompt,
2386 });
2387 cx.spawn(async move |_, _| {
2388 tx.send(response.await?.response).ok();
2389 anyhow::Ok(())
2390 })
2391 .detach_and_log_err(cx);
2392 })
2393 .log_err();
2394 })
2395}
2396
2397impl RepositoryId {
2398 pub fn to_proto(self) -> u64 {
2399 self.0
2400 }
2401
2402 pub fn from_proto(id: u64) -> Self {
2403 RepositoryId(id)
2404 }
2405}
2406
2407impl RepositorySnapshot {
2408 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2409 Self {
2410 id,
2411 merge_message: None,
2412 statuses_by_path: Default::default(),
2413 work_directory_abs_path,
2414 branch: None,
2415 merge_conflicts: Default::default(),
2416 merge_head_shas: Default::default(),
2417 scan_id: 0,
2418 }
2419 }
2420
2421 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2422 proto::UpdateRepository {
2423 branch_summary: self.branch.as_ref().map(branch_to_proto),
2424 updated_statuses: self
2425 .statuses_by_path
2426 .iter()
2427 .map(|entry| entry.to_proto())
2428 .collect(),
2429 removed_statuses: Default::default(),
2430 current_merge_conflicts: self
2431 .merge_conflicts
2432 .iter()
2433 .map(|repo_path| repo_path.to_proto())
2434 .collect(),
2435 project_id,
2436 id: self.id.to_proto(),
2437 abs_path: self.work_directory_abs_path.to_proto(),
2438 entry_ids: vec![self.id.to_proto()],
2439 scan_id: self.scan_id,
2440 is_last_update: true,
2441 }
2442 }
2443
2444 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2445 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2446 let mut removed_statuses: Vec<String> = Vec::new();
2447
2448 let mut new_statuses = self.statuses_by_path.iter().peekable();
2449 let mut old_statuses = old.statuses_by_path.iter().peekable();
2450
2451 let mut current_new_entry = new_statuses.next();
2452 let mut current_old_entry = old_statuses.next();
2453 loop {
2454 match (current_new_entry, current_old_entry) {
2455 (Some(new_entry), Some(old_entry)) => {
2456 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2457 Ordering::Less => {
2458 updated_statuses.push(new_entry.to_proto());
2459 current_new_entry = new_statuses.next();
2460 }
2461 Ordering::Equal => {
2462 if new_entry.status != old_entry.status {
2463 updated_statuses.push(new_entry.to_proto());
2464 }
2465 current_old_entry = old_statuses.next();
2466 current_new_entry = new_statuses.next();
2467 }
2468 Ordering::Greater => {
2469 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2470 current_old_entry = old_statuses.next();
2471 }
2472 }
2473 }
2474 (None, Some(old_entry)) => {
2475 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2476 current_old_entry = old_statuses.next();
2477 }
2478 (Some(new_entry), None) => {
2479 updated_statuses.push(new_entry.to_proto());
2480 current_new_entry = new_statuses.next();
2481 }
2482 (None, None) => break,
2483 }
2484 }
2485
2486 proto::UpdateRepository {
2487 branch_summary: self.branch.as_ref().map(branch_to_proto),
2488 updated_statuses,
2489 removed_statuses,
2490 current_merge_conflicts: self
2491 .merge_conflicts
2492 .iter()
2493 .map(|path| path.as_ref().to_proto())
2494 .collect(),
2495 project_id,
2496 id: self.id.to_proto(),
2497 abs_path: self.work_directory_abs_path.to_proto(),
2498 entry_ids: vec![],
2499 scan_id: self.scan_id,
2500 is_last_update: true,
2501 }
2502 }
2503
2504 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2505 self.statuses_by_path.iter().cloned()
2506 }
2507
2508 pub fn status_summary(&self) -> GitSummary {
2509 self.statuses_by_path.summary().item_summary
2510 }
2511
2512 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2513 self.statuses_by_path
2514 .get(&PathKey(path.0.clone()), &())
2515 .cloned()
2516 }
2517
2518 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2519 abs_path
2520 .strip_prefix(&self.work_directory_abs_path)
2521 .map(RepoPath::from)
2522 .ok()
2523 }
2524
2525 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2526 self.merge_conflicts.contains(repo_path)
2527 }
2528
2529 /// This is the name that will be displayed in the repository selector for this repository.
2530 pub fn display_name(&self) -> SharedString {
2531 self.work_directory_abs_path
2532 .file_name()
2533 .unwrap_or_default()
2534 .to_string_lossy()
2535 .to_string()
2536 .into()
2537 }
2538}
2539
2540impl Repository {
2541 fn local(
2542 id: RepositoryId,
2543 work_directory_abs_path: Arc<Path>,
2544 dot_git_abs_path: Arc<Path>,
2545 project_environment: WeakEntity<ProjectEnvironment>,
2546 fs: Arc<dyn Fs>,
2547 git_store: WeakEntity<GitStore>,
2548 cx: &mut Context<Self>,
2549 ) -> Self {
2550 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2551 Repository {
2552 this: cx.weak_entity(),
2553 git_store,
2554 snapshot,
2555 commit_message_buffer: None,
2556 askpass_delegates: Default::default(),
2557 paths_needing_status_update: Default::default(),
2558 latest_askpass_id: 0,
2559 job_sender: Repository::spawn_local_git_worker(
2560 work_directory_abs_path,
2561 dot_git_abs_path,
2562 project_environment,
2563 fs,
2564 cx,
2565 ),
2566 job_id: 0,
2567 active_jobs: Default::default(),
2568 }
2569 }
2570
2571 fn remote(
2572 id: RepositoryId,
2573 work_directory_abs_path: Arc<Path>,
2574 project_id: ProjectId,
2575 client: AnyProtoClient,
2576 git_store: WeakEntity<GitStore>,
2577 cx: &mut Context<Self>,
2578 ) -> Self {
2579 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2580 Self {
2581 this: cx.weak_entity(),
2582 snapshot,
2583 commit_message_buffer: None,
2584 git_store,
2585 paths_needing_status_update: Default::default(),
2586 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2587 askpass_delegates: Default::default(),
2588 latest_askpass_id: 0,
2589 active_jobs: Default::default(),
2590 job_id: 0,
2591 }
2592 }
2593
2594 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2595 self.git_store.upgrade()
2596 }
2597
2598 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
2599 let this = cx.weak_entity();
2600 let git_store = self.git_store.clone();
2601 let _ = self.send_keyed_job(
2602 Some(GitJobKey::ReloadBufferDiffBases),
2603 None,
2604 |state, mut cx| async move {
2605 let RepositoryState::Local { backend, .. } = state else {
2606 log::error!("tried to recompute diffs for a non-local repository");
2607 return Ok(());
2608 };
2609
2610 let Some(this) = this.upgrade() else {
2611 return Ok(());
2612 };
2613
2614 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
2615 git_store.update(cx, |git_store, cx| {
2616 git_store
2617 .diffs
2618 .iter()
2619 .filter_map(|(buffer_id, diff_state)| {
2620 let buffer_store = git_store.buffer_store.read(cx);
2621 let buffer = buffer_store.get(*buffer_id)?;
2622 let file = File::from_dyn(buffer.read(cx).file())?;
2623 let abs_path =
2624 file.worktree.read(cx).absolutize(&file.path).ok()?;
2625 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
2626 log::debug!(
2627 "start reload diff bases for repo path {}",
2628 repo_path.0.display()
2629 );
2630 diff_state.update(cx, |diff_state, _| {
2631 let has_unstaged_diff = diff_state
2632 .unstaged_diff
2633 .as_ref()
2634 .is_some_and(|diff| diff.is_upgradable());
2635 let has_uncommitted_diff = diff_state
2636 .uncommitted_diff
2637 .as_ref()
2638 .is_some_and(|set| set.is_upgradable());
2639
2640 Some((
2641 buffer,
2642 repo_path,
2643 has_unstaged_diff.then(|| diff_state.index_text.clone()),
2644 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
2645 ))
2646 })
2647 })
2648 .collect::<Vec<_>>()
2649 })
2650 })??;
2651
2652 let buffer_diff_base_changes = cx
2653 .background_spawn(async move {
2654 let mut changes = Vec::new();
2655 for (buffer, repo_path, current_index_text, current_head_text) in
2656 &repo_diff_state_updates
2657 {
2658 let index_text = if current_index_text.is_some() {
2659 backend.load_index_text(repo_path.clone()).await
2660 } else {
2661 None
2662 };
2663 let head_text = if current_head_text.is_some() {
2664 backend.load_committed_text(repo_path.clone()).await
2665 } else {
2666 None
2667 };
2668
2669 let change =
2670 match (current_index_text.as_ref(), current_head_text.as_ref()) {
2671 (Some(current_index), Some(current_head)) => {
2672 let index_changed =
2673 index_text.as_ref() != current_index.as_deref();
2674 let head_changed =
2675 head_text.as_ref() != current_head.as_deref();
2676 if index_changed && head_changed {
2677 if index_text == head_text {
2678 Some(DiffBasesChange::SetBoth(head_text))
2679 } else {
2680 Some(DiffBasesChange::SetEach {
2681 index: index_text,
2682 head: head_text,
2683 })
2684 }
2685 } else if index_changed {
2686 Some(DiffBasesChange::SetIndex(index_text))
2687 } else if head_changed {
2688 Some(DiffBasesChange::SetHead(head_text))
2689 } else {
2690 None
2691 }
2692 }
2693 (Some(current_index), None) => {
2694 let index_changed =
2695 index_text.as_ref() != current_index.as_deref();
2696 index_changed
2697 .then_some(DiffBasesChange::SetIndex(index_text))
2698 }
2699 (None, Some(current_head)) => {
2700 let head_changed =
2701 head_text.as_ref() != current_head.as_deref();
2702 head_changed.then_some(DiffBasesChange::SetHead(head_text))
2703 }
2704 (None, None) => None,
2705 };
2706
2707 changes.push((buffer.clone(), change))
2708 }
2709 changes
2710 })
2711 .await;
2712
2713 git_store.update(&mut cx, |git_store, cx| {
2714 for (buffer, diff_bases_change) in buffer_diff_base_changes {
2715 let buffer_snapshot = buffer.read(cx).text_snapshot();
2716 let buffer_id = buffer_snapshot.remote_id();
2717 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
2718 continue;
2719 };
2720
2721 let downstream_client = git_store.downstream_client();
2722 diff_state.update(cx, |diff_state, cx| {
2723 use proto::update_diff_bases::Mode;
2724
2725 if let Some((diff_bases_change, (client, project_id))) =
2726 diff_bases_change.clone().zip(downstream_client)
2727 {
2728 let (staged_text, committed_text, mode) = match diff_bases_change {
2729 DiffBasesChange::SetIndex(index) => {
2730 (index, None, Mode::IndexOnly)
2731 }
2732 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
2733 DiffBasesChange::SetEach { index, head } => {
2734 (index, head, Mode::IndexAndHead)
2735 }
2736 DiffBasesChange::SetBoth(text) => {
2737 (None, text, Mode::IndexMatchesHead)
2738 }
2739 };
2740 client
2741 .send(proto::UpdateDiffBases {
2742 project_id: project_id.to_proto(),
2743 buffer_id: buffer_id.to_proto(),
2744 staged_text,
2745 committed_text,
2746 mode: mode as i32,
2747 })
2748 .log_err();
2749 }
2750
2751 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
2752 });
2753 }
2754 })
2755 },
2756 );
2757 }
2758
2759 pub fn send_job<F, Fut, R>(
2760 &mut self,
2761 status: Option<SharedString>,
2762 job: F,
2763 ) -> oneshot::Receiver<R>
2764 where
2765 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2766 Fut: Future<Output = R> + 'static,
2767 R: Send + 'static,
2768 {
2769 self.send_keyed_job(None, status, job)
2770 }
2771
2772 fn send_keyed_job<F, Fut, R>(
2773 &mut self,
2774 key: Option<GitJobKey>,
2775 status: Option<SharedString>,
2776 job: F,
2777 ) -> oneshot::Receiver<R>
2778 where
2779 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2780 Fut: Future<Output = R> + 'static,
2781 R: Send + 'static,
2782 {
2783 let (result_tx, result_rx) = futures::channel::oneshot::channel();
2784 let job_id = post_inc(&mut self.job_id);
2785 let this = self.this.clone();
2786 self.job_sender
2787 .unbounded_send(GitJob {
2788 key,
2789 job: Box::new(move |state, cx: &mut AsyncApp| {
2790 let job = job(state, cx.clone());
2791 cx.spawn(async move |cx| {
2792 if let Some(s) = status.clone() {
2793 this.update(cx, |this, cx| {
2794 this.active_jobs.insert(
2795 job_id,
2796 JobInfo {
2797 start: Instant::now(),
2798 message: s.clone(),
2799 },
2800 );
2801
2802 cx.notify();
2803 })
2804 .ok();
2805 }
2806 let result = job.await;
2807
2808 this.update(cx, |this, cx| {
2809 this.active_jobs.remove(&job_id);
2810 cx.notify();
2811 })
2812 .ok();
2813
2814 result_tx.send(result).ok();
2815 })
2816 }),
2817 })
2818 .ok();
2819 result_rx
2820 }
2821
2822 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
2823 let Some(git_store) = self.git_store.upgrade() else {
2824 return;
2825 };
2826 let entity = cx.entity();
2827 git_store.update(cx, |git_store, cx| {
2828 let Some((&id, _)) = git_store
2829 .repositories
2830 .iter()
2831 .find(|(_, handle)| *handle == &entity)
2832 else {
2833 return;
2834 };
2835 git_store.active_repo_id = Some(id);
2836 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2837 });
2838 }
2839
2840 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
2841 self.snapshot.status()
2842 }
2843
2844 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
2845 let git_store = self.git_store.upgrade()?;
2846 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2847 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
2848 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
2849 Some(ProjectPath {
2850 worktree_id: worktree.read(cx).id(),
2851 path: relative_path.into(),
2852 })
2853 }
2854
2855 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
2856 let git_store = self.git_store.upgrade()?;
2857 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2858 let abs_path = worktree_store.absolutize(path, cx)?;
2859 self.snapshot.abs_path_to_repo_path(&abs_path)
2860 }
2861
2862 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
2863 other
2864 .read(cx)
2865 .snapshot
2866 .work_directory_abs_path
2867 .starts_with(&self.snapshot.work_directory_abs_path)
2868 }
2869
2870 pub fn open_commit_buffer(
2871 &mut self,
2872 languages: Option<Arc<LanguageRegistry>>,
2873 buffer_store: Entity<BufferStore>,
2874 cx: &mut Context<Self>,
2875 ) -> Task<Result<Entity<Buffer>>> {
2876 let id = self.id;
2877 if let Some(buffer) = self.commit_message_buffer.clone() {
2878 return Task::ready(Ok(buffer));
2879 }
2880 let this = cx.weak_entity();
2881
2882 let rx = self.send_job(None, move |state, mut cx| async move {
2883 let Some(this) = this.upgrade() else {
2884 bail!("git store was dropped");
2885 };
2886 match state {
2887 RepositoryState::Local { .. } => {
2888 this.update(&mut cx, |_, cx| {
2889 Self::open_local_commit_buffer(languages, buffer_store, cx)
2890 })?
2891 .await
2892 }
2893 RepositoryState::Remote { project_id, client } => {
2894 let request = client.request(proto::OpenCommitMessageBuffer {
2895 project_id: project_id.0,
2896 repository_id: id.to_proto(),
2897 });
2898 let response = request.await.context("requesting to open commit buffer")?;
2899 let buffer_id = BufferId::new(response.buffer_id)?;
2900 let buffer = buffer_store
2901 .update(&mut cx, |buffer_store, cx| {
2902 buffer_store.wait_for_remote_buffer(buffer_id, cx)
2903 })?
2904 .await?;
2905 if let Some(language_registry) = languages {
2906 let git_commit_language =
2907 language_registry.language_for_name("Git Commit").await?;
2908 buffer.update(&mut cx, |buffer, cx| {
2909 buffer.set_language(Some(git_commit_language), cx);
2910 })?;
2911 }
2912 this.update(&mut cx, |this, _| {
2913 this.commit_message_buffer = Some(buffer.clone());
2914 })?;
2915 Ok(buffer)
2916 }
2917 }
2918 });
2919
2920 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
2921 }
2922
2923 fn open_local_commit_buffer(
2924 language_registry: Option<Arc<LanguageRegistry>>,
2925 buffer_store: Entity<BufferStore>,
2926 cx: &mut Context<Self>,
2927 ) -> Task<Result<Entity<Buffer>>> {
2928 cx.spawn(async move |repository, cx| {
2929 let buffer = buffer_store
2930 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
2931 .await?;
2932
2933 if let Some(language_registry) = language_registry {
2934 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
2935 buffer.update(cx, |buffer, cx| {
2936 buffer.set_language(Some(git_commit_language), cx);
2937 })?;
2938 }
2939
2940 repository.update(cx, |repository, _| {
2941 repository.commit_message_buffer = Some(buffer.clone());
2942 })?;
2943 Ok(buffer)
2944 })
2945 }
2946
2947 pub fn checkout_files(
2948 &mut self,
2949 commit: &str,
2950 paths: Vec<RepoPath>,
2951 _cx: &mut App,
2952 ) -> oneshot::Receiver<Result<()>> {
2953 let commit = commit.to_string();
2954 let id = self.id;
2955
2956 self.send_job(
2957 Some(format!("git checkout {}", commit).into()),
2958 move |git_repo, _| async move {
2959 match git_repo {
2960 RepositoryState::Local {
2961 backend,
2962 environment,
2963 ..
2964 } => {
2965 backend
2966 .checkout_files(commit, paths, environment.clone())
2967 .await
2968 }
2969 RepositoryState::Remote { project_id, client } => {
2970 client
2971 .request(proto::GitCheckoutFiles {
2972 project_id: project_id.0,
2973 repository_id: id.to_proto(),
2974 commit,
2975 paths: paths
2976 .into_iter()
2977 .map(|p| p.to_string_lossy().to_string())
2978 .collect(),
2979 })
2980 .await?;
2981
2982 Ok(())
2983 }
2984 }
2985 },
2986 )
2987 }
2988
2989 pub fn reset(
2990 &mut self,
2991 commit: String,
2992 reset_mode: ResetMode,
2993 _cx: &mut App,
2994 ) -> oneshot::Receiver<Result<()>> {
2995 let commit = commit.to_string();
2996 let id = self.id;
2997
2998 self.send_job(None, move |git_repo, _| async move {
2999 match git_repo {
3000 RepositoryState::Local {
3001 backend,
3002 environment,
3003 ..
3004 } => backend.reset(commit, reset_mode, environment).await,
3005 RepositoryState::Remote { project_id, client } => {
3006 client
3007 .request(proto::GitReset {
3008 project_id: project_id.0,
3009 repository_id: id.to_proto(),
3010 commit,
3011 mode: match reset_mode {
3012 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3013 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3014 },
3015 })
3016 .await?;
3017
3018 Ok(())
3019 }
3020 }
3021 })
3022 }
3023
3024 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3025 let id = self.id;
3026 self.send_job(None, move |git_repo, _cx| async move {
3027 match git_repo {
3028 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3029 RepositoryState::Remote { project_id, client } => {
3030 let resp = client
3031 .request(proto::GitShow {
3032 project_id: project_id.0,
3033 repository_id: id.to_proto(),
3034 commit,
3035 })
3036 .await?;
3037
3038 Ok(CommitDetails {
3039 sha: resp.sha.into(),
3040 message: resp.message.into(),
3041 commit_timestamp: resp.commit_timestamp,
3042 author_email: resp.author_email.into(),
3043 author_name: resp.author_name.into(),
3044 })
3045 }
3046 }
3047 })
3048 }
3049
3050 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3051 let id = self.id;
3052 self.send_job(None, move |git_repo, cx| async move {
3053 match git_repo {
3054 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3055 RepositoryState::Remote {
3056 client, project_id, ..
3057 } => {
3058 let response = client
3059 .request(proto::LoadCommitDiff {
3060 project_id: project_id.0,
3061 repository_id: id.to_proto(),
3062 commit,
3063 })
3064 .await?;
3065 Ok(CommitDiff {
3066 files: response
3067 .files
3068 .into_iter()
3069 .map(|file| CommitFile {
3070 path: Path::new(&file.path).into(),
3071 old_text: file.old_text,
3072 new_text: file.new_text,
3073 })
3074 .collect(),
3075 })
3076 }
3077 }
3078 })
3079 }
3080
3081 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3082 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3083 }
3084
3085 pub fn stage_entries(
3086 &self,
3087 entries: Vec<RepoPath>,
3088 cx: &mut Context<Self>,
3089 ) -> Task<anyhow::Result<()>> {
3090 if entries.is_empty() {
3091 return Task::ready(Ok(()));
3092 }
3093 let id = self.id;
3094
3095 let mut save_futures = Vec::new();
3096 if let Some(buffer_store) = self.buffer_store(cx) {
3097 buffer_store.update(cx, |buffer_store, cx| {
3098 for path in &entries {
3099 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3100 continue;
3101 };
3102 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3103 if buffer
3104 .read(cx)
3105 .file()
3106 .map_or(false, |file| file.disk_state().exists())
3107 {
3108 save_futures.push(buffer_store.save_buffer(buffer, cx));
3109 }
3110 }
3111 }
3112 })
3113 }
3114
3115 cx.spawn(async move |this, cx| {
3116 for save_future in save_futures {
3117 save_future.await?;
3118 }
3119
3120 this.update(cx, |this, _| {
3121 this.send_job(None, move |git_repo, _cx| async move {
3122 match git_repo {
3123 RepositoryState::Local {
3124 backend,
3125 environment,
3126 ..
3127 } => backend.stage_paths(entries, environment.clone()).await,
3128 RepositoryState::Remote { project_id, client } => {
3129 client
3130 .request(proto::Stage {
3131 project_id: project_id.0,
3132 repository_id: id.to_proto(),
3133 paths: entries
3134 .into_iter()
3135 .map(|repo_path| repo_path.as_ref().to_proto())
3136 .collect(),
3137 })
3138 .await
3139 .context("sending stage request")?;
3140
3141 Ok(())
3142 }
3143 }
3144 })
3145 })?
3146 .await??;
3147
3148 Ok(())
3149 })
3150 }
3151
3152 pub fn unstage_entries(
3153 &self,
3154 entries: Vec<RepoPath>,
3155 cx: &mut Context<Self>,
3156 ) -> Task<anyhow::Result<()>> {
3157 if entries.is_empty() {
3158 return Task::ready(Ok(()));
3159 }
3160 let id = self.id;
3161
3162 let mut save_futures = Vec::new();
3163 if let Some(buffer_store) = self.buffer_store(cx) {
3164 buffer_store.update(cx, |buffer_store, cx| {
3165 for path in &entries {
3166 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3167 continue;
3168 };
3169 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3170 if buffer
3171 .read(cx)
3172 .file()
3173 .map_or(false, |file| file.disk_state().exists())
3174 {
3175 save_futures.push(buffer_store.save_buffer(buffer, cx));
3176 }
3177 }
3178 }
3179 })
3180 }
3181
3182 cx.spawn(async move |this, cx| {
3183 for save_future in save_futures {
3184 save_future.await?;
3185 }
3186
3187 this.update(cx, |this, _| {
3188 this.send_job(None, move |git_repo, _cx| async move {
3189 match git_repo {
3190 RepositoryState::Local {
3191 backend,
3192 environment,
3193 ..
3194 } => backend.unstage_paths(entries, environment).await,
3195 RepositoryState::Remote { project_id, client } => {
3196 client
3197 .request(proto::Unstage {
3198 project_id: project_id.0,
3199 repository_id: id.to_proto(),
3200 paths: entries
3201 .into_iter()
3202 .map(|repo_path| repo_path.as_ref().to_proto())
3203 .collect(),
3204 })
3205 .await
3206 .context("sending unstage request")?;
3207
3208 Ok(())
3209 }
3210 }
3211 })
3212 })?
3213 .await??;
3214
3215 Ok(())
3216 })
3217 }
3218
3219 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3220 let to_stage = self
3221 .cached_status()
3222 .filter(|entry| !entry.status.staging().is_fully_staged())
3223 .map(|entry| entry.repo_path.clone())
3224 .collect();
3225 self.stage_entries(to_stage, cx)
3226 }
3227
3228 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3229 let to_unstage = self
3230 .cached_status()
3231 .filter(|entry| entry.status.staging().has_staged())
3232 .map(|entry| entry.repo_path.clone())
3233 .collect();
3234 self.unstage_entries(to_unstage, cx)
3235 }
3236
3237 pub fn commit(
3238 &mut self,
3239 message: SharedString,
3240 name_and_email: Option<(SharedString, SharedString)>,
3241 _cx: &mut App,
3242 ) -> oneshot::Receiver<Result<()>> {
3243 let id = self.id;
3244
3245 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
3246 match git_repo {
3247 RepositoryState::Local {
3248 backend,
3249 environment,
3250 ..
3251 } => backend.commit(message, name_and_email, environment).await,
3252 RepositoryState::Remote { project_id, client } => {
3253 let (name, email) = name_and_email.unzip();
3254 client
3255 .request(proto::Commit {
3256 project_id: project_id.0,
3257 repository_id: id.to_proto(),
3258 message: String::from(message),
3259 name: name.map(String::from),
3260 email: email.map(String::from),
3261 })
3262 .await
3263 .context("sending commit request")?;
3264
3265 Ok(())
3266 }
3267 }
3268 })
3269 }
3270
3271 pub fn fetch(
3272 &mut self,
3273 askpass: AskPassDelegate,
3274 _cx: &mut App,
3275 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3276 let askpass_delegates = self.askpass_delegates.clone();
3277 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3278 let id = self.id;
3279
3280 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
3281 match git_repo {
3282 RepositoryState::Local {
3283 backend,
3284 environment,
3285 ..
3286 } => backend.fetch(askpass, environment, cx).await,
3287 RepositoryState::Remote { project_id, client } => {
3288 askpass_delegates.lock().insert(askpass_id, askpass);
3289 let _defer = util::defer(|| {
3290 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3291 debug_assert!(askpass_delegate.is_some());
3292 });
3293
3294 let response = client
3295 .request(proto::Fetch {
3296 project_id: project_id.0,
3297 repository_id: id.to_proto(),
3298 askpass_id,
3299 })
3300 .await
3301 .context("sending fetch request")?;
3302
3303 Ok(RemoteCommandOutput {
3304 stdout: response.stdout,
3305 stderr: response.stderr,
3306 })
3307 }
3308 }
3309 })
3310 }
3311
3312 pub fn push(
3313 &mut self,
3314 branch: SharedString,
3315 remote: SharedString,
3316 options: Option<PushOptions>,
3317 askpass: AskPassDelegate,
3318 cx: &mut Context<Self>,
3319 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3320 let askpass_delegates = self.askpass_delegates.clone();
3321 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3322 let id = self.id;
3323
3324 let args = options
3325 .map(|option| match option {
3326 PushOptions::SetUpstream => " --set-upstream",
3327 PushOptions::Force => " --force",
3328 })
3329 .unwrap_or("");
3330
3331 let updates_tx = self
3332 .git_store()
3333 .and_then(|git_store| match &git_store.read(cx).state {
3334 GitStoreState::Local { downstream, .. } => downstream
3335 .as_ref()
3336 .map(|downstream| downstream.updates_tx.clone()),
3337 _ => None,
3338 });
3339
3340 let this = cx.weak_entity();
3341 self.send_job(
3342 Some(format!("git push {} {} {}", args, branch, remote).into()),
3343 move |git_repo, mut cx| async move {
3344 match git_repo {
3345 RepositoryState::Local {
3346 backend,
3347 environment,
3348 ..
3349 } => {
3350 let result = backend
3351 .push(
3352 branch.to_string(),
3353 remote.to_string(),
3354 options,
3355 askpass,
3356 environment.clone(),
3357 cx.clone(),
3358 )
3359 .await;
3360 if result.is_ok() {
3361 let branches = backend.branches().await?;
3362 let branch = branches.into_iter().find(|branch| branch.is_head);
3363 log::info!("head branch after scan is {branch:?}");
3364 let snapshot = this.update(&mut cx, |this, cx| {
3365 this.snapshot.branch = branch;
3366 let snapshot = this.snapshot.clone();
3367 cx.emit(RepositoryEvent::Updated { full_scan: false });
3368 snapshot
3369 })?;
3370 if let Some(updates_tx) = updates_tx {
3371 updates_tx
3372 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3373 .ok();
3374 }
3375 }
3376 result
3377 }
3378 RepositoryState::Remote { project_id, client } => {
3379 askpass_delegates.lock().insert(askpass_id, askpass);
3380 let _defer = util::defer(|| {
3381 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3382 debug_assert!(askpass_delegate.is_some());
3383 });
3384 let response = client
3385 .request(proto::Push {
3386 project_id: project_id.0,
3387 repository_id: id.to_proto(),
3388 askpass_id,
3389 branch_name: branch.to_string(),
3390 remote_name: remote.to_string(),
3391 options: options.map(|options| match options {
3392 PushOptions::Force => proto::push::PushOptions::Force,
3393 PushOptions::SetUpstream => {
3394 proto::push::PushOptions::SetUpstream
3395 }
3396 }
3397 as i32),
3398 })
3399 .await
3400 .context("sending push request")?;
3401
3402 Ok(RemoteCommandOutput {
3403 stdout: response.stdout,
3404 stderr: response.stderr,
3405 })
3406 }
3407 }
3408 },
3409 )
3410 }
3411
3412 pub fn pull(
3413 &mut self,
3414 branch: SharedString,
3415 remote: SharedString,
3416 askpass: AskPassDelegate,
3417 _cx: &mut App,
3418 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3419 let askpass_delegates = self.askpass_delegates.clone();
3420 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3421 let id = self.id;
3422
3423 self.send_job(
3424 Some(format!("git pull {} {}", remote, branch).into()),
3425 move |git_repo, cx| async move {
3426 match git_repo {
3427 RepositoryState::Local {
3428 backend,
3429 environment,
3430 ..
3431 } => {
3432 backend
3433 .pull(
3434 branch.to_string(),
3435 remote.to_string(),
3436 askpass,
3437 environment.clone(),
3438 cx,
3439 )
3440 .await
3441 }
3442 RepositoryState::Remote { project_id, client } => {
3443 askpass_delegates.lock().insert(askpass_id, askpass);
3444 let _defer = util::defer(|| {
3445 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3446 debug_assert!(askpass_delegate.is_some());
3447 });
3448 let response = client
3449 .request(proto::Pull {
3450 project_id: project_id.0,
3451 repository_id: id.to_proto(),
3452 askpass_id,
3453 branch_name: branch.to_string(),
3454 remote_name: remote.to_string(),
3455 })
3456 .await
3457 .context("sending pull request")?;
3458
3459 Ok(RemoteCommandOutput {
3460 stdout: response.stdout,
3461 stderr: response.stderr,
3462 })
3463 }
3464 }
3465 },
3466 )
3467 }
3468
3469 fn spawn_set_index_text_job(
3470 &mut self,
3471 path: RepoPath,
3472 content: Option<String>,
3473 hunk_staging_operation_count: Option<usize>,
3474 cx: &mut Context<Self>,
3475 ) -> oneshot::Receiver<anyhow::Result<()>> {
3476 let id = self.id;
3477 let this = cx.weak_entity();
3478 let git_store = self.git_store.clone();
3479 self.send_keyed_job(
3480 Some(GitJobKey::WriteIndex(path.clone())),
3481 None,
3482 move |git_repo, mut cx| async move {
3483 log::debug!("start updating index text for buffer {}", path.display());
3484 match git_repo {
3485 RepositoryState::Local {
3486 backend,
3487 environment,
3488 ..
3489 } => {
3490 backend
3491 .set_index_text(path.clone(), content, environment.clone())
3492 .await?;
3493 }
3494 RepositoryState::Remote { project_id, client } => {
3495 client
3496 .request(proto::SetIndexText {
3497 project_id: project_id.0,
3498 repository_id: id.to_proto(),
3499 path: path.as_ref().to_proto(),
3500 text: content,
3501 })
3502 .await?;
3503 }
3504 }
3505 log::debug!("finish updating index text for buffer {}", path.display());
3506
3507 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
3508 let project_path = this
3509 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
3510 .ok()
3511 .flatten();
3512 git_store.update(&mut cx, |git_store, cx| {
3513 let buffer_id = git_store
3514 .buffer_store
3515 .read(cx)
3516 .get_by_path(&project_path?, cx)?
3517 .read(cx)
3518 .remote_id();
3519 let diff_state = git_store.diffs.get(&buffer_id)?;
3520 diff_state.update(cx, |diff_state, _| {
3521 diff_state.hunk_staging_operation_count_as_of_write =
3522 hunk_staging_operation_count;
3523 });
3524 Some(())
3525 })?;
3526 }
3527 Ok(())
3528 },
3529 )
3530 }
3531
3532 pub fn get_remotes(
3533 &mut self,
3534 branch_name: Option<String>,
3535 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3536 let id = self.id;
3537 self.send_job(None, move |repo, _cx| async move {
3538 match repo {
3539 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3540 RepositoryState::Remote { project_id, client } => {
3541 let response = client
3542 .request(proto::GetRemotes {
3543 project_id: project_id.0,
3544 repository_id: id.to_proto(),
3545 branch_name,
3546 })
3547 .await?;
3548
3549 let remotes = response
3550 .remotes
3551 .into_iter()
3552 .map(|remotes| git::repository::Remote {
3553 name: remotes.name.into(),
3554 })
3555 .collect();
3556
3557 Ok(remotes)
3558 }
3559 }
3560 })
3561 }
3562
3563 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3564 let id = self.id;
3565 self.send_job(None, move |repo, cx| async move {
3566 match repo {
3567 RepositoryState::Local { backend, .. } => {
3568 let backend = backend.clone();
3569 cx.background_spawn(async move { backend.branches().await })
3570 .await
3571 }
3572 RepositoryState::Remote { project_id, client } => {
3573 let response = client
3574 .request(proto::GitGetBranches {
3575 project_id: project_id.0,
3576 repository_id: id.to_proto(),
3577 })
3578 .await?;
3579
3580 let branches = response
3581 .branches
3582 .into_iter()
3583 .map(|branch| proto_to_branch(&branch))
3584 .collect();
3585
3586 Ok(branches)
3587 }
3588 }
3589 })
3590 }
3591
3592 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3593 let id = self.id;
3594 self.send_job(None, move |repo, _cx| async move {
3595 match repo {
3596 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3597 RepositoryState::Remote { project_id, client } => {
3598 let response = client
3599 .request(proto::GitDiff {
3600 project_id: project_id.0,
3601 repository_id: id.to_proto(),
3602 diff_type: match diff_type {
3603 DiffType::HeadToIndex => {
3604 proto::git_diff::DiffType::HeadToIndex.into()
3605 }
3606 DiffType::HeadToWorktree => {
3607 proto::git_diff::DiffType::HeadToWorktree.into()
3608 }
3609 },
3610 })
3611 .await?;
3612
3613 Ok(response.diff)
3614 }
3615 }
3616 })
3617 }
3618
3619 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3620 let id = self.id;
3621 self.send_job(
3622 Some(format!("git switch -c {branch_name}").into()),
3623 move |repo, _cx| async move {
3624 match repo {
3625 RepositoryState::Local { backend, .. } => {
3626 backend.create_branch(branch_name).await
3627 }
3628 RepositoryState::Remote { project_id, client } => {
3629 client
3630 .request(proto::GitCreateBranch {
3631 project_id: project_id.0,
3632 repository_id: id.to_proto(),
3633 branch_name,
3634 })
3635 .await?;
3636
3637 Ok(())
3638 }
3639 }
3640 },
3641 )
3642 }
3643
3644 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3645 let id = self.id;
3646 self.send_job(
3647 Some(format!("git switch {branch_name}").into()),
3648 move |repo, _cx| async move {
3649 match repo {
3650 RepositoryState::Local { backend, .. } => {
3651 backend.change_branch(branch_name).await
3652 }
3653 RepositoryState::Remote { project_id, client } => {
3654 client
3655 .request(proto::GitChangeBranch {
3656 project_id: project_id.0,
3657 repository_id: id.to_proto(),
3658 branch_name,
3659 })
3660 .await?;
3661
3662 Ok(())
3663 }
3664 }
3665 },
3666 )
3667 }
3668
3669 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3670 let id = self.id;
3671 self.send_job(None, move |repo, _cx| async move {
3672 match repo {
3673 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3674 RepositoryState::Remote { project_id, client } => {
3675 let response = client
3676 .request(proto::CheckForPushedCommits {
3677 project_id: project_id.0,
3678 repository_id: id.to_proto(),
3679 })
3680 .await?;
3681
3682 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3683
3684 Ok(branches)
3685 }
3686 }
3687 })
3688 }
3689
3690 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3691 self.send_job(None, |repo, _cx| async move {
3692 match repo {
3693 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
3694 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3695 }
3696 })
3697 }
3698
3699 pub fn restore_checkpoint(
3700 &mut self,
3701 checkpoint: GitRepositoryCheckpoint,
3702 ) -> oneshot::Receiver<Result<()>> {
3703 self.send_job(None, move |repo, _cx| async move {
3704 match repo {
3705 RepositoryState::Local { backend, .. } => {
3706 backend.restore_checkpoint(checkpoint).await
3707 }
3708 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3709 }
3710 })
3711 }
3712
3713 pub(crate) fn apply_remote_update(
3714 &mut self,
3715 update: proto::UpdateRepository,
3716 cx: &mut Context<Self>,
3717 ) -> Result<()> {
3718 let conflicted_paths = TreeSet::from_ordered_entries(
3719 update
3720 .current_merge_conflicts
3721 .into_iter()
3722 .map(|path| RepoPath(Path::new(&path).into())),
3723 );
3724 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
3725 self.snapshot.merge_conflicts = conflicted_paths;
3726
3727 let edits = update
3728 .removed_statuses
3729 .into_iter()
3730 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3731 .chain(
3732 update
3733 .updated_statuses
3734 .into_iter()
3735 .filter_map(|updated_status| {
3736 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3737 }),
3738 )
3739 .collect::<Vec<_>>();
3740 self.snapshot.statuses_by_path.edit(edits, &());
3741 if update.is_last_update {
3742 self.snapshot.scan_id = update.scan_id;
3743 }
3744 cx.emit(RepositoryEvent::Updated { full_scan: true });
3745 Ok(())
3746 }
3747
3748 pub fn compare_checkpoints(
3749 &mut self,
3750 left: GitRepositoryCheckpoint,
3751 right: GitRepositoryCheckpoint,
3752 ) -> oneshot::Receiver<Result<bool>> {
3753 self.send_job(None, move |repo, _cx| async move {
3754 match repo {
3755 RepositoryState::Local { backend, .. } => {
3756 backend.compare_checkpoints(left, right).await
3757 }
3758 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3759 }
3760 })
3761 }
3762
3763 pub fn delete_checkpoint(
3764 &mut self,
3765 checkpoint: GitRepositoryCheckpoint,
3766 ) -> oneshot::Receiver<Result<()>> {
3767 self.send_job(None, move |repo, _cx| async move {
3768 match repo {
3769 RepositoryState::Local { backend, .. } => {
3770 backend.delete_checkpoint(checkpoint).await
3771 }
3772 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3773 }
3774 })
3775 }
3776
3777 pub fn diff_checkpoints(
3778 &mut self,
3779 base_checkpoint: GitRepositoryCheckpoint,
3780 target_checkpoint: GitRepositoryCheckpoint,
3781 ) -> oneshot::Receiver<Result<String>> {
3782 self.send_job(None, move |repo, _cx| async move {
3783 match repo {
3784 RepositoryState::Local { backend, .. } => {
3785 backend
3786 .diff_checkpoints(base_checkpoint, target_checkpoint)
3787 .await
3788 }
3789 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3790 }
3791 })
3792 }
3793
3794 fn schedule_scan(
3795 &mut self,
3796 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3797 cx: &mut Context<Self>,
3798 ) {
3799 let this = cx.weak_entity();
3800 let _ = self.send_keyed_job(
3801 Some(GitJobKey::ReloadGitState),
3802 None,
3803 |state, mut cx| async move {
3804 let Some(this) = this.upgrade() else {
3805 return Ok(());
3806 };
3807 let RepositoryState::Local { backend, .. } = state else {
3808 bail!("not a local repository")
3809 };
3810 let (snapshot, events) = this
3811 .update(&mut cx, |this, _| {
3812 compute_snapshot(
3813 this.id,
3814 this.work_directory_abs_path.clone(),
3815 this.snapshot.clone(),
3816 backend.clone(),
3817 )
3818 })?
3819 .await?;
3820 this.update(&mut cx, |this, cx| {
3821 this.snapshot = snapshot.clone();
3822 for event in events {
3823 cx.emit(event);
3824 }
3825 })?;
3826 if let Some(updates_tx) = updates_tx {
3827 updates_tx
3828 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3829 .ok();
3830 }
3831 Ok(())
3832 },
3833 );
3834 }
3835
3836 fn spawn_local_git_worker(
3837 work_directory_abs_path: Arc<Path>,
3838 dot_git_abs_path: Arc<Path>,
3839 project_environment: WeakEntity<ProjectEnvironment>,
3840 fs: Arc<dyn Fs>,
3841 cx: &mut Context<Self>,
3842 ) -> mpsc::UnboundedSender<GitJob> {
3843 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3844
3845 cx.spawn(async move |_, cx| {
3846 let environment = project_environment
3847 .upgrade()
3848 .ok_or_else(|| anyhow!("missing project environment"))?
3849 .update(cx, |project_environment, cx| {
3850 project_environment.get_directory_environment(work_directory_abs_path.clone(), cx)
3851 })?
3852 .await
3853 .unwrap_or_else(|| {
3854 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
3855 HashMap::default()
3856 });
3857 let backend = cx
3858 .background_spawn(async move {
3859 fs.open_repo(&dot_git_abs_path)
3860 .ok_or_else(|| anyhow!("failed to build repository"))
3861 })
3862 .await?;
3863
3864 if let Some(git_hosting_provider_registry) =
3865 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
3866 {
3867 git_hosting_providers::register_additional_providers(
3868 git_hosting_provider_registry,
3869 backend.clone(),
3870 );
3871 }
3872
3873 let state = RepositoryState::Local {
3874 backend,
3875 environment: Arc::new(environment),
3876 };
3877 let mut jobs = VecDeque::new();
3878 loop {
3879 while let Ok(Some(next_job)) = job_rx.try_next() {
3880 jobs.push_back(next_job);
3881 }
3882
3883 if let Some(job) = jobs.pop_front() {
3884 if let Some(current_key) = &job.key {
3885 if jobs
3886 .iter()
3887 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3888 {
3889 continue;
3890 }
3891 }
3892 (job.job)(state.clone(), cx).await;
3893 } else if let Some(job) = job_rx.next().await {
3894 jobs.push_back(job);
3895 } else {
3896 break;
3897 }
3898 }
3899 anyhow::Ok(())
3900 })
3901 .detach_and_log_err(cx);
3902
3903 job_tx
3904 }
3905
3906 fn spawn_remote_git_worker(
3907 project_id: ProjectId,
3908 client: AnyProtoClient,
3909 cx: &mut Context<Self>,
3910 ) -> mpsc::UnboundedSender<GitJob> {
3911 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3912
3913 cx.spawn(async move |_, cx| {
3914 let state = RepositoryState::Remote { project_id, client };
3915 let mut jobs = VecDeque::new();
3916 loop {
3917 while let Ok(Some(next_job)) = job_rx.try_next() {
3918 jobs.push_back(next_job);
3919 }
3920
3921 if let Some(job) = jobs.pop_front() {
3922 if let Some(current_key) = &job.key {
3923 if jobs
3924 .iter()
3925 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3926 {
3927 continue;
3928 }
3929 }
3930 (job.job)(state.clone(), cx).await;
3931 } else if let Some(job) = job_rx.next().await {
3932 jobs.push_back(job);
3933 } else {
3934 break;
3935 }
3936 }
3937 anyhow::Ok(())
3938 })
3939 .detach_and_log_err(cx);
3940
3941 job_tx
3942 }
3943
3944 fn load_staged_text(
3945 &mut self,
3946 buffer_id: BufferId,
3947 repo_path: RepoPath,
3948 cx: &App,
3949 ) -> Task<Result<Option<String>>> {
3950 let rx = self.send_job(None, move |state, _| async move {
3951 match state {
3952 RepositoryState::Local { backend, .. } => {
3953 anyhow::Ok(backend.load_index_text(repo_path).await)
3954 }
3955 RepositoryState::Remote { project_id, client } => {
3956 let response = client
3957 .request(proto::OpenUnstagedDiff {
3958 project_id: project_id.to_proto(),
3959 buffer_id: buffer_id.to_proto(),
3960 })
3961 .await?;
3962 Ok(response.staged_text)
3963 }
3964 }
3965 });
3966 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3967 }
3968
3969 fn load_committed_text(
3970 &mut self,
3971 buffer_id: BufferId,
3972 repo_path: RepoPath,
3973 cx: &App,
3974 ) -> Task<Result<DiffBasesChange>> {
3975 let rx = self.send_job(None, move |state, _| async move {
3976 match state {
3977 RepositoryState::Local { backend, .. } => {
3978 let committed_text = backend.load_committed_text(repo_path.clone()).await;
3979 let staged_text = backend.load_index_text(repo_path).await;
3980 let diff_bases_change = if committed_text == staged_text {
3981 DiffBasesChange::SetBoth(committed_text)
3982 } else {
3983 DiffBasesChange::SetEach {
3984 index: staged_text,
3985 head: committed_text,
3986 }
3987 };
3988 anyhow::Ok(diff_bases_change)
3989 }
3990 RepositoryState::Remote { project_id, client } => {
3991 use proto::open_uncommitted_diff_response::Mode;
3992
3993 let response = client
3994 .request(proto::OpenUncommittedDiff {
3995 project_id: project_id.to_proto(),
3996 buffer_id: buffer_id.to_proto(),
3997 })
3998 .await?;
3999 let mode =
4000 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
4001 let bases = match mode {
4002 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4003 Mode::IndexAndHead => DiffBasesChange::SetEach {
4004 head: response.committed_text,
4005 index: response.staged_text,
4006 },
4007 };
4008 Ok(bases)
4009 }
4010 }
4011 });
4012
4013 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4014 }
4015
4016 fn paths_changed(
4017 &mut self,
4018 paths: Vec<RepoPath>,
4019 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4020 cx: &mut Context<Self>,
4021 ) {
4022 self.paths_needing_status_update.extend(paths);
4023
4024 let this = cx.weak_entity();
4025 let _ = self.send_keyed_job(
4026 Some(GitJobKey::RefreshStatuses),
4027 None,
4028 |state, mut cx| async move {
4029 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
4030 (
4031 this.snapshot.clone(),
4032 mem::take(&mut this.paths_needing_status_update),
4033 )
4034 })?;
4035 let RepositoryState::Local { backend, .. } = state else {
4036 bail!("not a local repository")
4037 };
4038
4039 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
4040 let statuses = backend.status(&paths).await?;
4041
4042 let changed_path_statuses = cx
4043 .background_spawn(async move {
4044 let mut changed_path_statuses = Vec::new();
4045 let prev_statuses = prev_snapshot.statuses_by_path.clone();
4046 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4047
4048 for (repo_path, status) in &*statuses.entries {
4049 changed_paths.remove(repo_path);
4050 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
4051 if &cursor.item().unwrap().status == status {
4052 continue;
4053 }
4054 }
4055
4056 changed_path_statuses.push(Edit::Insert(StatusEntry {
4057 repo_path: repo_path.clone(),
4058 status: *status,
4059 }));
4060 }
4061 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
4062 for path in changed_paths.iter() {
4063 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
4064 changed_path_statuses.push(Edit::Remove(PathKey(path.0.clone())));
4065 }
4066 }
4067 changed_path_statuses
4068 })
4069 .await;
4070
4071 this.update(&mut cx, |this, cx| {
4072 if !changed_path_statuses.is_empty() {
4073 this.snapshot
4074 .statuses_by_path
4075 .edit(changed_path_statuses, &());
4076 this.snapshot.scan_id += 1;
4077 if let Some(updates_tx) = updates_tx {
4078 updates_tx
4079 .unbounded_send(DownstreamUpdate::UpdateRepository(
4080 this.snapshot.clone(),
4081 ))
4082 .ok();
4083 }
4084 }
4085 cx.emit(RepositoryEvent::Updated { full_scan: false });
4086 })
4087 },
4088 );
4089 }
4090
4091 /// currently running git command and when it started
4092 pub fn current_job(&self) -> Option<JobInfo> {
4093 self.active_jobs.values().next().cloned()
4094 }
4095}
4096
4097fn get_permalink_in_rust_registry_src(
4098 provider_registry: Arc<GitHostingProviderRegistry>,
4099 path: PathBuf,
4100 selection: Range<u32>,
4101) -> Result<url::Url> {
4102 #[derive(Deserialize)]
4103 struct CargoVcsGit {
4104 sha1: String,
4105 }
4106
4107 #[derive(Deserialize)]
4108 struct CargoVcsInfo {
4109 git: CargoVcsGit,
4110 path_in_vcs: String,
4111 }
4112
4113 #[derive(Deserialize)]
4114 struct CargoPackage {
4115 repository: String,
4116 }
4117
4118 #[derive(Deserialize)]
4119 struct CargoToml {
4120 package: CargoPackage,
4121 }
4122
4123 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
4124 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
4125 Some((dir, json))
4126 }) else {
4127 bail!("No .cargo_vcs_info.json found in parent directories")
4128 };
4129 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
4130 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
4131 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
4132 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
4133 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
4134 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
4135 let permalink = provider.build_permalink(
4136 remote,
4137 BuildPermalinkParams {
4138 sha: &cargo_vcs_info.git.sha1,
4139 path: &path.to_string_lossy(),
4140 selection: Some(selection),
4141 },
4142 );
4143 Ok(permalink)
4144}
4145
4146fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
4147 let Some(blame) = blame else {
4148 return proto::BlameBufferResponse {
4149 blame_response: None,
4150 };
4151 };
4152
4153 let entries = blame
4154 .entries
4155 .into_iter()
4156 .map(|entry| proto::BlameEntry {
4157 sha: entry.sha.as_bytes().into(),
4158 start_line: entry.range.start,
4159 end_line: entry.range.end,
4160 original_line_number: entry.original_line_number,
4161 author: entry.author.clone(),
4162 author_mail: entry.author_mail.clone(),
4163 author_time: entry.author_time,
4164 author_tz: entry.author_tz.clone(),
4165 committer: entry.committer_name.clone(),
4166 committer_mail: entry.committer_email.clone(),
4167 committer_time: entry.committer_time,
4168 committer_tz: entry.committer_tz.clone(),
4169 summary: entry.summary.clone(),
4170 previous: entry.previous.clone(),
4171 filename: entry.filename.clone(),
4172 })
4173 .collect::<Vec<_>>();
4174
4175 let messages = blame
4176 .messages
4177 .into_iter()
4178 .map(|(oid, message)| proto::CommitMessage {
4179 oid: oid.as_bytes().into(),
4180 message,
4181 })
4182 .collect::<Vec<_>>();
4183
4184 proto::BlameBufferResponse {
4185 blame_response: Some(proto::blame_buffer_response::BlameResponse {
4186 entries,
4187 messages,
4188 remote_url: blame.remote_url,
4189 }),
4190 }
4191}
4192
4193fn deserialize_blame_buffer_response(
4194 response: proto::BlameBufferResponse,
4195) -> Option<git::blame::Blame> {
4196 let response = response.blame_response?;
4197 let entries = response
4198 .entries
4199 .into_iter()
4200 .filter_map(|entry| {
4201 Some(git::blame::BlameEntry {
4202 sha: git::Oid::from_bytes(&entry.sha).ok()?,
4203 range: entry.start_line..entry.end_line,
4204 original_line_number: entry.original_line_number,
4205 committer_name: entry.committer,
4206 committer_time: entry.committer_time,
4207 committer_tz: entry.committer_tz,
4208 committer_email: entry.committer_mail,
4209 author: entry.author,
4210 author_mail: entry.author_mail,
4211 author_time: entry.author_time,
4212 author_tz: entry.author_tz,
4213 summary: entry.summary,
4214 previous: entry.previous,
4215 filename: entry.filename,
4216 })
4217 })
4218 .collect::<Vec<_>>();
4219
4220 let messages = response
4221 .messages
4222 .into_iter()
4223 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4224 .collect::<HashMap<_, _>>();
4225
4226 Some(Blame {
4227 entries,
4228 messages,
4229 remote_url: response.remote_url,
4230 })
4231}
4232
4233fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4234 proto::Branch {
4235 is_head: branch.is_head,
4236 name: branch.name.to_string(),
4237 unix_timestamp: branch
4238 .most_recent_commit
4239 .as_ref()
4240 .map(|commit| commit.commit_timestamp as u64),
4241 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4242 ref_name: upstream.ref_name.to_string(),
4243 tracking: upstream
4244 .tracking
4245 .status()
4246 .map(|upstream| proto::UpstreamTracking {
4247 ahead: upstream.ahead as u64,
4248 behind: upstream.behind as u64,
4249 }),
4250 }),
4251 most_recent_commit: branch
4252 .most_recent_commit
4253 .as_ref()
4254 .map(|commit| proto::CommitSummary {
4255 sha: commit.sha.to_string(),
4256 subject: commit.subject.to_string(),
4257 commit_timestamp: commit.commit_timestamp,
4258 }),
4259 }
4260}
4261
4262fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4263 git::repository::Branch {
4264 is_head: proto.is_head,
4265 name: proto.name.clone().into(),
4266 upstream: proto
4267 .upstream
4268 .as_ref()
4269 .map(|upstream| git::repository::Upstream {
4270 ref_name: upstream.ref_name.to_string().into(),
4271 tracking: upstream
4272 .tracking
4273 .as_ref()
4274 .map(|tracking| {
4275 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4276 ahead: tracking.ahead as u32,
4277 behind: tracking.behind as u32,
4278 })
4279 })
4280 .unwrap_or(git::repository::UpstreamTracking::Gone),
4281 }),
4282 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4283 git::repository::CommitSummary {
4284 sha: commit.sha.to_string().into(),
4285 subject: commit.subject.to_string().into(),
4286 commit_timestamp: commit.commit_timestamp,
4287 has_parent: true,
4288 }
4289 }),
4290 }
4291}
4292
4293async fn compute_snapshot(
4294 id: RepositoryId,
4295 work_directory_abs_path: Arc<Path>,
4296 prev_snapshot: RepositorySnapshot,
4297 backend: Arc<dyn GitRepository>,
4298) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4299 let mut events = Vec::new();
4300 let branches = backend.branches().await?;
4301 let branch = branches.into_iter().find(|branch| branch.is_head);
4302 let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?;
4303 let merge_message = backend
4304 .merge_message()
4305 .await
4306 .and_then(|msg| Some(msg.lines().nth(0)?.to_owned().into()));
4307 let merge_head_shas = backend
4308 .merge_head_shas()
4309 .into_iter()
4310 .map(SharedString::from)
4311 .collect();
4312
4313 let statuses_by_path = SumTree::from_iter(
4314 statuses
4315 .entries
4316 .iter()
4317 .map(|(repo_path, status)| StatusEntry {
4318 repo_path: repo_path.clone(),
4319 status: *status,
4320 }),
4321 &(),
4322 );
4323
4324 let merge_head_shas_changed = merge_head_shas != prev_snapshot.merge_head_shas;
4325
4326 if merge_head_shas_changed
4327 || branch != prev_snapshot.branch
4328 || statuses_by_path != prev_snapshot.statuses_by_path
4329 {
4330 events.push(RepositoryEvent::Updated { full_scan: true });
4331 }
4332
4333 let mut current_merge_conflicts = TreeSet::default();
4334 for (repo_path, status) in statuses.entries.iter() {
4335 if status.is_conflicted() {
4336 current_merge_conflicts.insert(repo_path.clone());
4337 }
4338 }
4339
4340 // Cache merge conflict paths so they don't change from staging/unstaging,
4341 // until the merge heads change (at commit time, etc.).
4342 let mut merge_conflicts = prev_snapshot.merge_conflicts.clone();
4343 if merge_head_shas_changed {
4344 merge_conflicts = current_merge_conflicts;
4345 events.push(RepositoryEvent::MergeHeadsChanged);
4346 }
4347
4348 let snapshot = RepositorySnapshot {
4349 id,
4350 merge_message,
4351 statuses_by_path,
4352 work_directory_abs_path,
4353 scan_id: prev_snapshot.scan_id + 1,
4354 branch,
4355 merge_conflicts,
4356 merge_head_shas,
4357 };
4358
4359 Ok((snapshot, events))
4360}
4361
4362fn status_from_proto(
4363 simple_status: i32,
4364 status: Option<proto::GitFileStatus>,
4365) -> anyhow::Result<FileStatus> {
4366 use proto::git_file_status::Variant;
4367
4368 let Some(variant) = status.and_then(|status| status.variant) else {
4369 let code = proto::GitStatus::from_i32(simple_status)
4370 .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
4371 let result = match code {
4372 proto::GitStatus::Added => TrackedStatus {
4373 worktree_status: StatusCode::Added,
4374 index_status: StatusCode::Unmodified,
4375 }
4376 .into(),
4377 proto::GitStatus::Modified => TrackedStatus {
4378 worktree_status: StatusCode::Modified,
4379 index_status: StatusCode::Unmodified,
4380 }
4381 .into(),
4382 proto::GitStatus::Conflict => UnmergedStatus {
4383 first_head: UnmergedStatusCode::Updated,
4384 second_head: UnmergedStatusCode::Updated,
4385 }
4386 .into(),
4387 proto::GitStatus::Deleted => TrackedStatus {
4388 worktree_status: StatusCode::Deleted,
4389 index_status: StatusCode::Unmodified,
4390 }
4391 .into(),
4392 _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
4393 };
4394 return Ok(result);
4395 };
4396
4397 let result = match variant {
4398 Variant::Untracked(_) => FileStatus::Untracked,
4399 Variant::Ignored(_) => FileStatus::Ignored,
4400 Variant::Unmerged(unmerged) => {
4401 let [first_head, second_head] =
4402 [unmerged.first_head, unmerged.second_head].map(|head| {
4403 let code = proto::GitStatus::from_i32(head)
4404 .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
4405 let result = match code {
4406 proto::GitStatus::Added => UnmergedStatusCode::Added,
4407 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4408 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4409 _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
4410 };
4411 Ok(result)
4412 });
4413 let [first_head, second_head] = [first_head?, second_head?];
4414 UnmergedStatus {
4415 first_head,
4416 second_head,
4417 }
4418 .into()
4419 }
4420 Variant::Tracked(tracked) => {
4421 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4422 .map(|status| {
4423 let code = proto::GitStatus::from_i32(status)
4424 .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
4425 let result = match code {
4426 proto::GitStatus::Modified => StatusCode::Modified,
4427 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4428 proto::GitStatus::Added => StatusCode::Added,
4429 proto::GitStatus::Deleted => StatusCode::Deleted,
4430 proto::GitStatus::Renamed => StatusCode::Renamed,
4431 proto::GitStatus::Copied => StatusCode::Copied,
4432 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4433 _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
4434 };
4435 Ok(result)
4436 });
4437 let [index_status, worktree_status] = [index_status?, worktree_status?];
4438 TrackedStatus {
4439 index_status,
4440 worktree_status,
4441 }
4442 .into()
4443 }
4444 };
4445 Ok(result)
4446}
4447
4448fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4449 use proto::git_file_status::{Tracked, Unmerged, Variant};
4450
4451 let variant = match status {
4452 FileStatus::Untracked => Variant::Untracked(Default::default()),
4453 FileStatus::Ignored => Variant::Ignored(Default::default()),
4454 FileStatus::Unmerged(UnmergedStatus {
4455 first_head,
4456 second_head,
4457 }) => Variant::Unmerged(Unmerged {
4458 first_head: unmerged_status_to_proto(first_head),
4459 second_head: unmerged_status_to_proto(second_head),
4460 }),
4461 FileStatus::Tracked(TrackedStatus {
4462 index_status,
4463 worktree_status,
4464 }) => Variant::Tracked(Tracked {
4465 index_status: tracked_status_to_proto(index_status),
4466 worktree_status: tracked_status_to_proto(worktree_status),
4467 }),
4468 };
4469 proto::GitFileStatus {
4470 variant: Some(variant),
4471 }
4472}
4473
4474fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4475 match code {
4476 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4477 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4478 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4479 }
4480}
4481
4482fn tracked_status_to_proto(code: StatusCode) -> i32 {
4483 match code {
4484 StatusCode::Added => proto::GitStatus::Added as _,
4485 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4486 StatusCode::Modified => proto::GitStatus::Modified as _,
4487 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4488 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4489 StatusCode::Copied => proto::GitStatus::Copied as _,
4490 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4491 }
4492}