1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use std::{
60 cmp::Ordering,
61 collections::{BTreeSet, HashSet, VecDeque},
62 future::Future,
63 mem,
64 ops::Range,
65 path::{Path, PathBuf},
66 str::FromStr,
67 sync::{
68 Arc,
69 atomic::{self, AtomicU64},
70 },
71 time::Instant,
72};
73use sum_tree::{Edit, SumTree, TreeSet};
74use task::Shell;
75use text::{Bias, BufferId};
76use util::{
77 ResultExt, debug_panic,
78 paths::{PathStyle, SanitizedPath},
79 post_inc,
80 rel_path::RelPath,
81};
82use worktree::{
83 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
84 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
85};
86use zeroize::Zeroize;
87
88pub struct GitStore {
89 state: GitStoreState,
90 buffer_store: Entity<BufferStore>,
91 worktree_store: Entity<WorktreeStore>,
92 repositories: HashMap<RepositoryId, Entity<Repository>>,
93 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
94 active_repo_id: Option<RepositoryId>,
95 #[allow(clippy::type_complexity)]
96 loading_diffs:
97 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
98 diffs: HashMap<BufferId, Entity<BufferGitState>>,
99 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
100 _subscriptions: Vec<Subscription>,
101}
102
103#[derive(Default)]
104struct SharedDiffs {
105 unstaged: Option<Entity<BufferDiff>>,
106 uncommitted: Option<Entity<BufferDiff>>,
107}
108
109struct BufferGitState {
110 unstaged_diff: Option<WeakEntity<BufferDiff>>,
111 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
112 conflict_set: Option<WeakEntity<ConflictSet>>,
113 recalculate_diff_task: Option<Task<Result<()>>>,
114 reparse_conflict_markers_task: Option<Task<Result<()>>>,
115 language: Option<Arc<Language>>,
116 language_registry: Option<Arc<LanguageRegistry>>,
117 conflict_updated_futures: Vec<oneshot::Sender<()>>,
118 recalculating_tx: postage::watch::Sender<bool>,
119
120 /// These operation counts are used to ensure that head and index text
121 /// values read from the git repository are up-to-date with any hunk staging
122 /// operations that have been performed on the BufferDiff.
123 ///
124 /// The operation count is incremented immediately when the user initiates a
125 /// hunk stage/unstage operation. Then, upon finishing writing the new index
126 /// text do disk, the `operation count as of write` is updated to reflect
127 /// the operation count that prompted the write.
128 hunk_staging_operation_count: usize,
129 hunk_staging_operation_count_as_of_write: usize,
130
131 head_text: Option<Arc<String>>,
132 index_text: Option<Arc<String>>,
133 head_changed: bool,
134 index_changed: bool,
135 language_changed: bool,
136}
137
138#[derive(Clone, Debug)]
139enum DiffBasesChange {
140 SetIndex(Option<String>),
141 SetHead(Option<String>),
142 SetEach {
143 index: Option<String>,
144 head: Option<String>,
145 },
146 SetBoth(Option<String>),
147}
148
149#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
150enum DiffKind {
151 Unstaged,
152 Uncommitted,
153}
154
155enum GitStoreState {
156 Local {
157 next_repository_id: Arc<AtomicU64>,
158 downstream: Option<LocalDownstreamState>,
159 project_environment: Entity<ProjectEnvironment>,
160 fs: Arc<dyn Fs>,
161 },
162 Remote {
163 upstream_client: AnyProtoClient,
164 upstream_project_id: u64,
165 downstream: Option<(AnyProtoClient, ProjectId)>,
166 },
167}
168
169enum DownstreamUpdate {
170 UpdateRepository(RepositorySnapshot),
171 RemoveRepository(RepositoryId),
172}
173
174struct LocalDownstreamState {
175 client: AnyProtoClient,
176 project_id: ProjectId,
177 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
178 _task: Task<Result<()>>,
179}
180
181#[derive(Clone, Debug)]
182pub struct GitStoreCheckpoint {
183 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
184}
185
186#[derive(Clone, Debug, PartialEq, Eq)]
187pub struct StatusEntry {
188 pub repo_path: RepoPath,
189 pub status: FileStatus,
190}
191
192impl StatusEntry {
193 fn to_proto(&self) -> proto::StatusEntry {
194 let simple_status = match self.status {
195 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
196 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
197 FileStatus::Tracked(TrackedStatus {
198 index_status,
199 worktree_status,
200 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
201 worktree_status
202 } else {
203 index_status
204 }),
205 };
206
207 proto::StatusEntry {
208 repo_path: self.repo_path.to_proto(),
209 simple_status,
210 status: Some(status_to_proto(self.status)),
211 }
212 }
213}
214
215impl TryFrom<proto::StatusEntry> for StatusEntry {
216 type Error = anyhow::Error;
217
218 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
219 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
220 let status = status_from_proto(value.simple_status, value.status)?;
221 Ok(Self { repo_path, status })
222 }
223}
224
225impl sum_tree::Item for StatusEntry {
226 type Summary = PathSummary<GitSummary>;
227
228 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
229 PathSummary {
230 max_path: self.repo_path.as_ref().clone(),
231 item_summary: self.status.summary(),
232 }
233 }
234}
235
236impl sum_tree::KeyedItem for StatusEntry {
237 type Key = PathKey;
238
239 fn key(&self) -> Self::Key {
240 PathKey(self.repo_path.as_ref().clone())
241 }
242}
243
244#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
245pub struct RepositoryId(pub u64);
246
247#[derive(Clone, Debug, Default, PartialEq, Eq)]
248pub struct MergeDetails {
249 pub conflicted_paths: TreeSet<RepoPath>,
250 pub message: Option<SharedString>,
251 pub heads: Vec<Option<SharedString>>,
252}
253
254#[derive(Clone, Debug, PartialEq, Eq)]
255pub struct RepositorySnapshot {
256 pub id: RepositoryId,
257 pub statuses_by_path: SumTree<StatusEntry>,
258 pub pending_ops_by_path: SumTree<PendingOps>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 job_id: JobId,
289 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
290 latest_askpass_id: u64,
291}
292
293impl std::ops::Deref for Repository {
294 type Target = RepositorySnapshot;
295
296 fn deref(&self) -> &Self::Target {
297 &self.snapshot
298 }
299}
300
301#[derive(Clone)]
302pub enum RepositoryState {
303 Local {
304 backend: Arc<dyn GitRepository>,
305 environment: Arc<HashMap<String, String>>,
306 },
307 Remote {
308 project_id: ProjectId,
309 client: AnyProtoClient,
310 },
311}
312
313#[derive(Clone, Debug, PartialEq, Eq)]
314pub enum RepositoryEvent {
315 StatusesChanged {
316 // TODO could report which statuses changed here
317 full_scan: bool,
318 },
319 MergeHeadsChanged,
320 BranchChanged,
321 StashEntriesChanged,
322 PendingOpsChanged {
323 pending_ops: SumTree<pending_op::PendingOps>,
324 },
325}
326
327#[derive(Clone, Debug)]
328pub struct JobsUpdated;
329
330#[derive(Debug)]
331pub enum GitStoreEvent {
332 ActiveRepositoryChanged(Option<RepositoryId>),
333 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
334 RepositoryAdded,
335 RepositoryRemoved(RepositoryId),
336 IndexWriteError(anyhow::Error),
337 JobsUpdated,
338 ConflictsUpdated,
339}
340
341impl EventEmitter<RepositoryEvent> for Repository {}
342impl EventEmitter<JobsUpdated> for Repository {}
343impl EventEmitter<GitStoreEvent> for GitStore {}
344
345pub struct GitJob {
346 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
347 key: Option<GitJobKey>,
348}
349
350#[derive(PartialEq, Eq)]
351enum GitJobKey {
352 WriteIndex(Vec<RepoPath>),
353 ReloadBufferDiffBases,
354 RefreshStatuses,
355 ReloadGitState,
356}
357
358impl GitStore {
359 pub fn local(
360 worktree_store: &Entity<WorktreeStore>,
361 buffer_store: Entity<BufferStore>,
362 environment: Entity<ProjectEnvironment>,
363 fs: Arc<dyn Fs>,
364 cx: &mut Context<Self>,
365 ) -> Self {
366 Self::new(
367 worktree_store.clone(),
368 buffer_store,
369 GitStoreState::Local {
370 next_repository_id: Arc::new(AtomicU64::new(1)),
371 downstream: None,
372 project_environment: environment,
373 fs,
374 },
375 cx,
376 )
377 }
378
379 pub fn remote(
380 worktree_store: &Entity<WorktreeStore>,
381 buffer_store: Entity<BufferStore>,
382 upstream_client: AnyProtoClient,
383 project_id: u64,
384 cx: &mut Context<Self>,
385 ) -> Self {
386 Self::new(
387 worktree_store.clone(),
388 buffer_store,
389 GitStoreState::Remote {
390 upstream_client,
391 upstream_project_id: project_id,
392 downstream: None,
393 },
394 cx,
395 )
396 }
397
398 fn new(
399 worktree_store: Entity<WorktreeStore>,
400 buffer_store: Entity<BufferStore>,
401 state: GitStoreState,
402 cx: &mut Context<Self>,
403 ) -> Self {
404 let _subscriptions = vec![
405 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
406 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
407 ];
408
409 GitStore {
410 state,
411 buffer_store,
412 worktree_store,
413 repositories: HashMap::default(),
414 worktree_ids: HashMap::default(),
415 active_repo_id: None,
416 _subscriptions,
417 loading_diffs: HashMap::default(),
418 shared_diffs: HashMap::default(),
419 diffs: HashMap::default(),
420 }
421 }
422
423 pub fn init(client: &AnyProtoClient) {
424 client.add_entity_request_handler(Self::handle_get_remotes);
425 client.add_entity_request_handler(Self::handle_get_branches);
426 client.add_entity_request_handler(Self::handle_get_default_branch);
427 client.add_entity_request_handler(Self::handle_change_branch);
428 client.add_entity_request_handler(Self::handle_create_branch);
429 client.add_entity_request_handler(Self::handle_rename_branch);
430 client.add_entity_request_handler(Self::handle_git_init);
431 client.add_entity_request_handler(Self::handle_push);
432 client.add_entity_request_handler(Self::handle_pull);
433 client.add_entity_request_handler(Self::handle_fetch);
434 client.add_entity_request_handler(Self::handle_stage);
435 client.add_entity_request_handler(Self::handle_unstage);
436 client.add_entity_request_handler(Self::handle_stash);
437 client.add_entity_request_handler(Self::handle_stash_pop);
438 client.add_entity_request_handler(Self::handle_stash_apply);
439 client.add_entity_request_handler(Self::handle_stash_drop);
440 client.add_entity_request_handler(Self::handle_commit);
441 client.add_entity_request_handler(Self::handle_reset);
442 client.add_entity_request_handler(Self::handle_show);
443 client.add_entity_request_handler(Self::handle_load_commit_diff);
444 client.add_entity_request_handler(Self::handle_checkout_files);
445 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
446 client.add_entity_request_handler(Self::handle_set_index_text);
447 client.add_entity_request_handler(Self::handle_askpass);
448 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
449 client.add_entity_request_handler(Self::handle_git_diff);
450 client.add_entity_request_handler(Self::handle_tree_diff);
451 client.add_entity_request_handler(Self::handle_get_blob_content);
452 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
453 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
454 client.add_entity_message_handler(Self::handle_update_diff_bases);
455 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
456 client.add_entity_request_handler(Self::handle_blame_buffer);
457 client.add_entity_message_handler(Self::handle_update_repository);
458 client.add_entity_message_handler(Self::handle_remove_repository);
459 client.add_entity_request_handler(Self::handle_git_clone);
460 client.add_entity_request_handler(Self::handle_get_worktrees);
461 client.add_entity_request_handler(Self::handle_create_worktree);
462 }
463
464 pub fn is_local(&self) -> bool {
465 matches!(self.state, GitStoreState::Local { .. })
466 }
467 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
468 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
469 let id = repo.read(cx).id;
470 if self.active_repo_id != Some(id) {
471 self.active_repo_id = Some(id);
472 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
473 }
474 }
475 }
476
477 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
478 match &mut self.state {
479 GitStoreState::Remote {
480 downstream: downstream_client,
481 ..
482 } => {
483 for repo in self.repositories.values() {
484 let update = repo.read(cx).snapshot.initial_update(project_id);
485 for update in split_repository_update(update) {
486 client.send(update).log_err();
487 }
488 }
489 *downstream_client = Some((client, ProjectId(project_id)));
490 }
491 GitStoreState::Local {
492 downstream: downstream_client,
493 ..
494 } => {
495 let mut snapshots = HashMap::default();
496 let (updates_tx, mut updates_rx) = mpsc::unbounded();
497 for repo in self.repositories.values() {
498 updates_tx
499 .unbounded_send(DownstreamUpdate::UpdateRepository(
500 repo.read(cx).snapshot.clone(),
501 ))
502 .ok();
503 }
504 *downstream_client = Some(LocalDownstreamState {
505 client: client.clone(),
506 project_id: ProjectId(project_id),
507 updates_tx,
508 _task: cx.spawn(async move |this, cx| {
509 cx.background_spawn(async move {
510 while let Some(update) = updates_rx.next().await {
511 match update {
512 DownstreamUpdate::UpdateRepository(snapshot) => {
513 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
514 {
515 let update =
516 snapshot.build_update(old_snapshot, project_id);
517 *old_snapshot = snapshot;
518 for update in split_repository_update(update) {
519 client.send(update)?;
520 }
521 } else {
522 let update = snapshot.initial_update(project_id);
523 for update in split_repository_update(update) {
524 client.send(update)?;
525 }
526 snapshots.insert(snapshot.id, snapshot);
527 }
528 }
529 DownstreamUpdate::RemoveRepository(id) => {
530 client.send(proto::RemoveRepository {
531 project_id,
532 id: id.to_proto(),
533 })?;
534 }
535 }
536 }
537 anyhow::Ok(())
538 })
539 .await
540 .ok();
541 this.update(cx, |this, _| {
542 if let GitStoreState::Local {
543 downstream: downstream_client,
544 ..
545 } = &mut this.state
546 {
547 downstream_client.take();
548 } else {
549 unreachable!("unshared called on remote store");
550 }
551 })
552 }),
553 });
554 }
555 }
556 }
557
558 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
559 match &mut self.state {
560 GitStoreState::Local {
561 downstream: downstream_client,
562 ..
563 } => {
564 downstream_client.take();
565 }
566 GitStoreState::Remote {
567 downstream: downstream_client,
568 ..
569 } => {
570 downstream_client.take();
571 }
572 }
573 self.shared_diffs.clear();
574 }
575
576 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
577 self.shared_diffs.remove(peer_id);
578 }
579
580 pub fn active_repository(&self) -> Option<Entity<Repository>> {
581 self.active_repo_id
582 .as_ref()
583 .map(|id| self.repositories[id].clone())
584 }
585
586 pub fn open_unstaged_diff(
587 &mut self,
588 buffer: Entity<Buffer>,
589 cx: &mut Context<Self>,
590 ) -> Task<Result<Entity<BufferDiff>>> {
591 let buffer_id = buffer.read(cx).remote_id();
592 if let Some(diff_state) = self.diffs.get(&buffer_id)
593 && let Some(unstaged_diff) = diff_state
594 .read(cx)
595 .unstaged_diff
596 .as_ref()
597 .and_then(|weak| weak.upgrade())
598 {
599 if let Some(task) =
600 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
601 {
602 return cx.background_executor().spawn(async move {
603 task.await;
604 Ok(unstaged_diff)
605 });
606 }
607 return Task::ready(Ok(unstaged_diff));
608 }
609
610 let Some((repo, repo_path)) =
611 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
612 else {
613 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
614 };
615
616 let task = self
617 .loading_diffs
618 .entry((buffer_id, DiffKind::Unstaged))
619 .or_insert_with(|| {
620 let staged_text = repo.update(cx, |repo, cx| {
621 repo.load_staged_text(buffer_id, repo_path, cx)
622 });
623 cx.spawn(async move |this, cx| {
624 Self::open_diff_internal(
625 this,
626 DiffKind::Unstaged,
627 staged_text.await.map(DiffBasesChange::SetIndex),
628 buffer,
629 cx,
630 )
631 .await
632 .map_err(Arc::new)
633 })
634 .shared()
635 })
636 .clone();
637
638 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
639 }
640
641 pub fn open_diff_since(
642 &mut self,
643 oid: Option<git::Oid>,
644 buffer: Entity<Buffer>,
645 repo: Entity<Repository>,
646 languages: Arc<LanguageRegistry>,
647 cx: &mut Context<Self>,
648 ) -> Task<Result<Entity<BufferDiff>>> {
649 cx.spawn(async move |this, cx| {
650 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
651 let content = match oid {
652 None => None,
653 Some(oid) => Some(
654 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
655 .await?,
656 ),
657 };
658 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
659
660 buffer_diff
661 .update(cx, |buffer_diff, cx| {
662 buffer_diff.set_base_text(
663 content.map(Arc::new),
664 buffer_snapshot.language().cloned(),
665 Some(languages.clone()),
666 buffer_snapshot.text,
667 cx,
668 )
669 })?
670 .await?;
671 let unstaged_diff = this
672 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
673 .await?;
674 buffer_diff.update(cx, |buffer_diff, _| {
675 buffer_diff.set_secondary_diff(unstaged_diff);
676 })?;
677
678 this.update(cx, |_, cx| {
679 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
680 .detach();
681 })?;
682
683 Ok(buffer_diff)
684 })
685 }
686
687 pub fn open_uncommitted_diff(
688 &mut self,
689 buffer: Entity<Buffer>,
690 cx: &mut Context<Self>,
691 ) -> Task<Result<Entity<BufferDiff>>> {
692 let buffer_id = buffer.read(cx).remote_id();
693
694 if let Some(diff_state) = self.diffs.get(&buffer_id)
695 && let Some(uncommitted_diff) = diff_state
696 .read(cx)
697 .uncommitted_diff
698 .as_ref()
699 .and_then(|weak| weak.upgrade())
700 {
701 if let Some(task) =
702 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
703 {
704 return cx.background_executor().spawn(async move {
705 task.await;
706 Ok(uncommitted_diff)
707 });
708 }
709 return Task::ready(Ok(uncommitted_diff));
710 }
711
712 let Some((repo, repo_path)) =
713 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
714 else {
715 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
716 };
717
718 let task = self
719 .loading_diffs
720 .entry((buffer_id, DiffKind::Uncommitted))
721 .or_insert_with(|| {
722 let changes = repo.update(cx, |repo, cx| {
723 repo.load_committed_text(buffer_id, repo_path, cx)
724 });
725
726 // todo(lw): hot foreground spawn
727 cx.spawn(async move |this, cx| {
728 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
729 .await
730 .map_err(Arc::new)
731 })
732 .shared()
733 })
734 .clone();
735
736 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
737 }
738
739 async fn open_diff_internal(
740 this: WeakEntity<Self>,
741 kind: DiffKind,
742 texts: Result<DiffBasesChange>,
743 buffer_entity: Entity<Buffer>,
744 cx: &mut AsyncApp,
745 ) -> Result<Entity<BufferDiff>> {
746 let diff_bases_change = match texts {
747 Err(e) => {
748 this.update(cx, |this, cx| {
749 let buffer = buffer_entity.read(cx);
750 let buffer_id = buffer.remote_id();
751 this.loading_diffs.remove(&(buffer_id, kind));
752 })?;
753 return Err(e);
754 }
755 Ok(change) => change,
756 };
757
758 this.update(cx, |this, cx| {
759 let buffer = buffer_entity.read(cx);
760 let buffer_id = buffer.remote_id();
761 let language = buffer.language().cloned();
762 let language_registry = buffer.language_registry();
763 let text_snapshot = buffer.text_snapshot();
764 this.loading_diffs.remove(&(buffer_id, kind));
765
766 let git_store = cx.weak_entity();
767 let diff_state = this
768 .diffs
769 .entry(buffer_id)
770 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
771
772 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
773
774 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
775 diff_state.update(cx, |diff_state, cx| {
776 diff_state.language = language;
777 diff_state.language_registry = language_registry;
778
779 match kind {
780 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
781 DiffKind::Uncommitted => {
782 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
783 diff
784 } else {
785 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
786 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
787 unstaged_diff
788 };
789
790 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
791 diff_state.uncommitted_diff = Some(diff.downgrade())
792 }
793 }
794
795 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
796 let rx = diff_state.wait_for_recalculation();
797
798 anyhow::Ok(async move {
799 if let Some(rx) = rx {
800 rx.await;
801 }
802 Ok(diff)
803 })
804 })
805 })??
806 .await
807 }
808
809 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
810 let diff_state = self.diffs.get(&buffer_id)?;
811 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
812 }
813
814 pub fn get_uncommitted_diff(
815 &self,
816 buffer_id: BufferId,
817 cx: &App,
818 ) -> Option<Entity<BufferDiff>> {
819 let diff_state = self.diffs.get(&buffer_id)?;
820 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
821 }
822
823 pub fn open_conflict_set(
824 &mut self,
825 buffer: Entity<Buffer>,
826 cx: &mut Context<Self>,
827 ) -> Entity<ConflictSet> {
828 log::debug!("open conflict set");
829 let buffer_id = buffer.read(cx).remote_id();
830
831 if let Some(git_state) = self.diffs.get(&buffer_id)
832 && let Some(conflict_set) = git_state
833 .read(cx)
834 .conflict_set
835 .as_ref()
836 .and_then(|weak| weak.upgrade())
837 {
838 let conflict_set = conflict_set;
839 let buffer_snapshot = buffer.read(cx).text_snapshot();
840
841 git_state.update(cx, |state, cx| {
842 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
843 });
844
845 return conflict_set;
846 }
847
848 let is_unmerged = self
849 .repository_and_path_for_buffer_id(buffer_id, cx)
850 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
851 let git_store = cx.weak_entity();
852 let buffer_git_state = self
853 .diffs
854 .entry(buffer_id)
855 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
856 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
857
858 self._subscriptions
859 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
860 cx.emit(GitStoreEvent::ConflictsUpdated);
861 }));
862
863 buffer_git_state.update(cx, |state, cx| {
864 state.conflict_set = Some(conflict_set.downgrade());
865 let buffer_snapshot = buffer.read(cx).text_snapshot();
866 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
867 });
868
869 conflict_set
870 }
871
872 pub fn project_path_git_status(
873 &self,
874 project_path: &ProjectPath,
875 cx: &App,
876 ) -> Option<FileStatus> {
877 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
878 Some(repo.read(cx).status_for_path(&repo_path)?.status)
879 }
880
881 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
882 let mut work_directory_abs_paths = Vec::new();
883 let mut checkpoints = Vec::new();
884 for repository in self.repositories.values() {
885 repository.update(cx, |repository, _| {
886 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
887 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
888 });
889 }
890
891 cx.background_executor().spawn(async move {
892 let checkpoints = future::try_join_all(checkpoints).await?;
893 Ok(GitStoreCheckpoint {
894 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
895 .into_iter()
896 .zip(checkpoints)
897 .collect(),
898 })
899 })
900 }
901
902 pub fn restore_checkpoint(
903 &self,
904 checkpoint: GitStoreCheckpoint,
905 cx: &mut App,
906 ) -> Task<Result<()>> {
907 let repositories_by_work_dir_abs_path = self
908 .repositories
909 .values()
910 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
911 .collect::<HashMap<_, _>>();
912
913 let mut tasks = Vec::new();
914 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
915 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
916 let restore = repository.update(cx, |repository, _| {
917 repository.restore_checkpoint(checkpoint)
918 });
919 tasks.push(async move { restore.await? });
920 }
921 }
922 cx.background_spawn(async move {
923 future::try_join_all(tasks).await?;
924 Ok(())
925 })
926 }
927
928 /// Compares two checkpoints, returning true if they are equal.
929 pub fn compare_checkpoints(
930 &self,
931 left: GitStoreCheckpoint,
932 mut right: GitStoreCheckpoint,
933 cx: &mut App,
934 ) -> Task<Result<bool>> {
935 let repositories_by_work_dir_abs_path = self
936 .repositories
937 .values()
938 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
939 .collect::<HashMap<_, _>>();
940
941 let mut tasks = Vec::new();
942 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
943 if let Some(right_checkpoint) = right
944 .checkpoints_by_work_dir_abs_path
945 .remove(&work_dir_abs_path)
946 {
947 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
948 {
949 let compare = repository.update(cx, |repository, _| {
950 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
951 });
952
953 tasks.push(async move { compare.await? });
954 }
955 } else {
956 return Task::ready(Ok(false));
957 }
958 }
959 cx.background_spawn(async move {
960 Ok(future::try_join_all(tasks)
961 .await?
962 .into_iter()
963 .all(|result| result))
964 })
965 }
966
967 /// Blames a buffer.
968 pub fn blame_buffer(
969 &self,
970 buffer: &Entity<Buffer>,
971 version: Option<clock::Global>,
972 cx: &mut App,
973 ) -> Task<Result<Option<Blame>>> {
974 let buffer = buffer.read(cx);
975 let Some((repo, repo_path)) =
976 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
977 else {
978 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
979 };
980 let content = match &version {
981 Some(version) => buffer.rope_for_version(version),
982 None => buffer.as_rope().clone(),
983 };
984 let version = version.unwrap_or(buffer.version());
985 let buffer_id = buffer.remote_id();
986
987 let rx = repo.update(cx, |repo, _| {
988 repo.send_job(None, move |state, _| async move {
989 match state {
990 RepositoryState::Local { backend, .. } => backend
991 .blame(repo_path.clone(), content)
992 .await
993 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
994 .map(Some),
995 RepositoryState::Remote { project_id, client } => {
996 let response = client
997 .request(proto::BlameBuffer {
998 project_id: project_id.to_proto(),
999 buffer_id: buffer_id.into(),
1000 version: serialize_version(&version),
1001 })
1002 .await?;
1003 Ok(deserialize_blame_buffer_response(response))
1004 }
1005 }
1006 })
1007 });
1008
1009 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1010 }
1011
1012 pub fn get_permalink_to_line(
1013 &self,
1014 buffer: &Entity<Buffer>,
1015 selection: Range<u32>,
1016 cx: &mut App,
1017 ) -> Task<Result<url::Url>> {
1018 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1019 return Task::ready(Err(anyhow!("buffer has no file")));
1020 };
1021
1022 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1023 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1024 cx,
1025 ) else {
1026 // If we're not in a Git repo, check whether this is a Rust source
1027 // file in the Cargo registry (presumably opened with go-to-definition
1028 // from a normal Rust file). If so, we can put together a permalink
1029 // using crate metadata.
1030 if buffer
1031 .read(cx)
1032 .language()
1033 .is_none_or(|lang| lang.name() != "Rust".into())
1034 {
1035 return Task::ready(Err(anyhow!("no permalink available")));
1036 }
1037 let file_path = file.worktree.read(cx).absolutize(&file.path);
1038 return cx.spawn(async move |cx| {
1039 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1040 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1041 .context("no permalink available")
1042 });
1043 };
1044
1045 let buffer_id = buffer.read(cx).remote_id();
1046 let branch = repo.read(cx).branch.clone();
1047 let remote = branch
1048 .as_ref()
1049 .and_then(|b| b.upstream.as_ref())
1050 .and_then(|b| b.remote_name())
1051 .unwrap_or("origin")
1052 .to_string();
1053
1054 let rx = repo.update(cx, |repo, _| {
1055 repo.send_job(None, move |state, cx| async move {
1056 match state {
1057 RepositoryState::Local { backend, .. } => {
1058 let origin_url = backend
1059 .remote_url(&remote)
1060 .with_context(|| format!("remote \"{remote}\" not found"))?;
1061
1062 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1063
1064 let provider_registry =
1065 cx.update(GitHostingProviderRegistry::default_global)?;
1066
1067 let (provider, remote) =
1068 parse_git_remote_url(provider_registry, &origin_url)
1069 .context("parsing Git remote URL")?;
1070
1071 Ok(provider.build_permalink(
1072 remote,
1073 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1074 ))
1075 }
1076 RepositoryState::Remote { project_id, client } => {
1077 let response = client
1078 .request(proto::GetPermalinkToLine {
1079 project_id: project_id.to_proto(),
1080 buffer_id: buffer_id.into(),
1081 selection: Some(proto::Range {
1082 start: selection.start as u64,
1083 end: selection.end as u64,
1084 }),
1085 })
1086 .await?;
1087
1088 url::Url::parse(&response.permalink).context("failed to parse permalink")
1089 }
1090 }
1091 })
1092 });
1093 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1094 }
1095
1096 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1097 match &self.state {
1098 GitStoreState::Local {
1099 downstream: downstream_client,
1100 ..
1101 } => downstream_client
1102 .as_ref()
1103 .map(|state| (state.client.clone(), state.project_id)),
1104 GitStoreState::Remote {
1105 downstream: downstream_client,
1106 ..
1107 } => downstream_client.clone(),
1108 }
1109 }
1110
1111 fn upstream_client(&self) -> Option<AnyProtoClient> {
1112 match &self.state {
1113 GitStoreState::Local { .. } => None,
1114 GitStoreState::Remote {
1115 upstream_client, ..
1116 } => Some(upstream_client.clone()),
1117 }
1118 }
1119
1120 fn on_worktree_store_event(
1121 &mut self,
1122 worktree_store: Entity<WorktreeStore>,
1123 event: &WorktreeStoreEvent,
1124 cx: &mut Context<Self>,
1125 ) {
1126 let GitStoreState::Local {
1127 project_environment,
1128 downstream,
1129 next_repository_id,
1130 fs,
1131 } = &self.state
1132 else {
1133 return;
1134 };
1135
1136 match event {
1137 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1138 if let Some(worktree) = self
1139 .worktree_store
1140 .read(cx)
1141 .worktree_for_id(*worktree_id, cx)
1142 {
1143 let paths_by_git_repo =
1144 self.process_updated_entries(&worktree, updated_entries, cx);
1145 let downstream = downstream
1146 .as_ref()
1147 .map(|downstream| downstream.updates_tx.clone());
1148 cx.spawn(async move |_, cx| {
1149 let paths_by_git_repo = paths_by_git_repo.await;
1150 for (repo, paths) in paths_by_git_repo {
1151 repo.update(cx, |repo, cx| {
1152 repo.paths_changed(paths, downstream.clone(), cx);
1153 })
1154 .ok();
1155 }
1156 })
1157 .detach();
1158 }
1159 }
1160 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1161 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1162 else {
1163 return;
1164 };
1165 if !worktree.read(cx).is_visible() {
1166 log::debug!(
1167 "not adding repositories for local worktree {:?} because it's not visible",
1168 worktree.read(cx).abs_path()
1169 );
1170 return;
1171 }
1172 self.update_repositories_from_worktree(
1173 *worktree_id,
1174 project_environment.clone(),
1175 next_repository_id.clone(),
1176 downstream
1177 .as_ref()
1178 .map(|downstream| downstream.updates_tx.clone()),
1179 changed_repos.clone(),
1180 fs.clone(),
1181 cx,
1182 );
1183 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1184 }
1185 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1186 let repos_without_worktree: Vec<RepositoryId> = self
1187 .worktree_ids
1188 .iter_mut()
1189 .filter_map(|(repo_id, worktree_ids)| {
1190 worktree_ids.remove(worktree_id);
1191 if worktree_ids.is_empty() {
1192 Some(*repo_id)
1193 } else {
1194 None
1195 }
1196 })
1197 .collect();
1198 let is_active_repo_removed = repos_without_worktree
1199 .iter()
1200 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1201
1202 for repo_id in repos_without_worktree {
1203 self.repositories.remove(&repo_id);
1204 self.worktree_ids.remove(&repo_id);
1205 if let Some(updates_tx) =
1206 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1207 {
1208 updates_tx
1209 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1210 .ok();
1211 }
1212 }
1213
1214 if is_active_repo_removed {
1215 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1216 self.active_repo_id = Some(repo_id);
1217 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1218 } else {
1219 self.active_repo_id = None;
1220 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1221 }
1222 }
1223 }
1224 _ => {}
1225 }
1226 }
1227 fn on_repository_event(
1228 &mut self,
1229 repo: Entity<Repository>,
1230 event: &RepositoryEvent,
1231 cx: &mut Context<Self>,
1232 ) {
1233 let id = repo.read(cx).id;
1234 let repo_snapshot = repo.read(cx).snapshot.clone();
1235 for (buffer_id, diff) in self.diffs.iter() {
1236 if let Some((buffer_repo, repo_path)) =
1237 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1238 && buffer_repo == repo
1239 {
1240 diff.update(cx, |diff, cx| {
1241 if let Some(conflict_set) = &diff.conflict_set {
1242 let conflict_status_changed =
1243 conflict_set.update(cx, |conflict_set, cx| {
1244 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1245 conflict_set.set_has_conflict(has_conflict, cx)
1246 })?;
1247 if conflict_status_changed {
1248 let buffer_store = self.buffer_store.read(cx);
1249 if let Some(buffer) = buffer_store.get(*buffer_id) {
1250 let _ = diff
1251 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1252 }
1253 }
1254 }
1255 anyhow::Ok(())
1256 })
1257 .ok();
1258 }
1259 }
1260 cx.emit(GitStoreEvent::RepositoryUpdated(
1261 id,
1262 event.clone(),
1263 self.active_repo_id == Some(id),
1264 ))
1265 }
1266
1267 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1268 cx.emit(GitStoreEvent::JobsUpdated)
1269 }
1270
1271 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1272 fn update_repositories_from_worktree(
1273 &mut self,
1274 worktree_id: WorktreeId,
1275 project_environment: Entity<ProjectEnvironment>,
1276 next_repository_id: Arc<AtomicU64>,
1277 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1278 updated_git_repositories: UpdatedGitRepositoriesSet,
1279 fs: Arc<dyn Fs>,
1280 cx: &mut Context<Self>,
1281 ) {
1282 let mut removed_ids = Vec::new();
1283 for update in updated_git_repositories.iter() {
1284 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1285 let existing_work_directory_abs_path =
1286 repo.read(cx).work_directory_abs_path.clone();
1287 Some(&existing_work_directory_abs_path)
1288 == update.old_work_directory_abs_path.as_ref()
1289 || Some(&existing_work_directory_abs_path)
1290 == update.new_work_directory_abs_path.as_ref()
1291 }) {
1292 let repo_id = *id;
1293 if let Some(new_work_directory_abs_path) =
1294 update.new_work_directory_abs_path.clone()
1295 {
1296 self.worktree_ids
1297 .entry(repo_id)
1298 .or_insert_with(HashSet::new)
1299 .insert(worktree_id);
1300 existing.update(cx, |existing, cx| {
1301 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1302 existing.schedule_scan(updates_tx.clone(), cx);
1303 });
1304 } else {
1305 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1306 worktree_ids.remove(&worktree_id);
1307 if worktree_ids.is_empty() {
1308 removed_ids.push(repo_id);
1309 }
1310 }
1311 }
1312 } else if let UpdatedGitRepository {
1313 new_work_directory_abs_path: Some(work_directory_abs_path),
1314 dot_git_abs_path: Some(dot_git_abs_path),
1315 repository_dir_abs_path: Some(repository_dir_abs_path),
1316 common_dir_abs_path: Some(common_dir_abs_path),
1317 ..
1318 } = update
1319 {
1320 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1321 let git_store = cx.weak_entity();
1322 let repo = cx.new(|cx| {
1323 let mut repo = Repository::local(
1324 id,
1325 work_directory_abs_path.clone(),
1326 dot_git_abs_path.clone(),
1327 repository_dir_abs_path.clone(),
1328 common_dir_abs_path.clone(),
1329 project_environment.downgrade(),
1330 fs.clone(),
1331 git_store,
1332 cx,
1333 );
1334 if let Some(updates_tx) = updates_tx.as_ref() {
1335 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1336 updates_tx
1337 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1338 .ok();
1339 }
1340 repo.schedule_scan(updates_tx.clone(), cx);
1341 repo
1342 });
1343 self._subscriptions
1344 .push(cx.subscribe(&repo, Self::on_repository_event));
1345 self._subscriptions
1346 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1347 self.repositories.insert(id, repo);
1348 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1349 cx.emit(GitStoreEvent::RepositoryAdded);
1350 self.active_repo_id.get_or_insert_with(|| {
1351 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1352 id
1353 });
1354 }
1355 }
1356
1357 for id in removed_ids {
1358 if self.active_repo_id == Some(id) {
1359 self.active_repo_id = None;
1360 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1361 }
1362 self.repositories.remove(&id);
1363 if let Some(updates_tx) = updates_tx.as_ref() {
1364 updates_tx
1365 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1366 .ok();
1367 }
1368 }
1369 }
1370
1371 fn on_buffer_store_event(
1372 &mut self,
1373 _: Entity<BufferStore>,
1374 event: &BufferStoreEvent,
1375 cx: &mut Context<Self>,
1376 ) {
1377 match event {
1378 BufferStoreEvent::BufferAdded(buffer) => {
1379 cx.subscribe(buffer, |this, buffer, event, cx| {
1380 if let BufferEvent::LanguageChanged = event {
1381 let buffer_id = buffer.read(cx).remote_id();
1382 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1383 diff_state.update(cx, |diff_state, cx| {
1384 diff_state.buffer_language_changed(buffer, cx);
1385 });
1386 }
1387 }
1388 })
1389 .detach();
1390 }
1391 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1392 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1393 diffs.remove(buffer_id);
1394 }
1395 }
1396 BufferStoreEvent::BufferDropped(buffer_id) => {
1397 self.diffs.remove(buffer_id);
1398 for diffs in self.shared_diffs.values_mut() {
1399 diffs.remove(buffer_id);
1400 }
1401 }
1402 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1403 // Whenever a buffer's file path changes, it's possible that the
1404 // new path is actually a path that is being tracked by a git
1405 // repository. In that case, we'll want to update the buffer's
1406 // `BufferDiffState`, in case it already has one.
1407 let buffer_id = buffer.read(cx).remote_id();
1408 let diff_state = self.diffs.get(&buffer_id);
1409 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1410
1411 if let Some(diff_state) = diff_state
1412 && let Some((repo, repo_path)) = repo
1413 {
1414 let buffer = buffer.clone();
1415 let diff_state = diff_state.clone();
1416
1417 cx.spawn(async move |_git_store, cx| {
1418 async {
1419 let diff_bases_change = repo
1420 .update(cx, |repo, cx| {
1421 repo.load_committed_text(buffer_id, repo_path, cx)
1422 })?
1423 .await?;
1424
1425 diff_state.update(cx, |diff_state, cx| {
1426 let buffer_snapshot = buffer.read(cx).text_snapshot();
1427 diff_state.diff_bases_changed(
1428 buffer_snapshot,
1429 Some(diff_bases_change),
1430 cx,
1431 );
1432 })
1433 }
1434 .await
1435 .log_err();
1436 })
1437 .detach();
1438 }
1439 }
1440 _ => {}
1441 }
1442 }
1443
1444 pub fn recalculate_buffer_diffs(
1445 &mut self,
1446 buffers: Vec<Entity<Buffer>>,
1447 cx: &mut Context<Self>,
1448 ) -> impl Future<Output = ()> + use<> {
1449 let mut futures = Vec::new();
1450 for buffer in buffers {
1451 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1452 let buffer = buffer.read(cx).text_snapshot();
1453 diff_state.update(cx, |diff_state, cx| {
1454 diff_state.recalculate_diffs(buffer.clone(), cx);
1455 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1456 });
1457 futures.push(diff_state.update(cx, |diff_state, cx| {
1458 diff_state
1459 .reparse_conflict_markers(buffer, cx)
1460 .map(|_| {})
1461 .boxed()
1462 }));
1463 }
1464 }
1465 async move {
1466 futures::future::join_all(futures).await;
1467 }
1468 }
1469
1470 fn on_buffer_diff_event(
1471 &mut self,
1472 diff: Entity<buffer_diff::BufferDiff>,
1473 event: &BufferDiffEvent,
1474 cx: &mut Context<Self>,
1475 ) {
1476 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1477 let buffer_id = diff.read(cx).buffer_id;
1478 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1479 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1480 diff_state.hunk_staging_operation_count += 1;
1481 diff_state.hunk_staging_operation_count
1482 });
1483 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1484 let recv = repo.update(cx, |repo, cx| {
1485 log::debug!("hunks changed for {}", path.as_unix_str());
1486 repo.spawn_set_index_text_job(
1487 path,
1488 new_index_text.as_ref().map(|rope| rope.to_string()),
1489 Some(hunk_staging_operation_count),
1490 cx,
1491 )
1492 });
1493 let diff = diff.downgrade();
1494 cx.spawn(async move |this, cx| {
1495 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1496 diff.update(cx, |diff, cx| {
1497 diff.clear_pending_hunks(cx);
1498 })
1499 .ok();
1500 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1501 .ok();
1502 }
1503 })
1504 .detach();
1505 }
1506 }
1507 }
1508 }
1509
1510 fn local_worktree_git_repos_changed(
1511 &mut self,
1512 worktree: Entity<Worktree>,
1513 changed_repos: &UpdatedGitRepositoriesSet,
1514 cx: &mut Context<Self>,
1515 ) {
1516 log::debug!("local worktree repos changed");
1517 debug_assert!(worktree.read(cx).is_local());
1518
1519 for repository in self.repositories.values() {
1520 repository.update(cx, |repository, cx| {
1521 let repo_abs_path = &repository.work_directory_abs_path;
1522 if changed_repos.iter().any(|update| {
1523 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1524 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1525 }) {
1526 repository.reload_buffer_diff_bases(cx);
1527 }
1528 });
1529 }
1530 }
1531
1532 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1533 &self.repositories
1534 }
1535
1536 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1537 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1538 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1539 Some(status.status)
1540 }
1541
1542 pub fn repository_and_path_for_buffer_id(
1543 &self,
1544 buffer_id: BufferId,
1545 cx: &App,
1546 ) -> Option<(Entity<Repository>, RepoPath)> {
1547 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1548 let project_path = buffer.read(cx).project_path(cx)?;
1549 self.repository_and_path_for_project_path(&project_path, cx)
1550 }
1551
1552 pub fn repository_and_path_for_project_path(
1553 &self,
1554 path: &ProjectPath,
1555 cx: &App,
1556 ) -> Option<(Entity<Repository>, RepoPath)> {
1557 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1558 self.repositories
1559 .values()
1560 .filter_map(|repo| {
1561 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1562 Some((repo.clone(), repo_path))
1563 })
1564 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1565 }
1566
1567 pub fn git_init(
1568 &self,
1569 path: Arc<Path>,
1570 fallback_branch_name: String,
1571 cx: &App,
1572 ) -> Task<Result<()>> {
1573 match &self.state {
1574 GitStoreState::Local { fs, .. } => {
1575 let fs = fs.clone();
1576 cx.background_executor()
1577 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1578 }
1579 GitStoreState::Remote {
1580 upstream_client,
1581 upstream_project_id: project_id,
1582 ..
1583 } => {
1584 let client = upstream_client.clone();
1585 let project_id = *project_id;
1586 cx.background_executor().spawn(async move {
1587 client
1588 .request(proto::GitInit {
1589 project_id: project_id,
1590 abs_path: path.to_string_lossy().into_owned(),
1591 fallback_branch_name,
1592 })
1593 .await?;
1594 Ok(())
1595 })
1596 }
1597 }
1598 }
1599
1600 pub fn git_clone(
1601 &self,
1602 repo: String,
1603 path: impl Into<Arc<std::path::Path>>,
1604 cx: &App,
1605 ) -> Task<Result<()>> {
1606 let path = path.into();
1607 match &self.state {
1608 GitStoreState::Local { fs, .. } => {
1609 let fs = fs.clone();
1610 cx.background_executor()
1611 .spawn(async move { fs.git_clone(&repo, &path).await })
1612 }
1613 GitStoreState::Remote {
1614 upstream_client,
1615 upstream_project_id,
1616 ..
1617 } => {
1618 if upstream_client.is_via_collab() {
1619 return Task::ready(Err(anyhow!(
1620 "Git Clone isn't supported for project guests"
1621 )));
1622 }
1623 let request = upstream_client.request(proto::GitClone {
1624 project_id: *upstream_project_id,
1625 abs_path: path.to_string_lossy().into_owned(),
1626 remote_repo: repo,
1627 });
1628
1629 cx.background_spawn(async move {
1630 let result = request.await?;
1631
1632 match result.success {
1633 true => Ok(()),
1634 false => Err(anyhow!("Git Clone failed")),
1635 }
1636 })
1637 }
1638 }
1639 }
1640
1641 async fn handle_update_repository(
1642 this: Entity<Self>,
1643 envelope: TypedEnvelope<proto::UpdateRepository>,
1644 mut cx: AsyncApp,
1645 ) -> Result<()> {
1646 this.update(&mut cx, |this, cx| {
1647 let path_style = this.worktree_store.read(cx).path_style();
1648 let mut update = envelope.payload;
1649
1650 let id = RepositoryId::from_proto(update.id);
1651 let client = this.upstream_client().context("no upstream client")?;
1652
1653 let mut repo_subscription = None;
1654 let repo = this.repositories.entry(id).or_insert_with(|| {
1655 let git_store = cx.weak_entity();
1656 let repo = cx.new(|cx| {
1657 Repository::remote(
1658 id,
1659 Path::new(&update.abs_path).into(),
1660 path_style,
1661 ProjectId(update.project_id),
1662 client,
1663 git_store,
1664 cx,
1665 )
1666 });
1667 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1668 cx.emit(GitStoreEvent::RepositoryAdded);
1669 repo
1670 });
1671 this._subscriptions.extend(repo_subscription);
1672
1673 repo.update(cx, {
1674 let update = update.clone();
1675 |repo, cx| repo.apply_remote_update(update, cx)
1676 })?;
1677
1678 this.active_repo_id.get_or_insert_with(|| {
1679 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1680 id
1681 });
1682
1683 if let Some((client, project_id)) = this.downstream_client() {
1684 update.project_id = project_id.to_proto();
1685 client.send(update).log_err();
1686 }
1687 Ok(())
1688 })?
1689 }
1690
1691 async fn handle_remove_repository(
1692 this: Entity<Self>,
1693 envelope: TypedEnvelope<proto::RemoveRepository>,
1694 mut cx: AsyncApp,
1695 ) -> Result<()> {
1696 this.update(&mut cx, |this, cx| {
1697 let mut update = envelope.payload;
1698 let id = RepositoryId::from_proto(update.id);
1699 this.repositories.remove(&id);
1700 if let Some((client, project_id)) = this.downstream_client() {
1701 update.project_id = project_id.to_proto();
1702 client.send(update).log_err();
1703 }
1704 if this.active_repo_id == Some(id) {
1705 this.active_repo_id = None;
1706 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1707 }
1708 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1709 })
1710 }
1711
1712 async fn handle_git_init(
1713 this: Entity<Self>,
1714 envelope: TypedEnvelope<proto::GitInit>,
1715 cx: AsyncApp,
1716 ) -> Result<proto::Ack> {
1717 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1718 let name = envelope.payload.fallback_branch_name;
1719 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1720 .await?;
1721
1722 Ok(proto::Ack {})
1723 }
1724
1725 async fn handle_git_clone(
1726 this: Entity<Self>,
1727 envelope: TypedEnvelope<proto::GitClone>,
1728 cx: AsyncApp,
1729 ) -> Result<proto::GitCloneResponse> {
1730 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1731 let repo_name = envelope.payload.remote_repo;
1732 let result = cx
1733 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1734 .await;
1735
1736 Ok(proto::GitCloneResponse {
1737 success: result.is_ok(),
1738 })
1739 }
1740
1741 async fn handle_fetch(
1742 this: Entity<Self>,
1743 envelope: TypedEnvelope<proto::Fetch>,
1744 mut cx: AsyncApp,
1745 ) -> Result<proto::RemoteMessageResponse> {
1746 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1747 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1748 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1749 let askpass_id = envelope.payload.askpass_id;
1750
1751 let askpass = make_remote_delegate(
1752 this,
1753 envelope.payload.project_id,
1754 repository_id,
1755 askpass_id,
1756 &mut cx,
1757 );
1758
1759 let remote_output = repository_handle
1760 .update(&mut cx, |repository_handle, cx| {
1761 repository_handle.fetch(fetch_options, askpass, cx)
1762 })?
1763 .await??;
1764
1765 Ok(proto::RemoteMessageResponse {
1766 stdout: remote_output.stdout,
1767 stderr: remote_output.stderr,
1768 })
1769 }
1770
1771 async fn handle_push(
1772 this: Entity<Self>,
1773 envelope: TypedEnvelope<proto::Push>,
1774 mut cx: AsyncApp,
1775 ) -> Result<proto::RemoteMessageResponse> {
1776 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1777 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1778
1779 let askpass_id = envelope.payload.askpass_id;
1780 let askpass = make_remote_delegate(
1781 this,
1782 envelope.payload.project_id,
1783 repository_id,
1784 askpass_id,
1785 &mut cx,
1786 );
1787
1788 let options = envelope
1789 .payload
1790 .options
1791 .as_ref()
1792 .map(|_| match envelope.payload.options() {
1793 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1794 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1795 });
1796
1797 let branch_name = envelope.payload.branch_name.into();
1798 let remote_name = envelope.payload.remote_name.into();
1799
1800 let remote_output = repository_handle
1801 .update(&mut cx, |repository_handle, cx| {
1802 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1803 })?
1804 .await??;
1805 Ok(proto::RemoteMessageResponse {
1806 stdout: remote_output.stdout,
1807 stderr: remote_output.stderr,
1808 })
1809 }
1810
1811 async fn handle_pull(
1812 this: Entity<Self>,
1813 envelope: TypedEnvelope<proto::Pull>,
1814 mut cx: AsyncApp,
1815 ) -> Result<proto::RemoteMessageResponse> {
1816 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1817 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1818 let askpass_id = envelope.payload.askpass_id;
1819 let askpass = make_remote_delegate(
1820 this,
1821 envelope.payload.project_id,
1822 repository_id,
1823 askpass_id,
1824 &mut cx,
1825 );
1826
1827 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1828 let remote_name = envelope.payload.remote_name.into();
1829 let rebase = envelope.payload.rebase;
1830
1831 let remote_message = repository_handle
1832 .update(&mut cx, |repository_handle, cx| {
1833 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1834 })?
1835 .await??;
1836
1837 Ok(proto::RemoteMessageResponse {
1838 stdout: remote_message.stdout,
1839 stderr: remote_message.stderr,
1840 })
1841 }
1842
1843 async fn handle_stage(
1844 this: Entity<Self>,
1845 envelope: TypedEnvelope<proto::Stage>,
1846 mut cx: AsyncApp,
1847 ) -> Result<proto::Ack> {
1848 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1849 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1850
1851 let entries = envelope
1852 .payload
1853 .paths
1854 .into_iter()
1855 .map(|path| RepoPath::new(&path))
1856 .collect::<Result<Vec<_>>>()?;
1857
1858 repository_handle
1859 .update(&mut cx, |repository_handle, cx| {
1860 repository_handle.stage_entries(entries, cx)
1861 })?
1862 .await?;
1863 Ok(proto::Ack {})
1864 }
1865
1866 async fn handle_unstage(
1867 this: Entity<Self>,
1868 envelope: TypedEnvelope<proto::Unstage>,
1869 mut cx: AsyncApp,
1870 ) -> Result<proto::Ack> {
1871 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1872 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1873
1874 let entries = envelope
1875 .payload
1876 .paths
1877 .into_iter()
1878 .map(|path| RepoPath::new(&path))
1879 .collect::<Result<Vec<_>>>()?;
1880
1881 repository_handle
1882 .update(&mut cx, |repository_handle, cx| {
1883 repository_handle.unstage_entries(entries, cx)
1884 })?
1885 .await?;
1886
1887 Ok(proto::Ack {})
1888 }
1889
1890 async fn handle_stash(
1891 this: Entity<Self>,
1892 envelope: TypedEnvelope<proto::Stash>,
1893 mut cx: AsyncApp,
1894 ) -> Result<proto::Ack> {
1895 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1896 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1897
1898 let entries = envelope
1899 .payload
1900 .paths
1901 .into_iter()
1902 .map(|path| RepoPath::new(&path))
1903 .collect::<Result<Vec<_>>>()?;
1904
1905 repository_handle
1906 .update(&mut cx, |repository_handle, cx| {
1907 repository_handle.stash_entries(entries, cx)
1908 })?
1909 .await?;
1910
1911 Ok(proto::Ack {})
1912 }
1913
1914 async fn handle_stash_pop(
1915 this: Entity<Self>,
1916 envelope: TypedEnvelope<proto::StashPop>,
1917 mut cx: AsyncApp,
1918 ) -> Result<proto::Ack> {
1919 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1920 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1921 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1922
1923 repository_handle
1924 .update(&mut cx, |repository_handle, cx| {
1925 repository_handle.stash_pop(stash_index, cx)
1926 })?
1927 .await?;
1928
1929 Ok(proto::Ack {})
1930 }
1931
1932 async fn handle_stash_apply(
1933 this: Entity<Self>,
1934 envelope: TypedEnvelope<proto::StashApply>,
1935 mut cx: AsyncApp,
1936 ) -> Result<proto::Ack> {
1937 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1938 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1939 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1940
1941 repository_handle
1942 .update(&mut cx, |repository_handle, cx| {
1943 repository_handle.stash_apply(stash_index, cx)
1944 })?
1945 .await?;
1946
1947 Ok(proto::Ack {})
1948 }
1949
1950 async fn handle_stash_drop(
1951 this: Entity<Self>,
1952 envelope: TypedEnvelope<proto::StashDrop>,
1953 mut cx: AsyncApp,
1954 ) -> Result<proto::Ack> {
1955 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1956 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1957 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1958
1959 repository_handle
1960 .update(&mut cx, |repository_handle, cx| {
1961 repository_handle.stash_drop(stash_index, cx)
1962 })?
1963 .await??;
1964
1965 Ok(proto::Ack {})
1966 }
1967
1968 async fn handle_set_index_text(
1969 this: Entity<Self>,
1970 envelope: TypedEnvelope<proto::SetIndexText>,
1971 mut cx: AsyncApp,
1972 ) -> Result<proto::Ack> {
1973 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1974 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1975 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1976
1977 repository_handle
1978 .update(&mut cx, |repository_handle, cx| {
1979 repository_handle.spawn_set_index_text_job(
1980 repo_path,
1981 envelope.payload.text,
1982 None,
1983 cx,
1984 )
1985 })?
1986 .await??;
1987 Ok(proto::Ack {})
1988 }
1989
1990 async fn handle_commit(
1991 this: Entity<Self>,
1992 envelope: TypedEnvelope<proto::Commit>,
1993 mut cx: AsyncApp,
1994 ) -> Result<proto::Ack> {
1995 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1996 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1997 let askpass_id = envelope.payload.askpass_id;
1998
1999 let askpass = make_remote_delegate(
2000 this,
2001 envelope.payload.project_id,
2002 repository_id,
2003 askpass_id,
2004 &mut cx,
2005 );
2006
2007 let message = SharedString::from(envelope.payload.message);
2008 let name = envelope.payload.name.map(SharedString::from);
2009 let email = envelope.payload.email.map(SharedString::from);
2010 let options = envelope.payload.options.unwrap_or_default();
2011
2012 repository_handle
2013 .update(&mut cx, |repository_handle, cx| {
2014 repository_handle.commit(
2015 message,
2016 name.zip(email),
2017 CommitOptions {
2018 amend: options.amend,
2019 signoff: options.signoff,
2020 },
2021 askpass,
2022 cx,
2023 )
2024 })?
2025 .await??;
2026 Ok(proto::Ack {})
2027 }
2028
2029 async fn handle_get_remotes(
2030 this: Entity<Self>,
2031 envelope: TypedEnvelope<proto::GetRemotes>,
2032 mut cx: AsyncApp,
2033 ) -> Result<proto::GetRemotesResponse> {
2034 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2035 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2036
2037 let branch_name = envelope.payload.branch_name;
2038
2039 let remotes = repository_handle
2040 .update(&mut cx, |repository_handle, _| {
2041 repository_handle.get_remotes(branch_name)
2042 })?
2043 .await??;
2044
2045 Ok(proto::GetRemotesResponse {
2046 remotes: remotes
2047 .into_iter()
2048 .map(|remotes| proto::get_remotes_response::Remote {
2049 name: remotes.name.to_string(),
2050 })
2051 .collect::<Vec<_>>(),
2052 })
2053 }
2054
2055 async fn handle_get_worktrees(
2056 this: Entity<Self>,
2057 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2058 mut cx: AsyncApp,
2059 ) -> Result<proto::GitWorktreesResponse> {
2060 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2061 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2062
2063 let worktrees = repository_handle
2064 .update(&mut cx, |repository_handle, _| {
2065 repository_handle.worktrees()
2066 })?
2067 .await??;
2068
2069 Ok(proto::GitWorktreesResponse {
2070 worktrees: worktrees
2071 .into_iter()
2072 .map(|worktree| worktree_to_proto(&worktree))
2073 .collect::<Vec<_>>(),
2074 })
2075 }
2076
2077 async fn handle_create_worktree(
2078 this: Entity<Self>,
2079 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2080 mut cx: AsyncApp,
2081 ) -> Result<proto::Ack> {
2082 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2083 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2084 let directory = PathBuf::from(envelope.payload.directory);
2085 let name = envelope.payload.name;
2086 let commit = envelope.payload.commit;
2087
2088 repository_handle
2089 .update(&mut cx, |repository_handle, _| {
2090 repository_handle.create_worktree(name, directory, commit)
2091 })?
2092 .await??;
2093
2094 Ok(proto::Ack {})
2095 }
2096
2097 async fn handle_get_branches(
2098 this: Entity<Self>,
2099 envelope: TypedEnvelope<proto::GitGetBranches>,
2100 mut cx: AsyncApp,
2101 ) -> Result<proto::GitBranchesResponse> {
2102 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2103 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2104
2105 let branches = repository_handle
2106 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2107 .await??;
2108
2109 Ok(proto::GitBranchesResponse {
2110 branches: branches
2111 .into_iter()
2112 .map(|branch| branch_to_proto(&branch))
2113 .collect::<Vec<_>>(),
2114 })
2115 }
2116 async fn handle_get_default_branch(
2117 this: Entity<Self>,
2118 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2119 mut cx: AsyncApp,
2120 ) -> Result<proto::GetDefaultBranchResponse> {
2121 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2122 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2123
2124 let branch = repository_handle
2125 .update(&mut cx, |repository_handle, _| {
2126 repository_handle.default_branch()
2127 })?
2128 .await??
2129 .map(Into::into);
2130
2131 Ok(proto::GetDefaultBranchResponse { branch })
2132 }
2133 async fn handle_create_branch(
2134 this: Entity<Self>,
2135 envelope: TypedEnvelope<proto::GitCreateBranch>,
2136 mut cx: AsyncApp,
2137 ) -> Result<proto::Ack> {
2138 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2139 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2140 let branch_name = envelope.payload.branch_name;
2141
2142 repository_handle
2143 .update(&mut cx, |repository_handle, _| {
2144 repository_handle.create_branch(branch_name, None)
2145 })?
2146 .await??;
2147
2148 Ok(proto::Ack {})
2149 }
2150
2151 async fn handle_change_branch(
2152 this: Entity<Self>,
2153 envelope: TypedEnvelope<proto::GitChangeBranch>,
2154 mut cx: AsyncApp,
2155 ) -> Result<proto::Ack> {
2156 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2157 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2158 let branch_name = envelope.payload.branch_name;
2159
2160 repository_handle
2161 .update(&mut cx, |repository_handle, _| {
2162 repository_handle.change_branch(branch_name)
2163 })?
2164 .await??;
2165
2166 Ok(proto::Ack {})
2167 }
2168
2169 async fn handle_rename_branch(
2170 this: Entity<Self>,
2171 envelope: TypedEnvelope<proto::GitRenameBranch>,
2172 mut cx: AsyncApp,
2173 ) -> Result<proto::Ack> {
2174 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2175 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2176 let branch = envelope.payload.branch;
2177 let new_name = envelope.payload.new_name;
2178
2179 repository_handle
2180 .update(&mut cx, |repository_handle, _| {
2181 repository_handle.rename_branch(branch, new_name)
2182 })?
2183 .await??;
2184
2185 Ok(proto::Ack {})
2186 }
2187
2188 async fn handle_show(
2189 this: Entity<Self>,
2190 envelope: TypedEnvelope<proto::GitShow>,
2191 mut cx: AsyncApp,
2192 ) -> Result<proto::GitCommitDetails> {
2193 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2194 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2195
2196 let commit = repository_handle
2197 .update(&mut cx, |repository_handle, _| {
2198 repository_handle.show(envelope.payload.commit)
2199 })?
2200 .await??;
2201 Ok(proto::GitCommitDetails {
2202 sha: commit.sha.into(),
2203 message: commit.message.into(),
2204 commit_timestamp: commit.commit_timestamp,
2205 author_email: commit.author_email.into(),
2206 author_name: commit.author_name.into(),
2207 })
2208 }
2209
2210 async fn handle_load_commit_diff(
2211 this: Entity<Self>,
2212 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2213 mut cx: AsyncApp,
2214 ) -> Result<proto::LoadCommitDiffResponse> {
2215 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2216 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2217
2218 let commit_diff = repository_handle
2219 .update(&mut cx, |repository_handle, _| {
2220 repository_handle.load_commit_diff(envelope.payload.commit)
2221 })?
2222 .await??;
2223 Ok(proto::LoadCommitDiffResponse {
2224 files: commit_diff
2225 .files
2226 .into_iter()
2227 .map(|file| proto::CommitFile {
2228 path: file.path.to_proto(),
2229 old_text: file.old_text,
2230 new_text: file.new_text,
2231 })
2232 .collect(),
2233 })
2234 }
2235
2236 async fn handle_reset(
2237 this: Entity<Self>,
2238 envelope: TypedEnvelope<proto::GitReset>,
2239 mut cx: AsyncApp,
2240 ) -> Result<proto::Ack> {
2241 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2242 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2243
2244 let mode = match envelope.payload.mode() {
2245 git_reset::ResetMode::Soft => ResetMode::Soft,
2246 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2247 };
2248
2249 repository_handle
2250 .update(&mut cx, |repository_handle, cx| {
2251 repository_handle.reset(envelope.payload.commit, mode, cx)
2252 })?
2253 .await??;
2254 Ok(proto::Ack {})
2255 }
2256
2257 async fn handle_checkout_files(
2258 this: Entity<Self>,
2259 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2260 mut cx: AsyncApp,
2261 ) -> Result<proto::Ack> {
2262 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2263 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2264 let paths = envelope
2265 .payload
2266 .paths
2267 .iter()
2268 .map(|s| RepoPath::from_proto(s))
2269 .collect::<Result<Vec<_>>>()?;
2270
2271 repository_handle
2272 .update(&mut cx, |repository_handle, cx| {
2273 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2274 })?
2275 .await?;
2276 Ok(proto::Ack {})
2277 }
2278
2279 async fn handle_open_commit_message_buffer(
2280 this: Entity<Self>,
2281 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2282 mut cx: AsyncApp,
2283 ) -> Result<proto::OpenBufferResponse> {
2284 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2285 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2286 let buffer = repository
2287 .update(&mut cx, |repository, cx| {
2288 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2289 })?
2290 .await?;
2291
2292 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2293 this.update(&mut cx, |this, cx| {
2294 this.buffer_store.update(cx, |buffer_store, cx| {
2295 buffer_store
2296 .create_buffer_for_peer(
2297 &buffer,
2298 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2299 cx,
2300 )
2301 .detach_and_log_err(cx);
2302 })
2303 })?;
2304
2305 Ok(proto::OpenBufferResponse {
2306 buffer_id: buffer_id.to_proto(),
2307 })
2308 }
2309
2310 async fn handle_askpass(
2311 this: Entity<Self>,
2312 envelope: TypedEnvelope<proto::AskPassRequest>,
2313 mut cx: AsyncApp,
2314 ) -> Result<proto::AskPassResponse> {
2315 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2316 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2317
2318 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2319 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2320 debug_panic!("no askpass found");
2321 anyhow::bail!("no askpass found");
2322 };
2323
2324 let response = askpass
2325 .ask_password(envelope.payload.prompt)
2326 .await
2327 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2328
2329 delegates
2330 .lock()
2331 .insert(envelope.payload.askpass_id, askpass);
2332
2333 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2334 Ok(proto::AskPassResponse {
2335 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2336 })
2337 }
2338
2339 async fn handle_check_for_pushed_commits(
2340 this: Entity<Self>,
2341 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2342 mut cx: AsyncApp,
2343 ) -> Result<proto::CheckForPushedCommitsResponse> {
2344 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2345 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2346
2347 let branches = repository_handle
2348 .update(&mut cx, |repository_handle, _| {
2349 repository_handle.check_for_pushed_commits()
2350 })?
2351 .await??;
2352 Ok(proto::CheckForPushedCommitsResponse {
2353 pushed_to: branches
2354 .into_iter()
2355 .map(|commit| commit.to_string())
2356 .collect(),
2357 })
2358 }
2359
2360 async fn handle_git_diff(
2361 this: Entity<Self>,
2362 envelope: TypedEnvelope<proto::GitDiff>,
2363 mut cx: AsyncApp,
2364 ) -> Result<proto::GitDiffResponse> {
2365 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2366 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2367 let diff_type = match envelope.payload.diff_type() {
2368 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2369 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2370 };
2371
2372 let mut diff = repository_handle
2373 .update(&mut cx, |repository_handle, cx| {
2374 repository_handle.diff(diff_type, cx)
2375 })?
2376 .await??;
2377 const ONE_MB: usize = 1_000_000;
2378 if diff.len() > ONE_MB {
2379 diff = diff.chars().take(ONE_MB).collect()
2380 }
2381
2382 Ok(proto::GitDiffResponse { diff })
2383 }
2384
2385 async fn handle_tree_diff(
2386 this: Entity<Self>,
2387 request: TypedEnvelope<proto::GetTreeDiff>,
2388 mut cx: AsyncApp,
2389 ) -> Result<proto::GetTreeDiffResponse> {
2390 let repository_id = RepositoryId(request.payload.repository_id);
2391 let diff_type = if request.payload.is_merge {
2392 DiffTreeType::MergeBase {
2393 base: request.payload.base.into(),
2394 head: request.payload.head.into(),
2395 }
2396 } else {
2397 DiffTreeType::Since {
2398 base: request.payload.base.into(),
2399 head: request.payload.head.into(),
2400 }
2401 };
2402
2403 let diff = this
2404 .update(&mut cx, |this, cx| {
2405 let repository = this.repositories().get(&repository_id)?;
2406 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2407 })?
2408 .context("missing repository")?
2409 .await??;
2410
2411 Ok(proto::GetTreeDiffResponse {
2412 entries: diff
2413 .entries
2414 .into_iter()
2415 .map(|(path, status)| proto::TreeDiffStatus {
2416 path: path.as_ref().to_proto(),
2417 status: match status {
2418 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2419 TreeDiffStatus::Modified { .. } => {
2420 proto::tree_diff_status::Status::Modified.into()
2421 }
2422 TreeDiffStatus::Deleted { .. } => {
2423 proto::tree_diff_status::Status::Deleted.into()
2424 }
2425 },
2426 oid: match status {
2427 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2428 Some(old.to_string())
2429 }
2430 TreeDiffStatus::Added => None,
2431 },
2432 })
2433 .collect(),
2434 })
2435 }
2436
2437 async fn handle_get_blob_content(
2438 this: Entity<Self>,
2439 request: TypedEnvelope<proto::GetBlobContent>,
2440 mut cx: AsyncApp,
2441 ) -> Result<proto::GetBlobContentResponse> {
2442 let oid = git::Oid::from_str(&request.payload.oid)?;
2443 let repository_id = RepositoryId(request.payload.repository_id);
2444 let content = this
2445 .update(&mut cx, |this, cx| {
2446 let repository = this.repositories().get(&repository_id)?;
2447 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2448 })?
2449 .context("missing repository")?
2450 .await?;
2451 Ok(proto::GetBlobContentResponse { content })
2452 }
2453
2454 async fn handle_open_unstaged_diff(
2455 this: Entity<Self>,
2456 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2457 mut cx: AsyncApp,
2458 ) -> Result<proto::OpenUnstagedDiffResponse> {
2459 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2460 let diff = this
2461 .update(&mut cx, |this, cx| {
2462 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2463 Some(this.open_unstaged_diff(buffer, cx))
2464 })?
2465 .context("missing buffer")?
2466 .await?;
2467 this.update(&mut cx, |this, _| {
2468 let shared_diffs = this
2469 .shared_diffs
2470 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2471 .or_default();
2472 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2473 })?;
2474 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2475 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2476 }
2477
2478 async fn handle_open_uncommitted_diff(
2479 this: Entity<Self>,
2480 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2481 mut cx: AsyncApp,
2482 ) -> Result<proto::OpenUncommittedDiffResponse> {
2483 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2484 let diff = this
2485 .update(&mut cx, |this, cx| {
2486 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2487 Some(this.open_uncommitted_diff(buffer, cx))
2488 })?
2489 .context("missing buffer")?
2490 .await?;
2491 this.update(&mut cx, |this, _| {
2492 let shared_diffs = this
2493 .shared_diffs
2494 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2495 .or_default();
2496 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2497 })?;
2498 diff.read_with(&cx, |diff, cx| {
2499 use proto::open_uncommitted_diff_response::Mode;
2500
2501 let unstaged_diff = diff.secondary_diff();
2502 let index_snapshot = unstaged_diff.and_then(|diff| {
2503 let diff = diff.read(cx);
2504 diff.base_text_exists().then(|| diff.base_text())
2505 });
2506
2507 let mode;
2508 let staged_text;
2509 let committed_text;
2510 if diff.base_text_exists() {
2511 let committed_snapshot = diff.base_text();
2512 committed_text = Some(committed_snapshot.text());
2513 if let Some(index_text) = index_snapshot {
2514 if index_text.remote_id() == committed_snapshot.remote_id() {
2515 mode = Mode::IndexMatchesHead;
2516 staged_text = None;
2517 } else {
2518 mode = Mode::IndexAndHead;
2519 staged_text = Some(index_text.text());
2520 }
2521 } else {
2522 mode = Mode::IndexAndHead;
2523 staged_text = None;
2524 }
2525 } else {
2526 mode = Mode::IndexAndHead;
2527 committed_text = None;
2528 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2529 }
2530
2531 proto::OpenUncommittedDiffResponse {
2532 committed_text,
2533 staged_text,
2534 mode: mode.into(),
2535 }
2536 })
2537 }
2538
2539 async fn handle_update_diff_bases(
2540 this: Entity<Self>,
2541 request: TypedEnvelope<proto::UpdateDiffBases>,
2542 mut cx: AsyncApp,
2543 ) -> Result<()> {
2544 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2545 this.update(&mut cx, |this, cx| {
2546 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2547 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2548 {
2549 let buffer = buffer.read(cx).text_snapshot();
2550 diff_state.update(cx, |diff_state, cx| {
2551 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2552 })
2553 }
2554 })
2555 }
2556
2557 async fn handle_blame_buffer(
2558 this: Entity<Self>,
2559 envelope: TypedEnvelope<proto::BlameBuffer>,
2560 mut cx: AsyncApp,
2561 ) -> Result<proto::BlameBufferResponse> {
2562 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2563 let version = deserialize_version(&envelope.payload.version);
2564 let buffer = this.read_with(&cx, |this, cx| {
2565 this.buffer_store.read(cx).get_existing(buffer_id)
2566 })??;
2567 buffer
2568 .update(&mut cx, |buffer, _| {
2569 buffer.wait_for_version(version.clone())
2570 })?
2571 .await?;
2572 let blame = this
2573 .update(&mut cx, |this, cx| {
2574 this.blame_buffer(&buffer, Some(version), cx)
2575 })?
2576 .await?;
2577 Ok(serialize_blame_buffer_response(blame))
2578 }
2579
2580 async fn handle_get_permalink_to_line(
2581 this: Entity<Self>,
2582 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2583 mut cx: AsyncApp,
2584 ) -> Result<proto::GetPermalinkToLineResponse> {
2585 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2586 // let version = deserialize_version(&envelope.payload.version);
2587 let selection = {
2588 let proto_selection = envelope
2589 .payload
2590 .selection
2591 .context("no selection to get permalink for defined")?;
2592 proto_selection.start as u32..proto_selection.end as u32
2593 };
2594 let buffer = this.read_with(&cx, |this, cx| {
2595 this.buffer_store.read(cx).get_existing(buffer_id)
2596 })??;
2597 let permalink = this
2598 .update(&mut cx, |this, cx| {
2599 this.get_permalink_to_line(&buffer, selection, cx)
2600 })?
2601 .await?;
2602 Ok(proto::GetPermalinkToLineResponse {
2603 permalink: permalink.to_string(),
2604 })
2605 }
2606
2607 fn repository_for_request(
2608 this: &Entity<Self>,
2609 id: RepositoryId,
2610 cx: &mut AsyncApp,
2611 ) -> Result<Entity<Repository>> {
2612 this.read_with(cx, |this, _| {
2613 this.repositories
2614 .get(&id)
2615 .context("missing repository handle")
2616 .cloned()
2617 })?
2618 }
2619
2620 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2621 self.repositories
2622 .iter()
2623 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2624 .collect()
2625 }
2626
2627 fn process_updated_entries(
2628 &self,
2629 worktree: &Entity<Worktree>,
2630 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2631 cx: &mut App,
2632 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2633 let path_style = worktree.read(cx).path_style();
2634 let mut repo_paths = self
2635 .repositories
2636 .values()
2637 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2638 .collect::<Vec<_>>();
2639 let mut entries: Vec<_> = updated_entries
2640 .iter()
2641 .map(|(path, _, _)| path.clone())
2642 .collect();
2643 entries.sort();
2644 let worktree = worktree.read(cx);
2645
2646 let entries = entries
2647 .into_iter()
2648 .map(|path| worktree.absolutize(&path))
2649 .collect::<Arc<[_]>>();
2650
2651 let executor = cx.background_executor().clone();
2652 cx.background_executor().spawn(async move {
2653 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2654 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2655 let mut tasks = FuturesOrdered::new();
2656 for (repo_path, repo) in repo_paths.into_iter().rev() {
2657 let entries = entries.clone();
2658 let task = executor.spawn(async move {
2659 // Find all repository paths that belong to this repo
2660 let mut ix = entries.partition_point(|path| path < &*repo_path);
2661 if ix == entries.len() {
2662 return None;
2663 };
2664
2665 let mut paths = Vec::new();
2666 // All paths prefixed by a given repo will constitute a continuous range.
2667 while let Some(path) = entries.get(ix)
2668 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2669 &repo_path, path, path_style,
2670 )
2671 {
2672 paths.push((repo_path, ix));
2673 ix += 1;
2674 }
2675 if paths.is_empty() {
2676 None
2677 } else {
2678 Some((repo, paths))
2679 }
2680 });
2681 tasks.push_back(task);
2682 }
2683
2684 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2685 let mut path_was_used = vec![false; entries.len()];
2686 let tasks = tasks.collect::<Vec<_>>().await;
2687 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2688 // We always want to assign a path to it's innermost repository.
2689 for t in tasks {
2690 let Some((repo, paths)) = t else {
2691 continue;
2692 };
2693 let entry = paths_by_git_repo.entry(repo).or_default();
2694 for (repo_path, ix) in paths {
2695 if path_was_used[ix] {
2696 continue;
2697 }
2698 path_was_used[ix] = true;
2699 entry.push(repo_path);
2700 }
2701 }
2702
2703 paths_by_git_repo
2704 })
2705 }
2706}
2707
2708impl BufferGitState {
2709 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2710 Self {
2711 unstaged_diff: Default::default(),
2712 uncommitted_diff: Default::default(),
2713 recalculate_diff_task: Default::default(),
2714 language: Default::default(),
2715 language_registry: Default::default(),
2716 recalculating_tx: postage::watch::channel_with(false).0,
2717 hunk_staging_operation_count: 0,
2718 hunk_staging_operation_count_as_of_write: 0,
2719 head_text: Default::default(),
2720 index_text: Default::default(),
2721 head_changed: Default::default(),
2722 index_changed: Default::default(),
2723 language_changed: Default::default(),
2724 conflict_updated_futures: Default::default(),
2725 conflict_set: Default::default(),
2726 reparse_conflict_markers_task: Default::default(),
2727 }
2728 }
2729
2730 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2731 self.language = buffer.read(cx).language().cloned();
2732 self.language_changed = true;
2733 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2734 }
2735
2736 fn reparse_conflict_markers(
2737 &mut self,
2738 buffer: text::BufferSnapshot,
2739 cx: &mut Context<Self>,
2740 ) -> oneshot::Receiver<()> {
2741 let (tx, rx) = oneshot::channel();
2742
2743 let Some(conflict_set) = self
2744 .conflict_set
2745 .as_ref()
2746 .and_then(|conflict_set| conflict_set.upgrade())
2747 else {
2748 return rx;
2749 };
2750
2751 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2752 if conflict_set.has_conflict {
2753 Some(conflict_set.snapshot())
2754 } else {
2755 None
2756 }
2757 });
2758
2759 if let Some(old_snapshot) = old_snapshot {
2760 self.conflict_updated_futures.push(tx);
2761 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2762 let (snapshot, changed_range) = cx
2763 .background_spawn(async move {
2764 let new_snapshot = ConflictSet::parse(&buffer);
2765 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2766 (new_snapshot, changed_range)
2767 })
2768 .await;
2769 this.update(cx, |this, cx| {
2770 if let Some(conflict_set) = &this.conflict_set {
2771 conflict_set
2772 .update(cx, |conflict_set, cx| {
2773 conflict_set.set_snapshot(snapshot, changed_range, cx);
2774 })
2775 .ok();
2776 }
2777 let futures = std::mem::take(&mut this.conflict_updated_futures);
2778 for tx in futures {
2779 tx.send(()).ok();
2780 }
2781 })
2782 }))
2783 }
2784
2785 rx
2786 }
2787
2788 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2789 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2790 }
2791
2792 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2793 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2794 }
2795
2796 fn handle_base_texts_updated(
2797 &mut self,
2798 buffer: text::BufferSnapshot,
2799 message: proto::UpdateDiffBases,
2800 cx: &mut Context<Self>,
2801 ) {
2802 use proto::update_diff_bases::Mode;
2803
2804 let Some(mode) = Mode::from_i32(message.mode) else {
2805 return;
2806 };
2807
2808 let diff_bases_change = match mode {
2809 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2810 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2811 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2812 Mode::IndexAndHead => DiffBasesChange::SetEach {
2813 index: message.staged_text,
2814 head: message.committed_text,
2815 },
2816 };
2817
2818 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2819 }
2820
2821 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2822 if *self.recalculating_tx.borrow() {
2823 let mut rx = self.recalculating_tx.subscribe();
2824 Some(async move {
2825 loop {
2826 let is_recalculating = rx.recv().await;
2827 if is_recalculating != Some(true) {
2828 break;
2829 }
2830 }
2831 })
2832 } else {
2833 None
2834 }
2835 }
2836
2837 fn diff_bases_changed(
2838 &mut self,
2839 buffer: text::BufferSnapshot,
2840 diff_bases_change: Option<DiffBasesChange>,
2841 cx: &mut Context<Self>,
2842 ) {
2843 match diff_bases_change {
2844 Some(DiffBasesChange::SetIndex(index)) => {
2845 self.index_text = index.map(|mut index| {
2846 text::LineEnding::normalize(&mut index);
2847 Arc::new(index)
2848 });
2849 self.index_changed = true;
2850 }
2851 Some(DiffBasesChange::SetHead(head)) => {
2852 self.head_text = head.map(|mut head| {
2853 text::LineEnding::normalize(&mut head);
2854 Arc::new(head)
2855 });
2856 self.head_changed = true;
2857 }
2858 Some(DiffBasesChange::SetBoth(text)) => {
2859 let text = text.map(|mut text| {
2860 text::LineEnding::normalize(&mut text);
2861 Arc::new(text)
2862 });
2863 self.head_text = text.clone();
2864 self.index_text = text;
2865 self.head_changed = true;
2866 self.index_changed = true;
2867 }
2868 Some(DiffBasesChange::SetEach { index, head }) => {
2869 self.index_text = index.map(|mut index| {
2870 text::LineEnding::normalize(&mut index);
2871 Arc::new(index)
2872 });
2873 self.index_changed = true;
2874 self.head_text = head.map(|mut head| {
2875 text::LineEnding::normalize(&mut head);
2876 Arc::new(head)
2877 });
2878 self.head_changed = true;
2879 }
2880 None => {}
2881 }
2882
2883 self.recalculate_diffs(buffer, cx)
2884 }
2885
2886 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2887 *self.recalculating_tx.borrow_mut() = true;
2888
2889 let language = self.language.clone();
2890 let language_registry = self.language_registry.clone();
2891 let unstaged_diff = self.unstaged_diff();
2892 let uncommitted_diff = self.uncommitted_diff();
2893 let head = self.head_text.clone();
2894 let index = self.index_text.clone();
2895 let index_changed = self.index_changed;
2896 let head_changed = self.head_changed;
2897 let language_changed = self.language_changed;
2898 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2899 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2900 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2901 (None, None) => true,
2902 _ => false,
2903 };
2904 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2905 log::debug!(
2906 "start recalculating diffs for buffer {}",
2907 buffer.remote_id()
2908 );
2909
2910 let mut new_unstaged_diff = None;
2911 if let Some(unstaged_diff) = &unstaged_diff {
2912 new_unstaged_diff = Some(
2913 BufferDiff::update_diff(
2914 unstaged_diff.clone(),
2915 buffer.clone(),
2916 index,
2917 index_changed,
2918 language_changed,
2919 language.clone(),
2920 language_registry.clone(),
2921 cx,
2922 )
2923 .await?,
2924 );
2925 }
2926
2927 let mut new_uncommitted_diff = None;
2928 if let Some(uncommitted_diff) = &uncommitted_diff {
2929 new_uncommitted_diff = if index_matches_head {
2930 new_unstaged_diff.clone()
2931 } else {
2932 Some(
2933 BufferDiff::update_diff(
2934 uncommitted_diff.clone(),
2935 buffer.clone(),
2936 head,
2937 head_changed,
2938 language_changed,
2939 language.clone(),
2940 language_registry.clone(),
2941 cx,
2942 )
2943 .await?,
2944 )
2945 }
2946 }
2947
2948 let cancel = this.update(cx, |this, _| {
2949 // This checks whether all pending stage/unstage operations
2950 // have quiesced (i.e. both the corresponding write and the
2951 // read of that write have completed). If not, then we cancel
2952 // this recalculation attempt to avoid invalidating pending
2953 // state too quickly; another recalculation will come along
2954 // later and clear the pending state once the state of the index has settled.
2955 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2956 *this.recalculating_tx.borrow_mut() = false;
2957 true
2958 } else {
2959 false
2960 }
2961 })?;
2962 if cancel {
2963 log::debug!(
2964 concat!(
2965 "aborting recalculating diffs for buffer {}",
2966 "due to subsequent hunk operations",
2967 ),
2968 buffer.remote_id()
2969 );
2970 return Ok(());
2971 }
2972
2973 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2974 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2975 {
2976 unstaged_diff.update(cx, |diff, cx| {
2977 if language_changed {
2978 diff.language_changed(cx);
2979 }
2980 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2981 })?
2982 } else {
2983 None
2984 };
2985
2986 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2987 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2988 {
2989 uncommitted_diff.update(cx, |diff, cx| {
2990 if language_changed {
2991 diff.language_changed(cx);
2992 }
2993 diff.set_snapshot_with_secondary(
2994 new_uncommitted_diff,
2995 &buffer,
2996 unstaged_changed_range,
2997 true,
2998 cx,
2999 );
3000 })?;
3001 }
3002
3003 log::debug!(
3004 "finished recalculating diffs for buffer {}",
3005 buffer.remote_id()
3006 );
3007
3008 if let Some(this) = this.upgrade() {
3009 this.update(cx, |this, _| {
3010 this.index_changed = false;
3011 this.head_changed = false;
3012 this.language_changed = false;
3013 *this.recalculating_tx.borrow_mut() = false;
3014 })?;
3015 }
3016
3017 Ok(())
3018 }));
3019 }
3020}
3021
3022fn make_remote_delegate(
3023 this: Entity<GitStore>,
3024 project_id: u64,
3025 repository_id: RepositoryId,
3026 askpass_id: u64,
3027 cx: &mut AsyncApp,
3028) -> AskPassDelegate {
3029 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3030 this.update(cx, |this, cx| {
3031 let Some((client, _)) = this.downstream_client() else {
3032 return;
3033 };
3034 let response = client.request(proto::AskPassRequest {
3035 project_id,
3036 repository_id: repository_id.to_proto(),
3037 askpass_id,
3038 prompt,
3039 });
3040 cx.spawn(async move |_, _| {
3041 let mut response = response.await?.response;
3042 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3043 .ok();
3044 response.zeroize();
3045 anyhow::Ok(())
3046 })
3047 .detach_and_log_err(cx);
3048 })
3049 .log_err();
3050 })
3051}
3052
3053impl RepositoryId {
3054 pub fn to_proto(self) -> u64 {
3055 self.0
3056 }
3057
3058 pub fn from_proto(id: u64) -> Self {
3059 RepositoryId(id)
3060 }
3061}
3062
3063impl RepositorySnapshot {
3064 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3065 Self {
3066 id,
3067 statuses_by_path: Default::default(),
3068 pending_ops_by_path: Default::default(),
3069 work_directory_abs_path,
3070 branch: None,
3071 head_commit: None,
3072 scan_id: 0,
3073 merge: Default::default(),
3074 remote_origin_url: None,
3075 remote_upstream_url: None,
3076 stash_entries: Default::default(),
3077 path_style,
3078 }
3079 }
3080
3081 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3082 proto::UpdateRepository {
3083 branch_summary: self.branch.as_ref().map(branch_to_proto),
3084 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3085 updated_statuses: self
3086 .statuses_by_path
3087 .iter()
3088 .map(|entry| entry.to_proto())
3089 .collect(),
3090 removed_statuses: Default::default(),
3091 current_merge_conflicts: self
3092 .merge
3093 .conflicted_paths
3094 .iter()
3095 .map(|repo_path| repo_path.to_proto())
3096 .collect(),
3097 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3098 project_id,
3099 id: self.id.to_proto(),
3100 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3101 entry_ids: vec![self.id.to_proto()],
3102 scan_id: self.scan_id,
3103 is_last_update: true,
3104 stash_entries: self
3105 .stash_entries
3106 .entries
3107 .iter()
3108 .map(stash_to_proto)
3109 .collect(),
3110 }
3111 }
3112
3113 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3114 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3115 let mut removed_statuses: Vec<String> = Vec::new();
3116
3117 let mut new_statuses = self.statuses_by_path.iter().peekable();
3118 let mut old_statuses = old.statuses_by_path.iter().peekable();
3119
3120 let mut current_new_entry = new_statuses.next();
3121 let mut current_old_entry = old_statuses.next();
3122 loop {
3123 match (current_new_entry, current_old_entry) {
3124 (Some(new_entry), Some(old_entry)) => {
3125 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3126 Ordering::Less => {
3127 updated_statuses.push(new_entry.to_proto());
3128 current_new_entry = new_statuses.next();
3129 }
3130 Ordering::Equal => {
3131 if new_entry.status != old_entry.status {
3132 updated_statuses.push(new_entry.to_proto());
3133 }
3134 current_old_entry = old_statuses.next();
3135 current_new_entry = new_statuses.next();
3136 }
3137 Ordering::Greater => {
3138 removed_statuses.push(old_entry.repo_path.to_proto());
3139 current_old_entry = old_statuses.next();
3140 }
3141 }
3142 }
3143 (None, Some(old_entry)) => {
3144 removed_statuses.push(old_entry.repo_path.to_proto());
3145 current_old_entry = old_statuses.next();
3146 }
3147 (Some(new_entry), None) => {
3148 updated_statuses.push(new_entry.to_proto());
3149 current_new_entry = new_statuses.next();
3150 }
3151 (None, None) => break,
3152 }
3153 }
3154
3155 proto::UpdateRepository {
3156 branch_summary: self.branch.as_ref().map(branch_to_proto),
3157 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3158 updated_statuses,
3159 removed_statuses,
3160 current_merge_conflicts: self
3161 .merge
3162 .conflicted_paths
3163 .iter()
3164 .map(|path| path.to_proto())
3165 .collect(),
3166 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3167 project_id,
3168 id: self.id.to_proto(),
3169 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3170 entry_ids: vec![],
3171 scan_id: self.scan_id,
3172 is_last_update: true,
3173 stash_entries: self
3174 .stash_entries
3175 .entries
3176 .iter()
3177 .map(stash_to_proto)
3178 .collect(),
3179 }
3180 }
3181
3182 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3183 self.statuses_by_path.iter().cloned()
3184 }
3185
3186 pub fn status_summary(&self) -> GitSummary {
3187 self.statuses_by_path.summary().item_summary
3188 }
3189
3190 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3191 self.statuses_by_path
3192 .get(&PathKey(path.as_ref().clone()), ())
3193 .cloned()
3194 }
3195
3196 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3197 self.pending_ops_by_path
3198 .get(&PathKey(path.as_ref().clone()), ())
3199 .cloned()
3200 }
3201
3202 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3203 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3204 }
3205
3206 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3207 self.path_style
3208 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3209 .unwrap()
3210 .into()
3211 }
3212
3213 #[inline]
3214 fn abs_path_to_repo_path_inner(
3215 work_directory_abs_path: &Path,
3216 abs_path: &Path,
3217 path_style: PathStyle,
3218 ) -> Option<RepoPath> {
3219 abs_path
3220 .strip_prefix(&work_directory_abs_path)
3221 .ok()
3222 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3223 }
3224
3225 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3226 self.merge.conflicted_paths.contains(repo_path)
3227 }
3228
3229 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3230 let had_conflict_on_last_merge_head_change =
3231 self.merge.conflicted_paths.contains(repo_path);
3232 let has_conflict_currently = self
3233 .status_for_path(repo_path)
3234 .is_some_and(|entry| entry.status.is_conflicted());
3235 had_conflict_on_last_merge_head_change || has_conflict_currently
3236 }
3237
3238 /// This is the name that will be displayed in the repository selector for this repository.
3239 pub fn display_name(&self) -> SharedString {
3240 self.work_directory_abs_path
3241 .file_name()
3242 .unwrap_or_default()
3243 .to_string_lossy()
3244 .to_string()
3245 .into()
3246 }
3247}
3248
3249pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3250 proto::StashEntry {
3251 oid: entry.oid.as_bytes().to_vec(),
3252 message: entry.message.clone(),
3253 branch: entry.branch.clone(),
3254 index: entry.index as u64,
3255 timestamp: entry.timestamp,
3256 }
3257}
3258
3259pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3260 Ok(StashEntry {
3261 oid: Oid::from_bytes(&entry.oid)?,
3262 message: entry.message.clone(),
3263 index: entry.index as usize,
3264 branch: entry.branch.clone(),
3265 timestamp: entry.timestamp,
3266 })
3267}
3268
3269impl MergeDetails {
3270 async fn load(
3271 backend: &Arc<dyn GitRepository>,
3272 status: &SumTree<StatusEntry>,
3273 prev_snapshot: &RepositorySnapshot,
3274 ) -> Result<(MergeDetails, bool)> {
3275 log::debug!("load merge details");
3276 let message = backend.merge_message().await;
3277 let heads = backend
3278 .revparse_batch(vec![
3279 "MERGE_HEAD".into(),
3280 "CHERRY_PICK_HEAD".into(),
3281 "REBASE_HEAD".into(),
3282 "REVERT_HEAD".into(),
3283 "APPLY_HEAD".into(),
3284 ])
3285 .await
3286 .log_err()
3287 .unwrap_or_default()
3288 .into_iter()
3289 .map(|opt| opt.map(SharedString::from))
3290 .collect::<Vec<_>>();
3291 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3292 let conflicted_paths = if merge_heads_changed {
3293 let current_conflicted_paths = TreeSet::from_ordered_entries(
3294 status
3295 .iter()
3296 .filter(|entry| entry.status.is_conflicted())
3297 .map(|entry| entry.repo_path.clone()),
3298 );
3299
3300 // It can happen that we run a scan while a lengthy merge is in progress
3301 // that will eventually result in conflicts, but before those conflicts
3302 // are reported by `git status`. Since for the moment we only care about
3303 // the merge heads state for the purposes of tracking conflicts, don't update
3304 // this state until we see some conflicts.
3305 if heads.iter().any(Option::is_some)
3306 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3307 && current_conflicted_paths.is_empty()
3308 {
3309 log::debug!("not updating merge heads because no conflicts found");
3310 return Ok((
3311 MergeDetails {
3312 message: message.map(SharedString::from),
3313 ..prev_snapshot.merge.clone()
3314 },
3315 false,
3316 ));
3317 }
3318
3319 current_conflicted_paths
3320 } else {
3321 prev_snapshot.merge.conflicted_paths.clone()
3322 };
3323 let details = MergeDetails {
3324 conflicted_paths,
3325 message: message.map(SharedString::from),
3326 heads,
3327 };
3328 Ok((details, merge_heads_changed))
3329 }
3330}
3331
3332impl Repository {
3333 pub fn snapshot(&self) -> RepositorySnapshot {
3334 self.snapshot.clone()
3335 }
3336
3337 fn local(
3338 id: RepositoryId,
3339 work_directory_abs_path: Arc<Path>,
3340 dot_git_abs_path: Arc<Path>,
3341 repository_dir_abs_path: Arc<Path>,
3342 common_dir_abs_path: Arc<Path>,
3343 project_environment: WeakEntity<ProjectEnvironment>,
3344 fs: Arc<dyn Fs>,
3345 git_store: WeakEntity<GitStore>,
3346 cx: &mut Context<Self>,
3347 ) -> Self {
3348 let snapshot =
3349 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3350 Repository {
3351 this: cx.weak_entity(),
3352 git_store,
3353 snapshot,
3354 commit_message_buffer: None,
3355 askpass_delegates: Default::default(),
3356 paths_needing_status_update: Default::default(),
3357 latest_askpass_id: 0,
3358 job_sender: Repository::spawn_local_git_worker(
3359 work_directory_abs_path,
3360 dot_git_abs_path,
3361 repository_dir_abs_path,
3362 common_dir_abs_path,
3363 project_environment,
3364 fs,
3365 cx,
3366 ),
3367 job_id: 0,
3368 active_jobs: Default::default(),
3369 }
3370 }
3371
3372 fn remote(
3373 id: RepositoryId,
3374 work_directory_abs_path: Arc<Path>,
3375 path_style: PathStyle,
3376 project_id: ProjectId,
3377 client: AnyProtoClient,
3378 git_store: WeakEntity<GitStore>,
3379 cx: &mut Context<Self>,
3380 ) -> Self {
3381 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3382 Self {
3383 this: cx.weak_entity(),
3384 snapshot,
3385 commit_message_buffer: None,
3386 git_store,
3387 paths_needing_status_update: Default::default(),
3388 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3389 askpass_delegates: Default::default(),
3390 latest_askpass_id: 0,
3391 active_jobs: Default::default(),
3392 job_id: 0,
3393 }
3394 }
3395
3396 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3397 self.git_store.upgrade()
3398 }
3399
3400 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3401 let this = cx.weak_entity();
3402 let git_store = self.git_store.clone();
3403 let _ = self.send_keyed_job(
3404 Some(GitJobKey::ReloadBufferDiffBases),
3405 None,
3406 |state, mut cx| async move {
3407 let RepositoryState::Local { backend, .. } = state else {
3408 log::error!("tried to recompute diffs for a non-local repository");
3409 return Ok(());
3410 };
3411
3412 let Some(this) = this.upgrade() else {
3413 return Ok(());
3414 };
3415
3416 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3417 git_store.update(cx, |git_store, cx| {
3418 git_store
3419 .diffs
3420 .iter()
3421 .filter_map(|(buffer_id, diff_state)| {
3422 let buffer_store = git_store.buffer_store.read(cx);
3423 let buffer = buffer_store.get(*buffer_id)?;
3424 let file = File::from_dyn(buffer.read(cx).file())?;
3425 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3426 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3427 log::debug!(
3428 "start reload diff bases for repo path {}",
3429 repo_path.as_unix_str()
3430 );
3431 diff_state.update(cx, |diff_state, _| {
3432 let has_unstaged_diff = diff_state
3433 .unstaged_diff
3434 .as_ref()
3435 .is_some_and(|diff| diff.is_upgradable());
3436 let has_uncommitted_diff = diff_state
3437 .uncommitted_diff
3438 .as_ref()
3439 .is_some_and(|set| set.is_upgradable());
3440
3441 Some((
3442 buffer,
3443 repo_path,
3444 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3445 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3446 ))
3447 })
3448 })
3449 .collect::<Vec<_>>()
3450 })
3451 })??;
3452
3453 let buffer_diff_base_changes = cx
3454 .background_spawn(async move {
3455 let mut changes = Vec::new();
3456 for (buffer, repo_path, current_index_text, current_head_text) in
3457 &repo_diff_state_updates
3458 {
3459 let index_text = if current_index_text.is_some() {
3460 backend.load_index_text(repo_path.clone()).await
3461 } else {
3462 None
3463 };
3464 let head_text = if current_head_text.is_some() {
3465 backend.load_committed_text(repo_path.clone()).await
3466 } else {
3467 None
3468 };
3469
3470 let change =
3471 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3472 (Some(current_index), Some(current_head)) => {
3473 let index_changed =
3474 index_text.as_ref() != current_index.as_deref();
3475 let head_changed =
3476 head_text.as_ref() != current_head.as_deref();
3477 if index_changed && head_changed {
3478 if index_text == head_text {
3479 Some(DiffBasesChange::SetBoth(head_text))
3480 } else {
3481 Some(DiffBasesChange::SetEach {
3482 index: index_text,
3483 head: head_text,
3484 })
3485 }
3486 } else if index_changed {
3487 Some(DiffBasesChange::SetIndex(index_text))
3488 } else if head_changed {
3489 Some(DiffBasesChange::SetHead(head_text))
3490 } else {
3491 None
3492 }
3493 }
3494 (Some(current_index), None) => {
3495 let index_changed =
3496 index_text.as_ref() != current_index.as_deref();
3497 index_changed
3498 .then_some(DiffBasesChange::SetIndex(index_text))
3499 }
3500 (None, Some(current_head)) => {
3501 let head_changed =
3502 head_text.as_ref() != current_head.as_deref();
3503 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3504 }
3505 (None, None) => None,
3506 };
3507
3508 changes.push((buffer.clone(), change))
3509 }
3510 changes
3511 })
3512 .await;
3513
3514 git_store.update(&mut cx, |git_store, cx| {
3515 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3516 let buffer_snapshot = buffer.read(cx).text_snapshot();
3517 let buffer_id = buffer_snapshot.remote_id();
3518 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3519 continue;
3520 };
3521
3522 let downstream_client = git_store.downstream_client();
3523 diff_state.update(cx, |diff_state, cx| {
3524 use proto::update_diff_bases::Mode;
3525
3526 if let Some((diff_bases_change, (client, project_id))) =
3527 diff_bases_change.clone().zip(downstream_client)
3528 {
3529 let (staged_text, committed_text, mode) = match diff_bases_change {
3530 DiffBasesChange::SetIndex(index) => {
3531 (index, None, Mode::IndexOnly)
3532 }
3533 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3534 DiffBasesChange::SetEach { index, head } => {
3535 (index, head, Mode::IndexAndHead)
3536 }
3537 DiffBasesChange::SetBoth(text) => {
3538 (None, text, Mode::IndexMatchesHead)
3539 }
3540 };
3541 client
3542 .send(proto::UpdateDiffBases {
3543 project_id: project_id.to_proto(),
3544 buffer_id: buffer_id.to_proto(),
3545 staged_text,
3546 committed_text,
3547 mode: mode as i32,
3548 })
3549 .log_err();
3550 }
3551
3552 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3553 });
3554 }
3555 })
3556 },
3557 );
3558 }
3559
3560 pub fn send_job<F, Fut, R>(
3561 &mut self,
3562 status: Option<SharedString>,
3563 job: F,
3564 ) -> oneshot::Receiver<R>
3565 where
3566 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3567 Fut: Future<Output = R> + 'static,
3568 R: Send + 'static,
3569 {
3570 self.send_keyed_job(None, status, job)
3571 }
3572
3573 fn send_keyed_job<F, Fut, R>(
3574 &mut self,
3575 key: Option<GitJobKey>,
3576 status: Option<SharedString>,
3577 job: F,
3578 ) -> oneshot::Receiver<R>
3579 where
3580 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3581 Fut: Future<Output = R> + 'static,
3582 R: Send + 'static,
3583 {
3584 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3585 let job_id = post_inc(&mut self.job_id);
3586 let this = self.this.clone();
3587 self.job_sender
3588 .unbounded_send(GitJob {
3589 key,
3590 job: Box::new(move |state, cx: &mut AsyncApp| {
3591 let job = job(state, cx.clone());
3592 cx.spawn(async move |cx| {
3593 if let Some(s) = status.clone() {
3594 this.update(cx, |this, cx| {
3595 this.active_jobs.insert(
3596 job_id,
3597 JobInfo {
3598 start: Instant::now(),
3599 message: s.clone(),
3600 },
3601 );
3602
3603 cx.notify();
3604 })
3605 .ok();
3606 }
3607 let result = job.await;
3608
3609 this.update(cx, |this, cx| {
3610 this.active_jobs.remove(&job_id);
3611 cx.notify();
3612 })
3613 .ok();
3614
3615 result_tx.send(result).ok();
3616 })
3617 }),
3618 })
3619 .ok();
3620 result_rx
3621 }
3622
3623 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3624 let Some(git_store) = self.git_store.upgrade() else {
3625 return;
3626 };
3627 let entity = cx.entity();
3628 git_store.update(cx, |git_store, cx| {
3629 let Some((&id, _)) = git_store
3630 .repositories
3631 .iter()
3632 .find(|(_, handle)| *handle == &entity)
3633 else {
3634 return;
3635 };
3636 git_store.active_repo_id = Some(id);
3637 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3638 });
3639 }
3640
3641 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3642 self.snapshot.status()
3643 }
3644
3645 pub fn cached_stash(&self) -> GitStash {
3646 self.snapshot.stash_entries.clone()
3647 }
3648
3649 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3650 let git_store = self.git_store.upgrade()?;
3651 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3652 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3653 let abs_path = SanitizedPath::new(&abs_path);
3654 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3655 Some(ProjectPath {
3656 worktree_id: worktree.read(cx).id(),
3657 path: relative_path,
3658 })
3659 }
3660
3661 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3662 let git_store = self.git_store.upgrade()?;
3663 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3664 let abs_path = worktree_store.absolutize(path, cx)?;
3665 self.snapshot.abs_path_to_repo_path(&abs_path)
3666 }
3667
3668 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3669 other
3670 .read(cx)
3671 .snapshot
3672 .work_directory_abs_path
3673 .starts_with(&self.snapshot.work_directory_abs_path)
3674 }
3675
3676 pub fn open_commit_buffer(
3677 &mut self,
3678 languages: Option<Arc<LanguageRegistry>>,
3679 buffer_store: Entity<BufferStore>,
3680 cx: &mut Context<Self>,
3681 ) -> Task<Result<Entity<Buffer>>> {
3682 let id = self.id;
3683 if let Some(buffer) = self.commit_message_buffer.clone() {
3684 return Task::ready(Ok(buffer));
3685 }
3686 let this = cx.weak_entity();
3687
3688 let rx = self.send_job(None, move |state, mut cx| async move {
3689 let Some(this) = this.upgrade() else {
3690 bail!("git store was dropped");
3691 };
3692 match state {
3693 RepositoryState::Local { .. } => {
3694 this.update(&mut cx, |_, cx| {
3695 Self::open_local_commit_buffer(languages, buffer_store, cx)
3696 })?
3697 .await
3698 }
3699 RepositoryState::Remote { project_id, client } => {
3700 let request = client.request(proto::OpenCommitMessageBuffer {
3701 project_id: project_id.0,
3702 repository_id: id.to_proto(),
3703 });
3704 let response = request.await.context("requesting to open commit buffer")?;
3705 let buffer_id = BufferId::new(response.buffer_id)?;
3706 let buffer = buffer_store
3707 .update(&mut cx, |buffer_store, cx| {
3708 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3709 })?
3710 .await?;
3711 if let Some(language_registry) = languages {
3712 let git_commit_language =
3713 language_registry.language_for_name("Git Commit").await?;
3714 buffer.update(&mut cx, |buffer, cx| {
3715 buffer.set_language(Some(git_commit_language), cx);
3716 })?;
3717 }
3718 this.update(&mut cx, |this, _| {
3719 this.commit_message_buffer = Some(buffer.clone());
3720 })?;
3721 Ok(buffer)
3722 }
3723 }
3724 });
3725
3726 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3727 }
3728
3729 fn open_local_commit_buffer(
3730 language_registry: Option<Arc<LanguageRegistry>>,
3731 buffer_store: Entity<BufferStore>,
3732 cx: &mut Context<Self>,
3733 ) -> Task<Result<Entity<Buffer>>> {
3734 cx.spawn(async move |repository, cx| {
3735 let buffer = buffer_store
3736 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3737 .await?;
3738
3739 if let Some(language_registry) = language_registry {
3740 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3741 buffer.update(cx, |buffer, cx| {
3742 buffer.set_language(Some(git_commit_language), cx);
3743 })?;
3744 }
3745
3746 repository.update(cx, |repository, _| {
3747 repository.commit_message_buffer = Some(buffer.clone());
3748 })?;
3749 Ok(buffer)
3750 })
3751 }
3752
3753 pub fn checkout_files(
3754 &mut self,
3755 commit: &str,
3756 paths: Vec<RepoPath>,
3757 cx: &mut Context<Self>,
3758 ) -> Task<Result<()>> {
3759 let commit = commit.to_string();
3760 let id = self.id;
3761
3762 self.spawn_job_with_tracking(
3763 paths.clone(),
3764 pending_op::GitStatus::Reverted,
3765 cx,
3766 async move |this, cx| {
3767 this.update(cx, |this, _cx| {
3768 this.send_job(
3769 Some(format!("git checkout {}", commit).into()),
3770 move |git_repo, _| async move {
3771 match git_repo {
3772 RepositoryState::Local {
3773 backend,
3774 environment,
3775 ..
3776 } => {
3777 backend
3778 .checkout_files(commit, paths, environment.clone())
3779 .await
3780 }
3781 RepositoryState::Remote { project_id, client } => {
3782 client
3783 .request(proto::GitCheckoutFiles {
3784 project_id: project_id.0,
3785 repository_id: id.to_proto(),
3786 commit,
3787 paths: paths
3788 .into_iter()
3789 .map(|p| p.to_proto())
3790 .collect(),
3791 })
3792 .await?;
3793
3794 Ok(())
3795 }
3796 }
3797 },
3798 )
3799 })?
3800 .await?
3801 },
3802 )
3803 }
3804
3805 pub fn reset(
3806 &mut self,
3807 commit: String,
3808 reset_mode: ResetMode,
3809 _cx: &mut App,
3810 ) -> oneshot::Receiver<Result<()>> {
3811 let id = self.id;
3812
3813 self.send_job(None, move |git_repo, _| async move {
3814 match git_repo {
3815 RepositoryState::Local {
3816 backend,
3817 environment,
3818 ..
3819 } => backend.reset(commit, reset_mode, environment).await,
3820 RepositoryState::Remote { project_id, client } => {
3821 client
3822 .request(proto::GitReset {
3823 project_id: project_id.0,
3824 repository_id: id.to_proto(),
3825 commit,
3826 mode: match reset_mode {
3827 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3828 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3829 },
3830 })
3831 .await?;
3832
3833 Ok(())
3834 }
3835 }
3836 })
3837 }
3838
3839 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3840 let id = self.id;
3841 self.send_job(None, move |git_repo, _cx| async move {
3842 match git_repo {
3843 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3844 RepositoryState::Remote { project_id, client } => {
3845 let resp = client
3846 .request(proto::GitShow {
3847 project_id: project_id.0,
3848 repository_id: id.to_proto(),
3849 commit,
3850 })
3851 .await?;
3852
3853 Ok(CommitDetails {
3854 sha: resp.sha.into(),
3855 message: resp.message.into(),
3856 commit_timestamp: resp.commit_timestamp,
3857 author_email: resp.author_email.into(),
3858 author_name: resp.author_name.into(),
3859 })
3860 }
3861 }
3862 })
3863 }
3864
3865 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3866 let id = self.id;
3867 self.send_job(None, move |git_repo, cx| async move {
3868 match git_repo {
3869 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3870 RepositoryState::Remote {
3871 client, project_id, ..
3872 } => {
3873 let response = client
3874 .request(proto::LoadCommitDiff {
3875 project_id: project_id.0,
3876 repository_id: id.to_proto(),
3877 commit,
3878 })
3879 .await?;
3880 Ok(CommitDiff {
3881 files: response
3882 .files
3883 .into_iter()
3884 .map(|file| {
3885 Ok(CommitFile {
3886 path: RepoPath::from_proto(&file.path)?,
3887 old_text: file.old_text,
3888 new_text: file.new_text,
3889 })
3890 })
3891 .collect::<Result<Vec<_>>>()?,
3892 })
3893 }
3894 }
3895 })
3896 }
3897
3898 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3899 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3900 }
3901
3902 fn save_buffers<'a>(
3903 &self,
3904 entries: impl IntoIterator<Item = &'a RepoPath>,
3905 cx: &mut Context<Self>,
3906 ) -> Vec<Task<anyhow::Result<()>>> {
3907 let mut save_futures = Vec::new();
3908 if let Some(buffer_store) = self.buffer_store(cx) {
3909 buffer_store.update(cx, |buffer_store, cx| {
3910 for path in entries {
3911 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3912 continue;
3913 };
3914 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3915 && buffer
3916 .read(cx)
3917 .file()
3918 .is_some_and(|file| file.disk_state().exists())
3919 && buffer.read(cx).has_unsaved_edits()
3920 {
3921 save_futures.push(buffer_store.save_buffer(buffer, cx));
3922 }
3923 }
3924 })
3925 }
3926 save_futures
3927 }
3928
3929 pub fn stage_entries(
3930 &mut self,
3931 entries: Vec<RepoPath>,
3932 cx: &mut Context<Self>,
3933 ) -> Task<anyhow::Result<()>> {
3934 if entries.is_empty() {
3935 return Task::ready(Ok(()));
3936 }
3937 let id = self.id;
3938 let save_tasks = self.save_buffers(&entries, cx);
3939 let paths = entries
3940 .iter()
3941 .map(|p| p.as_unix_str())
3942 .collect::<Vec<_>>()
3943 .join(" ");
3944 let status = format!("git add {paths}");
3945 let job_key = GitJobKey::WriteIndex(entries.clone());
3946
3947 self.spawn_job_with_tracking(
3948 entries.clone(),
3949 pending_op::GitStatus::Staged,
3950 cx,
3951 async move |this, cx| {
3952 for save_task in save_tasks {
3953 save_task.await?;
3954 }
3955
3956 this.update(cx, |this, _| {
3957 this.send_keyed_job(
3958 Some(job_key),
3959 Some(status.into()),
3960 move |git_repo, _cx| async move {
3961 match git_repo {
3962 RepositoryState::Local {
3963 backend,
3964 environment,
3965 ..
3966 } => backend.stage_paths(entries, environment.clone()).await,
3967 RepositoryState::Remote { project_id, client } => {
3968 client
3969 .request(proto::Stage {
3970 project_id: project_id.0,
3971 repository_id: id.to_proto(),
3972 paths: entries
3973 .into_iter()
3974 .map(|repo_path| repo_path.to_proto())
3975 .collect(),
3976 })
3977 .await
3978 .context("sending stage request")?;
3979
3980 Ok(())
3981 }
3982 }
3983 },
3984 )
3985 })?
3986 .await?
3987 },
3988 )
3989 }
3990
3991 pub fn unstage_entries(
3992 &mut self,
3993 entries: Vec<RepoPath>,
3994 cx: &mut Context<Self>,
3995 ) -> Task<anyhow::Result<()>> {
3996 if entries.is_empty() {
3997 return Task::ready(Ok(()));
3998 }
3999 let id = self.id;
4000 let save_tasks = self.save_buffers(&entries, cx);
4001 let paths = entries
4002 .iter()
4003 .map(|p| p.as_unix_str())
4004 .collect::<Vec<_>>()
4005 .join(" ");
4006 let status = format!("git reset {paths}");
4007 let job_key = GitJobKey::WriteIndex(entries.clone());
4008
4009 self.spawn_job_with_tracking(
4010 entries.clone(),
4011 pending_op::GitStatus::Unstaged,
4012 cx,
4013 async move |this, cx| {
4014 for save_task in save_tasks {
4015 save_task.await?;
4016 }
4017
4018 this.update(cx, |this, _| {
4019 this.send_keyed_job(
4020 Some(job_key),
4021 Some(status.into()),
4022 move |git_repo, _cx| async move {
4023 match git_repo {
4024 RepositoryState::Local {
4025 backend,
4026 environment,
4027 ..
4028 } => backend.unstage_paths(entries, environment).await,
4029 RepositoryState::Remote { project_id, client } => {
4030 client
4031 .request(proto::Unstage {
4032 project_id: project_id.0,
4033 repository_id: id.to_proto(),
4034 paths: entries
4035 .into_iter()
4036 .map(|repo_path| repo_path.to_proto())
4037 .collect(),
4038 })
4039 .await
4040 .context("sending unstage request")?;
4041
4042 Ok(())
4043 }
4044 }
4045 },
4046 )
4047 })?
4048 .await?
4049 },
4050 )
4051 }
4052
4053 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4054 let to_stage = self
4055 .cached_status()
4056 .filter_map(|entry| {
4057 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4058 if ops.staging() || ops.staged() {
4059 None
4060 } else {
4061 Some(entry.repo_path)
4062 }
4063 } else if entry.status.staging().has_staged() {
4064 None
4065 } else {
4066 Some(entry.repo_path)
4067 }
4068 })
4069 .collect();
4070 self.stage_entries(to_stage, cx)
4071 }
4072
4073 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4074 let to_unstage = self
4075 .cached_status()
4076 .filter_map(|entry| {
4077 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4078 if !ops.staging() && !ops.staged() {
4079 None
4080 } else {
4081 Some(entry.repo_path)
4082 }
4083 } else if entry.status.staging().has_unstaged() {
4084 None
4085 } else {
4086 Some(entry.repo_path)
4087 }
4088 })
4089 .collect();
4090 self.unstage_entries(to_unstage, cx)
4091 }
4092
4093 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4094 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4095
4096 self.stash_entries(to_stash, cx)
4097 }
4098
4099 pub fn stash_entries(
4100 &mut self,
4101 entries: Vec<RepoPath>,
4102 cx: &mut Context<Self>,
4103 ) -> Task<anyhow::Result<()>> {
4104 let id = self.id;
4105
4106 cx.spawn(async move |this, cx| {
4107 this.update(cx, |this, _| {
4108 this.send_job(None, move |git_repo, _cx| async move {
4109 match git_repo {
4110 RepositoryState::Local {
4111 backend,
4112 environment,
4113 ..
4114 } => backend.stash_paths(entries, environment).await,
4115 RepositoryState::Remote { project_id, client } => {
4116 client
4117 .request(proto::Stash {
4118 project_id: project_id.0,
4119 repository_id: id.to_proto(),
4120 paths: entries
4121 .into_iter()
4122 .map(|repo_path| repo_path.to_proto())
4123 .collect(),
4124 })
4125 .await
4126 .context("sending stash request")?;
4127 Ok(())
4128 }
4129 }
4130 })
4131 })?
4132 .await??;
4133 Ok(())
4134 })
4135 }
4136
4137 pub fn stash_pop(
4138 &mut self,
4139 index: Option<usize>,
4140 cx: &mut Context<Self>,
4141 ) -> Task<anyhow::Result<()>> {
4142 let id = self.id;
4143 cx.spawn(async move |this, cx| {
4144 this.update(cx, |this, _| {
4145 this.send_job(None, move |git_repo, _cx| async move {
4146 match git_repo {
4147 RepositoryState::Local {
4148 backend,
4149 environment,
4150 ..
4151 } => backend.stash_pop(index, environment).await,
4152 RepositoryState::Remote { project_id, client } => {
4153 client
4154 .request(proto::StashPop {
4155 project_id: project_id.0,
4156 repository_id: id.to_proto(),
4157 stash_index: index.map(|i| i as u64),
4158 })
4159 .await
4160 .context("sending stash pop request")?;
4161 Ok(())
4162 }
4163 }
4164 })
4165 })?
4166 .await??;
4167 Ok(())
4168 })
4169 }
4170
4171 pub fn stash_apply(
4172 &mut self,
4173 index: Option<usize>,
4174 cx: &mut Context<Self>,
4175 ) -> Task<anyhow::Result<()>> {
4176 let id = self.id;
4177 cx.spawn(async move |this, cx| {
4178 this.update(cx, |this, _| {
4179 this.send_job(None, move |git_repo, _cx| async move {
4180 match git_repo {
4181 RepositoryState::Local {
4182 backend,
4183 environment,
4184 ..
4185 } => backend.stash_apply(index, environment).await,
4186 RepositoryState::Remote { project_id, client } => {
4187 client
4188 .request(proto::StashApply {
4189 project_id: project_id.0,
4190 repository_id: id.to_proto(),
4191 stash_index: index.map(|i| i as u64),
4192 })
4193 .await
4194 .context("sending stash apply request")?;
4195 Ok(())
4196 }
4197 }
4198 })
4199 })?
4200 .await??;
4201 Ok(())
4202 })
4203 }
4204
4205 pub fn stash_drop(
4206 &mut self,
4207 index: Option<usize>,
4208 cx: &mut Context<Self>,
4209 ) -> oneshot::Receiver<anyhow::Result<()>> {
4210 let id = self.id;
4211 let updates_tx = self
4212 .git_store()
4213 .and_then(|git_store| match &git_store.read(cx).state {
4214 GitStoreState::Local { downstream, .. } => downstream
4215 .as_ref()
4216 .map(|downstream| downstream.updates_tx.clone()),
4217 _ => None,
4218 });
4219 let this = cx.weak_entity();
4220 self.send_job(None, move |git_repo, mut cx| async move {
4221 match git_repo {
4222 RepositoryState::Local {
4223 backend,
4224 environment,
4225 ..
4226 } => {
4227 // TODO would be nice to not have to do this manually
4228 let result = backend.stash_drop(index, environment).await;
4229 if result.is_ok()
4230 && let Ok(stash_entries) = backend.stash_entries().await
4231 {
4232 let snapshot = this.update(&mut cx, |this, cx| {
4233 this.snapshot.stash_entries = stash_entries;
4234 cx.emit(RepositoryEvent::StashEntriesChanged);
4235 this.snapshot.clone()
4236 })?;
4237 if let Some(updates_tx) = updates_tx {
4238 updates_tx
4239 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4240 .ok();
4241 }
4242 }
4243
4244 result
4245 }
4246 RepositoryState::Remote { project_id, client } => {
4247 client
4248 .request(proto::StashDrop {
4249 project_id: project_id.0,
4250 repository_id: id.to_proto(),
4251 stash_index: index.map(|i| i as u64),
4252 })
4253 .await
4254 .context("sending stash pop request")?;
4255 Ok(())
4256 }
4257 }
4258 })
4259 }
4260
4261 pub fn commit(
4262 &mut self,
4263 message: SharedString,
4264 name_and_email: Option<(SharedString, SharedString)>,
4265 options: CommitOptions,
4266 askpass: AskPassDelegate,
4267 _cx: &mut App,
4268 ) -> oneshot::Receiver<Result<()>> {
4269 let id = self.id;
4270 let askpass_delegates = self.askpass_delegates.clone();
4271 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4272
4273 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4274 match git_repo {
4275 RepositoryState::Local {
4276 backend,
4277 environment,
4278 ..
4279 } => {
4280 backend
4281 .commit(message, name_and_email, options, askpass, environment)
4282 .await
4283 }
4284 RepositoryState::Remote { project_id, client } => {
4285 askpass_delegates.lock().insert(askpass_id, askpass);
4286 let _defer = util::defer(|| {
4287 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4288 debug_assert!(askpass_delegate.is_some());
4289 });
4290 let (name, email) = name_and_email.unzip();
4291 client
4292 .request(proto::Commit {
4293 project_id: project_id.0,
4294 repository_id: id.to_proto(),
4295 message: String::from(message),
4296 name: name.map(String::from),
4297 email: email.map(String::from),
4298 options: Some(proto::commit::CommitOptions {
4299 amend: options.amend,
4300 signoff: options.signoff,
4301 }),
4302 askpass_id,
4303 })
4304 .await
4305 .context("sending commit request")?;
4306
4307 Ok(())
4308 }
4309 }
4310 })
4311 }
4312
4313 pub fn fetch(
4314 &mut self,
4315 fetch_options: FetchOptions,
4316 askpass: AskPassDelegate,
4317 _cx: &mut App,
4318 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4319 let askpass_delegates = self.askpass_delegates.clone();
4320 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4321 let id = self.id;
4322
4323 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4324 match git_repo {
4325 RepositoryState::Local {
4326 backend,
4327 environment,
4328 ..
4329 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4330 RepositoryState::Remote { project_id, client } => {
4331 askpass_delegates.lock().insert(askpass_id, askpass);
4332 let _defer = util::defer(|| {
4333 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4334 debug_assert!(askpass_delegate.is_some());
4335 });
4336
4337 let response = client
4338 .request(proto::Fetch {
4339 project_id: project_id.0,
4340 repository_id: id.to_proto(),
4341 askpass_id,
4342 remote: fetch_options.to_proto(),
4343 })
4344 .await
4345 .context("sending fetch request")?;
4346
4347 Ok(RemoteCommandOutput {
4348 stdout: response.stdout,
4349 stderr: response.stderr,
4350 })
4351 }
4352 }
4353 })
4354 }
4355
4356 pub fn push(
4357 &mut self,
4358 branch: SharedString,
4359 remote: SharedString,
4360 options: Option<PushOptions>,
4361 askpass: AskPassDelegate,
4362 cx: &mut Context<Self>,
4363 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4364 let askpass_delegates = self.askpass_delegates.clone();
4365 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4366 let id = self.id;
4367
4368 let args = options
4369 .map(|option| match option {
4370 PushOptions::SetUpstream => " --set-upstream",
4371 PushOptions::Force => " --force-with-lease",
4372 })
4373 .unwrap_or("");
4374
4375 let updates_tx = self
4376 .git_store()
4377 .and_then(|git_store| match &git_store.read(cx).state {
4378 GitStoreState::Local { downstream, .. } => downstream
4379 .as_ref()
4380 .map(|downstream| downstream.updates_tx.clone()),
4381 _ => None,
4382 });
4383
4384 let this = cx.weak_entity();
4385 self.send_job(
4386 Some(format!("git push {} {} {}", args, remote, branch).into()),
4387 move |git_repo, mut cx| async move {
4388 match git_repo {
4389 RepositoryState::Local {
4390 backend,
4391 environment,
4392 ..
4393 } => {
4394 let result = backend
4395 .push(
4396 branch.to_string(),
4397 remote.to_string(),
4398 options,
4399 askpass,
4400 environment.clone(),
4401 cx.clone(),
4402 )
4403 .await;
4404 // TODO would be nice to not have to do this manually
4405 if result.is_ok() {
4406 let branches = backend.branches().await?;
4407 let branch = branches.into_iter().find(|branch| branch.is_head);
4408 log::info!("head branch after scan is {branch:?}");
4409 let snapshot = this.update(&mut cx, |this, cx| {
4410 this.snapshot.branch = branch;
4411 cx.emit(RepositoryEvent::BranchChanged);
4412 this.snapshot.clone()
4413 })?;
4414 if let Some(updates_tx) = updates_tx {
4415 updates_tx
4416 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4417 .ok();
4418 }
4419 }
4420 result
4421 }
4422 RepositoryState::Remote { project_id, client } => {
4423 askpass_delegates.lock().insert(askpass_id, askpass);
4424 let _defer = util::defer(|| {
4425 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4426 debug_assert!(askpass_delegate.is_some());
4427 });
4428 let response = client
4429 .request(proto::Push {
4430 project_id: project_id.0,
4431 repository_id: id.to_proto(),
4432 askpass_id,
4433 branch_name: branch.to_string(),
4434 remote_name: remote.to_string(),
4435 options: options.map(|options| match options {
4436 PushOptions::Force => proto::push::PushOptions::Force,
4437 PushOptions::SetUpstream => {
4438 proto::push::PushOptions::SetUpstream
4439 }
4440 }
4441 as i32),
4442 })
4443 .await
4444 .context("sending push request")?;
4445
4446 Ok(RemoteCommandOutput {
4447 stdout: response.stdout,
4448 stderr: response.stderr,
4449 })
4450 }
4451 }
4452 },
4453 )
4454 }
4455
4456 pub fn pull(
4457 &mut self,
4458 branch: Option<SharedString>,
4459 remote: SharedString,
4460 rebase: bool,
4461 askpass: AskPassDelegate,
4462 _cx: &mut App,
4463 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4464 let askpass_delegates = self.askpass_delegates.clone();
4465 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4466 let id = self.id;
4467
4468 let mut status = "git pull".to_string();
4469 if rebase {
4470 status.push_str(" --rebase");
4471 }
4472 status.push_str(&format!(" {}", remote));
4473 if let Some(b) = &branch {
4474 status.push_str(&format!(" {}", b));
4475 }
4476
4477 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4478 match git_repo {
4479 RepositoryState::Local {
4480 backend,
4481 environment,
4482 ..
4483 } => {
4484 backend
4485 .pull(
4486 branch.as_ref().map(|b| b.to_string()),
4487 remote.to_string(),
4488 rebase,
4489 askpass,
4490 environment.clone(),
4491 cx,
4492 )
4493 .await
4494 }
4495 RepositoryState::Remote { project_id, client } => {
4496 askpass_delegates.lock().insert(askpass_id, askpass);
4497 let _defer = util::defer(|| {
4498 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4499 debug_assert!(askpass_delegate.is_some());
4500 });
4501 let response = client
4502 .request(proto::Pull {
4503 project_id: project_id.0,
4504 repository_id: id.to_proto(),
4505 askpass_id,
4506 rebase,
4507 branch_name: branch.as_ref().map(|b| b.to_string()),
4508 remote_name: remote.to_string(),
4509 })
4510 .await
4511 .context("sending pull request")?;
4512
4513 Ok(RemoteCommandOutput {
4514 stdout: response.stdout,
4515 stderr: response.stderr,
4516 })
4517 }
4518 }
4519 })
4520 }
4521
4522 fn spawn_set_index_text_job(
4523 &mut self,
4524 path: RepoPath,
4525 content: Option<String>,
4526 hunk_staging_operation_count: Option<usize>,
4527 cx: &mut Context<Self>,
4528 ) -> oneshot::Receiver<anyhow::Result<()>> {
4529 let id = self.id;
4530 let this = cx.weak_entity();
4531 let git_store = self.git_store.clone();
4532 self.send_keyed_job(
4533 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4534 None,
4535 move |git_repo, mut cx| async move {
4536 log::debug!(
4537 "start updating index text for buffer {}",
4538 path.as_unix_str()
4539 );
4540 match git_repo {
4541 RepositoryState::Local {
4542 backend,
4543 environment,
4544 ..
4545 } => {
4546 backend
4547 .set_index_text(path.clone(), content, environment.clone())
4548 .await?;
4549 }
4550 RepositoryState::Remote { project_id, client } => {
4551 client
4552 .request(proto::SetIndexText {
4553 project_id: project_id.0,
4554 repository_id: id.to_proto(),
4555 path: path.to_proto(),
4556 text: content,
4557 })
4558 .await?;
4559 }
4560 }
4561 log::debug!(
4562 "finish updating index text for buffer {}",
4563 path.as_unix_str()
4564 );
4565
4566 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4567 let project_path = this
4568 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4569 .ok()
4570 .flatten();
4571 git_store.update(&mut cx, |git_store, cx| {
4572 let buffer_id = git_store
4573 .buffer_store
4574 .read(cx)
4575 .get_by_path(&project_path?)?
4576 .read(cx)
4577 .remote_id();
4578 let diff_state = git_store.diffs.get(&buffer_id)?;
4579 diff_state.update(cx, |diff_state, _| {
4580 diff_state.hunk_staging_operation_count_as_of_write =
4581 hunk_staging_operation_count;
4582 });
4583 Some(())
4584 })?;
4585 }
4586 Ok(())
4587 },
4588 )
4589 }
4590
4591 pub fn get_remotes(
4592 &mut self,
4593 branch_name: Option<String>,
4594 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4595 let id = self.id;
4596 self.send_job(None, move |repo, _cx| async move {
4597 match repo {
4598 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4599 RepositoryState::Remote { project_id, client } => {
4600 let response = client
4601 .request(proto::GetRemotes {
4602 project_id: project_id.0,
4603 repository_id: id.to_proto(),
4604 branch_name,
4605 })
4606 .await?;
4607
4608 let remotes = response
4609 .remotes
4610 .into_iter()
4611 .map(|remotes| git::repository::Remote {
4612 name: remotes.name.into(),
4613 })
4614 .collect();
4615
4616 Ok(remotes)
4617 }
4618 }
4619 })
4620 }
4621
4622 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4623 let id = self.id;
4624 self.send_job(None, move |repo, _| async move {
4625 match repo {
4626 RepositoryState::Local { backend, .. } => backend.branches().await,
4627 RepositoryState::Remote { project_id, client } => {
4628 let response = client
4629 .request(proto::GitGetBranches {
4630 project_id: project_id.0,
4631 repository_id: id.to_proto(),
4632 })
4633 .await?;
4634
4635 let branches = response
4636 .branches
4637 .into_iter()
4638 .map(|branch| proto_to_branch(&branch))
4639 .collect();
4640
4641 Ok(branches)
4642 }
4643 }
4644 })
4645 }
4646
4647 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4648 let id = self.id;
4649 self.send_job(None, move |repo, _| async move {
4650 match repo {
4651 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4652 RepositoryState::Remote { project_id, client } => {
4653 let response = client
4654 .request(proto::GitGetWorktrees {
4655 project_id: project_id.0,
4656 repository_id: id.to_proto(),
4657 })
4658 .await?;
4659
4660 let worktrees = response
4661 .worktrees
4662 .into_iter()
4663 .map(|worktree| proto_to_worktree(&worktree))
4664 .collect();
4665
4666 Ok(worktrees)
4667 }
4668 }
4669 })
4670 }
4671
4672 pub fn create_worktree(
4673 &mut self,
4674 name: String,
4675 path: PathBuf,
4676 commit: Option<String>,
4677 ) -> oneshot::Receiver<Result<()>> {
4678 let id = self.id;
4679 self.send_job(
4680 Some("git worktree add".into()),
4681 move |repo, _cx| async move {
4682 match repo {
4683 RepositoryState::Local { backend, .. } => {
4684 backend.create_worktree(name, path, commit).await
4685 }
4686 RepositoryState::Remote { project_id, client } => {
4687 client
4688 .request(proto::GitCreateWorktree {
4689 project_id: project_id.0,
4690 repository_id: id.to_proto(),
4691 name,
4692 directory: path.to_string_lossy().to_string(),
4693 commit,
4694 })
4695 .await?;
4696
4697 Ok(())
4698 }
4699 }
4700 },
4701 )
4702 }
4703
4704 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4705 let id = self.id;
4706 self.send_job(None, move |repo, _| async move {
4707 match repo {
4708 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4709 RepositoryState::Remote { project_id, client } => {
4710 let response = client
4711 .request(proto::GetDefaultBranch {
4712 project_id: project_id.0,
4713 repository_id: id.to_proto(),
4714 })
4715 .await?;
4716
4717 anyhow::Ok(response.branch.map(SharedString::from))
4718 }
4719 }
4720 })
4721 }
4722
4723 pub fn diff_tree(
4724 &mut self,
4725 diff_type: DiffTreeType,
4726 _cx: &App,
4727 ) -> oneshot::Receiver<Result<TreeDiff>> {
4728 let repository_id = self.snapshot.id;
4729 self.send_job(None, move |repo, _cx| async move {
4730 match repo {
4731 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4732 RepositoryState::Remote { client, project_id } => {
4733 let response = client
4734 .request(proto::GetTreeDiff {
4735 project_id: project_id.0,
4736 repository_id: repository_id.0,
4737 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4738 base: diff_type.base().to_string(),
4739 head: diff_type.head().to_string(),
4740 })
4741 .await?;
4742
4743 let entries = response
4744 .entries
4745 .into_iter()
4746 .filter_map(|entry| {
4747 let status = match entry.status() {
4748 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4749 proto::tree_diff_status::Status::Modified => {
4750 TreeDiffStatus::Modified {
4751 old: git::Oid::from_str(
4752 &entry.oid.context("missing oid").log_err()?,
4753 )
4754 .log_err()?,
4755 }
4756 }
4757 proto::tree_diff_status::Status::Deleted => {
4758 TreeDiffStatus::Deleted {
4759 old: git::Oid::from_str(
4760 &entry.oid.context("missing oid").log_err()?,
4761 )
4762 .log_err()?,
4763 }
4764 }
4765 };
4766 Some((
4767 RepoPath::from_rel_path(
4768 &RelPath::from_proto(&entry.path).log_err()?,
4769 ),
4770 status,
4771 ))
4772 })
4773 .collect();
4774
4775 Ok(TreeDiff { entries })
4776 }
4777 }
4778 })
4779 }
4780
4781 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4782 let id = self.id;
4783 self.send_job(None, move |repo, _cx| async move {
4784 match repo {
4785 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4786 RepositoryState::Remote { project_id, client } => {
4787 let response = client
4788 .request(proto::GitDiff {
4789 project_id: project_id.0,
4790 repository_id: id.to_proto(),
4791 diff_type: match diff_type {
4792 DiffType::HeadToIndex => {
4793 proto::git_diff::DiffType::HeadToIndex.into()
4794 }
4795 DiffType::HeadToWorktree => {
4796 proto::git_diff::DiffType::HeadToWorktree.into()
4797 }
4798 },
4799 })
4800 .await?;
4801
4802 Ok(response.diff)
4803 }
4804 }
4805 })
4806 }
4807
4808 pub fn create_branch(
4809 &mut self,
4810 branch_name: String,
4811 base_branch: Option<String>,
4812 ) -> oneshot::Receiver<Result<()>> {
4813 let id = self.id;
4814 let status_msg = if let Some(ref base) = base_branch {
4815 format!("git switch -c {branch_name} {base}").into()
4816 } else {
4817 format!("git switch -c {branch_name}").into()
4818 };
4819 self.send_job(Some(status_msg), move |repo, _cx| async move {
4820 match repo {
4821 RepositoryState::Local { backend, .. } => {
4822 backend.create_branch(branch_name, base_branch).await
4823 }
4824 RepositoryState::Remote { project_id, client } => {
4825 client
4826 .request(proto::GitCreateBranch {
4827 project_id: project_id.0,
4828 repository_id: id.to_proto(),
4829 branch_name,
4830 })
4831 .await?;
4832
4833 Ok(())
4834 }
4835 }
4836 })
4837 }
4838
4839 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4840 let id = self.id;
4841 self.send_job(
4842 Some(format!("git switch {branch_name}").into()),
4843 move |repo, _cx| async move {
4844 match repo {
4845 RepositoryState::Local { backend, .. } => {
4846 backend.change_branch(branch_name).await
4847 }
4848 RepositoryState::Remote { project_id, client } => {
4849 client
4850 .request(proto::GitChangeBranch {
4851 project_id: project_id.0,
4852 repository_id: id.to_proto(),
4853 branch_name,
4854 })
4855 .await?;
4856
4857 Ok(())
4858 }
4859 }
4860 },
4861 )
4862 }
4863
4864 pub fn rename_branch(
4865 &mut self,
4866 branch: String,
4867 new_name: String,
4868 ) -> oneshot::Receiver<Result<()>> {
4869 let id = self.id;
4870 self.send_job(
4871 Some(format!("git branch -m {branch} {new_name}").into()),
4872 move |repo, _cx| async move {
4873 match repo {
4874 RepositoryState::Local { backend, .. } => {
4875 backend.rename_branch(branch, new_name).await
4876 }
4877 RepositoryState::Remote { project_id, client } => {
4878 client
4879 .request(proto::GitRenameBranch {
4880 project_id: project_id.0,
4881 repository_id: id.to_proto(),
4882 branch,
4883 new_name,
4884 })
4885 .await?;
4886
4887 Ok(())
4888 }
4889 }
4890 },
4891 )
4892 }
4893
4894 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4895 let id = self.id;
4896 self.send_job(None, move |repo, _cx| async move {
4897 match repo {
4898 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4899 RepositoryState::Remote { project_id, client } => {
4900 let response = client
4901 .request(proto::CheckForPushedCommits {
4902 project_id: project_id.0,
4903 repository_id: id.to_proto(),
4904 })
4905 .await?;
4906
4907 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4908
4909 Ok(branches)
4910 }
4911 }
4912 })
4913 }
4914
4915 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4916 self.send_job(None, |repo, _cx| async move {
4917 match repo {
4918 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4919 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4920 }
4921 })
4922 }
4923
4924 pub fn restore_checkpoint(
4925 &mut self,
4926 checkpoint: GitRepositoryCheckpoint,
4927 ) -> oneshot::Receiver<Result<()>> {
4928 self.send_job(None, move |repo, _cx| async move {
4929 match repo {
4930 RepositoryState::Local { backend, .. } => {
4931 backend.restore_checkpoint(checkpoint).await
4932 }
4933 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4934 }
4935 })
4936 }
4937
4938 pub(crate) fn apply_remote_update(
4939 &mut self,
4940 update: proto::UpdateRepository,
4941 cx: &mut Context<Self>,
4942 ) -> Result<()> {
4943 let conflicted_paths = TreeSet::from_ordered_entries(
4944 update
4945 .current_merge_conflicts
4946 .into_iter()
4947 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4948 );
4949 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4950 let new_head_commit = update
4951 .head_commit_details
4952 .as_ref()
4953 .map(proto_to_commit_details);
4954 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4955 cx.emit(RepositoryEvent::BranchChanged)
4956 }
4957 self.snapshot.branch = new_branch;
4958 self.snapshot.head_commit = new_head_commit;
4959
4960 self.snapshot.merge.conflicted_paths = conflicted_paths;
4961 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4962 let new_stash_entries = GitStash {
4963 entries: update
4964 .stash_entries
4965 .iter()
4966 .filter_map(|entry| proto_to_stash(entry).ok())
4967 .collect(),
4968 };
4969 if self.snapshot.stash_entries != new_stash_entries {
4970 cx.emit(RepositoryEvent::StashEntriesChanged)
4971 }
4972 self.snapshot.stash_entries = new_stash_entries;
4973
4974 let edits = update
4975 .removed_statuses
4976 .into_iter()
4977 .filter_map(|path| {
4978 Some(sum_tree::Edit::Remove(PathKey(
4979 RelPath::from_proto(&path).log_err()?,
4980 )))
4981 })
4982 .chain(
4983 update
4984 .updated_statuses
4985 .into_iter()
4986 .filter_map(|updated_status| {
4987 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4988 }),
4989 )
4990 .collect::<Vec<_>>();
4991 if !edits.is_empty() {
4992 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4993 }
4994 self.snapshot.statuses_by_path.edit(edits, ());
4995 if update.is_last_update {
4996 self.snapshot.scan_id = update.scan_id;
4997 }
4998 Ok(())
4999 }
5000
5001 pub fn compare_checkpoints(
5002 &mut self,
5003 left: GitRepositoryCheckpoint,
5004 right: GitRepositoryCheckpoint,
5005 ) -> oneshot::Receiver<Result<bool>> {
5006 self.send_job(None, move |repo, _cx| async move {
5007 match repo {
5008 RepositoryState::Local { backend, .. } => {
5009 backend.compare_checkpoints(left, right).await
5010 }
5011 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5012 }
5013 })
5014 }
5015
5016 pub fn diff_checkpoints(
5017 &mut self,
5018 base_checkpoint: GitRepositoryCheckpoint,
5019 target_checkpoint: GitRepositoryCheckpoint,
5020 ) -> oneshot::Receiver<Result<String>> {
5021 self.send_job(None, move |repo, _cx| async move {
5022 match repo {
5023 RepositoryState::Local { backend, .. } => {
5024 backend
5025 .diff_checkpoints(base_checkpoint, target_checkpoint)
5026 .await
5027 }
5028 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5029 }
5030 })
5031 }
5032
5033 fn schedule_scan(
5034 &mut self,
5035 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5036 cx: &mut Context<Self>,
5037 ) {
5038 let this = cx.weak_entity();
5039 let _ = self.send_keyed_job(
5040 Some(GitJobKey::ReloadGitState),
5041 None,
5042 |state, mut cx| async move {
5043 log::debug!("run scheduled git status scan");
5044
5045 let Some(this) = this.upgrade() else {
5046 return Ok(());
5047 };
5048 let RepositoryState::Local { backend, .. } = state else {
5049 bail!("not a local repository")
5050 };
5051 let (snapshot, events) = this
5052 .update(&mut cx, |this, _| {
5053 this.paths_needing_status_update.clear();
5054 compute_snapshot(
5055 this.id,
5056 this.work_directory_abs_path.clone(),
5057 this.snapshot.clone(),
5058 backend.clone(),
5059 )
5060 })?
5061 .await?;
5062 this.update(&mut cx, |this, cx| {
5063 this.snapshot = snapshot.clone();
5064 for event in events {
5065 cx.emit(event);
5066 }
5067 })?;
5068 if let Some(updates_tx) = updates_tx {
5069 updates_tx
5070 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5071 .ok();
5072 }
5073 Ok(())
5074 },
5075 );
5076 }
5077
5078 fn spawn_local_git_worker(
5079 work_directory_abs_path: Arc<Path>,
5080 dot_git_abs_path: Arc<Path>,
5081 _repository_dir_abs_path: Arc<Path>,
5082 _common_dir_abs_path: Arc<Path>,
5083 project_environment: WeakEntity<ProjectEnvironment>,
5084 fs: Arc<dyn Fs>,
5085 cx: &mut Context<Self>,
5086 ) -> mpsc::UnboundedSender<GitJob> {
5087 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5088
5089 cx.spawn(async move |_, cx| {
5090 let environment = project_environment
5091 .upgrade()
5092 .context("missing project environment")?
5093 .update(cx, |project_environment, cx| {
5094 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
5095 })?
5096 .await
5097 .unwrap_or_else(|| {
5098 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
5099 HashMap::default()
5100 });
5101 let search_paths = environment.get("PATH").map(|val| val.to_owned());
5102 let backend = cx
5103 .background_spawn(async move {
5104 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
5105 .or_else(|| which::which("git").ok());
5106 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
5107 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
5108 })
5109 .await?;
5110
5111 if let Some(git_hosting_provider_registry) =
5112 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5113 {
5114 git_hosting_providers::register_additional_providers(
5115 git_hosting_provider_registry,
5116 backend.clone(),
5117 );
5118 }
5119
5120 let state = RepositoryState::Local {
5121 backend,
5122 environment: Arc::new(environment),
5123 };
5124 let mut jobs = VecDeque::new();
5125 loop {
5126 while let Ok(Some(next_job)) = job_rx.try_next() {
5127 jobs.push_back(next_job);
5128 }
5129
5130 if let Some(job) = jobs.pop_front() {
5131 if let Some(current_key) = &job.key
5132 && jobs
5133 .iter()
5134 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5135 {
5136 continue;
5137 }
5138 (job.job)(state.clone(), cx).await;
5139 } else if let Some(job) = job_rx.next().await {
5140 jobs.push_back(job);
5141 } else {
5142 break;
5143 }
5144 }
5145 anyhow::Ok(())
5146 })
5147 .detach_and_log_err(cx);
5148
5149 job_tx
5150 }
5151
5152 fn spawn_remote_git_worker(
5153 project_id: ProjectId,
5154 client: AnyProtoClient,
5155 cx: &mut Context<Self>,
5156 ) -> mpsc::UnboundedSender<GitJob> {
5157 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5158
5159 cx.spawn(async move |_, cx| {
5160 let state = RepositoryState::Remote { project_id, client };
5161 let mut jobs = VecDeque::new();
5162 loop {
5163 while let Ok(Some(next_job)) = job_rx.try_next() {
5164 jobs.push_back(next_job);
5165 }
5166
5167 if let Some(job) = jobs.pop_front() {
5168 if let Some(current_key) = &job.key
5169 && jobs
5170 .iter()
5171 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5172 {
5173 continue;
5174 }
5175 (job.job)(state.clone(), cx).await;
5176 } else if let Some(job) = job_rx.next().await {
5177 jobs.push_back(job);
5178 } else {
5179 break;
5180 }
5181 }
5182 anyhow::Ok(())
5183 })
5184 .detach_and_log_err(cx);
5185
5186 job_tx
5187 }
5188
5189 fn load_staged_text(
5190 &mut self,
5191 buffer_id: BufferId,
5192 repo_path: RepoPath,
5193 cx: &App,
5194 ) -> Task<Result<Option<String>>> {
5195 let rx = self.send_job(None, move |state, _| async move {
5196 match state {
5197 RepositoryState::Local { backend, .. } => {
5198 anyhow::Ok(backend.load_index_text(repo_path).await)
5199 }
5200 RepositoryState::Remote { project_id, client } => {
5201 let response = client
5202 .request(proto::OpenUnstagedDiff {
5203 project_id: project_id.to_proto(),
5204 buffer_id: buffer_id.to_proto(),
5205 })
5206 .await?;
5207 Ok(response.staged_text)
5208 }
5209 }
5210 });
5211 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5212 }
5213
5214 fn load_committed_text(
5215 &mut self,
5216 buffer_id: BufferId,
5217 repo_path: RepoPath,
5218 cx: &App,
5219 ) -> Task<Result<DiffBasesChange>> {
5220 let rx = self.send_job(None, move |state, _| async move {
5221 match state {
5222 RepositoryState::Local { backend, .. } => {
5223 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5224 let staged_text = backend.load_index_text(repo_path).await;
5225 let diff_bases_change = if committed_text == staged_text {
5226 DiffBasesChange::SetBoth(committed_text)
5227 } else {
5228 DiffBasesChange::SetEach {
5229 index: staged_text,
5230 head: committed_text,
5231 }
5232 };
5233 anyhow::Ok(diff_bases_change)
5234 }
5235 RepositoryState::Remote { project_id, client } => {
5236 use proto::open_uncommitted_diff_response::Mode;
5237
5238 let response = client
5239 .request(proto::OpenUncommittedDiff {
5240 project_id: project_id.to_proto(),
5241 buffer_id: buffer_id.to_proto(),
5242 })
5243 .await?;
5244 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5245 let bases = match mode {
5246 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5247 Mode::IndexAndHead => DiffBasesChange::SetEach {
5248 head: response.committed_text,
5249 index: response.staged_text,
5250 },
5251 };
5252 Ok(bases)
5253 }
5254 }
5255 });
5256
5257 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5258 }
5259 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5260 let repository_id = self.snapshot.id;
5261 let rx = self.send_job(None, move |state, _| async move {
5262 match state {
5263 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5264 RepositoryState::Remote { client, project_id } => {
5265 let response = client
5266 .request(proto::GetBlobContent {
5267 project_id: project_id.to_proto(),
5268 repository_id: repository_id.0,
5269 oid: oid.to_string(),
5270 })
5271 .await?;
5272 Ok(response.content)
5273 }
5274 }
5275 });
5276 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5277 }
5278
5279 fn paths_changed(
5280 &mut self,
5281 paths: Vec<RepoPath>,
5282 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5283 cx: &mut Context<Self>,
5284 ) {
5285 self.paths_needing_status_update.extend(paths);
5286
5287 let this = cx.weak_entity();
5288 let _ = self.send_keyed_job(
5289 Some(GitJobKey::RefreshStatuses),
5290 None,
5291 |state, mut cx| async move {
5292 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5293 (
5294 this.snapshot.clone(),
5295 mem::take(&mut this.paths_needing_status_update),
5296 )
5297 })?;
5298 let RepositoryState::Local { backend, .. } = state else {
5299 bail!("not a local repository")
5300 };
5301
5302 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5303 if paths.is_empty() {
5304 return Ok(());
5305 }
5306 let statuses = backend.status(&paths).await?;
5307 let stash_entries = backend.stash_entries().await?;
5308
5309 let changed_path_statuses = cx
5310 .background_spawn(async move {
5311 let mut changed_path_statuses = Vec::new();
5312 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5313 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5314
5315 for (repo_path, status) in &*statuses.entries {
5316 changed_paths.remove(repo_path);
5317 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5318 && cursor.item().is_some_and(|entry| entry.status == *status)
5319 {
5320 continue;
5321 }
5322
5323 changed_path_statuses.push(Edit::Insert(StatusEntry {
5324 repo_path: repo_path.clone(),
5325 status: *status,
5326 }));
5327 }
5328 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5329 for path in changed_paths.into_iter() {
5330 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5331 changed_path_statuses
5332 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5333 }
5334 }
5335 changed_path_statuses
5336 })
5337 .await;
5338
5339 this.update(&mut cx, |this, cx| {
5340 if this.snapshot.stash_entries != stash_entries {
5341 cx.emit(RepositoryEvent::StashEntriesChanged);
5342 this.snapshot.stash_entries = stash_entries;
5343 }
5344
5345 if !changed_path_statuses.is_empty() {
5346 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5347 this.snapshot
5348 .statuses_by_path
5349 .edit(changed_path_statuses, ());
5350 this.snapshot.scan_id += 1;
5351 }
5352
5353 if let Some(updates_tx) = updates_tx {
5354 updates_tx
5355 .unbounded_send(DownstreamUpdate::UpdateRepository(
5356 this.snapshot.clone(),
5357 ))
5358 .ok();
5359 }
5360 })
5361 },
5362 );
5363 }
5364
5365 /// currently running git command and when it started
5366 pub fn current_job(&self) -> Option<JobInfo> {
5367 self.active_jobs.values().next().cloned()
5368 }
5369
5370 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5371 self.send_job(None, |_, _| async {})
5372 }
5373
5374 fn spawn_job_with_tracking<AsyncFn>(
5375 &mut self,
5376 paths: Vec<RepoPath>,
5377 git_status: pending_op::GitStatus,
5378 cx: &mut Context<Self>,
5379 f: AsyncFn,
5380 ) -> Task<Result<()>>
5381 where
5382 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5383 {
5384 let ids = self.new_pending_ops_for_paths(paths, git_status);
5385
5386 cx.spawn(async move |this, cx| {
5387 let (job_status, result) = match f(this.clone(), cx).await {
5388 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5389 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5390 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5391 };
5392
5393 this.update(cx, |this, _| {
5394 let mut edits = Vec::with_capacity(ids.len());
5395 for (id, entry) in ids {
5396 if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
5397 if let Some(op) = ops.op_by_id_mut(id) {
5398 op.job_status = job_status;
5399 }
5400 edits.push(sum_tree::Edit::Insert(ops));
5401 }
5402 }
5403 this.snapshot.pending_ops_by_path.edit(edits, ());
5404 })?;
5405
5406 result
5407 })
5408 }
5409
5410 fn new_pending_ops_for_paths(
5411 &mut self,
5412 paths: Vec<RepoPath>,
5413 git_status: pending_op::GitStatus,
5414 ) -> Vec<(PendingOpId, RepoPath)> {
5415 let mut edits = Vec::with_capacity(paths.len());
5416 let mut ids = Vec::with_capacity(paths.len());
5417 for path in paths {
5418 let mut ops = self
5419 .snapshot
5420 .pending_ops_for_path(&path)
5421 .unwrap_or_else(|| PendingOps::new(&path));
5422 let id = ops.max_id() + 1;
5423 ops.ops.push(PendingOp {
5424 id,
5425 git_status,
5426 job_status: pending_op::JobStatus::Running,
5427 });
5428 edits.push(sum_tree::Edit::Insert(ops));
5429 ids.push((id, path));
5430 }
5431 self.snapshot.pending_ops_by_path.edit(edits, ());
5432 ids
5433 }
5434}
5435
5436fn get_permalink_in_rust_registry_src(
5437 provider_registry: Arc<GitHostingProviderRegistry>,
5438 path: PathBuf,
5439 selection: Range<u32>,
5440) -> Result<url::Url> {
5441 #[derive(Deserialize)]
5442 struct CargoVcsGit {
5443 sha1: String,
5444 }
5445
5446 #[derive(Deserialize)]
5447 struct CargoVcsInfo {
5448 git: CargoVcsGit,
5449 path_in_vcs: String,
5450 }
5451
5452 #[derive(Deserialize)]
5453 struct CargoPackage {
5454 repository: String,
5455 }
5456
5457 #[derive(Deserialize)]
5458 struct CargoToml {
5459 package: CargoPackage,
5460 }
5461
5462 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5463 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5464 Some((dir, json))
5465 }) else {
5466 bail!("No .cargo_vcs_info.json found in parent directories")
5467 };
5468 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5469 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5470 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5471 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5472 .context("parsing package.repository field of manifest")?;
5473 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5474 let permalink = provider.build_permalink(
5475 remote,
5476 BuildPermalinkParams::new(
5477 &cargo_vcs_info.git.sha1,
5478 &RepoPath::from_rel_path(
5479 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5480 ),
5481 Some(selection),
5482 ),
5483 );
5484 Ok(permalink)
5485}
5486
5487fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5488 let Some(blame) = blame else {
5489 return proto::BlameBufferResponse {
5490 blame_response: None,
5491 };
5492 };
5493
5494 let entries = blame
5495 .entries
5496 .into_iter()
5497 .map(|entry| proto::BlameEntry {
5498 sha: entry.sha.as_bytes().into(),
5499 start_line: entry.range.start,
5500 end_line: entry.range.end,
5501 original_line_number: entry.original_line_number,
5502 author: entry.author,
5503 author_mail: entry.author_mail,
5504 author_time: entry.author_time,
5505 author_tz: entry.author_tz,
5506 committer: entry.committer_name,
5507 committer_mail: entry.committer_email,
5508 committer_time: entry.committer_time,
5509 committer_tz: entry.committer_tz,
5510 summary: entry.summary,
5511 previous: entry.previous,
5512 filename: entry.filename,
5513 })
5514 .collect::<Vec<_>>();
5515
5516 let messages = blame
5517 .messages
5518 .into_iter()
5519 .map(|(oid, message)| proto::CommitMessage {
5520 oid: oid.as_bytes().into(),
5521 message,
5522 })
5523 .collect::<Vec<_>>();
5524
5525 proto::BlameBufferResponse {
5526 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5527 entries,
5528 messages,
5529 remote_url: blame.remote_url,
5530 }),
5531 }
5532}
5533
5534fn deserialize_blame_buffer_response(
5535 response: proto::BlameBufferResponse,
5536) -> Option<git::blame::Blame> {
5537 let response = response.blame_response?;
5538 let entries = response
5539 .entries
5540 .into_iter()
5541 .filter_map(|entry| {
5542 Some(git::blame::BlameEntry {
5543 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5544 range: entry.start_line..entry.end_line,
5545 original_line_number: entry.original_line_number,
5546 committer_name: entry.committer,
5547 committer_time: entry.committer_time,
5548 committer_tz: entry.committer_tz,
5549 committer_email: entry.committer_mail,
5550 author: entry.author,
5551 author_mail: entry.author_mail,
5552 author_time: entry.author_time,
5553 author_tz: entry.author_tz,
5554 summary: entry.summary,
5555 previous: entry.previous,
5556 filename: entry.filename,
5557 })
5558 })
5559 .collect::<Vec<_>>();
5560
5561 let messages = response
5562 .messages
5563 .into_iter()
5564 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5565 .collect::<HashMap<_, _>>();
5566
5567 Some(Blame {
5568 entries,
5569 messages,
5570 remote_url: response.remote_url,
5571 })
5572}
5573
5574fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5575 proto::Branch {
5576 is_head: branch.is_head,
5577 ref_name: branch.ref_name.to_string(),
5578 unix_timestamp: branch
5579 .most_recent_commit
5580 .as_ref()
5581 .map(|commit| commit.commit_timestamp as u64),
5582 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5583 ref_name: upstream.ref_name.to_string(),
5584 tracking: upstream
5585 .tracking
5586 .status()
5587 .map(|upstream| proto::UpstreamTracking {
5588 ahead: upstream.ahead as u64,
5589 behind: upstream.behind as u64,
5590 }),
5591 }),
5592 most_recent_commit: branch
5593 .most_recent_commit
5594 .as_ref()
5595 .map(|commit| proto::CommitSummary {
5596 sha: commit.sha.to_string(),
5597 subject: commit.subject.to_string(),
5598 commit_timestamp: commit.commit_timestamp,
5599 author_name: commit.author_name.to_string(),
5600 }),
5601 }
5602}
5603
5604fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5605 proto::Worktree {
5606 path: worktree.path.to_string_lossy().to_string(),
5607 ref_name: worktree.ref_name.to_string(),
5608 sha: worktree.sha.to_string(),
5609 }
5610}
5611
5612fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5613 git::repository::Worktree {
5614 path: PathBuf::from(proto.path.clone()),
5615 ref_name: proto.ref_name.clone().into(),
5616 sha: proto.sha.clone().into(),
5617 }
5618}
5619
5620fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5621 git::repository::Branch {
5622 is_head: proto.is_head,
5623 ref_name: proto.ref_name.clone().into(),
5624 upstream: proto
5625 .upstream
5626 .as_ref()
5627 .map(|upstream| git::repository::Upstream {
5628 ref_name: upstream.ref_name.to_string().into(),
5629 tracking: upstream
5630 .tracking
5631 .as_ref()
5632 .map(|tracking| {
5633 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5634 ahead: tracking.ahead as u32,
5635 behind: tracking.behind as u32,
5636 })
5637 })
5638 .unwrap_or(git::repository::UpstreamTracking::Gone),
5639 }),
5640 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5641 git::repository::CommitSummary {
5642 sha: commit.sha.to_string().into(),
5643 subject: commit.subject.to_string().into(),
5644 commit_timestamp: commit.commit_timestamp,
5645 author_name: commit.author_name.to_string().into(),
5646 has_parent: true,
5647 }
5648 }),
5649 }
5650}
5651
5652fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5653 proto::GitCommitDetails {
5654 sha: commit.sha.to_string(),
5655 message: commit.message.to_string(),
5656 commit_timestamp: commit.commit_timestamp,
5657 author_email: commit.author_email.to_string(),
5658 author_name: commit.author_name.to_string(),
5659 }
5660}
5661
5662fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5663 CommitDetails {
5664 sha: proto.sha.clone().into(),
5665 message: proto.message.clone().into(),
5666 commit_timestamp: proto.commit_timestamp,
5667 author_email: proto.author_email.clone().into(),
5668 author_name: proto.author_name.clone().into(),
5669 }
5670}
5671
5672async fn compute_snapshot(
5673 id: RepositoryId,
5674 work_directory_abs_path: Arc<Path>,
5675 prev_snapshot: RepositorySnapshot,
5676 backend: Arc<dyn GitRepository>,
5677) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5678 let mut events = Vec::new();
5679 let branches = backend.branches().await?;
5680 let branch = branches.into_iter().find(|branch| branch.is_head);
5681 let statuses = backend
5682 .status(&[RepoPath::from_rel_path(
5683 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
5684 )])
5685 .await?;
5686 let stash_entries = backend.stash_entries().await?;
5687 let statuses_by_path = SumTree::from_iter(
5688 statuses
5689 .entries
5690 .iter()
5691 .map(|(repo_path, status)| StatusEntry {
5692 repo_path: repo_path.clone(),
5693 status: *status,
5694 }),
5695 (),
5696 );
5697 let (merge_details, merge_heads_changed) =
5698 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5699 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5700
5701 let pending_ops_by_path = SumTree::from_iter(
5702 prev_snapshot.pending_ops_by_path.iter().filter_map(|ops| {
5703 let inner_ops: Vec<PendingOp> =
5704 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5705 if inner_ops.is_empty() {
5706 None
5707 } else {
5708 Some(PendingOps {
5709 repo_path: ops.repo_path.clone(),
5710 ops: inner_ops,
5711 })
5712 }
5713 }),
5714 (),
5715 );
5716
5717 if pending_ops_by_path != prev_snapshot.pending_ops_by_path {
5718 events.push(RepositoryEvent::PendingOpsChanged {
5719 pending_ops: prev_snapshot.pending_ops_by_path.clone(),
5720 })
5721 }
5722
5723 if merge_heads_changed {
5724 events.push(RepositoryEvent::MergeHeadsChanged);
5725 }
5726
5727 if statuses_by_path != prev_snapshot.statuses_by_path {
5728 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5729 }
5730
5731 // Useful when branch is None in detached head state
5732 let head_commit = match backend.head_sha().await {
5733 Some(head_sha) => backend.show(head_sha).await.log_err(),
5734 None => None,
5735 };
5736
5737 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5738 events.push(RepositoryEvent::BranchChanged);
5739 }
5740
5741 // Used by edit prediction data collection
5742 let remote_origin_url = backend.remote_url("origin");
5743 let remote_upstream_url = backend.remote_url("upstream");
5744
5745 let snapshot = RepositorySnapshot {
5746 id,
5747 statuses_by_path,
5748 pending_ops_by_path,
5749 work_directory_abs_path,
5750 path_style: prev_snapshot.path_style,
5751 scan_id: prev_snapshot.scan_id + 1,
5752 branch,
5753 head_commit,
5754 merge: merge_details,
5755 remote_origin_url,
5756 remote_upstream_url,
5757 stash_entries,
5758 };
5759
5760 Ok((snapshot, events))
5761}
5762
5763fn status_from_proto(
5764 simple_status: i32,
5765 status: Option<proto::GitFileStatus>,
5766) -> anyhow::Result<FileStatus> {
5767 use proto::git_file_status::Variant;
5768
5769 let Some(variant) = status.and_then(|status| status.variant) else {
5770 let code = proto::GitStatus::from_i32(simple_status)
5771 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5772 let result = match code {
5773 proto::GitStatus::Added => TrackedStatus {
5774 worktree_status: StatusCode::Added,
5775 index_status: StatusCode::Unmodified,
5776 }
5777 .into(),
5778 proto::GitStatus::Modified => TrackedStatus {
5779 worktree_status: StatusCode::Modified,
5780 index_status: StatusCode::Unmodified,
5781 }
5782 .into(),
5783 proto::GitStatus::Conflict => UnmergedStatus {
5784 first_head: UnmergedStatusCode::Updated,
5785 second_head: UnmergedStatusCode::Updated,
5786 }
5787 .into(),
5788 proto::GitStatus::Deleted => TrackedStatus {
5789 worktree_status: StatusCode::Deleted,
5790 index_status: StatusCode::Unmodified,
5791 }
5792 .into(),
5793 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5794 };
5795 return Ok(result);
5796 };
5797
5798 let result = match variant {
5799 Variant::Untracked(_) => FileStatus::Untracked,
5800 Variant::Ignored(_) => FileStatus::Ignored,
5801 Variant::Unmerged(unmerged) => {
5802 let [first_head, second_head] =
5803 [unmerged.first_head, unmerged.second_head].map(|head| {
5804 let code = proto::GitStatus::from_i32(head)
5805 .with_context(|| format!("Invalid git status code: {head}"))?;
5806 let result = match code {
5807 proto::GitStatus::Added => UnmergedStatusCode::Added,
5808 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5809 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5810 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5811 };
5812 Ok(result)
5813 });
5814 let [first_head, second_head] = [first_head?, second_head?];
5815 UnmergedStatus {
5816 first_head,
5817 second_head,
5818 }
5819 .into()
5820 }
5821 Variant::Tracked(tracked) => {
5822 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5823 .map(|status| {
5824 let code = proto::GitStatus::from_i32(status)
5825 .with_context(|| format!("Invalid git status code: {status}"))?;
5826 let result = match code {
5827 proto::GitStatus::Modified => StatusCode::Modified,
5828 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5829 proto::GitStatus::Added => StatusCode::Added,
5830 proto::GitStatus::Deleted => StatusCode::Deleted,
5831 proto::GitStatus::Renamed => StatusCode::Renamed,
5832 proto::GitStatus::Copied => StatusCode::Copied,
5833 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5834 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5835 };
5836 Ok(result)
5837 });
5838 let [index_status, worktree_status] = [index_status?, worktree_status?];
5839 TrackedStatus {
5840 index_status,
5841 worktree_status,
5842 }
5843 .into()
5844 }
5845 };
5846 Ok(result)
5847}
5848
5849fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5850 use proto::git_file_status::{Tracked, Unmerged, Variant};
5851
5852 let variant = match status {
5853 FileStatus::Untracked => Variant::Untracked(Default::default()),
5854 FileStatus::Ignored => Variant::Ignored(Default::default()),
5855 FileStatus::Unmerged(UnmergedStatus {
5856 first_head,
5857 second_head,
5858 }) => Variant::Unmerged(Unmerged {
5859 first_head: unmerged_status_to_proto(first_head),
5860 second_head: unmerged_status_to_proto(second_head),
5861 }),
5862 FileStatus::Tracked(TrackedStatus {
5863 index_status,
5864 worktree_status,
5865 }) => Variant::Tracked(Tracked {
5866 index_status: tracked_status_to_proto(index_status),
5867 worktree_status: tracked_status_to_proto(worktree_status),
5868 }),
5869 };
5870 proto::GitFileStatus {
5871 variant: Some(variant),
5872 }
5873}
5874
5875fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5876 match code {
5877 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5878 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5879 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5880 }
5881}
5882
5883fn tracked_status_to_proto(code: StatusCode) -> i32 {
5884 match code {
5885 StatusCode::Added => proto::GitStatus::Added as _,
5886 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5887 StatusCode::Modified => proto::GitStatus::Modified as _,
5888 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5889 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5890 StatusCode::Copied => proto::GitStatus::Copied as _,
5891 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5892 }
5893}