1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use std::{
60 cmp::Ordering,
61 collections::{BTreeSet, HashSet, VecDeque},
62 future::Future,
63 mem,
64 ops::Range,
65 path::{Path, PathBuf},
66 str::FromStr,
67 sync::{
68 Arc,
69 atomic::{self, AtomicU64},
70 },
71 time::Instant,
72};
73use sum_tree::{Edit, SumTree, TreeSet};
74use task::Shell;
75use text::{Bias, BufferId};
76use util::{
77 ResultExt, debug_panic,
78 paths::{PathStyle, SanitizedPath},
79 post_inc,
80 rel_path::RelPath,
81};
82use worktree::{
83 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
84 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
85};
86use zeroize::Zeroize;
87
88pub struct GitStore {
89 state: GitStoreState,
90 buffer_store: Entity<BufferStore>,
91 worktree_store: Entity<WorktreeStore>,
92 repositories: HashMap<RepositoryId, Entity<Repository>>,
93 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
94 active_repo_id: Option<RepositoryId>,
95 #[allow(clippy::type_complexity)]
96 loading_diffs:
97 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
98 diffs: HashMap<BufferId, Entity<BufferGitState>>,
99 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
100 _subscriptions: Vec<Subscription>,
101}
102
103#[derive(Default)]
104struct SharedDiffs {
105 unstaged: Option<Entity<BufferDiff>>,
106 uncommitted: Option<Entity<BufferDiff>>,
107}
108
109struct BufferGitState {
110 unstaged_diff: Option<WeakEntity<BufferDiff>>,
111 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
112 conflict_set: Option<WeakEntity<ConflictSet>>,
113 recalculate_diff_task: Option<Task<Result<()>>>,
114 reparse_conflict_markers_task: Option<Task<Result<()>>>,
115 language: Option<Arc<Language>>,
116 language_registry: Option<Arc<LanguageRegistry>>,
117 conflict_updated_futures: Vec<oneshot::Sender<()>>,
118 recalculating_tx: postage::watch::Sender<bool>,
119
120 /// These operation counts are used to ensure that head and index text
121 /// values read from the git repository are up-to-date with any hunk staging
122 /// operations that have been performed on the BufferDiff.
123 ///
124 /// The operation count is incremented immediately when the user initiates a
125 /// hunk stage/unstage operation. Then, upon finishing writing the new index
126 /// text do disk, the `operation count as of write` is updated to reflect
127 /// the operation count that prompted the write.
128 hunk_staging_operation_count: usize,
129 hunk_staging_operation_count_as_of_write: usize,
130
131 head_text: Option<Arc<String>>,
132 index_text: Option<Arc<String>>,
133 head_changed: bool,
134 index_changed: bool,
135 language_changed: bool,
136}
137
138#[derive(Clone, Debug)]
139enum DiffBasesChange {
140 SetIndex(Option<String>),
141 SetHead(Option<String>),
142 SetEach {
143 index: Option<String>,
144 head: Option<String>,
145 },
146 SetBoth(Option<String>),
147}
148
149#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
150enum DiffKind {
151 Unstaged,
152 Uncommitted,
153}
154
155enum GitStoreState {
156 Local {
157 next_repository_id: Arc<AtomicU64>,
158 downstream: Option<LocalDownstreamState>,
159 project_environment: Entity<ProjectEnvironment>,
160 fs: Arc<dyn Fs>,
161 },
162 Remote {
163 upstream_client: AnyProtoClient,
164 upstream_project_id: u64,
165 downstream: Option<(AnyProtoClient, ProjectId)>,
166 },
167}
168
169enum DownstreamUpdate {
170 UpdateRepository(RepositorySnapshot),
171 RemoveRepository(RepositoryId),
172}
173
174struct LocalDownstreamState {
175 client: AnyProtoClient,
176 project_id: ProjectId,
177 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
178 _task: Task<Result<()>>,
179}
180
181#[derive(Clone, Debug)]
182pub struct GitStoreCheckpoint {
183 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
184}
185
186#[derive(Clone, Debug, PartialEq, Eq)]
187pub struct StatusEntry {
188 pub repo_path: RepoPath,
189 pub status: FileStatus,
190}
191
192impl StatusEntry {
193 fn to_proto(&self) -> proto::StatusEntry {
194 let simple_status = match self.status {
195 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
196 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
197 FileStatus::Tracked(TrackedStatus {
198 index_status,
199 worktree_status,
200 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
201 worktree_status
202 } else {
203 index_status
204 }),
205 };
206
207 proto::StatusEntry {
208 repo_path: self.repo_path.to_proto(),
209 simple_status,
210 status: Some(status_to_proto(self.status)),
211 }
212 }
213}
214
215impl TryFrom<proto::StatusEntry> for StatusEntry {
216 type Error = anyhow::Error;
217
218 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
219 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
220 let status = status_from_proto(value.simple_status, value.status)?;
221 Ok(Self { repo_path, status })
222 }
223}
224
225impl sum_tree::Item for StatusEntry {
226 type Summary = PathSummary<GitSummary>;
227
228 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
229 PathSummary {
230 max_path: self.repo_path.as_ref().clone(),
231 item_summary: self.status.summary(),
232 }
233 }
234}
235
236impl sum_tree::KeyedItem for StatusEntry {
237 type Key = PathKey;
238
239 fn key(&self) -> Self::Key {
240 PathKey(self.repo_path.as_ref().clone())
241 }
242}
243
244#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
245pub struct RepositoryId(pub u64);
246
247#[derive(Clone, Debug, Default, PartialEq, Eq)]
248pub struct MergeDetails {
249 pub conflicted_paths: TreeSet<RepoPath>,
250 pub message: Option<SharedString>,
251 pub heads: Vec<Option<SharedString>>,
252}
253
254#[derive(Clone, Debug, PartialEq, Eq)]
255pub struct RepositorySnapshot {
256 pub id: RepositoryId,
257 pub statuses_by_path: SumTree<StatusEntry>,
258 pub pending_ops_by_path: SumTree<PendingOps>,
259 pub renamed_paths: HashMap<RepoPath, RepoPath>,
260 pub work_directory_abs_path: Arc<Path>,
261 pub path_style: PathStyle,
262 pub branch: Option<Branch>,
263 pub head_commit: Option<CommitDetails>,
264 pub scan_id: u64,
265 pub merge: MergeDetails,
266 pub remote_origin_url: Option<String>,
267 pub remote_upstream_url: Option<String>,
268 pub stash_entries: GitStash,
269}
270
271type JobId = u64;
272
273#[derive(Clone, Debug, PartialEq, Eq)]
274pub struct JobInfo {
275 pub start: Instant,
276 pub message: SharedString,
277}
278
279pub struct Repository {
280 this: WeakEntity<Self>,
281 snapshot: RepositorySnapshot,
282 commit_message_buffer: Option<Entity<Buffer>>,
283 git_store: WeakEntity<GitStore>,
284 // For a local repository, holds paths that have had worktree events since the last status scan completed,
285 // and that should be examined during the next status scan.
286 paths_needing_status_update: BTreeSet<RepoPath>,
287 job_sender: mpsc::UnboundedSender<GitJob>,
288 active_jobs: HashMap<JobId, JobInfo>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292}
293
294impl std::ops::Deref for Repository {
295 type Target = RepositorySnapshot;
296
297 fn deref(&self) -> &Self::Target {
298 &self.snapshot
299 }
300}
301
302#[derive(Clone)]
303pub enum RepositoryState {
304 Local {
305 backend: Arc<dyn GitRepository>,
306 environment: Arc<HashMap<String, String>>,
307 },
308 Remote {
309 project_id: ProjectId,
310 client: AnyProtoClient,
311 },
312}
313
314#[derive(Clone, Debug, PartialEq, Eq)]
315pub enum RepositoryEvent {
316 StatusesChanged,
317 MergeHeadsChanged,
318 BranchChanged,
319 StashEntriesChanged,
320 PendingOpsChanged {
321 pending_ops: SumTree<pending_op::PendingOps>,
322 },
323}
324
325#[derive(Clone, Debug)]
326pub struct JobsUpdated;
327
328#[derive(Debug)]
329pub enum GitStoreEvent {
330 ActiveRepositoryChanged(Option<RepositoryId>),
331 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
332 RepositoryAdded,
333 RepositoryRemoved(RepositoryId),
334 IndexWriteError(anyhow::Error),
335 JobsUpdated,
336 ConflictsUpdated,
337}
338
339impl EventEmitter<RepositoryEvent> for Repository {}
340impl EventEmitter<JobsUpdated> for Repository {}
341impl EventEmitter<GitStoreEvent> for GitStore {}
342
343pub struct GitJob {
344 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
345 key: Option<GitJobKey>,
346}
347
348#[derive(PartialEq, Eq)]
349enum GitJobKey {
350 WriteIndex(Vec<RepoPath>),
351 ReloadBufferDiffBases,
352 RefreshStatuses,
353 ReloadGitState,
354}
355
356impl GitStore {
357 pub fn local(
358 worktree_store: &Entity<WorktreeStore>,
359 buffer_store: Entity<BufferStore>,
360 environment: Entity<ProjectEnvironment>,
361 fs: Arc<dyn Fs>,
362 cx: &mut Context<Self>,
363 ) -> Self {
364 Self::new(
365 worktree_store.clone(),
366 buffer_store,
367 GitStoreState::Local {
368 next_repository_id: Arc::new(AtomicU64::new(1)),
369 downstream: None,
370 project_environment: environment,
371 fs,
372 },
373 cx,
374 )
375 }
376
377 pub fn remote(
378 worktree_store: &Entity<WorktreeStore>,
379 buffer_store: Entity<BufferStore>,
380 upstream_client: AnyProtoClient,
381 project_id: u64,
382 cx: &mut Context<Self>,
383 ) -> Self {
384 Self::new(
385 worktree_store.clone(),
386 buffer_store,
387 GitStoreState::Remote {
388 upstream_client,
389 upstream_project_id: project_id,
390 downstream: None,
391 },
392 cx,
393 )
394 }
395
396 fn new(
397 worktree_store: Entity<WorktreeStore>,
398 buffer_store: Entity<BufferStore>,
399 state: GitStoreState,
400 cx: &mut Context<Self>,
401 ) -> Self {
402 let _subscriptions = vec![
403 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
404 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
405 ];
406
407 GitStore {
408 state,
409 buffer_store,
410 worktree_store,
411 repositories: HashMap::default(),
412 worktree_ids: HashMap::default(),
413 active_repo_id: None,
414 _subscriptions,
415 loading_diffs: HashMap::default(),
416 shared_diffs: HashMap::default(),
417 diffs: HashMap::default(),
418 }
419 }
420
421 pub fn init(client: &AnyProtoClient) {
422 client.add_entity_request_handler(Self::handle_get_remotes);
423 client.add_entity_request_handler(Self::handle_get_branches);
424 client.add_entity_request_handler(Self::handle_get_default_branch);
425 client.add_entity_request_handler(Self::handle_change_branch);
426 client.add_entity_request_handler(Self::handle_create_branch);
427 client.add_entity_request_handler(Self::handle_rename_branch);
428 client.add_entity_request_handler(Self::handle_git_init);
429 client.add_entity_request_handler(Self::handle_push);
430 client.add_entity_request_handler(Self::handle_pull);
431 client.add_entity_request_handler(Self::handle_fetch);
432 client.add_entity_request_handler(Self::handle_stage);
433 client.add_entity_request_handler(Self::handle_unstage);
434 client.add_entity_request_handler(Self::handle_stash);
435 client.add_entity_request_handler(Self::handle_stash_pop);
436 client.add_entity_request_handler(Self::handle_stash_apply);
437 client.add_entity_request_handler(Self::handle_stash_drop);
438 client.add_entity_request_handler(Self::handle_commit);
439 client.add_entity_request_handler(Self::handle_reset);
440 client.add_entity_request_handler(Self::handle_show);
441 client.add_entity_request_handler(Self::handle_load_commit_diff);
442 client.add_entity_request_handler(Self::handle_checkout_files);
443 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
444 client.add_entity_request_handler(Self::handle_set_index_text);
445 client.add_entity_request_handler(Self::handle_askpass);
446 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
447 client.add_entity_request_handler(Self::handle_git_diff);
448 client.add_entity_request_handler(Self::handle_tree_diff);
449 client.add_entity_request_handler(Self::handle_get_blob_content);
450 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
451 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
452 client.add_entity_message_handler(Self::handle_update_diff_bases);
453 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
454 client.add_entity_request_handler(Self::handle_blame_buffer);
455 client.add_entity_message_handler(Self::handle_update_repository);
456 client.add_entity_message_handler(Self::handle_remove_repository);
457 client.add_entity_request_handler(Self::handle_git_clone);
458 client.add_entity_request_handler(Self::handle_get_worktrees);
459 client.add_entity_request_handler(Self::handle_create_worktree);
460 }
461
462 pub fn is_local(&self) -> bool {
463 matches!(self.state, GitStoreState::Local { .. })
464 }
465 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
466 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
467 let id = repo.read(cx).id;
468 if self.active_repo_id != Some(id) {
469 self.active_repo_id = Some(id);
470 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
471 }
472 }
473 }
474
475 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
476 match &mut self.state {
477 GitStoreState::Remote {
478 downstream: downstream_client,
479 ..
480 } => {
481 for repo in self.repositories.values() {
482 let update = repo.read(cx).snapshot.initial_update(project_id);
483 for update in split_repository_update(update) {
484 client.send(update).log_err();
485 }
486 }
487 *downstream_client = Some((client, ProjectId(project_id)));
488 }
489 GitStoreState::Local {
490 downstream: downstream_client,
491 ..
492 } => {
493 let mut snapshots = HashMap::default();
494 let (updates_tx, mut updates_rx) = mpsc::unbounded();
495 for repo in self.repositories.values() {
496 updates_tx
497 .unbounded_send(DownstreamUpdate::UpdateRepository(
498 repo.read(cx).snapshot.clone(),
499 ))
500 .ok();
501 }
502 *downstream_client = Some(LocalDownstreamState {
503 client: client.clone(),
504 project_id: ProjectId(project_id),
505 updates_tx,
506 _task: cx.spawn(async move |this, cx| {
507 cx.background_spawn(async move {
508 while let Some(update) = updates_rx.next().await {
509 match update {
510 DownstreamUpdate::UpdateRepository(snapshot) => {
511 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
512 {
513 let update =
514 snapshot.build_update(old_snapshot, project_id);
515 *old_snapshot = snapshot;
516 for update in split_repository_update(update) {
517 client.send(update)?;
518 }
519 } else {
520 let update = snapshot.initial_update(project_id);
521 for update in split_repository_update(update) {
522 client.send(update)?;
523 }
524 snapshots.insert(snapshot.id, snapshot);
525 }
526 }
527 DownstreamUpdate::RemoveRepository(id) => {
528 client.send(proto::RemoveRepository {
529 project_id,
530 id: id.to_proto(),
531 })?;
532 }
533 }
534 }
535 anyhow::Ok(())
536 })
537 .await
538 .ok();
539 this.update(cx, |this, _| {
540 if let GitStoreState::Local {
541 downstream: downstream_client,
542 ..
543 } = &mut this.state
544 {
545 downstream_client.take();
546 } else {
547 unreachable!("unshared called on remote store");
548 }
549 })
550 }),
551 });
552 }
553 }
554 }
555
556 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
557 match &mut self.state {
558 GitStoreState::Local {
559 downstream: downstream_client,
560 ..
561 } => {
562 downstream_client.take();
563 }
564 GitStoreState::Remote {
565 downstream: downstream_client,
566 ..
567 } => {
568 downstream_client.take();
569 }
570 }
571 self.shared_diffs.clear();
572 }
573
574 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
575 self.shared_diffs.remove(peer_id);
576 }
577
578 pub fn active_repository(&self) -> Option<Entity<Repository>> {
579 self.active_repo_id
580 .as_ref()
581 .map(|id| self.repositories[id].clone())
582 }
583
584 pub fn open_unstaged_diff(
585 &mut self,
586 buffer: Entity<Buffer>,
587 cx: &mut Context<Self>,
588 ) -> Task<Result<Entity<BufferDiff>>> {
589 let buffer_id = buffer.read(cx).remote_id();
590 if let Some(diff_state) = self.diffs.get(&buffer_id)
591 && let Some(unstaged_diff) = diff_state
592 .read(cx)
593 .unstaged_diff
594 .as_ref()
595 .and_then(|weak| weak.upgrade())
596 {
597 if let Some(task) =
598 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
599 {
600 return cx.background_executor().spawn(async move {
601 task.await;
602 Ok(unstaged_diff)
603 });
604 }
605 return Task::ready(Ok(unstaged_diff));
606 }
607
608 let Some((repo, repo_path)) =
609 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
610 else {
611 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
612 };
613
614 let task = self
615 .loading_diffs
616 .entry((buffer_id, DiffKind::Unstaged))
617 .or_insert_with(|| {
618 let staged_text = repo.update(cx, |repo, cx| {
619 repo.load_staged_text(buffer_id, repo_path, cx)
620 });
621 cx.spawn(async move |this, cx| {
622 Self::open_diff_internal(
623 this,
624 DiffKind::Unstaged,
625 staged_text.await.map(DiffBasesChange::SetIndex),
626 buffer,
627 cx,
628 )
629 .await
630 .map_err(Arc::new)
631 })
632 .shared()
633 })
634 .clone();
635
636 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
637 }
638
639 pub fn open_diff_since(
640 &mut self,
641 oid: Option<git::Oid>,
642 buffer: Entity<Buffer>,
643 repo: Entity<Repository>,
644 languages: Arc<LanguageRegistry>,
645 cx: &mut Context<Self>,
646 ) -> Task<Result<Entity<BufferDiff>>> {
647 cx.spawn(async move |this, cx| {
648 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
649 let content = match oid {
650 None => None,
651 Some(oid) => Some(
652 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
653 .await?,
654 ),
655 };
656 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
657
658 buffer_diff
659 .update(cx, |buffer_diff, cx| {
660 buffer_diff.set_base_text(
661 content.map(Arc::new),
662 buffer_snapshot.language().cloned(),
663 Some(languages.clone()),
664 buffer_snapshot.text,
665 cx,
666 )
667 })?
668 .await?;
669 let unstaged_diff = this
670 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
671 .await?;
672 buffer_diff.update(cx, |buffer_diff, _| {
673 buffer_diff.set_secondary_diff(unstaged_diff);
674 })?;
675
676 this.update(cx, |_, cx| {
677 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
678 .detach();
679 })?;
680
681 Ok(buffer_diff)
682 })
683 }
684
685 pub fn open_uncommitted_diff(
686 &mut self,
687 buffer: Entity<Buffer>,
688 cx: &mut Context<Self>,
689 ) -> Task<Result<Entity<BufferDiff>>> {
690 let buffer_id = buffer.read(cx).remote_id();
691
692 if let Some(diff_state) = self.diffs.get(&buffer_id)
693 && let Some(uncommitted_diff) = diff_state
694 .read(cx)
695 .uncommitted_diff
696 .as_ref()
697 .and_then(|weak| weak.upgrade())
698 {
699 if let Some(task) =
700 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
701 {
702 return cx.background_executor().spawn(async move {
703 task.await;
704 Ok(uncommitted_diff)
705 });
706 }
707 return Task::ready(Ok(uncommitted_diff));
708 }
709
710 let Some((repo, repo_path)) =
711 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
712 else {
713 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
714 };
715
716 let task = self
717 .loading_diffs
718 .entry((buffer_id, DiffKind::Uncommitted))
719 .or_insert_with(|| {
720 let changes = repo.update(cx, |repo, cx| {
721 repo.load_committed_text(buffer_id, repo_path, cx)
722 });
723
724 // todo(lw): hot foreground spawn
725 cx.spawn(async move |this, cx| {
726 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
727 .await
728 .map_err(Arc::new)
729 })
730 .shared()
731 })
732 .clone();
733
734 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
735 }
736
737 async fn open_diff_internal(
738 this: WeakEntity<Self>,
739 kind: DiffKind,
740 texts: Result<DiffBasesChange>,
741 buffer_entity: Entity<Buffer>,
742 cx: &mut AsyncApp,
743 ) -> Result<Entity<BufferDiff>> {
744 let diff_bases_change = match texts {
745 Err(e) => {
746 this.update(cx, |this, cx| {
747 let buffer = buffer_entity.read(cx);
748 let buffer_id = buffer.remote_id();
749 this.loading_diffs.remove(&(buffer_id, kind));
750 })?;
751 return Err(e);
752 }
753 Ok(change) => change,
754 };
755
756 this.update(cx, |this, cx| {
757 let buffer = buffer_entity.read(cx);
758 let buffer_id = buffer.remote_id();
759 let language = buffer.language().cloned();
760 let language_registry = buffer.language_registry();
761 let text_snapshot = buffer.text_snapshot();
762 this.loading_diffs.remove(&(buffer_id, kind));
763
764 let git_store = cx.weak_entity();
765 let diff_state = this
766 .diffs
767 .entry(buffer_id)
768 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
769
770 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
771
772 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
773 diff_state.update(cx, |diff_state, cx| {
774 diff_state.language = language;
775 diff_state.language_registry = language_registry;
776
777 match kind {
778 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
779 DiffKind::Uncommitted => {
780 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
781 diff
782 } else {
783 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
784 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
785 unstaged_diff
786 };
787
788 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
789 diff_state.uncommitted_diff = Some(diff.downgrade())
790 }
791 }
792
793 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
794 let rx = diff_state.wait_for_recalculation();
795
796 anyhow::Ok(async move {
797 if let Some(rx) = rx {
798 rx.await;
799 }
800 Ok(diff)
801 })
802 })
803 })??
804 .await
805 }
806
807 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
808 let diff_state = self.diffs.get(&buffer_id)?;
809 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
810 }
811
812 pub fn get_uncommitted_diff(
813 &self,
814 buffer_id: BufferId,
815 cx: &App,
816 ) -> Option<Entity<BufferDiff>> {
817 let diff_state = self.diffs.get(&buffer_id)?;
818 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
819 }
820
821 pub fn open_conflict_set(
822 &mut self,
823 buffer: Entity<Buffer>,
824 cx: &mut Context<Self>,
825 ) -> Entity<ConflictSet> {
826 log::debug!("open conflict set");
827 let buffer_id = buffer.read(cx).remote_id();
828
829 if let Some(git_state) = self.diffs.get(&buffer_id)
830 && let Some(conflict_set) = git_state
831 .read(cx)
832 .conflict_set
833 .as_ref()
834 .and_then(|weak| weak.upgrade())
835 {
836 let conflict_set = conflict_set;
837 let buffer_snapshot = buffer.read(cx).text_snapshot();
838
839 git_state.update(cx, |state, cx| {
840 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
841 });
842
843 return conflict_set;
844 }
845
846 let is_unmerged = self
847 .repository_and_path_for_buffer_id(buffer_id, cx)
848 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
849 let git_store = cx.weak_entity();
850 let buffer_git_state = self
851 .diffs
852 .entry(buffer_id)
853 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
854 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
855
856 self._subscriptions
857 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
858 cx.emit(GitStoreEvent::ConflictsUpdated);
859 }));
860
861 buffer_git_state.update(cx, |state, cx| {
862 state.conflict_set = Some(conflict_set.downgrade());
863 let buffer_snapshot = buffer.read(cx).text_snapshot();
864 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
865 });
866
867 conflict_set
868 }
869
870 pub fn project_path_git_status(
871 &self,
872 project_path: &ProjectPath,
873 cx: &App,
874 ) -> Option<FileStatus> {
875 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
876 Some(repo.read(cx).status_for_path(&repo_path)?.status)
877 }
878
879 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
880 let mut work_directory_abs_paths = Vec::new();
881 let mut checkpoints = Vec::new();
882 for repository in self.repositories.values() {
883 repository.update(cx, |repository, _| {
884 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
885 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
886 });
887 }
888
889 cx.background_executor().spawn(async move {
890 let checkpoints = future::try_join_all(checkpoints).await?;
891 Ok(GitStoreCheckpoint {
892 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
893 .into_iter()
894 .zip(checkpoints)
895 .collect(),
896 })
897 })
898 }
899
900 pub fn restore_checkpoint(
901 &self,
902 checkpoint: GitStoreCheckpoint,
903 cx: &mut App,
904 ) -> Task<Result<()>> {
905 let repositories_by_work_dir_abs_path = self
906 .repositories
907 .values()
908 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
909 .collect::<HashMap<_, _>>();
910
911 let mut tasks = Vec::new();
912 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
913 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
914 let restore = repository.update(cx, |repository, _| {
915 repository.restore_checkpoint(checkpoint)
916 });
917 tasks.push(async move { restore.await? });
918 }
919 }
920 cx.background_spawn(async move {
921 future::try_join_all(tasks).await?;
922 Ok(())
923 })
924 }
925
926 /// Compares two checkpoints, returning true if they are equal.
927 pub fn compare_checkpoints(
928 &self,
929 left: GitStoreCheckpoint,
930 mut right: GitStoreCheckpoint,
931 cx: &mut App,
932 ) -> Task<Result<bool>> {
933 let repositories_by_work_dir_abs_path = self
934 .repositories
935 .values()
936 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
937 .collect::<HashMap<_, _>>();
938
939 let mut tasks = Vec::new();
940 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
941 if let Some(right_checkpoint) = right
942 .checkpoints_by_work_dir_abs_path
943 .remove(&work_dir_abs_path)
944 {
945 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
946 {
947 let compare = repository.update(cx, |repository, _| {
948 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
949 });
950
951 tasks.push(async move { compare.await? });
952 }
953 } else {
954 return Task::ready(Ok(false));
955 }
956 }
957 cx.background_spawn(async move {
958 Ok(future::try_join_all(tasks)
959 .await?
960 .into_iter()
961 .all(|result| result))
962 })
963 }
964
965 /// Blames a buffer.
966 pub fn blame_buffer(
967 &self,
968 buffer: &Entity<Buffer>,
969 version: Option<clock::Global>,
970 cx: &mut App,
971 ) -> Task<Result<Option<Blame>>> {
972 let buffer = buffer.read(cx);
973 let Some((repo, repo_path)) =
974 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
975 else {
976 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
977 };
978 let content = match &version {
979 Some(version) => buffer.rope_for_version(version),
980 None => buffer.as_rope().clone(),
981 };
982 let version = version.unwrap_or(buffer.version());
983 let buffer_id = buffer.remote_id();
984
985 let rx = repo.update(cx, |repo, _| {
986 repo.send_job(None, move |state, _| async move {
987 match state {
988 RepositoryState::Local { backend, .. } => backend
989 .blame(repo_path.clone(), content)
990 .await
991 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
992 .map(Some),
993 RepositoryState::Remote { project_id, client } => {
994 let response = client
995 .request(proto::BlameBuffer {
996 project_id: project_id.to_proto(),
997 buffer_id: buffer_id.into(),
998 version: serialize_version(&version),
999 })
1000 .await?;
1001 Ok(deserialize_blame_buffer_response(response))
1002 }
1003 }
1004 })
1005 });
1006
1007 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1008 }
1009
1010 pub fn get_permalink_to_line(
1011 &self,
1012 buffer: &Entity<Buffer>,
1013 selection: Range<u32>,
1014 cx: &mut App,
1015 ) -> Task<Result<url::Url>> {
1016 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1017 return Task::ready(Err(anyhow!("buffer has no file")));
1018 };
1019
1020 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1021 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1022 cx,
1023 ) else {
1024 // If we're not in a Git repo, check whether this is a Rust source
1025 // file in the Cargo registry (presumably opened with go-to-definition
1026 // from a normal Rust file). If so, we can put together a permalink
1027 // using crate metadata.
1028 if buffer
1029 .read(cx)
1030 .language()
1031 .is_none_or(|lang| lang.name() != "Rust".into())
1032 {
1033 return Task::ready(Err(anyhow!("no permalink available")));
1034 }
1035 let file_path = file.worktree.read(cx).absolutize(&file.path);
1036 return cx.spawn(async move |cx| {
1037 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1038 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1039 .context("no permalink available")
1040 });
1041 };
1042
1043 let buffer_id = buffer.read(cx).remote_id();
1044 let branch = repo.read(cx).branch.clone();
1045 let remote = branch
1046 .as_ref()
1047 .and_then(|b| b.upstream.as_ref())
1048 .and_then(|b| b.remote_name())
1049 .unwrap_or("origin")
1050 .to_string();
1051
1052 let rx = repo.update(cx, |repo, _| {
1053 repo.send_job(None, move |state, cx| async move {
1054 match state {
1055 RepositoryState::Local { backend, .. } => {
1056 let origin_url = backend
1057 .remote_url(&remote)
1058 .with_context(|| format!("remote \"{remote}\" not found"))?;
1059
1060 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1061
1062 let provider_registry =
1063 cx.update(GitHostingProviderRegistry::default_global)?;
1064
1065 let (provider, remote) =
1066 parse_git_remote_url(provider_registry, &origin_url)
1067 .context("parsing Git remote URL")?;
1068
1069 Ok(provider.build_permalink(
1070 remote,
1071 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1072 ))
1073 }
1074 RepositoryState::Remote { project_id, client } => {
1075 let response = client
1076 .request(proto::GetPermalinkToLine {
1077 project_id: project_id.to_proto(),
1078 buffer_id: buffer_id.into(),
1079 selection: Some(proto::Range {
1080 start: selection.start as u64,
1081 end: selection.end as u64,
1082 }),
1083 })
1084 .await?;
1085
1086 url::Url::parse(&response.permalink).context("failed to parse permalink")
1087 }
1088 }
1089 })
1090 });
1091 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1092 }
1093
1094 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1095 match &self.state {
1096 GitStoreState::Local {
1097 downstream: downstream_client,
1098 ..
1099 } => downstream_client
1100 .as_ref()
1101 .map(|state| (state.client.clone(), state.project_id)),
1102 GitStoreState::Remote {
1103 downstream: downstream_client,
1104 ..
1105 } => downstream_client.clone(),
1106 }
1107 }
1108
1109 fn upstream_client(&self) -> Option<AnyProtoClient> {
1110 match &self.state {
1111 GitStoreState::Local { .. } => None,
1112 GitStoreState::Remote {
1113 upstream_client, ..
1114 } => Some(upstream_client.clone()),
1115 }
1116 }
1117
1118 fn on_worktree_store_event(
1119 &mut self,
1120 worktree_store: Entity<WorktreeStore>,
1121 event: &WorktreeStoreEvent,
1122 cx: &mut Context<Self>,
1123 ) {
1124 let GitStoreState::Local {
1125 project_environment,
1126 downstream,
1127 next_repository_id,
1128 fs,
1129 } = &self.state
1130 else {
1131 return;
1132 };
1133
1134 match event {
1135 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1136 if let Some(worktree) = self
1137 .worktree_store
1138 .read(cx)
1139 .worktree_for_id(*worktree_id, cx)
1140 {
1141 let paths_by_git_repo =
1142 self.process_updated_entries(&worktree, updated_entries, cx);
1143 let downstream = downstream
1144 .as_ref()
1145 .map(|downstream| downstream.updates_tx.clone());
1146 cx.spawn(async move |_, cx| {
1147 let paths_by_git_repo = paths_by_git_repo.await;
1148 for (repo, paths) in paths_by_git_repo {
1149 repo.update(cx, |repo, cx| {
1150 repo.paths_changed(paths, downstream.clone(), cx);
1151 })
1152 .ok();
1153 }
1154 })
1155 .detach();
1156 }
1157 }
1158 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1159 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1160 else {
1161 return;
1162 };
1163 if !worktree.read(cx).is_visible() {
1164 log::debug!(
1165 "not adding repositories for local worktree {:?} because it's not visible",
1166 worktree.read(cx).abs_path()
1167 );
1168 return;
1169 }
1170 self.update_repositories_from_worktree(
1171 *worktree_id,
1172 project_environment.clone(),
1173 next_repository_id.clone(),
1174 downstream
1175 .as_ref()
1176 .map(|downstream| downstream.updates_tx.clone()),
1177 changed_repos.clone(),
1178 fs.clone(),
1179 cx,
1180 );
1181 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1182 }
1183 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1184 let repos_without_worktree: Vec<RepositoryId> = self
1185 .worktree_ids
1186 .iter_mut()
1187 .filter_map(|(repo_id, worktree_ids)| {
1188 worktree_ids.remove(worktree_id);
1189 if worktree_ids.is_empty() {
1190 Some(*repo_id)
1191 } else {
1192 None
1193 }
1194 })
1195 .collect();
1196 let is_active_repo_removed = repos_without_worktree
1197 .iter()
1198 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1199
1200 for repo_id in repos_without_worktree {
1201 self.repositories.remove(&repo_id);
1202 self.worktree_ids.remove(&repo_id);
1203 if let Some(updates_tx) =
1204 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1205 {
1206 updates_tx
1207 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1208 .ok();
1209 }
1210 }
1211
1212 if is_active_repo_removed {
1213 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1214 self.active_repo_id = Some(repo_id);
1215 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1216 } else {
1217 self.active_repo_id = None;
1218 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1219 }
1220 }
1221 }
1222 _ => {}
1223 }
1224 }
1225 fn on_repository_event(
1226 &mut self,
1227 repo: Entity<Repository>,
1228 event: &RepositoryEvent,
1229 cx: &mut Context<Self>,
1230 ) {
1231 let id = repo.read(cx).id;
1232 let repo_snapshot = repo.read(cx).snapshot.clone();
1233 for (buffer_id, diff) in self.diffs.iter() {
1234 if let Some((buffer_repo, repo_path)) =
1235 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1236 && buffer_repo == repo
1237 {
1238 diff.update(cx, |diff, cx| {
1239 if let Some(conflict_set) = &diff.conflict_set {
1240 let conflict_status_changed =
1241 conflict_set.update(cx, |conflict_set, cx| {
1242 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1243 conflict_set.set_has_conflict(has_conflict, cx)
1244 })?;
1245 if conflict_status_changed {
1246 let buffer_store = self.buffer_store.read(cx);
1247 if let Some(buffer) = buffer_store.get(*buffer_id) {
1248 let _ = diff
1249 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1250 }
1251 }
1252 }
1253 anyhow::Ok(())
1254 })
1255 .ok();
1256 }
1257 }
1258 cx.emit(GitStoreEvent::RepositoryUpdated(
1259 id,
1260 event.clone(),
1261 self.active_repo_id == Some(id),
1262 ))
1263 }
1264
1265 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1266 cx.emit(GitStoreEvent::JobsUpdated)
1267 }
1268
1269 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1270 fn update_repositories_from_worktree(
1271 &mut self,
1272 worktree_id: WorktreeId,
1273 project_environment: Entity<ProjectEnvironment>,
1274 next_repository_id: Arc<AtomicU64>,
1275 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1276 updated_git_repositories: UpdatedGitRepositoriesSet,
1277 fs: Arc<dyn Fs>,
1278 cx: &mut Context<Self>,
1279 ) {
1280 let mut removed_ids = Vec::new();
1281 for update in updated_git_repositories.iter() {
1282 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1283 let existing_work_directory_abs_path =
1284 repo.read(cx).work_directory_abs_path.clone();
1285 Some(&existing_work_directory_abs_path)
1286 == update.old_work_directory_abs_path.as_ref()
1287 || Some(&existing_work_directory_abs_path)
1288 == update.new_work_directory_abs_path.as_ref()
1289 }) {
1290 let repo_id = *id;
1291 if let Some(new_work_directory_abs_path) =
1292 update.new_work_directory_abs_path.clone()
1293 {
1294 self.worktree_ids
1295 .entry(repo_id)
1296 .or_insert_with(HashSet::new)
1297 .insert(worktree_id);
1298 existing.update(cx, |existing, cx| {
1299 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1300 existing.schedule_scan(updates_tx.clone(), cx);
1301 });
1302 } else {
1303 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1304 worktree_ids.remove(&worktree_id);
1305 if worktree_ids.is_empty() {
1306 removed_ids.push(repo_id);
1307 }
1308 }
1309 }
1310 } else if let UpdatedGitRepository {
1311 new_work_directory_abs_path: Some(work_directory_abs_path),
1312 dot_git_abs_path: Some(dot_git_abs_path),
1313 repository_dir_abs_path: Some(repository_dir_abs_path),
1314 common_dir_abs_path: Some(common_dir_abs_path),
1315 ..
1316 } = update
1317 {
1318 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1319 let git_store = cx.weak_entity();
1320 let repo = cx.new(|cx| {
1321 let mut repo = Repository::local(
1322 id,
1323 work_directory_abs_path.clone(),
1324 dot_git_abs_path.clone(),
1325 repository_dir_abs_path.clone(),
1326 common_dir_abs_path.clone(),
1327 project_environment.downgrade(),
1328 fs.clone(),
1329 git_store,
1330 cx,
1331 );
1332 if let Some(updates_tx) = updates_tx.as_ref() {
1333 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1334 updates_tx
1335 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1336 .ok();
1337 }
1338 repo.schedule_scan(updates_tx.clone(), cx);
1339 repo
1340 });
1341 self._subscriptions
1342 .push(cx.subscribe(&repo, Self::on_repository_event));
1343 self._subscriptions
1344 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1345 self.repositories.insert(id, repo);
1346 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1347 cx.emit(GitStoreEvent::RepositoryAdded);
1348 self.active_repo_id.get_or_insert_with(|| {
1349 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1350 id
1351 });
1352 }
1353 }
1354
1355 for id in removed_ids {
1356 if self.active_repo_id == Some(id) {
1357 self.active_repo_id = None;
1358 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1359 }
1360 self.repositories.remove(&id);
1361 if let Some(updates_tx) = updates_tx.as_ref() {
1362 updates_tx
1363 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1364 .ok();
1365 }
1366 }
1367 }
1368
1369 fn on_buffer_store_event(
1370 &mut self,
1371 _: Entity<BufferStore>,
1372 event: &BufferStoreEvent,
1373 cx: &mut Context<Self>,
1374 ) {
1375 match event {
1376 BufferStoreEvent::BufferAdded(buffer) => {
1377 cx.subscribe(buffer, |this, buffer, event, cx| {
1378 if let BufferEvent::LanguageChanged = event {
1379 let buffer_id = buffer.read(cx).remote_id();
1380 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1381 diff_state.update(cx, |diff_state, cx| {
1382 diff_state.buffer_language_changed(buffer, cx);
1383 });
1384 }
1385 }
1386 })
1387 .detach();
1388 }
1389 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1390 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1391 diffs.remove(buffer_id);
1392 }
1393 }
1394 BufferStoreEvent::BufferDropped(buffer_id) => {
1395 self.diffs.remove(buffer_id);
1396 for diffs in self.shared_diffs.values_mut() {
1397 diffs.remove(buffer_id);
1398 }
1399 }
1400 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1401 // Whenever a buffer's file path changes, it's possible that the
1402 // new path is actually a path that is being tracked by a git
1403 // repository. In that case, we'll want to update the buffer's
1404 // `BufferDiffState`, in case it already has one.
1405 let buffer_id = buffer.read(cx).remote_id();
1406 let diff_state = self.diffs.get(&buffer_id);
1407 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1408
1409 if let Some(diff_state) = diff_state
1410 && let Some((repo, repo_path)) = repo
1411 {
1412 let buffer = buffer.clone();
1413 let diff_state = diff_state.clone();
1414
1415 cx.spawn(async move |_git_store, cx| {
1416 async {
1417 let diff_bases_change = repo
1418 .update(cx, |repo, cx| {
1419 repo.load_committed_text(buffer_id, repo_path, cx)
1420 })?
1421 .await?;
1422
1423 diff_state.update(cx, |diff_state, cx| {
1424 let buffer_snapshot = buffer.read(cx).text_snapshot();
1425 diff_state.diff_bases_changed(
1426 buffer_snapshot,
1427 Some(diff_bases_change),
1428 cx,
1429 );
1430 })
1431 }
1432 .await
1433 .log_err();
1434 })
1435 .detach();
1436 }
1437 }
1438 _ => {}
1439 }
1440 }
1441
1442 pub fn recalculate_buffer_diffs(
1443 &mut self,
1444 buffers: Vec<Entity<Buffer>>,
1445 cx: &mut Context<Self>,
1446 ) -> impl Future<Output = ()> + use<> {
1447 let mut futures = Vec::new();
1448 for buffer in buffers {
1449 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1450 let buffer = buffer.read(cx).text_snapshot();
1451 diff_state.update(cx, |diff_state, cx| {
1452 diff_state.recalculate_diffs(buffer.clone(), cx);
1453 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1454 });
1455 futures.push(diff_state.update(cx, |diff_state, cx| {
1456 diff_state
1457 .reparse_conflict_markers(buffer, cx)
1458 .map(|_| {})
1459 .boxed()
1460 }));
1461 }
1462 }
1463 async move {
1464 futures::future::join_all(futures).await;
1465 }
1466 }
1467
1468 fn on_buffer_diff_event(
1469 &mut self,
1470 diff: Entity<buffer_diff::BufferDiff>,
1471 event: &BufferDiffEvent,
1472 cx: &mut Context<Self>,
1473 ) {
1474 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1475 let buffer_id = diff.read(cx).buffer_id;
1476 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1477 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1478 diff_state.hunk_staging_operation_count += 1;
1479 diff_state.hunk_staging_operation_count
1480 });
1481 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1482 let recv = repo.update(cx, |repo, cx| {
1483 log::debug!("hunks changed for {}", path.as_unix_str());
1484 repo.spawn_set_index_text_job(
1485 path,
1486 new_index_text.as_ref().map(|rope| rope.to_string()),
1487 Some(hunk_staging_operation_count),
1488 cx,
1489 )
1490 });
1491 let diff = diff.downgrade();
1492 cx.spawn(async move |this, cx| {
1493 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1494 diff.update(cx, |diff, cx| {
1495 diff.clear_pending_hunks(cx);
1496 })
1497 .ok();
1498 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1499 .ok();
1500 }
1501 })
1502 .detach();
1503 }
1504 }
1505 }
1506 }
1507
1508 fn local_worktree_git_repos_changed(
1509 &mut self,
1510 worktree: Entity<Worktree>,
1511 changed_repos: &UpdatedGitRepositoriesSet,
1512 cx: &mut Context<Self>,
1513 ) {
1514 log::debug!("local worktree repos changed");
1515 debug_assert!(worktree.read(cx).is_local());
1516
1517 for repository in self.repositories.values() {
1518 repository.update(cx, |repository, cx| {
1519 let repo_abs_path = &repository.work_directory_abs_path;
1520 if changed_repos.iter().any(|update| {
1521 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1522 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1523 }) {
1524 repository.reload_buffer_diff_bases(cx);
1525 }
1526 });
1527 }
1528 }
1529
1530 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1531 &self.repositories
1532 }
1533
1534 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1535 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1536 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1537 Some(status.status)
1538 }
1539
1540 pub fn repository_and_path_for_buffer_id(
1541 &self,
1542 buffer_id: BufferId,
1543 cx: &App,
1544 ) -> Option<(Entity<Repository>, RepoPath)> {
1545 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1546 let project_path = buffer.read(cx).project_path(cx)?;
1547 self.repository_and_path_for_project_path(&project_path, cx)
1548 }
1549
1550 pub fn repository_and_path_for_project_path(
1551 &self,
1552 path: &ProjectPath,
1553 cx: &App,
1554 ) -> Option<(Entity<Repository>, RepoPath)> {
1555 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1556 self.repositories
1557 .values()
1558 .filter_map(|repo| {
1559 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1560 Some((repo.clone(), repo_path))
1561 })
1562 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1563 }
1564
1565 pub fn git_init(
1566 &self,
1567 path: Arc<Path>,
1568 fallback_branch_name: String,
1569 cx: &App,
1570 ) -> Task<Result<()>> {
1571 match &self.state {
1572 GitStoreState::Local { fs, .. } => {
1573 let fs = fs.clone();
1574 cx.background_executor()
1575 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1576 }
1577 GitStoreState::Remote {
1578 upstream_client,
1579 upstream_project_id: project_id,
1580 ..
1581 } => {
1582 let client = upstream_client.clone();
1583 let project_id = *project_id;
1584 cx.background_executor().spawn(async move {
1585 client
1586 .request(proto::GitInit {
1587 project_id: project_id,
1588 abs_path: path.to_string_lossy().into_owned(),
1589 fallback_branch_name,
1590 })
1591 .await?;
1592 Ok(())
1593 })
1594 }
1595 }
1596 }
1597
1598 pub fn git_clone(
1599 &self,
1600 repo: String,
1601 path: impl Into<Arc<std::path::Path>>,
1602 cx: &App,
1603 ) -> Task<Result<()>> {
1604 let path = path.into();
1605 match &self.state {
1606 GitStoreState::Local { fs, .. } => {
1607 let fs = fs.clone();
1608 cx.background_executor()
1609 .spawn(async move { fs.git_clone(&repo, &path).await })
1610 }
1611 GitStoreState::Remote {
1612 upstream_client,
1613 upstream_project_id,
1614 ..
1615 } => {
1616 if upstream_client.is_via_collab() {
1617 return Task::ready(Err(anyhow!(
1618 "Git Clone isn't supported for project guests"
1619 )));
1620 }
1621 let request = upstream_client.request(proto::GitClone {
1622 project_id: *upstream_project_id,
1623 abs_path: path.to_string_lossy().into_owned(),
1624 remote_repo: repo,
1625 });
1626
1627 cx.background_spawn(async move {
1628 let result = request.await?;
1629
1630 match result.success {
1631 true => Ok(()),
1632 false => Err(anyhow!("Git Clone failed")),
1633 }
1634 })
1635 }
1636 }
1637 }
1638
1639 async fn handle_update_repository(
1640 this: Entity<Self>,
1641 envelope: TypedEnvelope<proto::UpdateRepository>,
1642 mut cx: AsyncApp,
1643 ) -> Result<()> {
1644 this.update(&mut cx, |this, cx| {
1645 let path_style = this.worktree_store.read(cx).path_style();
1646 let mut update = envelope.payload;
1647
1648 let id = RepositoryId::from_proto(update.id);
1649 let client = this.upstream_client().context("no upstream client")?;
1650
1651 let mut repo_subscription = None;
1652 let repo = this.repositories.entry(id).or_insert_with(|| {
1653 let git_store = cx.weak_entity();
1654 let repo = cx.new(|cx| {
1655 Repository::remote(
1656 id,
1657 Path::new(&update.abs_path).into(),
1658 path_style,
1659 ProjectId(update.project_id),
1660 client,
1661 git_store,
1662 cx,
1663 )
1664 });
1665 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1666 cx.emit(GitStoreEvent::RepositoryAdded);
1667 repo
1668 });
1669 this._subscriptions.extend(repo_subscription);
1670
1671 repo.update(cx, {
1672 let update = update.clone();
1673 |repo, cx| repo.apply_remote_update(update, cx)
1674 })?;
1675
1676 this.active_repo_id.get_or_insert_with(|| {
1677 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1678 id
1679 });
1680
1681 if let Some((client, project_id)) = this.downstream_client() {
1682 update.project_id = project_id.to_proto();
1683 client.send(update).log_err();
1684 }
1685 Ok(())
1686 })?
1687 }
1688
1689 async fn handle_remove_repository(
1690 this: Entity<Self>,
1691 envelope: TypedEnvelope<proto::RemoveRepository>,
1692 mut cx: AsyncApp,
1693 ) -> Result<()> {
1694 this.update(&mut cx, |this, cx| {
1695 let mut update = envelope.payload;
1696 let id = RepositoryId::from_proto(update.id);
1697 this.repositories.remove(&id);
1698 if let Some((client, project_id)) = this.downstream_client() {
1699 update.project_id = project_id.to_proto();
1700 client.send(update).log_err();
1701 }
1702 if this.active_repo_id == Some(id) {
1703 this.active_repo_id = None;
1704 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1705 }
1706 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1707 })
1708 }
1709
1710 async fn handle_git_init(
1711 this: Entity<Self>,
1712 envelope: TypedEnvelope<proto::GitInit>,
1713 cx: AsyncApp,
1714 ) -> Result<proto::Ack> {
1715 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1716 let name = envelope.payload.fallback_branch_name;
1717 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1718 .await?;
1719
1720 Ok(proto::Ack {})
1721 }
1722
1723 async fn handle_git_clone(
1724 this: Entity<Self>,
1725 envelope: TypedEnvelope<proto::GitClone>,
1726 cx: AsyncApp,
1727 ) -> Result<proto::GitCloneResponse> {
1728 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1729 let repo_name = envelope.payload.remote_repo;
1730 let result = cx
1731 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1732 .await;
1733
1734 Ok(proto::GitCloneResponse {
1735 success: result.is_ok(),
1736 })
1737 }
1738
1739 async fn handle_fetch(
1740 this: Entity<Self>,
1741 envelope: TypedEnvelope<proto::Fetch>,
1742 mut cx: AsyncApp,
1743 ) -> Result<proto::RemoteMessageResponse> {
1744 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1745 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1746 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1747 let askpass_id = envelope.payload.askpass_id;
1748
1749 let askpass = make_remote_delegate(
1750 this,
1751 envelope.payload.project_id,
1752 repository_id,
1753 askpass_id,
1754 &mut cx,
1755 );
1756
1757 let remote_output = repository_handle
1758 .update(&mut cx, |repository_handle, cx| {
1759 repository_handle.fetch(fetch_options, askpass, cx)
1760 })?
1761 .await??;
1762
1763 Ok(proto::RemoteMessageResponse {
1764 stdout: remote_output.stdout,
1765 stderr: remote_output.stderr,
1766 })
1767 }
1768
1769 async fn handle_push(
1770 this: Entity<Self>,
1771 envelope: TypedEnvelope<proto::Push>,
1772 mut cx: AsyncApp,
1773 ) -> Result<proto::RemoteMessageResponse> {
1774 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1775 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1776
1777 let askpass_id = envelope.payload.askpass_id;
1778 let askpass = make_remote_delegate(
1779 this,
1780 envelope.payload.project_id,
1781 repository_id,
1782 askpass_id,
1783 &mut cx,
1784 );
1785
1786 let options = envelope
1787 .payload
1788 .options
1789 .as_ref()
1790 .map(|_| match envelope.payload.options() {
1791 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1792 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1793 });
1794
1795 let branch_name = envelope.payload.branch_name.into();
1796 let remote_name = envelope.payload.remote_name.into();
1797
1798 let remote_output = repository_handle
1799 .update(&mut cx, |repository_handle, cx| {
1800 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1801 })?
1802 .await??;
1803 Ok(proto::RemoteMessageResponse {
1804 stdout: remote_output.stdout,
1805 stderr: remote_output.stderr,
1806 })
1807 }
1808
1809 async fn handle_pull(
1810 this: Entity<Self>,
1811 envelope: TypedEnvelope<proto::Pull>,
1812 mut cx: AsyncApp,
1813 ) -> Result<proto::RemoteMessageResponse> {
1814 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1815 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1816 let askpass_id = envelope.payload.askpass_id;
1817 let askpass = make_remote_delegate(
1818 this,
1819 envelope.payload.project_id,
1820 repository_id,
1821 askpass_id,
1822 &mut cx,
1823 );
1824
1825 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1826 let remote_name = envelope.payload.remote_name.into();
1827 let rebase = envelope.payload.rebase;
1828
1829 let remote_message = repository_handle
1830 .update(&mut cx, |repository_handle, cx| {
1831 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1832 })?
1833 .await??;
1834
1835 Ok(proto::RemoteMessageResponse {
1836 stdout: remote_message.stdout,
1837 stderr: remote_message.stderr,
1838 })
1839 }
1840
1841 async fn handle_stage(
1842 this: Entity<Self>,
1843 envelope: TypedEnvelope<proto::Stage>,
1844 mut cx: AsyncApp,
1845 ) -> Result<proto::Ack> {
1846 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1847 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1848
1849 let entries = envelope
1850 .payload
1851 .paths
1852 .into_iter()
1853 .map(|path| RepoPath::new(&path))
1854 .collect::<Result<Vec<_>>>()?;
1855
1856 repository_handle
1857 .update(&mut cx, |repository_handle, cx| {
1858 repository_handle.stage_entries(entries, cx)
1859 })?
1860 .await?;
1861 Ok(proto::Ack {})
1862 }
1863
1864 async fn handle_unstage(
1865 this: Entity<Self>,
1866 envelope: TypedEnvelope<proto::Unstage>,
1867 mut cx: AsyncApp,
1868 ) -> Result<proto::Ack> {
1869 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1870 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1871
1872 let entries = envelope
1873 .payload
1874 .paths
1875 .into_iter()
1876 .map(|path| RepoPath::new(&path))
1877 .collect::<Result<Vec<_>>>()?;
1878
1879 repository_handle
1880 .update(&mut cx, |repository_handle, cx| {
1881 repository_handle.unstage_entries(entries, cx)
1882 })?
1883 .await?;
1884
1885 Ok(proto::Ack {})
1886 }
1887
1888 async fn handle_stash(
1889 this: Entity<Self>,
1890 envelope: TypedEnvelope<proto::Stash>,
1891 mut cx: AsyncApp,
1892 ) -> Result<proto::Ack> {
1893 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1894 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1895
1896 let entries = envelope
1897 .payload
1898 .paths
1899 .into_iter()
1900 .map(|path| RepoPath::new(&path))
1901 .collect::<Result<Vec<_>>>()?;
1902
1903 repository_handle
1904 .update(&mut cx, |repository_handle, cx| {
1905 repository_handle.stash_entries(entries, cx)
1906 })?
1907 .await?;
1908
1909 Ok(proto::Ack {})
1910 }
1911
1912 async fn handle_stash_pop(
1913 this: Entity<Self>,
1914 envelope: TypedEnvelope<proto::StashPop>,
1915 mut cx: AsyncApp,
1916 ) -> Result<proto::Ack> {
1917 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1918 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1919 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1920
1921 repository_handle
1922 .update(&mut cx, |repository_handle, cx| {
1923 repository_handle.stash_pop(stash_index, cx)
1924 })?
1925 .await?;
1926
1927 Ok(proto::Ack {})
1928 }
1929
1930 async fn handle_stash_apply(
1931 this: Entity<Self>,
1932 envelope: TypedEnvelope<proto::StashApply>,
1933 mut cx: AsyncApp,
1934 ) -> Result<proto::Ack> {
1935 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1936 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1937 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1938
1939 repository_handle
1940 .update(&mut cx, |repository_handle, cx| {
1941 repository_handle.stash_apply(stash_index, cx)
1942 })?
1943 .await?;
1944
1945 Ok(proto::Ack {})
1946 }
1947
1948 async fn handle_stash_drop(
1949 this: Entity<Self>,
1950 envelope: TypedEnvelope<proto::StashDrop>,
1951 mut cx: AsyncApp,
1952 ) -> Result<proto::Ack> {
1953 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1954 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1955 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1956
1957 repository_handle
1958 .update(&mut cx, |repository_handle, cx| {
1959 repository_handle.stash_drop(stash_index, cx)
1960 })?
1961 .await??;
1962
1963 Ok(proto::Ack {})
1964 }
1965
1966 async fn handle_set_index_text(
1967 this: Entity<Self>,
1968 envelope: TypedEnvelope<proto::SetIndexText>,
1969 mut cx: AsyncApp,
1970 ) -> Result<proto::Ack> {
1971 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1972 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1973 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1974
1975 repository_handle
1976 .update(&mut cx, |repository_handle, cx| {
1977 repository_handle.spawn_set_index_text_job(
1978 repo_path,
1979 envelope.payload.text,
1980 None,
1981 cx,
1982 )
1983 })?
1984 .await??;
1985 Ok(proto::Ack {})
1986 }
1987
1988 async fn handle_commit(
1989 this: Entity<Self>,
1990 envelope: TypedEnvelope<proto::Commit>,
1991 mut cx: AsyncApp,
1992 ) -> Result<proto::Ack> {
1993 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1994 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1995 let askpass_id = envelope.payload.askpass_id;
1996
1997 let askpass = make_remote_delegate(
1998 this,
1999 envelope.payload.project_id,
2000 repository_id,
2001 askpass_id,
2002 &mut cx,
2003 );
2004
2005 let message = SharedString::from(envelope.payload.message);
2006 let name = envelope.payload.name.map(SharedString::from);
2007 let email = envelope.payload.email.map(SharedString::from);
2008 let options = envelope.payload.options.unwrap_or_default();
2009
2010 repository_handle
2011 .update(&mut cx, |repository_handle, cx| {
2012 repository_handle.commit(
2013 message,
2014 name.zip(email),
2015 CommitOptions {
2016 amend: options.amend,
2017 signoff: options.signoff,
2018 },
2019 askpass,
2020 cx,
2021 )
2022 })?
2023 .await??;
2024 Ok(proto::Ack {})
2025 }
2026
2027 async fn handle_get_remotes(
2028 this: Entity<Self>,
2029 envelope: TypedEnvelope<proto::GetRemotes>,
2030 mut cx: AsyncApp,
2031 ) -> Result<proto::GetRemotesResponse> {
2032 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2033 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2034
2035 let branch_name = envelope.payload.branch_name;
2036
2037 let remotes = repository_handle
2038 .update(&mut cx, |repository_handle, _| {
2039 repository_handle.get_remotes(branch_name)
2040 })?
2041 .await??;
2042
2043 Ok(proto::GetRemotesResponse {
2044 remotes: remotes
2045 .into_iter()
2046 .map(|remotes| proto::get_remotes_response::Remote {
2047 name: remotes.name.to_string(),
2048 })
2049 .collect::<Vec<_>>(),
2050 })
2051 }
2052
2053 async fn handle_get_worktrees(
2054 this: Entity<Self>,
2055 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2056 mut cx: AsyncApp,
2057 ) -> Result<proto::GitWorktreesResponse> {
2058 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2059 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2060
2061 let worktrees = repository_handle
2062 .update(&mut cx, |repository_handle, _| {
2063 repository_handle.worktrees()
2064 })?
2065 .await??;
2066
2067 Ok(proto::GitWorktreesResponse {
2068 worktrees: worktrees
2069 .into_iter()
2070 .map(|worktree| worktree_to_proto(&worktree))
2071 .collect::<Vec<_>>(),
2072 })
2073 }
2074
2075 async fn handle_create_worktree(
2076 this: Entity<Self>,
2077 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2078 mut cx: AsyncApp,
2079 ) -> Result<proto::Ack> {
2080 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2081 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2082 let directory = PathBuf::from(envelope.payload.directory);
2083 let name = envelope.payload.name;
2084 let commit = envelope.payload.commit;
2085
2086 repository_handle
2087 .update(&mut cx, |repository_handle, _| {
2088 repository_handle.create_worktree(name, directory, commit)
2089 })?
2090 .await??;
2091
2092 Ok(proto::Ack {})
2093 }
2094
2095 async fn handle_get_branches(
2096 this: Entity<Self>,
2097 envelope: TypedEnvelope<proto::GitGetBranches>,
2098 mut cx: AsyncApp,
2099 ) -> Result<proto::GitBranchesResponse> {
2100 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2101 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2102
2103 let branches = repository_handle
2104 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2105 .await??;
2106
2107 Ok(proto::GitBranchesResponse {
2108 branches: branches
2109 .into_iter()
2110 .map(|branch| branch_to_proto(&branch))
2111 .collect::<Vec<_>>(),
2112 })
2113 }
2114 async fn handle_get_default_branch(
2115 this: Entity<Self>,
2116 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2117 mut cx: AsyncApp,
2118 ) -> Result<proto::GetDefaultBranchResponse> {
2119 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2120 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2121
2122 let branch = repository_handle
2123 .update(&mut cx, |repository_handle, _| {
2124 repository_handle.default_branch()
2125 })?
2126 .await??
2127 .map(Into::into);
2128
2129 Ok(proto::GetDefaultBranchResponse { branch })
2130 }
2131 async fn handle_create_branch(
2132 this: Entity<Self>,
2133 envelope: TypedEnvelope<proto::GitCreateBranch>,
2134 mut cx: AsyncApp,
2135 ) -> Result<proto::Ack> {
2136 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2137 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2138 let branch_name = envelope.payload.branch_name;
2139
2140 repository_handle
2141 .update(&mut cx, |repository_handle, _| {
2142 repository_handle.create_branch(branch_name, None)
2143 })?
2144 .await??;
2145
2146 Ok(proto::Ack {})
2147 }
2148
2149 async fn handle_change_branch(
2150 this: Entity<Self>,
2151 envelope: TypedEnvelope<proto::GitChangeBranch>,
2152 mut cx: AsyncApp,
2153 ) -> Result<proto::Ack> {
2154 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2155 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2156 let branch_name = envelope.payload.branch_name;
2157
2158 repository_handle
2159 .update(&mut cx, |repository_handle, _| {
2160 repository_handle.change_branch(branch_name)
2161 })?
2162 .await??;
2163
2164 Ok(proto::Ack {})
2165 }
2166
2167 async fn handle_rename_branch(
2168 this: Entity<Self>,
2169 envelope: TypedEnvelope<proto::GitRenameBranch>,
2170 mut cx: AsyncApp,
2171 ) -> Result<proto::Ack> {
2172 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2173 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2174 let branch = envelope.payload.branch;
2175 let new_name = envelope.payload.new_name;
2176
2177 repository_handle
2178 .update(&mut cx, |repository_handle, _| {
2179 repository_handle.rename_branch(branch, new_name)
2180 })?
2181 .await??;
2182
2183 Ok(proto::Ack {})
2184 }
2185
2186 async fn handle_show(
2187 this: Entity<Self>,
2188 envelope: TypedEnvelope<proto::GitShow>,
2189 mut cx: AsyncApp,
2190 ) -> Result<proto::GitCommitDetails> {
2191 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2192 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2193
2194 let commit = repository_handle
2195 .update(&mut cx, |repository_handle, _| {
2196 repository_handle.show(envelope.payload.commit)
2197 })?
2198 .await??;
2199 Ok(proto::GitCommitDetails {
2200 sha: commit.sha.into(),
2201 message: commit.message.into(),
2202 commit_timestamp: commit.commit_timestamp,
2203 author_email: commit.author_email.into(),
2204 author_name: commit.author_name.into(),
2205 })
2206 }
2207
2208 async fn handle_load_commit_diff(
2209 this: Entity<Self>,
2210 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2211 mut cx: AsyncApp,
2212 ) -> Result<proto::LoadCommitDiffResponse> {
2213 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2214 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2215
2216 let commit_diff = repository_handle
2217 .update(&mut cx, |repository_handle, _| {
2218 repository_handle.load_commit_diff(envelope.payload.commit)
2219 })?
2220 .await??;
2221 Ok(proto::LoadCommitDiffResponse {
2222 files: commit_diff
2223 .files
2224 .into_iter()
2225 .map(|file| proto::CommitFile {
2226 path: file.path.to_proto(),
2227 old_text: file.old_text,
2228 new_text: file.new_text,
2229 })
2230 .collect(),
2231 })
2232 }
2233
2234 async fn handle_reset(
2235 this: Entity<Self>,
2236 envelope: TypedEnvelope<proto::GitReset>,
2237 mut cx: AsyncApp,
2238 ) -> Result<proto::Ack> {
2239 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2240 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2241
2242 let mode = match envelope.payload.mode() {
2243 git_reset::ResetMode::Soft => ResetMode::Soft,
2244 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2245 };
2246
2247 repository_handle
2248 .update(&mut cx, |repository_handle, cx| {
2249 repository_handle.reset(envelope.payload.commit, mode, cx)
2250 })?
2251 .await??;
2252 Ok(proto::Ack {})
2253 }
2254
2255 async fn handle_checkout_files(
2256 this: Entity<Self>,
2257 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2258 mut cx: AsyncApp,
2259 ) -> Result<proto::Ack> {
2260 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2261 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2262 let paths = envelope
2263 .payload
2264 .paths
2265 .iter()
2266 .map(|s| RepoPath::from_proto(s))
2267 .collect::<Result<Vec<_>>>()?;
2268
2269 repository_handle
2270 .update(&mut cx, |repository_handle, cx| {
2271 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2272 })?
2273 .await?;
2274 Ok(proto::Ack {})
2275 }
2276
2277 async fn handle_open_commit_message_buffer(
2278 this: Entity<Self>,
2279 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2280 mut cx: AsyncApp,
2281 ) -> Result<proto::OpenBufferResponse> {
2282 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2283 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2284 let buffer = repository
2285 .update(&mut cx, |repository, cx| {
2286 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2287 })?
2288 .await?;
2289
2290 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2291 this.update(&mut cx, |this, cx| {
2292 this.buffer_store.update(cx, |buffer_store, cx| {
2293 buffer_store
2294 .create_buffer_for_peer(
2295 &buffer,
2296 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2297 cx,
2298 )
2299 .detach_and_log_err(cx);
2300 })
2301 })?;
2302
2303 Ok(proto::OpenBufferResponse {
2304 buffer_id: buffer_id.to_proto(),
2305 })
2306 }
2307
2308 async fn handle_askpass(
2309 this: Entity<Self>,
2310 envelope: TypedEnvelope<proto::AskPassRequest>,
2311 mut cx: AsyncApp,
2312 ) -> Result<proto::AskPassResponse> {
2313 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2314 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2315
2316 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2317 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2318 debug_panic!("no askpass found");
2319 anyhow::bail!("no askpass found");
2320 };
2321
2322 let response = askpass
2323 .ask_password(envelope.payload.prompt)
2324 .await
2325 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2326
2327 delegates
2328 .lock()
2329 .insert(envelope.payload.askpass_id, askpass);
2330
2331 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2332 Ok(proto::AskPassResponse {
2333 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2334 })
2335 }
2336
2337 async fn handle_check_for_pushed_commits(
2338 this: Entity<Self>,
2339 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2340 mut cx: AsyncApp,
2341 ) -> Result<proto::CheckForPushedCommitsResponse> {
2342 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2343 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2344
2345 let branches = repository_handle
2346 .update(&mut cx, |repository_handle, _| {
2347 repository_handle.check_for_pushed_commits()
2348 })?
2349 .await??;
2350 Ok(proto::CheckForPushedCommitsResponse {
2351 pushed_to: branches
2352 .into_iter()
2353 .map(|commit| commit.to_string())
2354 .collect(),
2355 })
2356 }
2357
2358 async fn handle_git_diff(
2359 this: Entity<Self>,
2360 envelope: TypedEnvelope<proto::GitDiff>,
2361 mut cx: AsyncApp,
2362 ) -> Result<proto::GitDiffResponse> {
2363 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2364 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2365 let diff_type = match envelope.payload.diff_type() {
2366 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2367 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2368 };
2369
2370 let mut diff = repository_handle
2371 .update(&mut cx, |repository_handle, cx| {
2372 repository_handle.diff(diff_type, cx)
2373 })?
2374 .await??;
2375 const ONE_MB: usize = 1_000_000;
2376 if diff.len() > ONE_MB {
2377 diff = diff.chars().take(ONE_MB).collect()
2378 }
2379
2380 Ok(proto::GitDiffResponse { diff })
2381 }
2382
2383 async fn handle_tree_diff(
2384 this: Entity<Self>,
2385 request: TypedEnvelope<proto::GetTreeDiff>,
2386 mut cx: AsyncApp,
2387 ) -> Result<proto::GetTreeDiffResponse> {
2388 let repository_id = RepositoryId(request.payload.repository_id);
2389 let diff_type = if request.payload.is_merge {
2390 DiffTreeType::MergeBase {
2391 base: request.payload.base.into(),
2392 head: request.payload.head.into(),
2393 }
2394 } else {
2395 DiffTreeType::Since {
2396 base: request.payload.base.into(),
2397 head: request.payload.head.into(),
2398 }
2399 };
2400
2401 let diff = this
2402 .update(&mut cx, |this, cx| {
2403 let repository = this.repositories().get(&repository_id)?;
2404 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2405 })?
2406 .context("missing repository")?
2407 .await??;
2408
2409 Ok(proto::GetTreeDiffResponse {
2410 entries: diff
2411 .entries
2412 .into_iter()
2413 .map(|(path, status)| proto::TreeDiffStatus {
2414 path: path.as_ref().to_proto(),
2415 status: match status {
2416 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2417 TreeDiffStatus::Modified { .. } => {
2418 proto::tree_diff_status::Status::Modified.into()
2419 }
2420 TreeDiffStatus::Deleted { .. } => {
2421 proto::tree_diff_status::Status::Deleted.into()
2422 }
2423 },
2424 oid: match status {
2425 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2426 Some(old.to_string())
2427 }
2428 TreeDiffStatus::Added => None,
2429 },
2430 })
2431 .collect(),
2432 })
2433 }
2434
2435 async fn handle_get_blob_content(
2436 this: Entity<Self>,
2437 request: TypedEnvelope<proto::GetBlobContent>,
2438 mut cx: AsyncApp,
2439 ) -> Result<proto::GetBlobContentResponse> {
2440 let oid = git::Oid::from_str(&request.payload.oid)?;
2441 let repository_id = RepositoryId(request.payload.repository_id);
2442 let content = this
2443 .update(&mut cx, |this, cx| {
2444 let repository = this.repositories().get(&repository_id)?;
2445 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2446 })?
2447 .context("missing repository")?
2448 .await?;
2449 Ok(proto::GetBlobContentResponse { content })
2450 }
2451
2452 async fn handle_open_unstaged_diff(
2453 this: Entity<Self>,
2454 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2455 mut cx: AsyncApp,
2456 ) -> Result<proto::OpenUnstagedDiffResponse> {
2457 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2458 let diff = this
2459 .update(&mut cx, |this, cx| {
2460 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2461 Some(this.open_unstaged_diff(buffer, cx))
2462 })?
2463 .context("missing buffer")?
2464 .await?;
2465 this.update(&mut cx, |this, _| {
2466 let shared_diffs = this
2467 .shared_diffs
2468 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2469 .or_default();
2470 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2471 })?;
2472 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2473 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2474 }
2475
2476 async fn handle_open_uncommitted_diff(
2477 this: Entity<Self>,
2478 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2479 mut cx: AsyncApp,
2480 ) -> Result<proto::OpenUncommittedDiffResponse> {
2481 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2482 let diff = this
2483 .update(&mut cx, |this, cx| {
2484 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2485 Some(this.open_uncommitted_diff(buffer, cx))
2486 })?
2487 .context("missing buffer")?
2488 .await?;
2489 this.update(&mut cx, |this, _| {
2490 let shared_diffs = this
2491 .shared_diffs
2492 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2493 .or_default();
2494 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2495 })?;
2496 diff.read_with(&cx, |diff, cx| {
2497 use proto::open_uncommitted_diff_response::Mode;
2498
2499 let unstaged_diff = diff.secondary_diff();
2500 let index_snapshot = unstaged_diff.and_then(|diff| {
2501 let diff = diff.read(cx);
2502 diff.base_text_exists().then(|| diff.base_text())
2503 });
2504
2505 let mode;
2506 let staged_text;
2507 let committed_text;
2508 if diff.base_text_exists() {
2509 let committed_snapshot = diff.base_text();
2510 committed_text = Some(committed_snapshot.text());
2511 if let Some(index_text) = index_snapshot {
2512 if index_text.remote_id() == committed_snapshot.remote_id() {
2513 mode = Mode::IndexMatchesHead;
2514 staged_text = None;
2515 } else {
2516 mode = Mode::IndexAndHead;
2517 staged_text = Some(index_text.text());
2518 }
2519 } else {
2520 mode = Mode::IndexAndHead;
2521 staged_text = None;
2522 }
2523 } else {
2524 mode = Mode::IndexAndHead;
2525 committed_text = None;
2526 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2527 }
2528
2529 proto::OpenUncommittedDiffResponse {
2530 committed_text,
2531 staged_text,
2532 mode: mode.into(),
2533 }
2534 })
2535 }
2536
2537 async fn handle_update_diff_bases(
2538 this: Entity<Self>,
2539 request: TypedEnvelope<proto::UpdateDiffBases>,
2540 mut cx: AsyncApp,
2541 ) -> Result<()> {
2542 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2543 this.update(&mut cx, |this, cx| {
2544 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2545 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2546 {
2547 let buffer = buffer.read(cx).text_snapshot();
2548 diff_state.update(cx, |diff_state, cx| {
2549 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2550 })
2551 }
2552 })
2553 }
2554
2555 async fn handle_blame_buffer(
2556 this: Entity<Self>,
2557 envelope: TypedEnvelope<proto::BlameBuffer>,
2558 mut cx: AsyncApp,
2559 ) -> Result<proto::BlameBufferResponse> {
2560 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2561 let version = deserialize_version(&envelope.payload.version);
2562 let buffer = this.read_with(&cx, |this, cx| {
2563 this.buffer_store.read(cx).get_existing(buffer_id)
2564 })??;
2565 buffer
2566 .update(&mut cx, |buffer, _| {
2567 buffer.wait_for_version(version.clone())
2568 })?
2569 .await?;
2570 let blame = this
2571 .update(&mut cx, |this, cx| {
2572 this.blame_buffer(&buffer, Some(version), cx)
2573 })?
2574 .await?;
2575 Ok(serialize_blame_buffer_response(blame))
2576 }
2577
2578 async fn handle_get_permalink_to_line(
2579 this: Entity<Self>,
2580 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2581 mut cx: AsyncApp,
2582 ) -> Result<proto::GetPermalinkToLineResponse> {
2583 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2584 // let version = deserialize_version(&envelope.payload.version);
2585 let selection = {
2586 let proto_selection = envelope
2587 .payload
2588 .selection
2589 .context("no selection to get permalink for defined")?;
2590 proto_selection.start as u32..proto_selection.end as u32
2591 };
2592 let buffer = this.read_with(&cx, |this, cx| {
2593 this.buffer_store.read(cx).get_existing(buffer_id)
2594 })??;
2595 let permalink = this
2596 .update(&mut cx, |this, cx| {
2597 this.get_permalink_to_line(&buffer, selection, cx)
2598 })?
2599 .await?;
2600 Ok(proto::GetPermalinkToLineResponse {
2601 permalink: permalink.to_string(),
2602 })
2603 }
2604
2605 fn repository_for_request(
2606 this: &Entity<Self>,
2607 id: RepositoryId,
2608 cx: &mut AsyncApp,
2609 ) -> Result<Entity<Repository>> {
2610 this.read_with(cx, |this, _| {
2611 this.repositories
2612 .get(&id)
2613 .context("missing repository handle")
2614 .cloned()
2615 })?
2616 }
2617
2618 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2619 self.repositories
2620 .iter()
2621 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2622 .collect()
2623 }
2624
2625 fn process_updated_entries(
2626 &self,
2627 worktree: &Entity<Worktree>,
2628 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2629 cx: &mut App,
2630 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2631 let path_style = worktree.read(cx).path_style();
2632 let mut repo_paths = self
2633 .repositories
2634 .values()
2635 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2636 .collect::<Vec<_>>();
2637 let mut entries: Vec<_> = updated_entries
2638 .iter()
2639 .map(|(path, _, _)| path.clone())
2640 .collect();
2641 entries.sort();
2642 let worktree = worktree.read(cx);
2643
2644 let entries = entries
2645 .into_iter()
2646 .map(|path| worktree.absolutize(&path))
2647 .collect::<Arc<[_]>>();
2648
2649 let executor = cx.background_executor().clone();
2650 cx.background_executor().spawn(async move {
2651 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2652 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2653 let mut tasks = FuturesOrdered::new();
2654 for (repo_path, repo) in repo_paths.into_iter().rev() {
2655 let entries = entries.clone();
2656 let task = executor.spawn(async move {
2657 // Find all repository paths that belong to this repo
2658 let mut ix = entries.partition_point(|path| path < &*repo_path);
2659 if ix == entries.len() {
2660 return None;
2661 };
2662
2663 let mut paths = Vec::new();
2664 // All paths prefixed by a given repo will constitute a continuous range.
2665 while let Some(path) = entries.get(ix)
2666 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2667 &repo_path, path, path_style,
2668 )
2669 {
2670 paths.push((repo_path, ix));
2671 ix += 1;
2672 }
2673 if paths.is_empty() {
2674 None
2675 } else {
2676 Some((repo, paths))
2677 }
2678 });
2679 tasks.push_back(task);
2680 }
2681
2682 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2683 let mut path_was_used = vec![false; entries.len()];
2684 let tasks = tasks.collect::<Vec<_>>().await;
2685 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2686 // We always want to assign a path to it's innermost repository.
2687 for t in tasks {
2688 let Some((repo, paths)) = t else {
2689 continue;
2690 };
2691 let entry = paths_by_git_repo.entry(repo).or_default();
2692 for (repo_path, ix) in paths {
2693 if path_was_used[ix] {
2694 continue;
2695 }
2696 path_was_used[ix] = true;
2697 entry.push(repo_path);
2698 }
2699 }
2700
2701 paths_by_git_repo
2702 })
2703 }
2704}
2705
2706impl BufferGitState {
2707 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2708 Self {
2709 unstaged_diff: Default::default(),
2710 uncommitted_diff: Default::default(),
2711 recalculate_diff_task: Default::default(),
2712 language: Default::default(),
2713 language_registry: Default::default(),
2714 recalculating_tx: postage::watch::channel_with(false).0,
2715 hunk_staging_operation_count: 0,
2716 hunk_staging_operation_count_as_of_write: 0,
2717 head_text: Default::default(),
2718 index_text: Default::default(),
2719 head_changed: Default::default(),
2720 index_changed: Default::default(),
2721 language_changed: Default::default(),
2722 conflict_updated_futures: Default::default(),
2723 conflict_set: Default::default(),
2724 reparse_conflict_markers_task: Default::default(),
2725 }
2726 }
2727
2728 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2729 self.language = buffer.read(cx).language().cloned();
2730 self.language_changed = true;
2731 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2732 }
2733
2734 fn reparse_conflict_markers(
2735 &mut self,
2736 buffer: text::BufferSnapshot,
2737 cx: &mut Context<Self>,
2738 ) -> oneshot::Receiver<()> {
2739 let (tx, rx) = oneshot::channel();
2740
2741 let Some(conflict_set) = self
2742 .conflict_set
2743 .as_ref()
2744 .and_then(|conflict_set| conflict_set.upgrade())
2745 else {
2746 return rx;
2747 };
2748
2749 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2750 if conflict_set.has_conflict {
2751 Some(conflict_set.snapshot())
2752 } else {
2753 None
2754 }
2755 });
2756
2757 if let Some(old_snapshot) = old_snapshot {
2758 self.conflict_updated_futures.push(tx);
2759 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2760 let (snapshot, changed_range) = cx
2761 .background_spawn(async move {
2762 let new_snapshot = ConflictSet::parse(&buffer);
2763 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2764 (new_snapshot, changed_range)
2765 })
2766 .await;
2767 this.update(cx, |this, cx| {
2768 if let Some(conflict_set) = &this.conflict_set {
2769 conflict_set
2770 .update(cx, |conflict_set, cx| {
2771 conflict_set.set_snapshot(snapshot, changed_range, cx);
2772 })
2773 .ok();
2774 }
2775 let futures = std::mem::take(&mut this.conflict_updated_futures);
2776 for tx in futures {
2777 tx.send(()).ok();
2778 }
2779 })
2780 }))
2781 }
2782
2783 rx
2784 }
2785
2786 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2787 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2788 }
2789
2790 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2791 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2792 }
2793
2794 fn handle_base_texts_updated(
2795 &mut self,
2796 buffer: text::BufferSnapshot,
2797 message: proto::UpdateDiffBases,
2798 cx: &mut Context<Self>,
2799 ) {
2800 use proto::update_diff_bases::Mode;
2801
2802 let Some(mode) = Mode::from_i32(message.mode) else {
2803 return;
2804 };
2805
2806 let diff_bases_change = match mode {
2807 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2808 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2809 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2810 Mode::IndexAndHead => DiffBasesChange::SetEach {
2811 index: message.staged_text,
2812 head: message.committed_text,
2813 },
2814 };
2815
2816 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2817 }
2818
2819 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2820 if *self.recalculating_tx.borrow() {
2821 let mut rx = self.recalculating_tx.subscribe();
2822 Some(async move {
2823 loop {
2824 let is_recalculating = rx.recv().await;
2825 if is_recalculating != Some(true) {
2826 break;
2827 }
2828 }
2829 })
2830 } else {
2831 None
2832 }
2833 }
2834
2835 fn diff_bases_changed(
2836 &mut self,
2837 buffer: text::BufferSnapshot,
2838 diff_bases_change: Option<DiffBasesChange>,
2839 cx: &mut Context<Self>,
2840 ) {
2841 match diff_bases_change {
2842 Some(DiffBasesChange::SetIndex(index)) => {
2843 self.index_text = index.map(|mut index| {
2844 text::LineEnding::normalize(&mut index);
2845 Arc::new(index)
2846 });
2847 self.index_changed = true;
2848 }
2849 Some(DiffBasesChange::SetHead(head)) => {
2850 self.head_text = head.map(|mut head| {
2851 text::LineEnding::normalize(&mut head);
2852 Arc::new(head)
2853 });
2854 self.head_changed = true;
2855 }
2856 Some(DiffBasesChange::SetBoth(text)) => {
2857 let text = text.map(|mut text| {
2858 text::LineEnding::normalize(&mut text);
2859 Arc::new(text)
2860 });
2861 self.head_text = text.clone();
2862 self.index_text = text;
2863 self.head_changed = true;
2864 self.index_changed = true;
2865 }
2866 Some(DiffBasesChange::SetEach { index, head }) => {
2867 self.index_text = index.map(|mut index| {
2868 text::LineEnding::normalize(&mut index);
2869 Arc::new(index)
2870 });
2871 self.index_changed = true;
2872 self.head_text = head.map(|mut head| {
2873 text::LineEnding::normalize(&mut head);
2874 Arc::new(head)
2875 });
2876 self.head_changed = true;
2877 }
2878 None => {}
2879 }
2880
2881 self.recalculate_diffs(buffer, cx)
2882 }
2883
2884 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2885 *self.recalculating_tx.borrow_mut() = true;
2886
2887 let language = self.language.clone();
2888 let language_registry = self.language_registry.clone();
2889 let unstaged_diff = self.unstaged_diff();
2890 let uncommitted_diff = self.uncommitted_diff();
2891 let head = self.head_text.clone();
2892 let index = self.index_text.clone();
2893 let index_changed = self.index_changed;
2894 let head_changed = self.head_changed;
2895 let language_changed = self.language_changed;
2896 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2897 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2898 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2899 (None, None) => true,
2900 _ => false,
2901 };
2902 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2903 log::debug!(
2904 "start recalculating diffs for buffer {}",
2905 buffer.remote_id()
2906 );
2907
2908 let mut new_unstaged_diff = None;
2909 if let Some(unstaged_diff) = &unstaged_diff {
2910 new_unstaged_diff = Some(
2911 BufferDiff::update_diff(
2912 unstaged_diff.clone(),
2913 buffer.clone(),
2914 index,
2915 index_changed,
2916 language_changed,
2917 language.clone(),
2918 language_registry.clone(),
2919 cx,
2920 )
2921 .await?,
2922 );
2923 }
2924
2925 let mut new_uncommitted_diff = None;
2926 if let Some(uncommitted_diff) = &uncommitted_diff {
2927 new_uncommitted_diff = if index_matches_head {
2928 new_unstaged_diff.clone()
2929 } else {
2930 Some(
2931 BufferDiff::update_diff(
2932 uncommitted_diff.clone(),
2933 buffer.clone(),
2934 head,
2935 head_changed,
2936 language_changed,
2937 language.clone(),
2938 language_registry.clone(),
2939 cx,
2940 )
2941 .await?,
2942 )
2943 }
2944 }
2945
2946 let cancel = this.update(cx, |this, _| {
2947 // This checks whether all pending stage/unstage operations
2948 // have quiesced (i.e. both the corresponding write and the
2949 // read of that write have completed). If not, then we cancel
2950 // this recalculation attempt to avoid invalidating pending
2951 // state too quickly; another recalculation will come along
2952 // later and clear the pending state once the state of the index has settled.
2953 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2954 *this.recalculating_tx.borrow_mut() = false;
2955 true
2956 } else {
2957 false
2958 }
2959 })?;
2960 if cancel {
2961 log::debug!(
2962 concat!(
2963 "aborting recalculating diffs for buffer {}",
2964 "due to subsequent hunk operations",
2965 ),
2966 buffer.remote_id()
2967 );
2968 return Ok(());
2969 }
2970
2971 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2972 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2973 {
2974 unstaged_diff.update(cx, |diff, cx| {
2975 if language_changed {
2976 diff.language_changed(cx);
2977 }
2978 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2979 })?
2980 } else {
2981 None
2982 };
2983
2984 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2985 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2986 {
2987 uncommitted_diff.update(cx, |diff, cx| {
2988 if language_changed {
2989 diff.language_changed(cx);
2990 }
2991 diff.set_snapshot_with_secondary(
2992 new_uncommitted_diff,
2993 &buffer,
2994 unstaged_changed_range,
2995 true,
2996 cx,
2997 );
2998 })?;
2999 }
3000
3001 log::debug!(
3002 "finished recalculating diffs for buffer {}",
3003 buffer.remote_id()
3004 );
3005
3006 if let Some(this) = this.upgrade() {
3007 this.update(cx, |this, _| {
3008 this.index_changed = false;
3009 this.head_changed = false;
3010 this.language_changed = false;
3011 *this.recalculating_tx.borrow_mut() = false;
3012 })?;
3013 }
3014
3015 Ok(())
3016 }));
3017 }
3018}
3019
3020fn make_remote_delegate(
3021 this: Entity<GitStore>,
3022 project_id: u64,
3023 repository_id: RepositoryId,
3024 askpass_id: u64,
3025 cx: &mut AsyncApp,
3026) -> AskPassDelegate {
3027 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3028 this.update(cx, |this, cx| {
3029 let Some((client, _)) = this.downstream_client() else {
3030 return;
3031 };
3032 let response = client.request(proto::AskPassRequest {
3033 project_id,
3034 repository_id: repository_id.to_proto(),
3035 askpass_id,
3036 prompt,
3037 });
3038 cx.spawn(async move |_, _| {
3039 let mut response = response.await?.response;
3040 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3041 .ok();
3042 response.zeroize();
3043 anyhow::Ok(())
3044 })
3045 .detach_and_log_err(cx);
3046 })
3047 .log_err();
3048 })
3049}
3050
3051impl RepositoryId {
3052 pub fn to_proto(self) -> u64 {
3053 self.0
3054 }
3055
3056 pub fn from_proto(id: u64) -> Self {
3057 RepositoryId(id)
3058 }
3059}
3060
3061impl RepositorySnapshot {
3062 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3063 Self {
3064 id,
3065 statuses_by_path: Default::default(),
3066 pending_ops_by_path: Default::default(),
3067 renamed_paths: HashMap::default(),
3068 work_directory_abs_path,
3069 branch: None,
3070 head_commit: None,
3071 scan_id: 0,
3072 merge: Default::default(),
3073 remote_origin_url: None,
3074 remote_upstream_url: None,
3075 stash_entries: Default::default(),
3076 path_style,
3077 }
3078 }
3079
3080 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3081 proto::UpdateRepository {
3082 branch_summary: self.branch.as_ref().map(branch_to_proto),
3083 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3084 updated_statuses: self
3085 .statuses_by_path
3086 .iter()
3087 .map(|entry| entry.to_proto())
3088 .collect(),
3089 removed_statuses: Default::default(),
3090 current_merge_conflicts: self
3091 .merge
3092 .conflicted_paths
3093 .iter()
3094 .map(|repo_path| repo_path.to_proto())
3095 .collect(),
3096 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3097 project_id,
3098 id: self.id.to_proto(),
3099 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3100 entry_ids: vec![self.id.to_proto()],
3101 scan_id: self.scan_id,
3102 is_last_update: true,
3103 stash_entries: self
3104 .stash_entries
3105 .entries
3106 .iter()
3107 .map(stash_to_proto)
3108 .collect(),
3109 renamed_paths: self
3110 .renamed_paths
3111 .iter()
3112 .map(|(new_path, old_path)| (new_path.to_proto(), old_path.to_proto()))
3113 .collect(),
3114 }
3115 }
3116
3117 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3118 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3119 let mut removed_statuses: Vec<String> = Vec::new();
3120
3121 let mut new_statuses = self.statuses_by_path.iter().peekable();
3122 let mut old_statuses = old.statuses_by_path.iter().peekable();
3123
3124 let mut current_new_entry = new_statuses.next();
3125 let mut current_old_entry = old_statuses.next();
3126 loop {
3127 match (current_new_entry, current_old_entry) {
3128 (Some(new_entry), Some(old_entry)) => {
3129 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3130 Ordering::Less => {
3131 updated_statuses.push(new_entry.to_proto());
3132 current_new_entry = new_statuses.next();
3133 }
3134 Ordering::Equal => {
3135 if new_entry.status != old_entry.status {
3136 updated_statuses.push(new_entry.to_proto());
3137 }
3138 current_old_entry = old_statuses.next();
3139 current_new_entry = new_statuses.next();
3140 }
3141 Ordering::Greater => {
3142 removed_statuses.push(old_entry.repo_path.to_proto());
3143 current_old_entry = old_statuses.next();
3144 }
3145 }
3146 }
3147 (None, Some(old_entry)) => {
3148 removed_statuses.push(old_entry.repo_path.to_proto());
3149 current_old_entry = old_statuses.next();
3150 }
3151 (Some(new_entry), None) => {
3152 updated_statuses.push(new_entry.to_proto());
3153 current_new_entry = new_statuses.next();
3154 }
3155 (None, None) => break,
3156 }
3157 }
3158
3159 proto::UpdateRepository {
3160 branch_summary: self.branch.as_ref().map(branch_to_proto),
3161 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3162 updated_statuses,
3163 removed_statuses,
3164 current_merge_conflicts: self
3165 .merge
3166 .conflicted_paths
3167 .iter()
3168 .map(|path| path.to_proto())
3169 .collect(),
3170 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3171 project_id,
3172 id: self.id.to_proto(),
3173 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3174 entry_ids: vec![],
3175 scan_id: self.scan_id,
3176 is_last_update: true,
3177 stash_entries: self
3178 .stash_entries
3179 .entries
3180 .iter()
3181 .map(stash_to_proto)
3182 .collect(),
3183 renamed_paths: self
3184 .renamed_paths
3185 .iter()
3186 .map(|(new_path, old_path)| (new_path.to_proto(), old_path.to_proto()))
3187 .collect(),
3188 }
3189 }
3190
3191 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3192 self.statuses_by_path.iter().cloned()
3193 }
3194
3195 pub fn status_summary(&self) -> GitSummary {
3196 self.statuses_by_path.summary().item_summary
3197 }
3198
3199 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3200 self.statuses_by_path
3201 .get(&PathKey(path.as_ref().clone()), ())
3202 .cloned()
3203 }
3204
3205 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3206 self.pending_ops_by_path
3207 .get(&PathKey(path.as_ref().clone()), ())
3208 .cloned()
3209 }
3210
3211 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3212 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3213 }
3214
3215 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3216 self.path_style
3217 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3218 .unwrap()
3219 .into()
3220 }
3221
3222 #[inline]
3223 fn abs_path_to_repo_path_inner(
3224 work_directory_abs_path: &Path,
3225 abs_path: &Path,
3226 path_style: PathStyle,
3227 ) -> Option<RepoPath> {
3228 abs_path
3229 .strip_prefix(&work_directory_abs_path)
3230 .ok()
3231 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3232 }
3233
3234 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3235 self.merge.conflicted_paths.contains(repo_path)
3236 }
3237
3238 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3239 let had_conflict_on_last_merge_head_change =
3240 self.merge.conflicted_paths.contains(repo_path);
3241 let has_conflict_currently = self
3242 .status_for_path(repo_path)
3243 .is_some_and(|entry| entry.status.is_conflicted());
3244 had_conflict_on_last_merge_head_change || has_conflict_currently
3245 }
3246
3247 /// This is the name that will be displayed in the repository selector for this repository.
3248 pub fn display_name(&self) -> SharedString {
3249 self.work_directory_abs_path
3250 .file_name()
3251 .unwrap_or_default()
3252 .to_string_lossy()
3253 .to_string()
3254 .into()
3255 }
3256}
3257
3258pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3259 proto::StashEntry {
3260 oid: entry.oid.as_bytes().to_vec(),
3261 message: entry.message.clone(),
3262 branch: entry.branch.clone(),
3263 index: entry.index as u64,
3264 timestamp: entry.timestamp,
3265 }
3266}
3267
3268pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3269 Ok(StashEntry {
3270 oid: Oid::from_bytes(&entry.oid)?,
3271 message: entry.message.clone(),
3272 index: entry.index as usize,
3273 branch: entry.branch.clone(),
3274 timestamp: entry.timestamp,
3275 })
3276}
3277
3278impl MergeDetails {
3279 async fn load(
3280 backend: &Arc<dyn GitRepository>,
3281 status: &SumTree<StatusEntry>,
3282 prev_snapshot: &RepositorySnapshot,
3283 ) -> Result<(MergeDetails, bool)> {
3284 log::debug!("load merge details");
3285 let message = backend.merge_message().await;
3286 let heads = backend
3287 .revparse_batch(vec![
3288 "MERGE_HEAD".into(),
3289 "CHERRY_PICK_HEAD".into(),
3290 "REBASE_HEAD".into(),
3291 "REVERT_HEAD".into(),
3292 "APPLY_HEAD".into(),
3293 ])
3294 .await
3295 .log_err()
3296 .unwrap_or_default()
3297 .into_iter()
3298 .map(|opt| opt.map(SharedString::from))
3299 .collect::<Vec<_>>();
3300 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3301 let conflicted_paths = if merge_heads_changed {
3302 let current_conflicted_paths = TreeSet::from_ordered_entries(
3303 status
3304 .iter()
3305 .filter(|entry| entry.status.is_conflicted())
3306 .map(|entry| entry.repo_path.clone()),
3307 );
3308
3309 // It can happen that we run a scan while a lengthy merge is in progress
3310 // that will eventually result in conflicts, but before those conflicts
3311 // are reported by `git status`. Since for the moment we only care about
3312 // the merge heads state for the purposes of tracking conflicts, don't update
3313 // this state until we see some conflicts.
3314 if heads.iter().any(Option::is_some)
3315 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3316 && current_conflicted_paths.is_empty()
3317 {
3318 log::debug!("not updating merge heads because no conflicts found");
3319 return Ok((
3320 MergeDetails {
3321 message: message.map(SharedString::from),
3322 ..prev_snapshot.merge.clone()
3323 },
3324 false,
3325 ));
3326 }
3327
3328 current_conflicted_paths
3329 } else {
3330 prev_snapshot.merge.conflicted_paths.clone()
3331 };
3332 let details = MergeDetails {
3333 conflicted_paths,
3334 message: message.map(SharedString::from),
3335 heads,
3336 };
3337 Ok((details, merge_heads_changed))
3338 }
3339}
3340
3341impl Repository {
3342 pub fn snapshot(&self) -> RepositorySnapshot {
3343 self.snapshot.clone()
3344 }
3345
3346 fn local(
3347 id: RepositoryId,
3348 work_directory_abs_path: Arc<Path>,
3349 dot_git_abs_path: Arc<Path>,
3350 repository_dir_abs_path: Arc<Path>,
3351 common_dir_abs_path: Arc<Path>,
3352 project_environment: WeakEntity<ProjectEnvironment>,
3353 fs: Arc<dyn Fs>,
3354 git_store: WeakEntity<GitStore>,
3355 cx: &mut Context<Self>,
3356 ) -> Self {
3357 let snapshot =
3358 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3359 Repository {
3360 this: cx.weak_entity(),
3361 git_store,
3362 snapshot,
3363 commit_message_buffer: None,
3364 askpass_delegates: Default::default(),
3365 paths_needing_status_update: Default::default(),
3366 latest_askpass_id: 0,
3367 job_sender: Repository::spawn_local_git_worker(
3368 work_directory_abs_path,
3369 dot_git_abs_path,
3370 repository_dir_abs_path,
3371 common_dir_abs_path,
3372 project_environment,
3373 fs,
3374 cx,
3375 ),
3376 job_id: 0,
3377 active_jobs: Default::default(),
3378 }
3379 }
3380
3381 fn remote(
3382 id: RepositoryId,
3383 work_directory_abs_path: Arc<Path>,
3384 path_style: PathStyle,
3385 project_id: ProjectId,
3386 client: AnyProtoClient,
3387 git_store: WeakEntity<GitStore>,
3388 cx: &mut Context<Self>,
3389 ) -> Self {
3390 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3391 Self {
3392 this: cx.weak_entity(),
3393 snapshot,
3394 commit_message_buffer: None,
3395 git_store,
3396 paths_needing_status_update: Default::default(),
3397 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3398 askpass_delegates: Default::default(),
3399 latest_askpass_id: 0,
3400 active_jobs: Default::default(),
3401 job_id: 0,
3402 }
3403 }
3404
3405 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3406 self.git_store.upgrade()
3407 }
3408
3409 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3410 let this = cx.weak_entity();
3411 let git_store = self.git_store.clone();
3412 let _ = self.send_keyed_job(
3413 Some(GitJobKey::ReloadBufferDiffBases),
3414 None,
3415 |state, mut cx| async move {
3416 let RepositoryState::Local { backend, .. } = state else {
3417 log::error!("tried to recompute diffs for a non-local repository");
3418 return Ok(());
3419 };
3420
3421 let Some(this) = this.upgrade() else {
3422 return Ok(());
3423 };
3424
3425 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3426 git_store.update(cx, |git_store, cx| {
3427 git_store
3428 .diffs
3429 .iter()
3430 .filter_map(|(buffer_id, diff_state)| {
3431 let buffer_store = git_store.buffer_store.read(cx);
3432 let buffer = buffer_store.get(*buffer_id)?;
3433 let file = File::from_dyn(buffer.read(cx).file())?;
3434 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3435 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3436 log::debug!(
3437 "start reload diff bases for repo path {}",
3438 repo_path.as_unix_str()
3439 );
3440 diff_state.update(cx, |diff_state, _| {
3441 let has_unstaged_diff = diff_state
3442 .unstaged_diff
3443 .as_ref()
3444 .is_some_and(|diff| diff.is_upgradable());
3445 let has_uncommitted_diff = diff_state
3446 .uncommitted_diff
3447 .as_ref()
3448 .is_some_and(|set| set.is_upgradable());
3449
3450 Some((
3451 buffer,
3452 repo_path,
3453 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3454 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3455 ))
3456 })
3457 })
3458 .collect::<Vec<_>>()
3459 })
3460 })??;
3461
3462 let buffer_diff_base_changes = cx
3463 .background_spawn(async move {
3464 let mut changes = Vec::new();
3465 for (buffer, repo_path, current_index_text, current_head_text) in
3466 &repo_diff_state_updates
3467 {
3468 let index_text = if current_index_text.is_some() {
3469 backend.load_index_text(repo_path.clone()).await
3470 } else {
3471 None
3472 };
3473 let head_text = if current_head_text.is_some() {
3474 backend.load_committed_text(repo_path.clone()).await
3475 } else {
3476 None
3477 };
3478
3479 let change =
3480 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3481 (Some(current_index), Some(current_head)) => {
3482 let index_changed =
3483 index_text.as_ref() != current_index.as_deref();
3484 let head_changed =
3485 head_text.as_ref() != current_head.as_deref();
3486 if index_changed && head_changed {
3487 if index_text == head_text {
3488 Some(DiffBasesChange::SetBoth(head_text))
3489 } else {
3490 Some(DiffBasesChange::SetEach {
3491 index: index_text,
3492 head: head_text,
3493 })
3494 }
3495 } else if index_changed {
3496 Some(DiffBasesChange::SetIndex(index_text))
3497 } else if head_changed {
3498 Some(DiffBasesChange::SetHead(head_text))
3499 } else {
3500 None
3501 }
3502 }
3503 (Some(current_index), None) => {
3504 let index_changed =
3505 index_text.as_ref() != current_index.as_deref();
3506 index_changed
3507 .then_some(DiffBasesChange::SetIndex(index_text))
3508 }
3509 (None, Some(current_head)) => {
3510 let head_changed =
3511 head_text.as_ref() != current_head.as_deref();
3512 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3513 }
3514 (None, None) => None,
3515 };
3516
3517 changes.push((buffer.clone(), change))
3518 }
3519 changes
3520 })
3521 .await;
3522
3523 git_store.update(&mut cx, |git_store, cx| {
3524 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3525 let buffer_snapshot = buffer.read(cx).text_snapshot();
3526 let buffer_id = buffer_snapshot.remote_id();
3527 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3528 continue;
3529 };
3530
3531 let downstream_client = git_store.downstream_client();
3532 diff_state.update(cx, |diff_state, cx| {
3533 use proto::update_diff_bases::Mode;
3534
3535 if let Some((diff_bases_change, (client, project_id))) =
3536 diff_bases_change.clone().zip(downstream_client)
3537 {
3538 let (staged_text, committed_text, mode) = match diff_bases_change {
3539 DiffBasesChange::SetIndex(index) => {
3540 (index, None, Mode::IndexOnly)
3541 }
3542 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3543 DiffBasesChange::SetEach { index, head } => {
3544 (index, head, Mode::IndexAndHead)
3545 }
3546 DiffBasesChange::SetBoth(text) => {
3547 (None, text, Mode::IndexMatchesHead)
3548 }
3549 };
3550 client
3551 .send(proto::UpdateDiffBases {
3552 project_id: project_id.to_proto(),
3553 buffer_id: buffer_id.to_proto(),
3554 staged_text,
3555 committed_text,
3556 mode: mode as i32,
3557 })
3558 .log_err();
3559 }
3560
3561 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3562 });
3563 }
3564 })
3565 },
3566 );
3567 }
3568
3569 pub fn send_job<F, Fut, R>(
3570 &mut self,
3571 status: Option<SharedString>,
3572 job: F,
3573 ) -> oneshot::Receiver<R>
3574 where
3575 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3576 Fut: Future<Output = R> + 'static,
3577 R: Send + 'static,
3578 {
3579 self.send_keyed_job(None, status, job)
3580 }
3581
3582 fn send_keyed_job<F, Fut, R>(
3583 &mut self,
3584 key: Option<GitJobKey>,
3585 status: Option<SharedString>,
3586 job: F,
3587 ) -> oneshot::Receiver<R>
3588 where
3589 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3590 Fut: Future<Output = R> + 'static,
3591 R: Send + 'static,
3592 {
3593 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3594 let job_id = post_inc(&mut self.job_id);
3595 let this = self.this.clone();
3596 self.job_sender
3597 .unbounded_send(GitJob {
3598 key,
3599 job: Box::new(move |state, cx: &mut AsyncApp| {
3600 let job = job(state, cx.clone());
3601 cx.spawn(async move |cx| {
3602 if let Some(s) = status.clone() {
3603 this.update(cx, |this, cx| {
3604 this.active_jobs.insert(
3605 job_id,
3606 JobInfo {
3607 start: Instant::now(),
3608 message: s.clone(),
3609 },
3610 );
3611
3612 cx.notify();
3613 })
3614 .ok();
3615 }
3616 let result = job.await;
3617
3618 this.update(cx, |this, cx| {
3619 this.active_jobs.remove(&job_id);
3620 cx.notify();
3621 })
3622 .ok();
3623
3624 result_tx.send(result).ok();
3625 })
3626 }),
3627 })
3628 .ok();
3629 result_rx
3630 }
3631
3632 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3633 let Some(git_store) = self.git_store.upgrade() else {
3634 return;
3635 };
3636 let entity = cx.entity();
3637 git_store.update(cx, |git_store, cx| {
3638 let Some((&id, _)) = git_store
3639 .repositories
3640 .iter()
3641 .find(|(_, handle)| *handle == &entity)
3642 else {
3643 return;
3644 };
3645 git_store.active_repo_id = Some(id);
3646 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3647 });
3648 }
3649
3650 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3651 self.snapshot.status()
3652 }
3653
3654 pub fn cached_stash(&self) -> GitStash {
3655 self.snapshot.stash_entries.clone()
3656 }
3657
3658 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3659 let git_store = self.git_store.upgrade()?;
3660 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3661 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3662 let abs_path = SanitizedPath::new(&abs_path);
3663 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3664 Some(ProjectPath {
3665 worktree_id: worktree.read(cx).id(),
3666 path: relative_path,
3667 })
3668 }
3669
3670 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3671 let git_store = self.git_store.upgrade()?;
3672 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3673 let abs_path = worktree_store.absolutize(path, cx)?;
3674 self.snapshot.abs_path_to_repo_path(&abs_path)
3675 }
3676
3677 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3678 other
3679 .read(cx)
3680 .snapshot
3681 .work_directory_abs_path
3682 .starts_with(&self.snapshot.work_directory_abs_path)
3683 }
3684
3685 pub fn open_commit_buffer(
3686 &mut self,
3687 languages: Option<Arc<LanguageRegistry>>,
3688 buffer_store: Entity<BufferStore>,
3689 cx: &mut Context<Self>,
3690 ) -> Task<Result<Entity<Buffer>>> {
3691 let id = self.id;
3692 if let Some(buffer) = self.commit_message_buffer.clone() {
3693 return Task::ready(Ok(buffer));
3694 }
3695 let this = cx.weak_entity();
3696
3697 let rx = self.send_job(None, move |state, mut cx| async move {
3698 let Some(this) = this.upgrade() else {
3699 bail!("git store was dropped");
3700 };
3701 match state {
3702 RepositoryState::Local { .. } => {
3703 this.update(&mut cx, |_, cx| {
3704 Self::open_local_commit_buffer(languages, buffer_store, cx)
3705 })?
3706 .await
3707 }
3708 RepositoryState::Remote { project_id, client } => {
3709 let request = client.request(proto::OpenCommitMessageBuffer {
3710 project_id: project_id.0,
3711 repository_id: id.to_proto(),
3712 });
3713 let response = request.await.context("requesting to open commit buffer")?;
3714 let buffer_id = BufferId::new(response.buffer_id)?;
3715 let buffer = buffer_store
3716 .update(&mut cx, |buffer_store, cx| {
3717 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3718 })?
3719 .await?;
3720 if let Some(language_registry) = languages {
3721 let git_commit_language =
3722 language_registry.language_for_name("Git Commit").await?;
3723 buffer.update(&mut cx, |buffer, cx| {
3724 buffer.set_language(Some(git_commit_language), cx);
3725 })?;
3726 }
3727 this.update(&mut cx, |this, _| {
3728 this.commit_message_buffer = Some(buffer.clone());
3729 })?;
3730 Ok(buffer)
3731 }
3732 }
3733 });
3734
3735 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3736 }
3737
3738 fn open_local_commit_buffer(
3739 language_registry: Option<Arc<LanguageRegistry>>,
3740 buffer_store: Entity<BufferStore>,
3741 cx: &mut Context<Self>,
3742 ) -> Task<Result<Entity<Buffer>>> {
3743 cx.spawn(async move |repository, cx| {
3744 let buffer = buffer_store
3745 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3746 .await?;
3747
3748 if let Some(language_registry) = language_registry {
3749 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3750 buffer.update(cx, |buffer, cx| {
3751 buffer.set_language(Some(git_commit_language), cx);
3752 })?;
3753 }
3754
3755 repository.update(cx, |repository, _| {
3756 repository.commit_message_buffer = Some(buffer.clone());
3757 })?;
3758 Ok(buffer)
3759 })
3760 }
3761
3762 pub fn checkout_files(
3763 &mut self,
3764 commit: &str,
3765 paths: Vec<RepoPath>,
3766 cx: &mut Context<Self>,
3767 ) -> Task<Result<()>> {
3768 let commit = commit.to_string();
3769 let id = self.id;
3770
3771 self.spawn_job_with_tracking(
3772 paths.clone(),
3773 pending_op::GitStatus::Reverted,
3774 cx,
3775 async move |this, cx| {
3776 this.update(cx, |this, _cx| {
3777 this.send_job(
3778 Some(format!("git checkout {}", commit).into()),
3779 move |git_repo, _| async move {
3780 match git_repo {
3781 RepositoryState::Local {
3782 backend,
3783 environment,
3784 ..
3785 } => {
3786 backend
3787 .checkout_files(commit, paths, environment.clone())
3788 .await
3789 }
3790 RepositoryState::Remote { project_id, client } => {
3791 client
3792 .request(proto::GitCheckoutFiles {
3793 project_id: project_id.0,
3794 repository_id: id.to_proto(),
3795 commit,
3796 paths: paths
3797 .into_iter()
3798 .map(|p| p.to_proto())
3799 .collect(),
3800 })
3801 .await?;
3802
3803 Ok(())
3804 }
3805 }
3806 },
3807 )
3808 })?
3809 .await?
3810 },
3811 )
3812 }
3813
3814 pub fn reset(
3815 &mut self,
3816 commit: String,
3817 reset_mode: ResetMode,
3818 _cx: &mut App,
3819 ) -> oneshot::Receiver<Result<()>> {
3820 let id = self.id;
3821
3822 self.send_job(None, move |git_repo, _| async move {
3823 match git_repo {
3824 RepositoryState::Local {
3825 backend,
3826 environment,
3827 ..
3828 } => backend.reset(commit, reset_mode, environment).await,
3829 RepositoryState::Remote { project_id, client } => {
3830 client
3831 .request(proto::GitReset {
3832 project_id: project_id.0,
3833 repository_id: id.to_proto(),
3834 commit,
3835 mode: match reset_mode {
3836 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3837 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3838 },
3839 })
3840 .await?;
3841
3842 Ok(())
3843 }
3844 }
3845 })
3846 }
3847
3848 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3849 let id = self.id;
3850 self.send_job(None, move |git_repo, _cx| async move {
3851 match git_repo {
3852 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3853 RepositoryState::Remote { project_id, client } => {
3854 let resp = client
3855 .request(proto::GitShow {
3856 project_id: project_id.0,
3857 repository_id: id.to_proto(),
3858 commit,
3859 })
3860 .await?;
3861
3862 Ok(CommitDetails {
3863 sha: resp.sha.into(),
3864 message: resp.message.into(),
3865 commit_timestamp: resp.commit_timestamp,
3866 author_email: resp.author_email.into(),
3867 author_name: resp.author_name.into(),
3868 })
3869 }
3870 }
3871 })
3872 }
3873
3874 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3875 let id = self.id;
3876 self.send_job(None, move |git_repo, cx| async move {
3877 match git_repo {
3878 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3879 RepositoryState::Remote {
3880 client, project_id, ..
3881 } => {
3882 let response = client
3883 .request(proto::LoadCommitDiff {
3884 project_id: project_id.0,
3885 repository_id: id.to_proto(),
3886 commit,
3887 })
3888 .await?;
3889 Ok(CommitDiff {
3890 files: response
3891 .files
3892 .into_iter()
3893 .map(|file| {
3894 Ok(CommitFile {
3895 path: RepoPath::from_proto(&file.path)?,
3896 old_text: file.old_text,
3897 new_text: file.new_text,
3898 })
3899 })
3900 .collect::<Result<Vec<_>>>()?,
3901 })
3902 }
3903 }
3904 })
3905 }
3906
3907 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3908 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3909 }
3910
3911 fn save_buffers<'a>(
3912 &self,
3913 entries: impl IntoIterator<Item = &'a RepoPath>,
3914 cx: &mut Context<Self>,
3915 ) -> Vec<Task<anyhow::Result<()>>> {
3916 let mut save_futures = Vec::new();
3917 if let Some(buffer_store) = self.buffer_store(cx) {
3918 buffer_store.update(cx, |buffer_store, cx| {
3919 for path in entries {
3920 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3921 continue;
3922 };
3923 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3924 && buffer
3925 .read(cx)
3926 .file()
3927 .is_some_and(|file| file.disk_state().exists())
3928 && buffer.read(cx).has_unsaved_edits()
3929 {
3930 save_futures.push(buffer_store.save_buffer(buffer, cx));
3931 }
3932 }
3933 })
3934 }
3935 save_futures
3936 }
3937
3938 pub fn stage_entries(
3939 &mut self,
3940 entries: Vec<RepoPath>,
3941 cx: &mut Context<Self>,
3942 ) -> Task<anyhow::Result<()>> {
3943 if entries.is_empty() {
3944 return Task::ready(Ok(()));
3945 }
3946 let id = self.id;
3947 let save_tasks = self.save_buffers(&entries, cx);
3948 let paths = entries
3949 .iter()
3950 .map(|p| p.as_unix_str())
3951 .collect::<Vec<_>>()
3952 .join(" ");
3953 let status = format!("git add {paths}");
3954 let job_key = GitJobKey::WriteIndex(entries.clone());
3955
3956 self.spawn_job_with_tracking(
3957 entries.clone(),
3958 pending_op::GitStatus::Staged,
3959 cx,
3960 async move |this, cx| {
3961 for save_task in save_tasks {
3962 save_task.await?;
3963 }
3964
3965 this.update(cx, |this, _| {
3966 this.send_keyed_job(
3967 Some(job_key),
3968 Some(status.into()),
3969 move |git_repo, _cx| async move {
3970 match git_repo {
3971 RepositoryState::Local {
3972 backend,
3973 environment,
3974 ..
3975 } => backend.stage_paths(entries, environment.clone()).await,
3976 RepositoryState::Remote { project_id, client } => {
3977 client
3978 .request(proto::Stage {
3979 project_id: project_id.0,
3980 repository_id: id.to_proto(),
3981 paths: entries
3982 .into_iter()
3983 .map(|repo_path| repo_path.to_proto())
3984 .collect(),
3985 })
3986 .await
3987 .context("sending stage request")?;
3988
3989 Ok(())
3990 }
3991 }
3992 },
3993 )
3994 })?
3995 .await?
3996 },
3997 )
3998 }
3999
4000 pub fn unstage_entries(
4001 &mut self,
4002 entries: Vec<RepoPath>,
4003 cx: &mut Context<Self>,
4004 ) -> Task<anyhow::Result<()>> {
4005 if entries.is_empty() {
4006 return Task::ready(Ok(()));
4007 }
4008 let id = self.id;
4009 let save_tasks = self.save_buffers(&entries, cx);
4010 let paths = entries
4011 .iter()
4012 .map(|p| p.as_unix_str())
4013 .collect::<Vec<_>>()
4014 .join(" ");
4015 let status = format!("git reset {paths}");
4016 let job_key = GitJobKey::WriteIndex(entries.clone());
4017
4018 self.spawn_job_with_tracking(
4019 entries.clone(),
4020 pending_op::GitStatus::Unstaged,
4021 cx,
4022 async move |this, cx| {
4023 for save_task in save_tasks {
4024 save_task.await?;
4025 }
4026
4027 this.update(cx, |this, _| {
4028 this.send_keyed_job(
4029 Some(job_key),
4030 Some(status.into()),
4031 move |git_repo, _cx| async move {
4032 match git_repo {
4033 RepositoryState::Local {
4034 backend,
4035 environment,
4036 ..
4037 } => backend.unstage_paths(entries, environment).await,
4038 RepositoryState::Remote { project_id, client } => {
4039 client
4040 .request(proto::Unstage {
4041 project_id: project_id.0,
4042 repository_id: id.to_proto(),
4043 paths: entries
4044 .into_iter()
4045 .map(|repo_path| repo_path.to_proto())
4046 .collect(),
4047 })
4048 .await
4049 .context("sending unstage request")?;
4050
4051 Ok(())
4052 }
4053 }
4054 },
4055 )
4056 })?
4057 .await?
4058 },
4059 )
4060 }
4061
4062 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4063 let to_stage = self
4064 .cached_status()
4065 .filter_map(|entry| {
4066 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4067 if ops.staging() || ops.staged() {
4068 None
4069 } else {
4070 Some(entry.repo_path)
4071 }
4072 } else if entry.status.staging().is_fully_staged() {
4073 None
4074 } else {
4075 Some(entry.repo_path)
4076 }
4077 })
4078 .collect();
4079 self.stage_entries(to_stage, cx)
4080 }
4081
4082 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4083 let to_unstage = self
4084 .cached_status()
4085 .filter_map(|entry| {
4086 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4087 if !ops.staging() && !ops.staged() {
4088 None
4089 } else {
4090 Some(entry.repo_path)
4091 }
4092 } else if entry.status.staging().is_fully_unstaged() {
4093 None
4094 } else {
4095 Some(entry.repo_path)
4096 }
4097 })
4098 .collect();
4099 self.unstage_entries(to_unstage, cx)
4100 }
4101
4102 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4103 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4104
4105 self.stash_entries(to_stash, cx)
4106 }
4107
4108 pub fn stash_entries(
4109 &mut self,
4110 entries: Vec<RepoPath>,
4111 cx: &mut Context<Self>,
4112 ) -> Task<anyhow::Result<()>> {
4113 let id = self.id;
4114
4115 cx.spawn(async move |this, cx| {
4116 this.update(cx, |this, _| {
4117 this.send_job(None, move |git_repo, _cx| async move {
4118 match git_repo {
4119 RepositoryState::Local {
4120 backend,
4121 environment,
4122 ..
4123 } => backend.stash_paths(entries, environment).await,
4124 RepositoryState::Remote { project_id, client } => {
4125 client
4126 .request(proto::Stash {
4127 project_id: project_id.0,
4128 repository_id: id.to_proto(),
4129 paths: entries
4130 .into_iter()
4131 .map(|repo_path| repo_path.to_proto())
4132 .collect(),
4133 })
4134 .await
4135 .context("sending stash request")?;
4136 Ok(())
4137 }
4138 }
4139 })
4140 })?
4141 .await??;
4142 Ok(())
4143 })
4144 }
4145
4146 pub fn stash_pop(
4147 &mut self,
4148 index: Option<usize>,
4149 cx: &mut Context<Self>,
4150 ) -> Task<anyhow::Result<()>> {
4151 let id = self.id;
4152 cx.spawn(async move |this, cx| {
4153 this.update(cx, |this, _| {
4154 this.send_job(None, move |git_repo, _cx| async move {
4155 match git_repo {
4156 RepositoryState::Local {
4157 backend,
4158 environment,
4159 ..
4160 } => backend.stash_pop(index, environment).await,
4161 RepositoryState::Remote { project_id, client } => {
4162 client
4163 .request(proto::StashPop {
4164 project_id: project_id.0,
4165 repository_id: id.to_proto(),
4166 stash_index: index.map(|i| i as u64),
4167 })
4168 .await
4169 .context("sending stash pop request")?;
4170 Ok(())
4171 }
4172 }
4173 })
4174 })?
4175 .await??;
4176 Ok(())
4177 })
4178 }
4179
4180 pub fn stash_apply(
4181 &mut self,
4182 index: Option<usize>,
4183 cx: &mut Context<Self>,
4184 ) -> Task<anyhow::Result<()>> {
4185 let id = self.id;
4186 cx.spawn(async move |this, cx| {
4187 this.update(cx, |this, _| {
4188 this.send_job(None, move |git_repo, _cx| async move {
4189 match git_repo {
4190 RepositoryState::Local {
4191 backend,
4192 environment,
4193 ..
4194 } => backend.stash_apply(index, environment).await,
4195 RepositoryState::Remote { project_id, client } => {
4196 client
4197 .request(proto::StashApply {
4198 project_id: project_id.0,
4199 repository_id: id.to_proto(),
4200 stash_index: index.map(|i| i as u64),
4201 })
4202 .await
4203 .context("sending stash apply request")?;
4204 Ok(())
4205 }
4206 }
4207 })
4208 })?
4209 .await??;
4210 Ok(())
4211 })
4212 }
4213
4214 pub fn stash_drop(
4215 &mut self,
4216 index: Option<usize>,
4217 cx: &mut Context<Self>,
4218 ) -> oneshot::Receiver<anyhow::Result<()>> {
4219 let id = self.id;
4220 let updates_tx = self
4221 .git_store()
4222 .and_then(|git_store| match &git_store.read(cx).state {
4223 GitStoreState::Local { downstream, .. } => downstream
4224 .as_ref()
4225 .map(|downstream| downstream.updates_tx.clone()),
4226 _ => None,
4227 });
4228 let this = cx.weak_entity();
4229 self.send_job(None, move |git_repo, mut cx| async move {
4230 match git_repo {
4231 RepositoryState::Local {
4232 backend,
4233 environment,
4234 ..
4235 } => {
4236 // TODO would be nice to not have to do this manually
4237 let result = backend.stash_drop(index, environment).await;
4238 if result.is_ok()
4239 && let Ok(stash_entries) = backend.stash_entries().await
4240 {
4241 let snapshot = this.update(&mut cx, |this, cx| {
4242 this.snapshot.stash_entries = stash_entries;
4243 cx.emit(RepositoryEvent::StashEntriesChanged);
4244 this.snapshot.clone()
4245 })?;
4246 if let Some(updates_tx) = updates_tx {
4247 updates_tx
4248 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4249 .ok();
4250 }
4251 }
4252
4253 result
4254 }
4255 RepositoryState::Remote { project_id, client } => {
4256 client
4257 .request(proto::StashDrop {
4258 project_id: project_id.0,
4259 repository_id: id.to_proto(),
4260 stash_index: index.map(|i| i as u64),
4261 })
4262 .await
4263 .context("sending stash pop request")?;
4264 Ok(())
4265 }
4266 }
4267 })
4268 }
4269
4270 pub fn commit(
4271 &mut self,
4272 message: SharedString,
4273 name_and_email: Option<(SharedString, SharedString)>,
4274 options: CommitOptions,
4275 askpass: AskPassDelegate,
4276 _cx: &mut App,
4277 ) -> oneshot::Receiver<Result<()>> {
4278 let id = self.id;
4279 let askpass_delegates = self.askpass_delegates.clone();
4280 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4281
4282 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4283 match git_repo {
4284 RepositoryState::Local {
4285 backend,
4286 environment,
4287 ..
4288 } => {
4289 backend
4290 .commit(message, name_and_email, options, askpass, environment)
4291 .await
4292 }
4293 RepositoryState::Remote { project_id, client } => {
4294 askpass_delegates.lock().insert(askpass_id, askpass);
4295 let _defer = util::defer(|| {
4296 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4297 debug_assert!(askpass_delegate.is_some());
4298 });
4299 let (name, email) = name_and_email.unzip();
4300 client
4301 .request(proto::Commit {
4302 project_id: project_id.0,
4303 repository_id: id.to_proto(),
4304 message: String::from(message),
4305 name: name.map(String::from),
4306 email: email.map(String::from),
4307 options: Some(proto::commit::CommitOptions {
4308 amend: options.amend,
4309 signoff: options.signoff,
4310 }),
4311 askpass_id,
4312 })
4313 .await
4314 .context("sending commit request")?;
4315
4316 Ok(())
4317 }
4318 }
4319 })
4320 }
4321
4322 pub fn fetch(
4323 &mut self,
4324 fetch_options: FetchOptions,
4325 askpass: AskPassDelegate,
4326 _cx: &mut App,
4327 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4328 let askpass_delegates = self.askpass_delegates.clone();
4329 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4330 let id = self.id;
4331
4332 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4333 match git_repo {
4334 RepositoryState::Local {
4335 backend,
4336 environment,
4337 ..
4338 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4339 RepositoryState::Remote { project_id, client } => {
4340 askpass_delegates.lock().insert(askpass_id, askpass);
4341 let _defer = util::defer(|| {
4342 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4343 debug_assert!(askpass_delegate.is_some());
4344 });
4345
4346 let response = client
4347 .request(proto::Fetch {
4348 project_id: project_id.0,
4349 repository_id: id.to_proto(),
4350 askpass_id,
4351 remote: fetch_options.to_proto(),
4352 })
4353 .await
4354 .context("sending fetch request")?;
4355
4356 Ok(RemoteCommandOutput {
4357 stdout: response.stdout,
4358 stderr: response.stderr,
4359 })
4360 }
4361 }
4362 })
4363 }
4364
4365 pub fn push(
4366 &mut self,
4367 branch: SharedString,
4368 remote: SharedString,
4369 options: Option<PushOptions>,
4370 askpass: AskPassDelegate,
4371 cx: &mut Context<Self>,
4372 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4373 let askpass_delegates = self.askpass_delegates.clone();
4374 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4375 let id = self.id;
4376
4377 let args = options
4378 .map(|option| match option {
4379 PushOptions::SetUpstream => " --set-upstream",
4380 PushOptions::Force => " --force-with-lease",
4381 })
4382 .unwrap_or("");
4383
4384 let updates_tx = self
4385 .git_store()
4386 .and_then(|git_store| match &git_store.read(cx).state {
4387 GitStoreState::Local { downstream, .. } => downstream
4388 .as_ref()
4389 .map(|downstream| downstream.updates_tx.clone()),
4390 _ => None,
4391 });
4392
4393 let this = cx.weak_entity();
4394 self.send_job(
4395 Some(format!("git push {} {} {}", args, remote, branch).into()),
4396 move |git_repo, mut cx| async move {
4397 match git_repo {
4398 RepositoryState::Local {
4399 backend,
4400 environment,
4401 ..
4402 } => {
4403 let result = backend
4404 .push(
4405 branch.to_string(),
4406 remote.to_string(),
4407 options,
4408 askpass,
4409 environment.clone(),
4410 cx.clone(),
4411 )
4412 .await;
4413 // TODO would be nice to not have to do this manually
4414 if result.is_ok() {
4415 let branches = backend.branches().await?;
4416 let branch = branches.into_iter().find(|branch| branch.is_head);
4417 log::info!("head branch after scan is {branch:?}");
4418 let snapshot = this.update(&mut cx, |this, cx| {
4419 this.snapshot.branch = branch;
4420 cx.emit(RepositoryEvent::BranchChanged);
4421 this.snapshot.clone()
4422 })?;
4423 if let Some(updates_tx) = updates_tx {
4424 updates_tx
4425 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4426 .ok();
4427 }
4428 }
4429 result
4430 }
4431 RepositoryState::Remote { project_id, client } => {
4432 askpass_delegates.lock().insert(askpass_id, askpass);
4433 let _defer = util::defer(|| {
4434 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4435 debug_assert!(askpass_delegate.is_some());
4436 });
4437 let response = client
4438 .request(proto::Push {
4439 project_id: project_id.0,
4440 repository_id: id.to_proto(),
4441 askpass_id,
4442 branch_name: branch.to_string(),
4443 remote_name: remote.to_string(),
4444 options: options.map(|options| match options {
4445 PushOptions::Force => proto::push::PushOptions::Force,
4446 PushOptions::SetUpstream => {
4447 proto::push::PushOptions::SetUpstream
4448 }
4449 }
4450 as i32),
4451 })
4452 .await
4453 .context("sending push request")?;
4454
4455 Ok(RemoteCommandOutput {
4456 stdout: response.stdout,
4457 stderr: response.stderr,
4458 })
4459 }
4460 }
4461 },
4462 )
4463 }
4464
4465 pub fn pull(
4466 &mut self,
4467 branch: Option<SharedString>,
4468 remote: SharedString,
4469 rebase: bool,
4470 askpass: AskPassDelegate,
4471 _cx: &mut App,
4472 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4473 let askpass_delegates = self.askpass_delegates.clone();
4474 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4475 let id = self.id;
4476
4477 let mut status = "git pull".to_string();
4478 if rebase {
4479 status.push_str(" --rebase");
4480 }
4481 status.push_str(&format!(" {}", remote));
4482 if let Some(b) = &branch {
4483 status.push_str(&format!(" {}", b));
4484 }
4485
4486 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4487 match git_repo {
4488 RepositoryState::Local {
4489 backend,
4490 environment,
4491 ..
4492 } => {
4493 backend
4494 .pull(
4495 branch.as_ref().map(|b| b.to_string()),
4496 remote.to_string(),
4497 rebase,
4498 askpass,
4499 environment.clone(),
4500 cx,
4501 )
4502 .await
4503 }
4504 RepositoryState::Remote { project_id, client } => {
4505 askpass_delegates.lock().insert(askpass_id, askpass);
4506 let _defer = util::defer(|| {
4507 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4508 debug_assert!(askpass_delegate.is_some());
4509 });
4510 let response = client
4511 .request(proto::Pull {
4512 project_id: project_id.0,
4513 repository_id: id.to_proto(),
4514 askpass_id,
4515 rebase,
4516 branch_name: branch.as_ref().map(|b| b.to_string()),
4517 remote_name: remote.to_string(),
4518 })
4519 .await
4520 .context("sending pull request")?;
4521
4522 Ok(RemoteCommandOutput {
4523 stdout: response.stdout,
4524 stderr: response.stderr,
4525 })
4526 }
4527 }
4528 })
4529 }
4530
4531 fn spawn_set_index_text_job(
4532 &mut self,
4533 path: RepoPath,
4534 content: Option<String>,
4535 hunk_staging_operation_count: Option<usize>,
4536 cx: &mut Context<Self>,
4537 ) -> oneshot::Receiver<anyhow::Result<()>> {
4538 let id = self.id;
4539 let this = cx.weak_entity();
4540 let git_store = self.git_store.clone();
4541 self.send_keyed_job(
4542 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4543 None,
4544 move |git_repo, mut cx| async move {
4545 log::debug!(
4546 "start updating index text for buffer {}",
4547 path.as_unix_str()
4548 );
4549 match git_repo {
4550 RepositoryState::Local {
4551 backend,
4552 environment,
4553 ..
4554 } => {
4555 backend
4556 .set_index_text(path.clone(), content, environment.clone())
4557 .await?;
4558 }
4559 RepositoryState::Remote { project_id, client } => {
4560 client
4561 .request(proto::SetIndexText {
4562 project_id: project_id.0,
4563 repository_id: id.to_proto(),
4564 path: path.to_proto(),
4565 text: content,
4566 })
4567 .await?;
4568 }
4569 }
4570 log::debug!(
4571 "finish updating index text for buffer {}",
4572 path.as_unix_str()
4573 );
4574
4575 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4576 let project_path = this
4577 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4578 .ok()
4579 .flatten();
4580 git_store.update(&mut cx, |git_store, cx| {
4581 let buffer_id = git_store
4582 .buffer_store
4583 .read(cx)
4584 .get_by_path(&project_path?)?
4585 .read(cx)
4586 .remote_id();
4587 let diff_state = git_store.diffs.get(&buffer_id)?;
4588 diff_state.update(cx, |diff_state, _| {
4589 diff_state.hunk_staging_operation_count_as_of_write =
4590 hunk_staging_operation_count;
4591 });
4592 Some(())
4593 })?;
4594 }
4595 Ok(())
4596 },
4597 )
4598 }
4599
4600 pub fn get_remotes(
4601 &mut self,
4602 branch_name: Option<String>,
4603 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4604 let id = self.id;
4605 self.send_job(None, move |repo, _cx| async move {
4606 match repo {
4607 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4608 RepositoryState::Remote { project_id, client } => {
4609 let response = client
4610 .request(proto::GetRemotes {
4611 project_id: project_id.0,
4612 repository_id: id.to_proto(),
4613 branch_name,
4614 })
4615 .await?;
4616
4617 let remotes = response
4618 .remotes
4619 .into_iter()
4620 .map(|remotes| git::repository::Remote {
4621 name: remotes.name.into(),
4622 })
4623 .collect();
4624
4625 Ok(remotes)
4626 }
4627 }
4628 })
4629 }
4630
4631 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4632 let id = self.id;
4633 self.send_job(None, move |repo, _| async move {
4634 match repo {
4635 RepositoryState::Local { backend, .. } => backend.branches().await,
4636 RepositoryState::Remote { project_id, client } => {
4637 let response = client
4638 .request(proto::GitGetBranches {
4639 project_id: project_id.0,
4640 repository_id: id.to_proto(),
4641 })
4642 .await?;
4643
4644 let branches = response
4645 .branches
4646 .into_iter()
4647 .map(|branch| proto_to_branch(&branch))
4648 .collect();
4649
4650 Ok(branches)
4651 }
4652 }
4653 })
4654 }
4655
4656 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4657 let id = self.id;
4658 self.send_job(None, move |repo, _| async move {
4659 match repo {
4660 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4661 RepositoryState::Remote { project_id, client } => {
4662 let response = client
4663 .request(proto::GitGetWorktrees {
4664 project_id: project_id.0,
4665 repository_id: id.to_proto(),
4666 })
4667 .await?;
4668
4669 let worktrees = response
4670 .worktrees
4671 .into_iter()
4672 .map(|worktree| proto_to_worktree(&worktree))
4673 .collect();
4674
4675 Ok(worktrees)
4676 }
4677 }
4678 })
4679 }
4680
4681 pub fn create_worktree(
4682 &mut self,
4683 name: String,
4684 path: PathBuf,
4685 commit: Option<String>,
4686 ) -> oneshot::Receiver<Result<()>> {
4687 let id = self.id;
4688 self.send_job(
4689 Some("git worktree add".into()),
4690 move |repo, _cx| async move {
4691 match repo {
4692 RepositoryState::Local { backend, .. } => {
4693 backend.create_worktree(name, path, commit).await
4694 }
4695 RepositoryState::Remote { project_id, client } => {
4696 client
4697 .request(proto::GitCreateWorktree {
4698 project_id: project_id.0,
4699 repository_id: id.to_proto(),
4700 name,
4701 directory: path.to_string_lossy().to_string(),
4702 commit,
4703 })
4704 .await?;
4705
4706 Ok(())
4707 }
4708 }
4709 },
4710 )
4711 }
4712
4713 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4714 let id = self.id;
4715 self.send_job(None, move |repo, _| async move {
4716 match repo {
4717 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4718 RepositoryState::Remote { project_id, client } => {
4719 let response = client
4720 .request(proto::GetDefaultBranch {
4721 project_id: project_id.0,
4722 repository_id: id.to_proto(),
4723 })
4724 .await?;
4725
4726 anyhow::Ok(response.branch.map(SharedString::from))
4727 }
4728 }
4729 })
4730 }
4731
4732 pub fn diff_tree(
4733 &mut self,
4734 diff_type: DiffTreeType,
4735 _cx: &App,
4736 ) -> oneshot::Receiver<Result<TreeDiff>> {
4737 let repository_id = self.snapshot.id;
4738 self.send_job(None, move |repo, _cx| async move {
4739 match repo {
4740 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4741 RepositoryState::Remote { client, project_id } => {
4742 let response = client
4743 .request(proto::GetTreeDiff {
4744 project_id: project_id.0,
4745 repository_id: repository_id.0,
4746 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4747 base: diff_type.base().to_string(),
4748 head: diff_type.head().to_string(),
4749 })
4750 .await?;
4751
4752 let entries = response
4753 .entries
4754 .into_iter()
4755 .filter_map(|entry| {
4756 let status = match entry.status() {
4757 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4758 proto::tree_diff_status::Status::Modified => {
4759 TreeDiffStatus::Modified {
4760 old: git::Oid::from_str(
4761 &entry.oid.context("missing oid").log_err()?,
4762 )
4763 .log_err()?,
4764 }
4765 }
4766 proto::tree_diff_status::Status::Deleted => {
4767 TreeDiffStatus::Deleted {
4768 old: git::Oid::from_str(
4769 &entry.oid.context("missing oid").log_err()?,
4770 )
4771 .log_err()?,
4772 }
4773 }
4774 };
4775 Some((
4776 RepoPath::from_rel_path(
4777 &RelPath::from_proto(&entry.path).log_err()?,
4778 ),
4779 status,
4780 ))
4781 })
4782 .collect();
4783
4784 Ok(TreeDiff { entries })
4785 }
4786 }
4787 })
4788 }
4789
4790 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4791 let id = self.id;
4792 self.send_job(None, move |repo, _cx| async move {
4793 match repo {
4794 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4795 RepositoryState::Remote { project_id, client } => {
4796 let response = client
4797 .request(proto::GitDiff {
4798 project_id: project_id.0,
4799 repository_id: id.to_proto(),
4800 diff_type: match diff_type {
4801 DiffType::HeadToIndex => {
4802 proto::git_diff::DiffType::HeadToIndex.into()
4803 }
4804 DiffType::HeadToWorktree => {
4805 proto::git_diff::DiffType::HeadToWorktree.into()
4806 }
4807 },
4808 })
4809 .await?;
4810
4811 Ok(response.diff)
4812 }
4813 }
4814 })
4815 }
4816
4817 pub fn create_branch(
4818 &mut self,
4819 branch_name: String,
4820 base_branch: Option<String>,
4821 ) -> oneshot::Receiver<Result<()>> {
4822 let id = self.id;
4823 let status_msg = if let Some(ref base) = base_branch {
4824 format!("git switch -c {branch_name} {base}").into()
4825 } else {
4826 format!("git switch -c {branch_name}").into()
4827 };
4828 self.send_job(Some(status_msg), move |repo, _cx| async move {
4829 match repo {
4830 RepositoryState::Local { backend, .. } => {
4831 backend.create_branch(branch_name, base_branch).await
4832 }
4833 RepositoryState::Remote { project_id, client } => {
4834 client
4835 .request(proto::GitCreateBranch {
4836 project_id: project_id.0,
4837 repository_id: id.to_proto(),
4838 branch_name,
4839 })
4840 .await?;
4841
4842 Ok(())
4843 }
4844 }
4845 })
4846 }
4847
4848 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4849 let id = self.id;
4850 self.send_job(
4851 Some(format!("git switch {branch_name}").into()),
4852 move |repo, _cx| async move {
4853 match repo {
4854 RepositoryState::Local { backend, .. } => {
4855 backend.change_branch(branch_name).await
4856 }
4857 RepositoryState::Remote { project_id, client } => {
4858 client
4859 .request(proto::GitChangeBranch {
4860 project_id: project_id.0,
4861 repository_id: id.to_proto(),
4862 branch_name,
4863 })
4864 .await?;
4865
4866 Ok(())
4867 }
4868 }
4869 },
4870 )
4871 }
4872
4873 pub fn rename_branch(
4874 &mut self,
4875 branch: String,
4876 new_name: String,
4877 ) -> oneshot::Receiver<Result<()>> {
4878 let id = self.id;
4879 self.send_job(
4880 Some(format!("git branch -m {branch} {new_name}").into()),
4881 move |repo, _cx| async move {
4882 match repo {
4883 RepositoryState::Local { backend, .. } => {
4884 backend.rename_branch(branch, new_name).await
4885 }
4886 RepositoryState::Remote { project_id, client } => {
4887 client
4888 .request(proto::GitRenameBranch {
4889 project_id: project_id.0,
4890 repository_id: id.to_proto(),
4891 branch,
4892 new_name,
4893 })
4894 .await?;
4895
4896 Ok(())
4897 }
4898 }
4899 },
4900 )
4901 }
4902
4903 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4904 let id = self.id;
4905 self.send_job(None, move |repo, _cx| async move {
4906 match repo {
4907 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4908 RepositoryState::Remote { project_id, client } => {
4909 let response = client
4910 .request(proto::CheckForPushedCommits {
4911 project_id: project_id.0,
4912 repository_id: id.to_proto(),
4913 })
4914 .await?;
4915
4916 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4917
4918 Ok(branches)
4919 }
4920 }
4921 })
4922 }
4923
4924 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4925 self.send_job(None, |repo, _cx| async move {
4926 match repo {
4927 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4928 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4929 }
4930 })
4931 }
4932
4933 pub fn restore_checkpoint(
4934 &mut self,
4935 checkpoint: GitRepositoryCheckpoint,
4936 ) -> oneshot::Receiver<Result<()>> {
4937 self.send_job(None, move |repo, _cx| async move {
4938 match repo {
4939 RepositoryState::Local { backend, .. } => {
4940 backend.restore_checkpoint(checkpoint).await
4941 }
4942 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4943 }
4944 })
4945 }
4946
4947 pub(crate) fn apply_remote_update(
4948 &mut self,
4949 update: proto::UpdateRepository,
4950 cx: &mut Context<Self>,
4951 ) -> Result<()> {
4952 let conflicted_paths = TreeSet::from_ordered_entries(
4953 update
4954 .current_merge_conflicts
4955 .into_iter()
4956 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4957 );
4958 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4959 let new_head_commit = update
4960 .head_commit_details
4961 .as_ref()
4962 .map(proto_to_commit_details);
4963 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4964 cx.emit(RepositoryEvent::BranchChanged)
4965 }
4966 self.snapshot.branch = new_branch;
4967 self.snapshot.head_commit = new_head_commit;
4968
4969 self.snapshot.merge.conflicted_paths = conflicted_paths;
4970 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4971 let new_stash_entries = GitStash {
4972 entries: update
4973 .stash_entries
4974 .iter()
4975 .filter_map(|entry| proto_to_stash(entry).ok())
4976 .collect(),
4977 };
4978 if self.snapshot.stash_entries != new_stash_entries {
4979 cx.emit(RepositoryEvent::StashEntriesChanged)
4980 }
4981 self.snapshot.stash_entries = new_stash_entries;
4982
4983 self.snapshot.renamed_paths = update
4984 .renamed_paths
4985 .into_iter()
4986 .filter_map(|(new_path_str, old_path_str)| {
4987 Some((
4988 RepoPath::from_proto(&new_path_str).log_err()?,
4989 RepoPath::from_proto(&old_path_str).log_err()?,
4990 ))
4991 })
4992 .collect();
4993
4994 let edits = update
4995 .removed_statuses
4996 .into_iter()
4997 .filter_map(|path| {
4998 Some(sum_tree::Edit::Remove(PathKey(
4999 RelPath::from_proto(&path).log_err()?,
5000 )))
5001 })
5002 .chain(
5003 update
5004 .updated_statuses
5005 .into_iter()
5006 .filter_map(|updated_status| {
5007 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5008 }),
5009 )
5010 .collect::<Vec<_>>();
5011 if !edits.is_empty() {
5012 cx.emit(RepositoryEvent::StatusesChanged);
5013 }
5014 self.snapshot.statuses_by_path.edit(edits, ());
5015 if update.is_last_update {
5016 self.snapshot.scan_id = update.scan_id;
5017 }
5018 Ok(())
5019 }
5020
5021 pub fn compare_checkpoints(
5022 &mut self,
5023 left: GitRepositoryCheckpoint,
5024 right: GitRepositoryCheckpoint,
5025 ) -> oneshot::Receiver<Result<bool>> {
5026 self.send_job(None, move |repo, _cx| async move {
5027 match repo {
5028 RepositoryState::Local { backend, .. } => {
5029 backend.compare_checkpoints(left, right).await
5030 }
5031 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5032 }
5033 })
5034 }
5035
5036 pub fn diff_checkpoints(
5037 &mut self,
5038 base_checkpoint: GitRepositoryCheckpoint,
5039 target_checkpoint: GitRepositoryCheckpoint,
5040 ) -> oneshot::Receiver<Result<String>> {
5041 self.send_job(None, move |repo, _cx| async move {
5042 match repo {
5043 RepositoryState::Local { backend, .. } => {
5044 backend
5045 .diff_checkpoints(base_checkpoint, target_checkpoint)
5046 .await
5047 }
5048 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5049 }
5050 })
5051 }
5052
5053 fn schedule_scan(
5054 &mut self,
5055 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5056 cx: &mut Context<Self>,
5057 ) {
5058 let this = cx.weak_entity();
5059 let _ = self.send_keyed_job(
5060 Some(GitJobKey::ReloadGitState),
5061 None,
5062 |state, mut cx| async move {
5063 log::debug!("run scheduled git status scan");
5064
5065 let Some(this) = this.upgrade() else {
5066 return Ok(());
5067 };
5068 let RepositoryState::Local { backend, .. } = state else {
5069 bail!("not a local repository")
5070 };
5071 let (snapshot, events) = this
5072 .update(&mut cx, |this, _| {
5073 this.paths_needing_status_update.clear();
5074 compute_snapshot(
5075 this.id,
5076 this.work_directory_abs_path.clone(),
5077 this.snapshot.clone(),
5078 backend.clone(),
5079 )
5080 })?
5081 .await?;
5082 this.update(&mut cx, |this, cx| {
5083 this.snapshot = snapshot.clone();
5084 for event in events {
5085 cx.emit(event);
5086 }
5087 })?;
5088 if let Some(updates_tx) = updates_tx {
5089 updates_tx
5090 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5091 .ok();
5092 }
5093 Ok(())
5094 },
5095 );
5096 }
5097
5098 fn spawn_local_git_worker(
5099 work_directory_abs_path: Arc<Path>,
5100 dot_git_abs_path: Arc<Path>,
5101 _repository_dir_abs_path: Arc<Path>,
5102 _common_dir_abs_path: Arc<Path>,
5103 project_environment: WeakEntity<ProjectEnvironment>,
5104 fs: Arc<dyn Fs>,
5105 cx: &mut Context<Self>,
5106 ) -> mpsc::UnboundedSender<GitJob> {
5107 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5108
5109 cx.spawn(async move |_, cx| {
5110 let environment = project_environment
5111 .upgrade()
5112 .context("missing project environment")?
5113 .update(cx, |project_environment, cx| {
5114 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
5115 })?
5116 .await
5117 .unwrap_or_else(|| {
5118 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
5119 HashMap::default()
5120 });
5121 let search_paths = environment.get("PATH").map(|val| val.to_owned());
5122 let backend = cx
5123 .background_spawn(async move {
5124 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
5125 .or_else(|| which::which("git").ok());
5126 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
5127 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
5128 })
5129 .await?;
5130
5131 if let Some(git_hosting_provider_registry) =
5132 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5133 {
5134 git_hosting_providers::register_additional_providers(
5135 git_hosting_provider_registry,
5136 backend.clone(),
5137 );
5138 }
5139
5140 let state = RepositoryState::Local {
5141 backend,
5142 environment: Arc::new(environment),
5143 };
5144 let mut jobs = VecDeque::new();
5145 loop {
5146 while let Ok(Some(next_job)) = job_rx.try_next() {
5147 jobs.push_back(next_job);
5148 }
5149
5150 if let Some(job) = jobs.pop_front() {
5151 if let Some(current_key) = &job.key
5152 && jobs
5153 .iter()
5154 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5155 {
5156 continue;
5157 }
5158 (job.job)(state.clone(), cx).await;
5159 } else if let Some(job) = job_rx.next().await {
5160 jobs.push_back(job);
5161 } else {
5162 break;
5163 }
5164 }
5165 anyhow::Ok(())
5166 })
5167 .detach_and_log_err(cx);
5168
5169 job_tx
5170 }
5171
5172 fn spawn_remote_git_worker(
5173 project_id: ProjectId,
5174 client: AnyProtoClient,
5175 cx: &mut Context<Self>,
5176 ) -> mpsc::UnboundedSender<GitJob> {
5177 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5178
5179 cx.spawn(async move |_, cx| {
5180 let state = RepositoryState::Remote { project_id, client };
5181 let mut jobs = VecDeque::new();
5182 loop {
5183 while let Ok(Some(next_job)) = job_rx.try_next() {
5184 jobs.push_back(next_job);
5185 }
5186
5187 if let Some(job) = jobs.pop_front() {
5188 if let Some(current_key) = &job.key
5189 && jobs
5190 .iter()
5191 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5192 {
5193 continue;
5194 }
5195 (job.job)(state.clone(), cx).await;
5196 } else if let Some(job) = job_rx.next().await {
5197 jobs.push_back(job);
5198 } else {
5199 break;
5200 }
5201 }
5202 anyhow::Ok(())
5203 })
5204 .detach_and_log_err(cx);
5205
5206 job_tx
5207 }
5208
5209 fn load_staged_text(
5210 &mut self,
5211 buffer_id: BufferId,
5212 repo_path: RepoPath,
5213 cx: &App,
5214 ) -> Task<Result<Option<String>>> {
5215 let rx = self.send_job(None, move |state, _| async move {
5216 match state {
5217 RepositoryState::Local { backend, .. } => {
5218 anyhow::Ok(backend.load_index_text(repo_path).await)
5219 }
5220 RepositoryState::Remote { project_id, client } => {
5221 let response = client
5222 .request(proto::OpenUnstagedDiff {
5223 project_id: project_id.to_proto(),
5224 buffer_id: buffer_id.to_proto(),
5225 })
5226 .await?;
5227 Ok(response.staged_text)
5228 }
5229 }
5230 });
5231 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5232 }
5233
5234 fn load_committed_text(
5235 &mut self,
5236 buffer_id: BufferId,
5237 repo_path: RepoPath,
5238 cx: &App,
5239 ) -> Task<Result<DiffBasesChange>> {
5240 let rx = self.send_job(None, move |state, _| async move {
5241 match state {
5242 RepositoryState::Local { backend, .. } => {
5243 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5244 let staged_text = backend.load_index_text(repo_path).await;
5245 let diff_bases_change = if committed_text == staged_text {
5246 DiffBasesChange::SetBoth(committed_text)
5247 } else {
5248 DiffBasesChange::SetEach {
5249 index: staged_text,
5250 head: committed_text,
5251 }
5252 };
5253 anyhow::Ok(diff_bases_change)
5254 }
5255 RepositoryState::Remote { project_id, client } => {
5256 use proto::open_uncommitted_diff_response::Mode;
5257
5258 let response = client
5259 .request(proto::OpenUncommittedDiff {
5260 project_id: project_id.to_proto(),
5261 buffer_id: buffer_id.to_proto(),
5262 })
5263 .await?;
5264 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5265 let bases = match mode {
5266 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5267 Mode::IndexAndHead => DiffBasesChange::SetEach {
5268 head: response.committed_text,
5269 index: response.staged_text,
5270 },
5271 };
5272 Ok(bases)
5273 }
5274 }
5275 });
5276
5277 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5278 }
5279 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5280 let repository_id = self.snapshot.id;
5281 let rx = self.send_job(None, move |state, _| async move {
5282 match state {
5283 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5284 RepositoryState::Remote { client, project_id } => {
5285 let response = client
5286 .request(proto::GetBlobContent {
5287 project_id: project_id.to_proto(),
5288 repository_id: repository_id.0,
5289 oid: oid.to_string(),
5290 })
5291 .await?;
5292 Ok(response.content)
5293 }
5294 }
5295 });
5296 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5297 }
5298
5299 fn paths_changed(
5300 &mut self,
5301 paths: Vec<RepoPath>,
5302 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5303 cx: &mut Context<Self>,
5304 ) {
5305 self.paths_needing_status_update.extend(paths);
5306
5307 let this = cx.weak_entity();
5308 let _ = self.send_keyed_job(
5309 Some(GitJobKey::RefreshStatuses),
5310 None,
5311 |state, mut cx| async move {
5312 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5313 (
5314 this.snapshot.clone(),
5315 mem::take(&mut this.paths_needing_status_update),
5316 )
5317 })?;
5318 let RepositoryState::Local { backend, .. } = state else {
5319 bail!("not a local repository")
5320 };
5321
5322 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5323 if paths.is_empty() {
5324 return Ok(());
5325 }
5326 let statuses = backend.status(&paths).await?;
5327 let stash_entries = backend.stash_entries().await?;
5328
5329 let changed_path_statuses = cx
5330 .background_spawn(async move {
5331 let mut changed_path_statuses = Vec::new();
5332 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5333 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5334
5335 for (repo_path, status) in &*statuses.entries {
5336 changed_paths.remove(repo_path);
5337 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5338 && cursor.item().is_some_and(|entry| entry.status == *status)
5339 {
5340 continue;
5341 }
5342
5343 changed_path_statuses.push(Edit::Insert(StatusEntry {
5344 repo_path: repo_path.clone(),
5345 status: *status,
5346 }));
5347 }
5348 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5349 for path in changed_paths.into_iter() {
5350 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5351 changed_path_statuses
5352 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5353 }
5354 }
5355 changed_path_statuses
5356 })
5357 .await;
5358
5359 this.update(&mut cx, |this, cx| {
5360 if this.snapshot.stash_entries != stash_entries {
5361 cx.emit(RepositoryEvent::StashEntriesChanged);
5362 this.snapshot.stash_entries = stash_entries;
5363 }
5364
5365 if !changed_path_statuses.is_empty() {
5366 cx.emit(RepositoryEvent::StatusesChanged);
5367 this.snapshot
5368 .statuses_by_path
5369 .edit(changed_path_statuses, ());
5370 this.snapshot.scan_id += 1;
5371 }
5372
5373 if let Some(updates_tx) = updates_tx {
5374 updates_tx
5375 .unbounded_send(DownstreamUpdate::UpdateRepository(
5376 this.snapshot.clone(),
5377 ))
5378 .ok();
5379 }
5380 })
5381 },
5382 );
5383 }
5384
5385 /// currently running git command and when it started
5386 pub fn current_job(&self) -> Option<JobInfo> {
5387 self.active_jobs.values().next().cloned()
5388 }
5389
5390 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5391 self.send_job(None, |_, _| async {})
5392 }
5393
5394 fn spawn_job_with_tracking<AsyncFn>(
5395 &mut self,
5396 paths: Vec<RepoPath>,
5397 git_status: pending_op::GitStatus,
5398 cx: &mut Context<Self>,
5399 f: AsyncFn,
5400 ) -> Task<Result<()>>
5401 where
5402 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5403 {
5404 let ids = self.new_pending_ops_for_paths(paths, git_status);
5405
5406 cx.spawn(async move |this, cx| {
5407 let (job_status, result) = match f(this.clone(), cx).await {
5408 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5409 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5410 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5411 };
5412
5413 this.update(cx, |this, _| {
5414 let mut edits = Vec::with_capacity(ids.len());
5415 for (id, entry) in ids {
5416 if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
5417 if let Some(op) = ops.op_by_id_mut(id) {
5418 op.job_status = job_status;
5419 }
5420 edits.push(sum_tree::Edit::Insert(ops));
5421 }
5422 }
5423 this.snapshot.pending_ops_by_path.edit(edits, ());
5424 })?;
5425
5426 result
5427 })
5428 }
5429
5430 fn new_pending_ops_for_paths(
5431 &mut self,
5432 paths: Vec<RepoPath>,
5433 git_status: pending_op::GitStatus,
5434 ) -> Vec<(PendingOpId, RepoPath)> {
5435 let mut edits = Vec::with_capacity(paths.len());
5436 let mut ids = Vec::with_capacity(paths.len());
5437 for path in paths {
5438 let mut ops = self
5439 .snapshot
5440 .pending_ops_for_path(&path)
5441 .unwrap_or_else(|| PendingOps::new(&path));
5442 let id = ops.max_id() + 1;
5443 ops.ops.push(PendingOp {
5444 id,
5445 git_status,
5446 job_status: pending_op::JobStatus::Running,
5447 });
5448 edits.push(sum_tree::Edit::Insert(ops));
5449 ids.push((id, path));
5450 }
5451 self.snapshot.pending_ops_by_path.edit(edits, ());
5452 ids
5453 }
5454}
5455
5456fn get_permalink_in_rust_registry_src(
5457 provider_registry: Arc<GitHostingProviderRegistry>,
5458 path: PathBuf,
5459 selection: Range<u32>,
5460) -> Result<url::Url> {
5461 #[derive(Deserialize)]
5462 struct CargoVcsGit {
5463 sha1: String,
5464 }
5465
5466 #[derive(Deserialize)]
5467 struct CargoVcsInfo {
5468 git: CargoVcsGit,
5469 path_in_vcs: String,
5470 }
5471
5472 #[derive(Deserialize)]
5473 struct CargoPackage {
5474 repository: String,
5475 }
5476
5477 #[derive(Deserialize)]
5478 struct CargoToml {
5479 package: CargoPackage,
5480 }
5481
5482 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5483 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5484 Some((dir, json))
5485 }) else {
5486 bail!("No .cargo_vcs_info.json found in parent directories")
5487 };
5488 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5489 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5490 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5491 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5492 .context("parsing package.repository field of manifest")?;
5493 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5494 let permalink = provider.build_permalink(
5495 remote,
5496 BuildPermalinkParams::new(
5497 &cargo_vcs_info.git.sha1,
5498 &RepoPath::from_rel_path(
5499 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5500 ),
5501 Some(selection),
5502 ),
5503 );
5504 Ok(permalink)
5505}
5506
5507fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5508 let Some(blame) = blame else {
5509 return proto::BlameBufferResponse {
5510 blame_response: None,
5511 };
5512 };
5513
5514 let entries = blame
5515 .entries
5516 .into_iter()
5517 .map(|entry| proto::BlameEntry {
5518 sha: entry.sha.as_bytes().into(),
5519 start_line: entry.range.start,
5520 end_line: entry.range.end,
5521 original_line_number: entry.original_line_number,
5522 author: entry.author,
5523 author_mail: entry.author_mail,
5524 author_time: entry.author_time,
5525 author_tz: entry.author_tz,
5526 committer: entry.committer_name,
5527 committer_mail: entry.committer_email,
5528 committer_time: entry.committer_time,
5529 committer_tz: entry.committer_tz,
5530 summary: entry.summary,
5531 previous: entry.previous,
5532 filename: entry.filename,
5533 })
5534 .collect::<Vec<_>>();
5535
5536 let messages = blame
5537 .messages
5538 .into_iter()
5539 .map(|(oid, message)| proto::CommitMessage {
5540 oid: oid.as_bytes().into(),
5541 message,
5542 })
5543 .collect::<Vec<_>>();
5544
5545 proto::BlameBufferResponse {
5546 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5547 entries,
5548 messages,
5549 remote_url: blame.remote_url,
5550 }),
5551 }
5552}
5553
5554fn deserialize_blame_buffer_response(
5555 response: proto::BlameBufferResponse,
5556) -> Option<git::blame::Blame> {
5557 let response = response.blame_response?;
5558 let entries = response
5559 .entries
5560 .into_iter()
5561 .filter_map(|entry| {
5562 Some(git::blame::BlameEntry {
5563 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5564 range: entry.start_line..entry.end_line,
5565 original_line_number: entry.original_line_number,
5566 committer_name: entry.committer,
5567 committer_time: entry.committer_time,
5568 committer_tz: entry.committer_tz,
5569 committer_email: entry.committer_mail,
5570 author: entry.author,
5571 author_mail: entry.author_mail,
5572 author_time: entry.author_time,
5573 author_tz: entry.author_tz,
5574 summary: entry.summary,
5575 previous: entry.previous,
5576 filename: entry.filename,
5577 })
5578 })
5579 .collect::<Vec<_>>();
5580
5581 let messages = response
5582 .messages
5583 .into_iter()
5584 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5585 .collect::<HashMap<_, _>>();
5586
5587 Some(Blame {
5588 entries,
5589 messages,
5590 remote_url: response.remote_url,
5591 })
5592}
5593
5594fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5595 proto::Branch {
5596 is_head: branch.is_head,
5597 ref_name: branch.ref_name.to_string(),
5598 unix_timestamp: branch
5599 .most_recent_commit
5600 .as_ref()
5601 .map(|commit| commit.commit_timestamp as u64),
5602 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5603 ref_name: upstream.ref_name.to_string(),
5604 tracking: upstream
5605 .tracking
5606 .status()
5607 .map(|upstream| proto::UpstreamTracking {
5608 ahead: upstream.ahead as u64,
5609 behind: upstream.behind as u64,
5610 }),
5611 }),
5612 most_recent_commit: branch
5613 .most_recent_commit
5614 .as_ref()
5615 .map(|commit| proto::CommitSummary {
5616 sha: commit.sha.to_string(),
5617 subject: commit.subject.to_string(),
5618 commit_timestamp: commit.commit_timestamp,
5619 author_name: commit.author_name.to_string(),
5620 }),
5621 }
5622}
5623
5624fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5625 proto::Worktree {
5626 path: worktree.path.to_string_lossy().to_string(),
5627 ref_name: worktree.ref_name.to_string(),
5628 sha: worktree.sha.to_string(),
5629 }
5630}
5631
5632fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5633 git::repository::Worktree {
5634 path: PathBuf::from(proto.path.clone()),
5635 ref_name: proto.ref_name.clone().into(),
5636 sha: proto.sha.clone().into(),
5637 }
5638}
5639
5640fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5641 git::repository::Branch {
5642 is_head: proto.is_head,
5643 ref_name: proto.ref_name.clone().into(),
5644 upstream: proto
5645 .upstream
5646 .as_ref()
5647 .map(|upstream| git::repository::Upstream {
5648 ref_name: upstream.ref_name.to_string().into(),
5649 tracking: upstream
5650 .tracking
5651 .as_ref()
5652 .map(|tracking| {
5653 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5654 ahead: tracking.ahead as u32,
5655 behind: tracking.behind as u32,
5656 })
5657 })
5658 .unwrap_or(git::repository::UpstreamTracking::Gone),
5659 }),
5660 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5661 git::repository::CommitSummary {
5662 sha: commit.sha.to_string().into(),
5663 subject: commit.subject.to_string().into(),
5664 commit_timestamp: commit.commit_timestamp,
5665 author_name: commit.author_name.to_string().into(),
5666 has_parent: true,
5667 }
5668 }),
5669 }
5670}
5671
5672fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5673 proto::GitCommitDetails {
5674 sha: commit.sha.to_string(),
5675 message: commit.message.to_string(),
5676 commit_timestamp: commit.commit_timestamp,
5677 author_email: commit.author_email.to_string(),
5678 author_name: commit.author_name.to_string(),
5679 }
5680}
5681
5682fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5683 CommitDetails {
5684 sha: proto.sha.clone().into(),
5685 message: proto.message.clone().into(),
5686 commit_timestamp: proto.commit_timestamp,
5687 author_email: proto.author_email.clone().into(),
5688 author_name: proto.author_name.clone().into(),
5689 }
5690}
5691
5692async fn compute_snapshot(
5693 id: RepositoryId,
5694 work_directory_abs_path: Arc<Path>,
5695 prev_snapshot: RepositorySnapshot,
5696 backend: Arc<dyn GitRepository>,
5697) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5698 let mut events = Vec::new();
5699 let branches = backend.branches().await?;
5700 let branch = branches.into_iter().find(|branch| branch.is_head);
5701 let statuses = backend
5702 .status(&[RepoPath::from_rel_path(
5703 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
5704 )])
5705 .await?;
5706 let stash_entries = backend.stash_entries().await?;
5707 let statuses_by_path = SumTree::from_iter(
5708 statuses
5709 .entries
5710 .iter()
5711 .map(|(repo_path, status)| StatusEntry {
5712 repo_path: repo_path.clone(),
5713 status: *status,
5714 }),
5715 (),
5716 );
5717 let (merge_details, merge_heads_changed) =
5718 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5719 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5720
5721 let pending_ops_by_path = SumTree::from_iter(
5722 prev_snapshot.pending_ops_by_path.iter().filter_map(|ops| {
5723 let inner_ops: Vec<PendingOp> =
5724 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5725 if inner_ops.is_empty() {
5726 None
5727 } else {
5728 Some(PendingOps {
5729 repo_path: ops.repo_path.clone(),
5730 ops: inner_ops,
5731 })
5732 }
5733 }),
5734 (),
5735 );
5736
5737 if pending_ops_by_path != prev_snapshot.pending_ops_by_path {
5738 events.push(RepositoryEvent::PendingOpsChanged {
5739 pending_ops: prev_snapshot.pending_ops_by_path.clone(),
5740 })
5741 }
5742
5743 if merge_heads_changed {
5744 events.push(RepositoryEvent::MergeHeadsChanged);
5745 }
5746
5747 if statuses_by_path != prev_snapshot.statuses_by_path {
5748 events.push(RepositoryEvent::StatusesChanged)
5749 }
5750
5751 // Useful when branch is None in detached head state
5752 let head_commit = match backend.head_sha().await {
5753 Some(head_sha) => backend.show(head_sha).await.log_err(),
5754 None => None,
5755 };
5756
5757 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5758 events.push(RepositoryEvent::BranchChanged);
5759 }
5760
5761 // Used by edit prediction data collection
5762 let remote_origin_url = backend.remote_url("origin");
5763 let remote_upstream_url = backend.remote_url("upstream");
5764
5765 let snapshot = RepositorySnapshot {
5766 id,
5767 statuses_by_path,
5768 pending_ops_by_path,
5769 renamed_paths: statuses.renamed_paths,
5770 work_directory_abs_path,
5771 path_style: prev_snapshot.path_style,
5772 scan_id: prev_snapshot.scan_id + 1,
5773 branch,
5774 head_commit,
5775 merge: merge_details,
5776 remote_origin_url,
5777 remote_upstream_url,
5778 stash_entries,
5779 };
5780
5781 Ok((snapshot, events))
5782}
5783
5784fn status_from_proto(
5785 simple_status: i32,
5786 status: Option<proto::GitFileStatus>,
5787) -> anyhow::Result<FileStatus> {
5788 use proto::git_file_status::Variant;
5789
5790 let Some(variant) = status.and_then(|status| status.variant) else {
5791 let code = proto::GitStatus::from_i32(simple_status)
5792 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5793 let result = match code {
5794 proto::GitStatus::Added => TrackedStatus {
5795 worktree_status: StatusCode::Added,
5796 index_status: StatusCode::Unmodified,
5797 }
5798 .into(),
5799 proto::GitStatus::Modified => TrackedStatus {
5800 worktree_status: StatusCode::Modified,
5801 index_status: StatusCode::Unmodified,
5802 }
5803 .into(),
5804 proto::GitStatus::Conflict => UnmergedStatus {
5805 first_head: UnmergedStatusCode::Updated,
5806 second_head: UnmergedStatusCode::Updated,
5807 }
5808 .into(),
5809 proto::GitStatus::Deleted => TrackedStatus {
5810 worktree_status: StatusCode::Deleted,
5811 index_status: StatusCode::Unmodified,
5812 }
5813 .into(),
5814 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5815 };
5816 return Ok(result);
5817 };
5818
5819 let result = match variant {
5820 Variant::Untracked(_) => FileStatus::Untracked,
5821 Variant::Ignored(_) => FileStatus::Ignored,
5822 Variant::Unmerged(unmerged) => {
5823 let [first_head, second_head] =
5824 [unmerged.first_head, unmerged.second_head].map(|head| {
5825 let code = proto::GitStatus::from_i32(head)
5826 .with_context(|| format!("Invalid git status code: {head}"))?;
5827 let result = match code {
5828 proto::GitStatus::Added => UnmergedStatusCode::Added,
5829 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5830 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5831 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5832 };
5833 Ok(result)
5834 });
5835 let [first_head, second_head] = [first_head?, second_head?];
5836 UnmergedStatus {
5837 first_head,
5838 second_head,
5839 }
5840 .into()
5841 }
5842 Variant::Tracked(tracked) => {
5843 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5844 .map(|status| {
5845 let code = proto::GitStatus::from_i32(status)
5846 .with_context(|| format!("Invalid git status code: {status}"))?;
5847 let result = match code {
5848 proto::GitStatus::Modified => StatusCode::Modified,
5849 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5850 proto::GitStatus::Added => StatusCode::Added,
5851 proto::GitStatus::Deleted => StatusCode::Deleted,
5852 proto::GitStatus::Renamed => StatusCode::Renamed,
5853 proto::GitStatus::Copied => StatusCode::Copied,
5854 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5855 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5856 };
5857 Ok(result)
5858 });
5859 let [index_status, worktree_status] = [index_status?, worktree_status?];
5860 TrackedStatus {
5861 index_status,
5862 worktree_status,
5863 }
5864 .into()
5865 }
5866 };
5867 Ok(result)
5868}
5869
5870fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5871 use proto::git_file_status::{Tracked, Unmerged, Variant};
5872
5873 let variant = match status {
5874 FileStatus::Untracked => Variant::Untracked(Default::default()),
5875 FileStatus::Ignored => Variant::Ignored(Default::default()),
5876 FileStatus::Unmerged(UnmergedStatus {
5877 first_head,
5878 second_head,
5879 }) => Variant::Unmerged(Unmerged {
5880 first_head: unmerged_status_to_proto(first_head),
5881 second_head: unmerged_status_to_proto(second_head),
5882 }),
5883 FileStatus::Tracked(TrackedStatus {
5884 index_status,
5885 worktree_status,
5886 }) => Variant::Tracked(Tracked {
5887 index_status: tracked_status_to_proto(index_status),
5888 worktree_status: tracked_status_to_proto(worktree_status),
5889 }),
5890 };
5891 proto::GitFileStatus {
5892 variant: Some(variant),
5893 }
5894}
5895
5896fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5897 match code {
5898 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5899 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5900 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5901 }
5902}
5903
5904fn tracked_status_to_proto(code: StatusCode) -> i32 {
5905 match code {
5906 StatusCode::Added => proto::GitStatus::Added as _,
5907 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5908 StatusCode::Modified => proto::GitStatus::Modified as _,
5909 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5910 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5911 StatusCode::Copied => proto::GitStatus::Copied as _,
5912 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5913 }
5914}