1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use std::{
60 cmp::Ordering,
61 collections::{BTreeSet, HashSet, VecDeque},
62 future::Future,
63 mem,
64 ops::Range,
65 path::{Path, PathBuf},
66 str::FromStr,
67 sync::{
68 Arc,
69 atomic::{self, AtomicU64},
70 },
71 time::Instant,
72};
73use sum_tree::{Edit, SumTree, TreeSet};
74use task::Shell;
75use text::{Bias, BufferId};
76use util::{
77 ResultExt, debug_panic,
78 paths::{PathStyle, SanitizedPath},
79 post_inc,
80 rel_path::RelPath,
81};
82use worktree::{
83 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
84 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
85};
86use zeroize::Zeroize;
87
88pub struct GitStore {
89 state: GitStoreState,
90 buffer_store: Entity<BufferStore>,
91 worktree_store: Entity<WorktreeStore>,
92 repositories: HashMap<RepositoryId, Entity<Repository>>,
93 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
94 active_repo_id: Option<RepositoryId>,
95 #[allow(clippy::type_complexity)]
96 loading_diffs:
97 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
98 diffs: HashMap<BufferId, Entity<BufferGitState>>,
99 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
100 _subscriptions: Vec<Subscription>,
101}
102
103#[derive(Default)]
104struct SharedDiffs {
105 unstaged: Option<Entity<BufferDiff>>,
106 uncommitted: Option<Entity<BufferDiff>>,
107}
108
109struct BufferGitState {
110 unstaged_diff: Option<WeakEntity<BufferDiff>>,
111 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
112 conflict_set: Option<WeakEntity<ConflictSet>>,
113 recalculate_diff_task: Option<Task<Result<()>>>,
114 reparse_conflict_markers_task: Option<Task<Result<()>>>,
115 language: Option<Arc<Language>>,
116 language_registry: Option<Arc<LanguageRegistry>>,
117 conflict_updated_futures: Vec<oneshot::Sender<()>>,
118 recalculating_tx: postage::watch::Sender<bool>,
119
120 /// These operation counts are used to ensure that head and index text
121 /// values read from the git repository are up-to-date with any hunk staging
122 /// operations that have been performed on the BufferDiff.
123 ///
124 /// The operation count is incremented immediately when the user initiates a
125 /// hunk stage/unstage operation. Then, upon finishing writing the new index
126 /// text do disk, the `operation count as of write` is updated to reflect
127 /// the operation count that prompted the write.
128 hunk_staging_operation_count: usize,
129 hunk_staging_operation_count_as_of_write: usize,
130
131 head_text: Option<Arc<String>>,
132 index_text: Option<Arc<String>>,
133 head_changed: bool,
134 index_changed: bool,
135 language_changed: bool,
136}
137
138#[derive(Clone, Debug)]
139enum DiffBasesChange {
140 SetIndex(Option<String>),
141 SetHead(Option<String>),
142 SetEach {
143 index: Option<String>,
144 head: Option<String>,
145 },
146 SetBoth(Option<String>),
147}
148
149#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
150enum DiffKind {
151 Unstaged,
152 Uncommitted,
153}
154
155enum GitStoreState {
156 Local {
157 next_repository_id: Arc<AtomicU64>,
158 downstream: Option<LocalDownstreamState>,
159 project_environment: Entity<ProjectEnvironment>,
160 fs: Arc<dyn Fs>,
161 },
162 Remote {
163 upstream_client: AnyProtoClient,
164 upstream_project_id: u64,
165 downstream: Option<(AnyProtoClient, ProjectId)>,
166 },
167}
168
169enum DownstreamUpdate {
170 UpdateRepository(RepositorySnapshot),
171 RemoveRepository(RepositoryId),
172}
173
174struct LocalDownstreamState {
175 client: AnyProtoClient,
176 project_id: ProjectId,
177 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
178 _task: Task<Result<()>>,
179}
180
181#[derive(Clone, Debug)]
182pub struct GitStoreCheckpoint {
183 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
184}
185
186#[derive(Clone, Debug, PartialEq, Eq)]
187pub struct StatusEntry {
188 pub repo_path: RepoPath,
189 pub status: FileStatus,
190}
191
192impl StatusEntry {
193 fn to_proto(&self) -> proto::StatusEntry {
194 let simple_status = match self.status {
195 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
196 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
197 FileStatus::Tracked(TrackedStatus {
198 index_status,
199 worktree_status,
200 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
201 worktree_status
202 } else {
203 index_status
204 }),
205 };
206
207 proto::StatusEntry {
208 repo_path: self.repo_path.to_proto(),
209 simple_status,
210 status: Some(status_to_proto(self.status)),
211 }
212 }
213}
214
215impl TryFrom<proto::StatusEntry> for StatusEntry {
216 type Error = anyhow::Error;
217
218 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
219 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
220 let status = status_from_proto(value.simple_status, value.status)?;
221 Ok(Self { repo_path, status })
222 }
223}
224
225impl sum_tree::Item for StatusEntry {
226 type Summary = PathSummary<GitSummary>;
227
228 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
229 PathSummary {
230 max_path: self.repo_path.as_ref().clone(),
231 item_summary: self.status.summary(),
232 }
233 }
234}
235
236impl sum_tree::KeyedItem for StatusEntry {
237 type Key = PathKey;
238
239 fn key(&self) -> Self::Key {
240 PathKey(self.repo_path.as_ref().clone())
241 }
242}
243
244#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
245pub struct RepositoryId(pub u64);
246
247#[derive(Clone, Debug, Default, PartialEq, Eq)]
248pub struct MergeDetails {
249 pub conflicted_paths: TreeSet<RepoPath>,
250 pub message: Option<SharedString>,
251 pub heads: Vec<Option<SharedString>>,
252}
253
254#[derive(Clone, Debug, PartialEq, Eq)]
255pub struct RepositorySnapshot {
256 pub id: RepositoryId,
257 pub statuses_by_path: SumTree<StatusEntry>,
258 pub work_directory_abs_path: Arc<Path>,
259 pub path_style: PathStyle,
260 pub branch: Option<Branch>,
261 pub head_commit: Option<CommitDetails>,
262 pub scan_id: u64,
263 pub merge: MergeDetails,
264 pub remote_origin_url: Option<String>,
265 pub remote_upstream_url: Option<String>,
266 pub stash_entries: GitStash,
267}
268
269type JobId = u64;
270
271#[derive(Clone, Debug, PartialEq, Eq)]
272pub struct JobInfo {
273 pub start: Instant,
274 pub message: SharedString,
275}
276
277pub struct Repository {
278 this: WeakEntity<Self>,
279 snapshot: RepositorySnapshot,
280 commit_message_buffer: Option<Entity<Buffer>>,
281 git_store: WeakEntity<GitStore>,
282 // For a local repository, holds paths that have had worktree events since the last status scan completed,
283 // and that should be examined during the next status scan.
284 paths_needing_status_update: BTreeSet<RepoPath>,
285 job_sender: mpsc::UnboundedSender<GitJob>,
286 active_jobs: HashMap<JobId, JobInfo>,
287 pending_ops: SumTree<PendingOps>,
288 job_id: JobId,
289 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
290 latest_askpass_id: u64,
291}
292
293impl std::ops::Deref for Repository {
294 type Target = RepositorySnapshot;
295
296 fn deref(&self) -> &Self::Target {
297 &self.snapshot
298 }
299}
300
301#[derive(Clone)]
302pub enum RepositoryState {
303 Local {
304 fs: Arc<dyn Fs>,
305 backend: Arc<dyn GitRepository>,
306 environment: Arc<HashMap<String, String>>,
307 },
308 Remote {
309 project_id: ProjectId,
310 client: AnyProtoClient,
311 },
312}
313
314#[derive(Clone, Debug, PartialEq, Eq)]
315pub enum RepositoryEvent {
316 StatusesChanged,
317 MergeHeadsChanged,
318 BranchChanged,
319 StashEntriesChanged,
320 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
321}
322
323#[derive(Clone, Debug)]
324pub struct JobsUpdated;
325
326#[derive(Debug)]
327pub enum GitStoreEvent {
328 ActiveRepositoryChanged(Option<RepositoryId>),
329 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
330 RepositoryAdded,
331 RepositoryRemoved(RepositoryId),
332 IndexWriteError(anyhow::Error),
333 JobsUpdated,
334 ConflictsUpdated,
335}
336
337impl EventEmitter<RepositoryEvent> for Repository {}
338impl EventEmitter<JobsUpdated> for Repository {}
339impl EventEmitter<GitStoreEvent> for GitStore {}
340
341pub struct GitJob {
342 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
343 key: Option<GitJobKey>,
344}
345
346#[derive(PartialEq, Eq)]
347enum GitJobKey {
348 WriteIndex(Vec<RepoPath>),
349 ReloadBufferDiffBases,
350 RefreshStatuses,
351 ReloadGitState,
352}
353
354impl GitStore {
355 pub fn local(
356 worktree_store: &Entity<WorktreeStore>,
357 buffer_store: Entity<BufferStore>,
358 environment: Entity<ProjectEnvironment>,
359 fs: Arc<dyn Fs>,
360 cx: &mut Context<Self>,
361 ) -> Self {
362 Self::new(
363 worktree_store.clone(),
364 buffer_store,
365 GitStoreState::Local {
366 next_repository_id: Arc::new(AtomicU64::new(1)),
367 downstream: None,
368 project_environment: environment,
369 fs,
370 },
371 cx,
372 )
373 }
374
375 pub fn remote(
376 worktree_store: &Entity<WorktreeStore>,
377 buffer_store: Entity<BufferStore>,
378 upstream_client: AnyProtoClient,
379 project_id: u64,
380 cx: &mut Context<Self>,
381 ) -> Self {
382 Self::new(
383 worktree_store.clone(),
384 buffer_store,
385 GitStoreState::Remote {
386 upstream_client,
387 upstream_project_id: project_id,
388 downstream: None,
389 },
390 cx,
391 )
392 }
393
394 fn new(
395 worktree_store: Entity<WorktreeStore>,
396 buffer_store: Entity<BufferStore>,
397 state: GitStoreState,
398 cx: &mut Context<Self>,
399 ) -> Self {
400 let _subscriptions = vec![
401 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
402 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
403 ];
404
405 GitStore {
406 state,
407 buffer_store,
408 worktree_store,
409 repositories: HashMap::default(),
410 worktree_ids: HashMap::default(),
411 active_repo_id: None,
412 _subscriptions,
413 loading_diffs: HashMap::default(),
414 shared_diffs: HashMap::default(),
415 diffs: HashMap::default(),
416 }
417 }
418
419 pub fn init(client: &AnyProtoClient) {
420 client.add_entity_request_handler(Self::handle_get_remotes);
421 client.add_entity_request_handler(Self::handle_get_branches);
422 client.add_entity_request_handler(Self::handle_get_default_branch);
423 client.add_entity_request_handler(Self::handle_change_branch);
424 client.add_entity_request_handler(Self::handle_create_branch);
425 client.add_entity_request_handler(Self::handle_rename_branch);
426 client.add_entity_request_handler(Self::handle_git_init);
427 client.add_entity_request_handler(Self::handle_push);
428 client.add_entity_request_handler(Self::handle_pull);
429 client.add_entity_request_handler(Self::handle_fetch);
430 client.add_entity_request_handler(Self::handle_stage);
431 client.add_entity_request_handler(Self::handle_unstage);
432 client.add_entity_request_handler(Self::handle_stash);
433 client.add_entity_request_handler(Self::handle_stash_pop);
434 client.add_entity_request_handler(Self::handle_stash_apply);
435 client.add_entity_request_handler(Self::handle_stash_drop);
436 client.add_entity_request_handler(Self::handle_commit);
437 client.add_entity_request_handler(Self::handle_run_hook);
438 client.add_entity_request_handler(Self::handle_reset);
439 client.add_entity_request_handler(Self::handle_show);
440 client.add_entity_request_handler(Self::handle_load_commit_diff);
441 client.add_entity_request_handler(Self::handle_checkout_files);
442 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
443 client.add_entity_request_handler(Self::handle_set_index_text);
444 client.add_entity_request_handler(Self::handle_askpass);
445 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
446 client.add_entity_request_handler(Self::handle_git_diff);
447 client.add_entity_request_handler(Self::handle_tree_diff);
448 client.add_entity_request_handler(Self::handle_get_blob_content);
449 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
450 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
451 client.add_entity_message_handler(Self::handle_update_diff_bases);
452 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
453 client.add_entity_request_handler(Self::handle_blame_buffer);
454 client.add_entity_message_handler(Self::handle_update_repository);
455 client.add_entity_message_handler(Self::handle_remove_repository);
456 client.add_entity_request_handler(Self::handle_git_clone);
457 client.add_entity_request_handler(Self::handle_get_worktrees);
458 client.add_entity_request_handler(Self::handle_create_worktree);
459 }
460
461 pub fn is_local(&self) -> bool {
462 matches!(self.state, GitStoreState::Local { .. })
463 }
464 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
465 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
466 let id = repo.read(cx).id;
467 if self.active_repo_id != Some(id) {
468 self.active_repo_id = Some(id);
469 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
470 }
471 }
472 }
473
474 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
475 match &mut self.state {
476 GitStoreState::Remote {
477 downstream: downstream_client,
478 ..
479 } => {
480 for repo in self.repositories.values() {
481 let update = repo.read(cx).snapshot.initial_update(project_id);
482 for update in split_repository_update(update) {
483 client.send(update).log_err();
484 }
485 }
486 *downstream_client = Some((client, ProjectId(project_id)));
487 }
488 GitStoreState::Local {
489 downstream: downstream_client,
490 ..
491 } => {
492 let mut snapshots = HashMap::default();
493 let (updates_tx, mut updates_rx) = mpsc::unbounded();
494 for repo in self.repositories.values() {
495 updates_tx
496 .unbounded_send(DownstreamUpdate::UpdateRepository(
497 repo.read(cx).snapshot.clone(),
498 ))
499 .ok();
500 }
501 *downstream_client = Some(LocalDownstreamState {
502 client: client.clone(),
503 project_id: ProjectId(project_id),
504 updates_tx,
505 _task: cx.spawn(async move |this, cx| {
506 cx.background_spawn(async move {
507 while let Some(update) = updates_rx.next().await {
508 match update {
509 DownstreamUpdate::UpdateRepository(snapshot) => {
510 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
511 {
512 let update =
513 snapshot.build_update(old_snapshot, project_id);
514 *old_snapshot = snapshot;
515 for update in split_repository_update(update) {
516 client.send(update)?;
517 }
518 } else {
519 let update = snapshot.initial_update(project_id);
520 for update in split_repository_update(update) {
521 client.send(update)?;
522 }
523 snapshots.insert(snapshot.id, snapshot);
524 }
525 }
526 DownstreamUpdate::RemoveRepository(id) => {
527 client.send(proto::RemoveRepository {
528 project_id,
529 id: id.to_proto(),
530 })?;
531 }
532 }
533 }
534 anyhow::Ok(())
535 })
536 .await
537 .ok();
538 this.update(cx, |this, _| {
539 if let GitStoreState::Local {
540 downstream: downstream_client,
541 ..
542 } = &mut this.state
543 {
544 downstream_client.take();
545 } else {
546 unreachable!("unshared called on remote store");
547 }
548 })
549 }),
550 });
551 }
552 }
553 }
554
555 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
556 match &mut self.state {
557 GitStoreState::Local {
558 downstream: downstream_client,
559 ..
560 } => {
561 downstream_client.take();
562 }
563 GitStoreState::Remote {
564 downstream: downstream_client,
565 ..
566 } => {
567 downstream_client.take();
568 }
569 }
570 self.shared_diffs.clear();
571 }
572
573 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
574 self.shared_diffs.remove(peer_id);
575 }
576
577 pub fn active_repository(&self) -> Option<Entity<Repository>> {
578 self.active_repo_id
579 .as_ref()
580 .map(|id| self.repositories[id].clone())
581 }
582
583 pub fn open_unstaged_diff(
584 &mut self,
585 buffer: Entity<Buffer>,
586 cx: &mut Context<Self>,
587 ) -> Task<Result<Entity<BufferDiff>>> {
588 let buffer_id = buffer.read(cx).remote_id();
589 if let Some(diff_state) = self.diffs.get(&buffer_id)
590 && let Some(unstaged_diff) = diff_state
591 .read(cx)
592 .unstaged_diff
593 .as_ref()
594 .and_then(|weak| weak.upgrade())
595 {
596 if let Some(task) =
597 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
598 {
599 return cx.background_executor().spawn(async move {
600 task.await;
601 Ok(unstaged_diff)
602 });
603 }
604 return Task::ready(Ok(unstaged_diff));
605 }
606
607 let Some((repo, repo_path)) =
608 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
609 else {
610 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
611 };
612
613 let task = self
614 .loading_diffs
615 .entry((buffer_id, DiffKind::Unstaged))
616 .or_insert_with(|| {
617 let staged_text = repo.update(cx, |repo, cx| {
618 repo.load_staged_text(buffer_id, repo_path, cx)
619 });
620 cx.spawn(async move |this, cx| {
621 Self::open_diff_internal(
622 this,
623 DiffKind::Unstaged,
624 staged_text.await.map(DiffBasesChange::SetIndex),
625 buffer,
626 cx,
627 )
628 .await
629 .map_err(Arc::new)
630 })
631 .shared()
632 })
633 .clone();
634
635 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
636 }
637
638 pub fn open_diff_since(
639 &mut self,
640 oid: Option<git::Oid>,
641 buffer: Entity<Buffer>,
642 repo: Entity<Repository>,
643 languages: Arc<LanguageRegistry>,
644 cx: &mut Context<Self>,
645 ) -> Task<Result<Entity<BufferDiff>>> {
646 cx.spawn(async move |this, cx| {
647 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
648 let content = match oid {
649 None => None,
650 Some(oid) => Some(
651 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
652 .await?,
653 ),
654 };
655 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
656
657 buffer_diff
658 .update(cx, |buffer_diff, cx| {
659 buffer_diff.set_base_text(
660 content.map(Arc::new),
661 buffer_snapshot.language().cloned(),
662 Some(languages.clone()),
663 buffer_snapshot.text,
664 cx,
665 )
666 })?
667 .await?;
668 let unstaged_diff = this
669 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
670 .await?;
671 buffer_diff.update(cx, |buffer_diff, _| {
672 buffer_diff.set_secondary_diff(unstaged_diff);
673 })?;
674
675 this.update(cx, |_, cx| {
676 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
677 .detach();
678 })?;
679
680 Ok(buffer_diff)
681 })
682 }
683
684 pub fn open_uncommitted_diff(
685 &mut self,
686 buffer: Entity<Buffer>,
687 cx: &mut Context<Self>,
688 ) -> Task<Result<Entity<BufferDiff>>> {
689 let buffer_id = buffer.read(cx).remote_id();
690
691 if let Some(diff_state) = self.diffs.get(&buffer_id)
692 && let Some(uncommitted_diff) = diff_state
693 .read(cx)
694 .uncommitted_diff
695 .as_ref()
696 .and_then(|weak| weak.upgrade())
697 {
698 if let Some(task) =
699 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
700 {
701 return cx.background_executor().spawn(async move {
702 task.await;
703 Ok(uncommitted_diff)
704 });
705 }
706 return Task::ready(Ok(uncommitted_diff));
707 }
708
709 let Some((repo, repo_path)) =
710 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
711 else {
712 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
713 };
714
715 let task = self
716 .loading_diffs
717 .entry((buffer_id, DiffKind::Uncommitted))
718 .or_insert_with(|| {
719 let changes = repo.update(cx, |repo, cx| {
720 repo.load_committed_text(buffer_id, repo_path, cx)
721 });
722
723 // todo(lw): hot foreground spawn
724 cx.spawn(async move |this, cx| {
725 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
726 .await
727 .map_err(Arc::new)
728 })
729 .shared()
730 })
731 .clone();
732
733 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
734 }
735
736 async fn open_diff_internal(
737 this: WeakEntity<Self>,
738 kind: DiffKind,
739 texts: Result<DiffBasesChange>,
740 buffer_entity: Entity<Buffer>,
741 cx: &mut AsyncApp,
742 ) -> Result<Entity<BufferDiff>> {
743 let diff_bases_change = match texts {
744 Err(e) => {
745 this.update(cx, |this, cx| {
746 let buffer = buffer_entity.read(cx);
747 let buffer_id = buffer.remote_id();
748 this.loading_diffs.remove(&(buffer_id, kind));
749 })?;
750 return Err(e);
751 }
752 Ok(change) => change,
753 };
754
755 this.update(cx, |this, cx| {
756 let buffer = buffer_entity.read(cx);
757 let buffer_id = buffer.remote_id();
758 let language = buffer.language().cloned();
759 let language_registry = buffer.language_registry();
760 let text_snapshot = buffer.text_snapshot();
761 this.loading_diffs.remove(&(buffer_id, kind));
762
763 let git_store = cx.weak_entity();
764 let diff_state = this
765 .diffs
766 .entry(buffer_id)
767 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
768
769 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
770
771 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
772 diff_state.update(cx, |diff_state, cx| {
773 diff_state.language = language;
774 diff_state.language_registry = language_registry;
775
776 match kind {
777 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
778 DiffKind::Uncommitted => {
779 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
780 diff
781 } else {
782 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
783 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
784 unstaged_diff
785 };
786
787 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
788 diff_state.uncommitted_diff = Some(diff.downgrade())
789 }
790 }
791
792 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
793 let rx = diff_state.wait_for_recalculation();
794
795 anyhow::Ok(async move {
796 if let Some(rx) = rx {
797 rx.await;
798 }
799 Ok(diff)
800 })
801 })
802 })??
803 .await
804 }
805
806 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
807 let diff_state = self.diffs.get(&buffer_id)?;
808 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
809 }
810
811 pub fn get_uncommitted_diff(
812 &self,
813 buffer_id: BufferId,
814 cx: &App,
815 ) -> Option<Entity<BufferDiff>> {
816 let diff_state = self.diffs.get(&buffer_id)?;
817 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
818 }
819
820 pub fn open_conflict_set(
821 &mut self,
822 buffer: Entity<Buffer>,
823 cx: &mut Context<Self>,
824 ) -> Entity<ConflictSet> {
825 log::debug!("open conflict set");
826 let buffer_id = buffer.read(cx).remote_id();
827
828 if let Some(git_state) = self.diffs.get(&buffer_id)
829 && let Some(conflict_set) = git_state
830 .read(cx)
831 .conflict_set
832 .as_ref()
833 .and_then(|weak| weak.upgrade())
834 {
835 let conflict_set = conflict_set;
836 let buffer_snapshot = buffer.read(cx).text_snapshot();
837
838 git_state.update(cx, |state, cx| {
839 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
840 });
841
842 return conflict_set;
843 }
844
845 let is_unmerged = self
846 .repository_and_path_for_buffer_id(buffer_id, cx)
847 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
848 let git_store = cx.weak_entity();
849 let buffer_git_state = self
850 .diffs
851 .entry(buffer_id)
852 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
853 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
854
855 self._subscriptions
856 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
857 cx.emit(GitStoreEvent::ConflictsUpdated);
858 }));
859
860 buffer_git_state.update(cx, |state, cx| {
861 state.conflict_set = Some(conflict_set.downgrade());
862 let buffer_snapshot = buffer.read(cx).text_snapshot();
863 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
864 });
865
866 conflict_set
867 }
868
869 pub fn project_path_git_status(
870 &self,
871 project_path: &ProjectPath,
872 cx: &App,
873 ) -> Option<FileStatus> {
874 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
875 Some(repo.read(cx).status_for_path(&repo_path)?.status)
876 }
877
878 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
879 let mut work_directory_abs_paths = Vec::new();
880 let mut checkpoints = Vec::new();
881 for repository in self.repositories.values() {
882 repository.update(cx, |repository, _| {
883 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
884 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
885 });
886 }
887
888 cx.background_executor().spawn(async move {
889 let checkpoints = future::try_join_all(checkpoints).await?;
890 Ok(GitStoreCheckpoint {
891 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
892 .into_iter()
893 .zip(checkpoints)
894 .collect(),
895 })
896 })
897 }
898
899 pub fn restore_checkpoint(
900 &self,
901 checkpoint: GitStoreCheckpoint,
902 cx: &mut App,
903 ) -> Task<Result<()>> {
904 let repositories_by_work_dir_abs_path = self
905 .repositories
906 .values()
907 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
908 .collect::<HashMap<_, _>>();
909
910 let mut tasks = Vec::new();
911 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
912 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
913 let restore = repository.update(cx, |repository, _| {
914 repository.restore_checkpoint(checkpoint)
915 });
916 tasks.push(async move { restore.await? });
917 }
918 }
919 cx.background_spawn(async move {
920 future::try_join_all(tasks).await?;
921 Ok(())
922 })
923 }
924
925 /// Compares two checkpoints, returning true if they are equal.
926 pub fn compare_checkpoints(
927 &self,
928 left: GitStoreCheckpoint,
929 mut right: GitStoreCheckpoint,
930 cx: &mut App,
931 ) -> Task<Result<bool>> {
932 let repositories_by_work_dir_abs_path = self
933 .repositories
934 .values()
935 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
936 .collect::<HashMap<_, _>>();
937
938 let mut tasks = Vec::new();
939 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
940 if let Some(right_checkpoint) = right
941 .checkpoints_by_work_dir_abs_path
942 .remove(&work_dir_abs_path)
943 {
944 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
945 {
946 let compare = repository.update(cx, |repository, _| {
947 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
948 });
949
950 tasks.push(async move { compare.await? });
951 }
952 } else {
953 return Task::ready(Ok(false));
954 }
955 }
956 cx.background_spawn(async move {
957 Ok(future::try_join_all(tasks)
958 .await?
959 .into_iter()
960 .all(|result| result))
961 })
962 }
963
964 /// Blames a buffer.
965 pub fn blame_buffer(
966 &self,
967 buffer: &Entity<Buffer>,
968 version: Option<clock::Global>,
969 cx: &mut App,
970 ) -> Task<Result<Option<Blame>>> {
971 let buffer = buffer.read(cx);
972 let Some((repo, repo_path)) =
973 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
974 else {
975 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
976 };
977 let content = match &version {
978 Some(version) => buffer.rope_for_version(version),
979 None => buffer.as_rope().clone(),
980 };
981 let version = version.unwrap_or(buffer.version());
982 let buffer_id = buffer.remote_id();
983
984 let rx = repo.update(cx, |repo, _| {
985 repo.send_job(None, move |state, _| async move {
986 match state {
987 RepositoryState::Local { backend, .. } => backend
988 .blame(repo_path.clone(), content)
989 .await
990 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
991 .map(Some),
992 RepositoryState::Remote { project_id, client } => {
993 let response = client
994 .request(proto::BlameBuffer {
995 project_id: project_id.to_proto(),
996 buffer_id: buffer_id.into(),
997 version: serialize_version(&version),
998 })
999 .await?;
1000 Ok(deserialize_blame_buffer_response(response))
1001 }
1002 }
1003 })
1004 });
1005
1006 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1007 }
1008
1009 pub fn get_permalink_to_line(
1010 &self,
1011 buffer: &Entity<Buffer>,
1012 selection: Range<u32>,
1013 cx: &mut App,
1014 ) -> Task<Result<url::Url>> {
1015 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1016 return Task::ready(Err(anyhow!("buffer has no file")));
1017 };
1018
1019 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1020 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1021 cx,
1022 ) else {
1023 // If we're not in a Git repo, check whether this is a Rust source
1024 // file in the Cargo registry (presumably opened with go-to-definition
1025 // from a normal Rust file). If so, we can put together a permalink
1026 // using crate metadata.
1027 if buffer
1028 .read(cx)
1029 .language()
1030 .is_none_or(|lang| lang.name() != "Rust".into())
1031 {
1032 return Task::ready(Err(anyhow!("no permalink available")));
1033 }
1034 let file_path = file.worktree.read(cx).absolutize(&file.path);
1035 return cx.spawn(async move |cx| {
1036 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1037 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1038 .context("no permalink available")
1039 });
1040 };
1041
1042 let buffer_id = buffer.read(cx).remote_id();
1043 let branch = repo.read(cx).branch.clone();
1044 let remote = branch
1045 .as_ref()
1046 .and_then(|b| b.upstream.as_ref())
1047 .and_then(|b| b.remote_name())
1048 .unwrap_or("origin")
1049 .to_string();
1050
1051 let rx = repo.update(cx, |repo, _| {
1052 repo.send_job(None, move |state, cx| async move {
1053 match state {
1054 RepositoryState::Local { backend, .. } => {
1055 let origin_url = backend
1056 .remote_url(&remote)
1057 .with_context(|| format!("remote \"{remote}\" not found"))?;
1058
1059 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1060
1061 let provider_registry =
1062 cx.update(GitHostingProviderRegistry::default_global)?;
1063
1064 let (provider, remote) =
1065 parse_git_remote_url(provider_registry, &origin_url)
1066 .context("parsing Git remote URL")?;
1067
1068 Ok(provider.build_permalink(
1069 remote,
1070 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1071 ))
1072 }
1073 RepositoryState::Remote { project_id, client } => {
1074 let response = client
1075 .request(proto::GetPermalinkToLine {
1076 project_id: project_id.to_proto(),
1077 buffer_id: buffer_id.into(),
1078 selection: Some(proto::Range {
1079 start: selection.start as u64,
1080 end: selection.end as u64,
1081 }),
1082 })
1083 .await?;
1084
1085 url::Url::parse(&response.permalink).context("failed to parse permalink")
1086 }
1087 }
1088 })
1089 });
1090 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1091 }
1092
1093 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1094 match &self.state {
1095 GitStoreState::Local {
1096 downstream: downstream_client,
1097 ..
1098 } => downstream_client
1099 .as_ref()
1100 .map(|state| (state.client.clone(), state.project_id)),
1101 GitStoreState::Remote {
1102 downstream: downstream_client,
1103 ..
1104 } => downstream_client.clone(),
1105 }
1106 }
1107
1108 fn upstream_client(&self) -> Option<AnyProtoClient> {
1109 match &self.state {
1110 GitStoreState::Local { .. } => None,
1111 GitStoreState::Remote {
1112 upstream_client, ..
1113 } => Some(upstream_client.clone()),
1114 }
1115 }
1116
1117 fn on_worktree_store_event(
1118 &mut self,
1119 worktree_store: Entity<WorktreeStore>,
1120 event: &WorktreeStoreEvent,
1121 cx: &mut Context<Self>,
1122 ) {
1123 let GitStoreState::Local {
1124 project_environment,
1125 downstream,
1126 next_repository_id,
1127 fs,
1128 } = &self.state
1129 else {
1130 return;
1131 };
1132
1133 match event {
1134 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1135 if let Some(worktree) = self
1136 .worktree_store
1137 .read(cx)
1138 .worktree_for_id(*worktree_id, cx)
1139 {
1140 let paths_by_git_repo =
1141 self.process_updated_entries(&worktree, updated_entries, cx);
1142 let downstream = downstream
1143 .as_ref()
1144 .map(|downstream| downstream.updates_tx.clone());
1145 cx.spawn(async move |_, cx| {
1146 let paths_by_git_repo = paths_by_git_repo.await;
1147 for (repo, paths) in paths_by_git_repo {
1148 repo.update(cx, |repo, cx| {
1149 repo.paths_changed(paths, downstream.clone(), cx);
1150 })
1151 .ok();
1152 }
1153 })
1154 .detach();
1155 }
1156 }
1157 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1158 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1159 else {
1160 return;
1161 };
1162 if !worktree.read(cx).is_visible() {
1163 log::debug!(
1164 "not adding repositories for local worktree {:?} because it's not visible",
1165 worktree.read(cx).abs_path()
1166 );
1167 return;
1168 }
1169 self.update_repositories_from_worktree(
1170 *worktree_id,
1171 project_environment.clone(),
1172 next_repository_id.clone(),
1173 downstream
1174 .as_ref()
1175 .map(|downstream| downstream.updates_tx.clone()),
1176 changed_repos.clone(),
1177 fs.clone(),
1178 cx,
1179 );
1180 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1181 }
1182 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1183 let repos_without_worktree: Vec<RepositoryId> = self
1184 .worktree_ids
1185 .iter_mut()
1186 .filter_map(|(repo_id, worktree_ids)| {
1187 worktree_ids.remove(worktree_id);
1188 if worktree_ids.is_empty() {
1189 Some(*repo_id)
1190 } else {
1191 None
1192 }
1193 })
1194 .collect();
1195 let is_active_repo_removed = repos_without_worktree
1196 .iter()
1197 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1198
1199 for repo_id in repos_without_worktree {
1200 self.repositories.remove(&repo_id);
1201 self.worktree_ids.remove(&repo_id);
1202 if let Some(updates_tx) =
1203 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1204 {
1205 updates_tx
1206 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1207 .ok();
1208 }
1209 }
1210
1211 if is_active_repo_removed {
1212 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1213 self.active_repo_id = Some(repo_id);
1214 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1215 } else {
1216 self.active_repo_id = None;
1217 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1218 }
1219 }
1220 }
1221 _ => {}
1222 }
1223 }
1224 fn on_repository_event(
1225 &mut self,
1226 repo: Entity<Repository>,
1227 event: &RepositoryEvent,
1228 cx: &mut Context<Self>,
1229 ) {
1230 let id = repo.read(cx).id;
1231 let repo_snapshot = repo.read(cx).snapshot.clone();
1232 for (buffer_id, diff) in self.diffs.iter() {
1233 if let Some((buffer_repo, repo_path)) =
1234 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1235 && buffer_repo == repo
1236 {
1237 diff.update(cx, |diff, cx| {
1238 if let Some(conflict_set) = &diff.conflict_set {
1239 let conflict_status_changed =
1240 conflict_set.update(cx, |conflict_set, cx| {
1241 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1242 conflict_set.set_has_conflict(has_conflict, cx)
1243 })?;
1244 if conflict_status_changed {
1245 let buffer_store = self.buffer_store.read(cx);
1246 if let Some(buffer) = buffer_store.get(*buffer_id) {
1247 let _ = diff
1248 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1249 }
1250 }
1251 }
1252 anyhow::Ok(())
1253 })
1254 .ok();
1255 }
1256 }
1257 cx.emit(GitStoreEvent::RepositoryUpdated(
1258 id,
1259 event.clone(),
1260 self.active_repo_id == Some(id),
1261 ))
1262 }
1263
1264 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1265 cx.emit(GitStoreEvent::JobsUpdated)
1266 }
1267
1268 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1269 fn update_repositories_from_worktree(
1270 &mut self,
1271 worktree_id: WorktreeId,
1272 project_environment: Entity<ProjectEnvironment>,
1273 next_repository_id: Arc<AtomicU64>,
1274 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1275 updated_git_repositories: UpdatedGitRepositoriesSet,
1276 fs: Arc<dyn Fs>,
1277 cx: &mut Context<Self>,
1278 ) {
1279 let mut removed_ids = Vec::new();
1280 for update in updated_git_repositories.iter() {
1281 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1282 let existing_work_directory_abs_path =
1283 repo.read(cx).work_directory_abs_path.clone();
1284 Some(&existing_work_directory_abs_path)
1285 == update.old_work_directory_abs_path.as_ref()
1286 || Some(&existing_work_directory_abs_path)
1287 == update.new_work_directory_abs_path.as_ref()
1288 }) {
1289 let repo_id = *id;
1290 if let Some(new_work_directory_abs_path) =
1291 update.new_work_directory_abs_path.clone()
1292 {
1293 self.worktree_ids
1294 .entry(repo_id)
1295 .or_insert_with(HashSet::new)
1296 .insert(worktree_id);
1297 existing.update(cx, |existing, cx| {
1298 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1299 existing.schedule_scan(updates_tx.clone(), cx);
1300 });
1301 } else {
1302 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1303 worktree_ids.remove(&worktree_id);
1304 if worktree_ids.is_empty() {
1305 removed_ids.push(repo_id);
1306 }
1307 }
1308 }
1309 } else if let UpdatedGitRepository {
1310 new_work_directory_abs_path: Some(work_directory_abs_path),
1311 dot_git_abs_path: Some(dot_git_abs_path),
1312 repository_dir_abs_path: Some(repository_dir_abs_path),
1313 common_dir_abs_path: Some(common_dir_abs_path),
1314 ..
1315 } = update
1316 {
1317 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1318 let git_store = cx.weak_entity();
1319 let repo = cx.new(|cx| {
1320 let mut repo = Repository::local(
1321 id,
1322 work_directory_abs_path.clone(),
1323 dot_git_abs_path.clone(),
1324 repository_dir_abs_path.clone(),
1325 common_dir_abs_path.clone(),
1326 project_environment.downgrade(),
1327 fs.clone(),
1328 git_store,
1329 cx,
1330 );
1331 if let Some(updates_tx) = updates_tx.as_ref() {
1332 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1333 updates_tx
1334 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1335 .ok();
1336 }
1337 repo.schedule_scan(updates_tx.clone(), cx);
1338 repo
1339 });
1340 self._subscriptions
1341 .push(cx.subscribe(&repo, Self::on_repository_event));
1342 self._subscriptions
1343 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1344 self.repositories.insert(id, repo);
1345 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1346 cx.emit(GitStoreEvent::RepositoryAdded);
1347 self.active_repo_id.get_or_insert_with(|| {
1348 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1349 id
1350 });
1351 }
1352 }
1353
1354 for id in removed_ids {
1355 if self.active_repo_id == Some(id) {
1356 self.active_repo_id = None;
1357 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1358 }
1359 self.repositories.remove(&id);
1360 if let Some(updates_tx) = updates_tx.as_ref() {
1361 updates_tx
1362 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1363 .ok();
1364 }
1365 }
1366 }
1367
1368 fn on_buffer_store_event(
1369 &mut self,
1370 _: Entity<BufferStore>,
1371 event: &BufferStoreEvent,
1372 cx: &mut Context<Self>,
1373 ) {
1374 match event {
1375 BufferStoreEvent::BufferAdded(buffer) => {
1376 cx.subscribe(buffer, |this, buffer, event, cx| {
1377 if let BufferEvent::LanguageChanged = event {
1378 let buffer_id = buffer.read(cx).remote_id();
1379 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1380 diff_state.update(cx, |diff_state, cx| {
1381 diff_state.buffer_language_changed(buffer, cx);
1382 });
1383 }
1384 }
1385 })
1386 .detach();
1387 }
1388 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1389 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1390 diffs.remove(buffer_id);
1391 }
1392 }
1393 BufferStoreEvent::BufferDropped(buffer_id) => {
1394 self.diffs.remove(buffer_id);
1395 for diffs in self.shared_diffs.values_mut() {
1396 diffs.remove(buffer_id);
1397 }
1398 }
1399 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1400 // Whenever a buffer's file path changes, it's possible that the
1401 // new path is actually a path that is being tracked by a git
1402 // repository. In that case, we'll want to update the buffer's
1403 // `BufferDiffState`, in case it already has one.
1404 let buffer_id = buffer.read(cx).remote_id();
1405 let diff_state = self.diffs.get(&buffer_id);
1406 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1407
1408 if let Some(diff_state) = diff_state
1409 && let Some((repo, repo_path)) = repo
1410 {
1411 let buffer = buffer.clone();
1412 let diff_state = diff_state.clone();
1413
1414 cx.spawn(async move |_git_store, cx| {
1415 async {
1416 let diff_bases_change = repo
1417 .update(cx, |repo, cx| {
1418 repo.load_committed_text(buffer_id, repo_path, cx)
1419 })?
1420 .await?;
1421
1422 diff_state.update(cx, |diff_state, cx| {
1423 let buffer_snapshot = buffer.read(cx).text_snapshot();
1424 diff_state.diff_bases_changed(
1425 buffer_snapshot,
1426 Some(diff_bases_change),
1427 cx,
1428 );
1429 })
1430 }
1431 .await
1432 .log_err();
1433 })
1434 .detach();
1435 }
1436 }
1437 _ => {}
1438 }
1439 }
1440
1441 pub fn recalculate_buffer_diffs(
1442 &mut self,
1443 buffers: Vec<Entity<Buffer>>,
1444 cx: &mut Context<Self>,
1445 ) -> impl Future<Output = ()> + use<> {
1446 let mut futures = Vec::new();
1447 for buffer in buffers {
1448 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1449 let buffer = buffer.read(cx).text_snapshot();
1450 diff_state.update(cx, |diff_state, cx| {
1451 diff_state.recalculate_diffs(buffer.clone(), cx);
1452 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1453 });
1454 futures.push(diff_state.update(cx, |diff_state, cx| {
1455 diff_state
1456 .reparse_conflict_markers(buffer, cx)
1457 .map(|_| {})
1458 .boxed()
1459 }));
1460 }
1461 }
1462 async move {
1463 futures::future::join_all(futures).await;
1464 }
1465 }
1466
1467 fn on_buffer_diff_event(
1468 &mut self,
1469 diff: Entity<buffer_diff::BufferDiff>,
1470 event: &BufferDiffEvent,
1471 cx: &mut Context<Self>,
1472 ) {
1473 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1474 let buffer_id = diff.read(cx).buffer_id;
1475 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1476 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1477 diff_state.hunk_staging_operation_count += 1;
1478 diff_state.hunk_staging_operation_count
1479 });
1480 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1481 let recv = repo.update(cx, |repo, cx| {
1482 log::debug!("hunks changed for {}", path.as_unix_str());
1483 repo.spawn_set_index_text_job(
1484 path,
1485 new_index_text.as_ref().map(|rope| rope.to_string()),
1486 Some(hunk_staging_operation_count),
1487 cx,
1488 )
1489 });
1490 let diff = diff.downgrade();
1491 cx.spawn(async move |this, cx| {
1492 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1493 diff.update(cx, |diff, cx| {
1494 diff.clear_pending_hunks(cx);
1495 })
1496 .ok();
1497 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1498 .ok();
1499 }
1500 })
1501 .detach();
1502 }
1503 }
1504 }
1505 }
1506
1507 fn local_worktree_git_repos_changed(
1508 &mut self,
1509 worktree: Entity<Worktree>,
1510 changed_repos: &UpdatedGitRepositoriesSet,
1511 cx: &mut Context<Self>,
1512 ) {
1513 log::debug!("local worktree repos changed");
1514 debug_assert!(worktree.read(cx).is_local());
1515
1516 for repository in self.repositories.values() {
1517 repository.update(cx, |repository, cx| {
1518 let repo_abs_path = &repository.work_directory_abs_path;
1519 if changed_repos.iter().any(|update| {
1520 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1521 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1522 }) {
1523 repository.reload_buffer_diff_bases(cx);
1524 }
1525 });
1526 }
1527 }
1528
1529 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1530 &self.repositories
1531 }
1532
1533 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1534 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1535 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1536 Some(status.status)
1537 }
1538
1539 pub fn repository_and_path_for_buffer_id(
1540 &self,
1541 buffer_id: BufferId,
1542 cx: &App,
1543 ) -> Option<(Entity<Repository>, RepoPath)> {
1544 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1545 let project_path = buffer.read(cx).project_path(cx)?;
1546 self.repository_and_path_for_project_path(&project_path, cx)
1547 }
1548
1549 pub fn repository_and_path_for_project_path(
1550 &self,
1551 path: &ProjectPath,
1552 cx: &App,
1553 ) -> Option<(Entity<Repository>, RepoPath)> {
1554 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1555 self.repositories
1556 .values()
1557 .filter_map(|repo| {
1558 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1559 Some((repo.clone(), repo_path))
1560 })
1561 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1562 }
1563
1564 pub fn git_init(
1565 &self,
1566 path: Arc<Path>,
1567 fallback_branch_name: String,
1568 cx: &App,
1569 ) -> Task<Result<()>> {
1570 match &self.state {
1571 GitStoreState::Local { fs, .. } => {
1572 let fs = fs.clone();
1573 cx.background_executor()
1574 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1575 }
1576 GitStoreState::Remote {
1577 upstream_client,
1578 upstream_project_id: project_id,
1579 ..
1580 } => {
1581 let client = upstream_client.clone();
1582 let project_id = *project_id;
1583 cx.background_executor().spawn(async move {
1584 client
1585 .request(proto::GitInit {
1586 project_id: project_id,
1587 abs_path: path.to_string_lossy().into_owned(),
1588 fallback_branch_name,
1589 })
1590 .await?;
1591 Ok(())
1592 })
1593 }
1594 }
1595 }
1596
1597 pub fn git_clone(
1598 &self,
1599 repo: String,
1600 path: impl Into<Arc<std::path::Path>>,
1601 cx: &App,
1602 ) -> Task<Result<()>> {
1603 let path = path.into();
1604 match &self.state {
1605 GitStoreState::Local { fs, .. } => {
1606 let fs = fs.clone();
1607 cx.background_executor()
1608 .spawn(async move { fs.git_clone(&repo, &path).await })
1609 }
1610 GitStoreState::Remote {
1611 upstream_client,
1612 upstream_project_id,
1613 ..
1614 } => {
1615 if upstream_client.is_via_collab() {
1616 return Task::ready(Err(anyhow!(
1617 "Git Clone isn't supported for project guests"
1618 )));
1619 }
1620 let request = upstream_client.request(proto::GitClone {
1621 project_id: *upstream_project_id,
1622 abs_path: path.to_string_lossy().into_owned(),
1623 remote_repo: repo,
1624 });
1625
1626 cx.background_spawn(async move {
1627 let result = request.await?;
1628
1629 match result.success {
1630 true => Ok(()),
1631 false => Err(anyhow!("Git Clone failed")),
1632 }
1633 })
1634 }
1635 }
1636 }
1637
1638 async fn handle_update_repository(
1639 this: Entity<Self>,
1640 envelope: TypedEnvelope<proto::UpdateRepository>,
1641 mut cx: AsyncApp,
1642 ) -> Result<()> {
1643 this.update(&mut cx, |this, cx| {
1644 let path_style = this.worktree_store.read(cx).path_style();
1645 let mut update = envelope.payload;
1646
1647 let id = RepositoryId::from_proto(update.id);
1648 let client = this.upstream_client().context("no upstream client")?;
1649
1650 let mut repo_subscription = None;
1651 let repo = this.repositories.entry(id).or_insert_with(|| {
1652 let git_store = cx.weak_entity();
1653 let repo = cx.new(|cx| {
1654 Repository::remote(
1655 id,
1656 Path::new(&update.abs_path).into(),
1657 path_style,
1658 ProjectId(update.project_id),
1659 client,
1660 git_store,
1661 cx,
1662 )
1663 });
1664 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1665 cx.emit(GitStoreEvent::RepositoryAdded);
1666 repo
1667 });
1668 this._subscriptions.extend(repo_subscription);
1669
1670 repo.update(cx, {
1671 let update = update.clone();
1672 |repo, cx| repo.apply_remote_update(update, cx)
1673 })?;
1674
1675 this.active_repo_id.get_or_insert_with(|| {
1676 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1677 id
1678 });
1679
1680 if let Some((client, project_id)) = this.downstream_client() {
1681 update.project_id = project_id.to_proto();
1682 client.send(update).log_err();
1683 }
1684 Ok(())
1685 })?
1686 }
1687
1688 async fn handle_remove_repository(
1689 this: Entity<Self>,
1690 envelope: TypedEnvelope<proto::RemoveRepository>,
1691 mut cx: AsyncApp,
1692 ) -> Result<()> {
1693 this.update(&mut cx, |this, cx| {
1694 let mut update = envelope.payload;
1695 let id = RepositoryId::from_proto(update.id);
1696 this.repositories.remove(&id);
1697 if let Some((client, project_id)) = this.downstream_client() {
1698 update.project_id = project_id.to_proto();
1699 client.send(update).log_err();
1700 }
1701 if this.active_repo_id == Some(id) {
1702 this.active_repo_id = None;
1703 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1704 }
1705 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1706 })
1707 }
1708
1709 async fn handle_git_init(
1710 this: Entity<Self>,
1711 envelope: TypedEnvelope<proto::GitInit>,
1712 cx: AsyncApp,
1713 ) -> Result<proto::Ack> {
1714 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1715 let name = envelope.payload.fallback_branch_name;
1716 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1717 .await?;
1718
1719 Ok(proto::Ack {})
1720 }
1721
1722 async fn handle_git_clone(
1723 this: Entity<Self>,
1724 envelope: TypedEnvelope<proto::GitClone>,
1725 cx: AsyncApp,
1726 ) -> Result<proto::GitCloneResponse> {
1727 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1728 let repo_name = envelope.payload.remote_repo;
1729 let result = cx
1730 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1731 .await;
1732
1733 Ok(proto::GitCloneResponse {
1734 success: result.is_ok(),
1735 })
1736 }
1737
1738 async fn handle_fetch(
1739 this: Entity<Self>,
1740 envelope: TypedEnvelope<proto::Fetch>,
1741 mut cx: AsyncApp,
1742 ) -> Result<proto::RemoteMessageResponse> {
1743 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1744 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1745 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1746 let askpass_id = envelope.payload.askpass_id;
1747
1748 let askpass = make_remote_delegate(
1749 this,
1750 envelope.payload.project_id,
1751 repository_id,
1752 askpass_id,
1753 &mut cx,
1754 );
1755
1756 let remote_output = repository_handle
1757 .update(&mut cx, |repository_handle, cx| {
1758 repository_handle.fetch(fetch_options, askpass, cx)
1759 })?
1760 .await??;
1761
1762 Ok(proto::RemoteMessageResponse {
1763 stdout: remote_output.stdout,
1764 stderr: remote_output.stderr,
1765 })
1766 }
1767
1768 async fn handle_push(
1769 this: Entity<Self>,
1770 envelope: TypedEnvelope<proto::Push>,
1771 mut cx: AsyncApp,
1772 ) -> Result<proto::RemoteMessageResponse> {
1773 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1774 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1775
1776 let askpass_id = envelope.payload.askpass_id;
1777 let askpass = make_remote_delegate(
1778 this,
1779 envelope.payload.project_id,
1780 repository_id,
1781 askpass_id,
1782 &mut cx,
1783 );
1784
1785 let options = envelope
1786 .payload
1787 .options
1788 .as_ref()
1789 .map(|_| match envelope.payload.options() {
1790 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1791 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1792 });
1793
1794 let branch_name = envelope.payload.branch_name.into();
1795 let remote_name = envelope.payload.remote_name.into();
1796
1797 let remote_output = repository_handle
1798 .update(&mut cx, |repository_handle, cx| {
1799 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1800 })?
1801 .await??;
1802 Ok(proto::RemoteMessageResponse {
1803 stdout: remote_output.stdout,
1804 stderr: remote_output.stderr,
1805 })
1806 }
1807
1808 async fn handle_pull(
1809 this: Entity<Self>,
1810 envelope: TypedEnvelope<proto::Pull>,
1811 mut cx: AsyncApp,
1812 ) -> Result<proto::RemoteMessageResponse> {
1813 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1814 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1815 let askpass_id = envelope.payload.askpass_id;
1816 let askpass = make_remote_delegate(
1817 this,
1818 envelope.payload.project_id,
1819 repository_id,
1820 askpass_id,
1821 &mut cx,
1822 );
1823
1824 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1825 let remote_name = envelope.payload.remote_name.into();
1826 let rebase = envelope.payload.rebase;
1827
1828 let remote_message = repository_handle
1829 .update(&mut cx, |repository_handle, cx| {
1830 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1831 })?
1832 .await??;
1833
1834 Ok(proto::RemoteMessageResponse {
1835 stdout: remote_message.stdout,
1836 stderr: remote_message.stderr,
1837 })
1838 }
1839
1840 async fn handle_stage(
1841 this: Entity<Self>,
1842 envelope: TypedEnvelope<proto::Stage>,
1843 mut cx: AsyncApp,
1844 ) -> Result<proto::Ack> {
1845 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1846 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1847
1848 let entries = envelope
1849 .payload
1850 .paths
1851 .into_iter()
1852 .map(|path| RepoPath::new(&path))
1853 .collect::<Result<Vec<_>>>()?;
1854
1855 repository_handle
1856 .update(&mut cx, |repository_handle, cx| {
1857 repository_handle.stage_entries(entries, cx)
1858 })?
1859 .await?;
1860 Ok(proto::Ack {})
1861 }
1862
1863 async fn handle_unstage(
1864 this: Entity<Self>,
1865 envelope: TypedEnvelope<proto::Unstage>,
1866 mut cx: AsyncApp,
1867 ) -> Result<proto::Ack> {
1868 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1869 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1870
1871 let entries = envelope
1872 .payload
1873 .paths
1874 .into_iter()
1875 .map(|path| RepoPath::new(&path))
1876 .collect::<Result<Vec<_>>>()?;
1877
1878 repository_handle
1879 .update(&mut cx, |repository_handle, cx| {
1880 repository_handle.unstage_entries(entries, cx)
1881 })?
1882 .await?;
1883
1884 Ok(proto::Ack {})
1885 }
1886
1887 async fn handle_stash(
1888 this: Entity<Self>,
1889 envelope: TypedEnvelope<proto::Stash>,
1890 mut cx: AsyncApp,
1891 ) -> Result<proto::Ack> {
1892 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1893 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1894
1895 let entries = envelope
1896 .payload
1897 .paths
1898 .into_iter()
1899 .map(|path| RepoPath::new(&path))
1900 .collect::<Result<Vec<_>>>()?;
1901
1902 repository_handle
1903 .update(&mut cx, |repository_handle, cx| {
1904 repository_handle.stash_entries(entries, cx)
1905 })?
1906 .await?;
1907
1908 Ok(proto::Ack {})
1909 }
1910
1911 async fn handle_stash_pop(
1912 this: Entity<Self>,
1913 envelope: TypedEnvelope<proto::StashPop>,
1914 mut cx: AsyncApp,
1915 ) -> Result<proto::Ack> {
1916 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1917 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1918 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1919
1920 repository_handle
1921 .update(&mut cx, |repository_handle, cx| {
1922 repository_handle.stash_pop(stash_index, cx)
1923 })?
1924 .await?;
1925
1926 Ok(proto::Ack {})
1927 }
1928
1929 async fn handle_stash_apply(
1930 this: Entity<Self>,
1931 envelope: TypedEnvelope<proto::StashApply>,
1932 mut cx: AsyncApp,
1933 ) -> Result<proto::Ack> {
1934 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1935 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1936 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1937
1938 repository_handle
1939 .update(&mut cx, |repository_handle, cx| {
1940 repository_handle.stash_apply(stash_index, cx)
1941 })?
1942 .await?;
1943
1944 Ok(proto::Ack {})
1945 }
1946
1947 async fn handle_stash_drop(
1948 this: Entity<Self>,
1949 envelope: TypedEnvelope<proto::StashDrop>,
1950 mut cx: AsyncApp,
1951 ) -> Result<proto::Ack> {
1952 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1953 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1954 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1955
1956 repository_handle
1957 .update(&mut cx, |repository_handle, cx| {
1958 repository_handle.stash_drop(stash_index, cx)
1959 })?
1960 .await??;
1961
1962 Ok(proto::Ack {})
1963 }
1964
1965 async fn handle_set_index_text(
1966 this: Entity<Self>,
1967 envelope: TypedEnvelope<proto::SetIndexText>,
1968 mut cx: AsyncApp,
1969 ) -> Result<proto::Ack> {
1970 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1971 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1972 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1973
1974 repository_handle
1975 .update(&mut cx, |repository_handle, cx| {
1976 repository_handle.spawn_set_index_text_job(
1977 repo_path,
1978 envelope.payload.text,
1979 None,
1980 cx,
1981 )
1982 })?
1983 .await??;
1984 Ok(proto::Ack {})
1985 }
1986
1987 async fn handle_run_hook(
1988 this: Entity<Self>,
1989 envelope: TypedEnvelope<proto::RunGitHook>,
1990 mut cx: AsyncApp,
1991 ) -> Result<proto::Ack> {
1992 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1993 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1994 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
1995 repository_handle
1996 .update(&mut cx, |repository_handle, cx| {
1997 repository_handle.run_hook(hook, cx)
1998 })?
1999 .await??;
2000 Ok(proto::Ack {})
2001 }
2002
2003 async fn handle_commit(
2004 this: Entity<Self>,
2005 envelope: TypedEnvelope<proto::Commit>,
2006 mut cx: AsyncApp,
2007 ) -> Result<proto::Ack> {
2008 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2009 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2010 let askpass_id = envelope.payload.askpass_id;
2011
2012 let askpass = make_remote_delegate(
2013 this,
2014 envelope.payload.project_id,
2015 repository_id,
2016 askpass_id,
2017 &mut cx,
2018 );
2019
2020 let message = SharedString::from(envelope.payload.message);
2021 let name = envelope.payload.name.map(SharedString::from);
2022 let email = envelope.payload.email.map(SharedString::from);
2023 let options = envelope.payload.options.unwrap_or_default();
2024
2025 repository_handle
2026 .update(&mut cx, |repository_handle, cx| {
2027 repository_handle.commit(
2028 message,
2029 name.zip(email),
2030 CommitOptions {
2031 amend: options.amend,
2032 signoff: options.signoff,
2033 },
2034 askpass,
2035 cx,
2036 )
2037 })?
2038 .await??;
2039 Ok(proto::Ack {})
2040 }
2041
2042 async fn handle_get_remotes(
2043 this: Entity<Self>,
2044 envelope: TypedEnvelope<proto::GetRemotes>,
2045 mut cx: AsyncApp,
2046 ) -> Result<proto::GetRemotesResponse> {
2047 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2048 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2049
2050 let branch_name = envelope.payload.branch_name;
2051
2052 let remotes = repository_handle
2053 .update(&mut cx, |repository_handle, _| {
2054 repository_handle.get_remotes(branch_name)
2055 })?
2056 .await??;
2057
2058 Ok(proto::GetRemotesResponse {
2059 remotes: remotes
2060 .into_iter()
2061 .map(|remotes| proto::get_remotes_response::Remote {
2062 name: remotes.name.to_string(),
2063 })
2064 .collect::<Vec<_>>(),
2065 })
2066 }
2067
2068 async fn handle_get_worktrees(
2069 this: Entity<Self>,
2070 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2071 mut cx: AsyncApp,
2072 ) -> Result<proto::GitWorktreesResponse> {
2073 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2074 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2075
2076 let worktrees = repository_handle
2077 .update(&mut cx, |repository_handle, _| {
2078 repository_handle.worktrees()
2079 })?
2080 .await??;
2081
2082 Ok(proto::GitWorktreesResponse {
2083 worktrees: worktrees
2084 .into_iter()
2085 .map(|worktree| worktree_to_proto(&worktree))
2086 .collect::<Vec<_>>(),
2087 })
2088 }
2089
2090 async fn handle_create_worktree(
2091 this: Entity<Self>,
2092 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2093 mut cx: AsyncApp,
2094 ) -> Result<proto::Ack> {
2095 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2096 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2097 let directory = PathBuf::from(envelope.payload.directory);
2098 let name = envelope.payload.name;
2099 let commit = envelope.payload.commit;
2100
2101 repository_handle
2102 .update(&mut cx, |repository_handle, _| {
2103 repository_handle.create_worktree(name, directory, commit)
2104 })?
2105 .await??;
2106
2107 Ok(proto::Ack {})
2108 }
2109
2110 async fn handle_get_branches(
2111 this: Entity<Self>,
2112 envelope: TypedEnvelope<proto::GitGetBranches>,
2113 mut cx: AsyncApp,
2114 ) -> Result<proto::GitBranchesResponse> {
2115 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2116 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2117
2118 let branches = repository_handle
2119 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2120 .await??;
2121
2122 Ok(proto::GitBranchesResponse {
2123 branches: branches
2124 .into_iter()
2125 .map(|branch| branch_to_proto(&branch))
2126 .collect::<Vec<_>>(),
2127 })
2128 }
2129 async fn handle_get_default_branch(
2130 this: Entity<Self>,
2131 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2132 mut cx: AsyncApp,
2133 ) -> Result<proto::GetDefaultBranchResponse> {
2134 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2135 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2136
2137 let branch = repository_handle
2138 .update(&mut cx, |repository_handle, _| {
2139 repository_handle.default_branch()
2140 })?
2141 .await??
2142 .map(Into::into);
2143
2144 Ok(proto::GetDefaultBranchResponse { branch })
2145 }
2146 async fn handle_create_branch(
2147 this: Entity<Self>,
2148 envelope: TypedEnvelope<proto::GitCreateBranch>,
2149 mut cx: AsyncApp,
2150 ) -> Result<proto::Ack> {
2151 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2152 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2153 let branch_name = envelope.payload.branch_name;
2154
2155 repository_handle
2156 .update(&mut cx, |repository_handle, _| {
2157 repository_handle.create_branch(branch_name, None)
2158 })?
2159 .await??;
2160
2161 Ok(proto::Ack {})
2162 }
2163
2164 async fn handle_change_branch(
2165 this: Entity<Self>,
2166 envelope: TypedEnvelope<proto::GitChangeBranch>,
2167 mut cx: AsyncApp,
2168 ) -> Result<proto::Ack> {
2169 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2170 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2171 let branch_name = envelope.payload.branch_name;
2172
2173 repository_handle
2174 .update(&mut cx, |repository_handle, _| {
2175 repository_handle.change_branch(branch_name)
2176 })?
2177 .await??;
2178
2179 Ok(proto::Ack {})
2180 }
2181
2182 async fn handle_rename_branch(
2183 this: Entity<Self>,
2184 envelope: TypedEnvelope<proto::GitRenameBranch>,
2185 mut cx: AsyncApp,
2186 ) -> Result<proto::Ack> {
2187 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2188 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2189 let branch = envelope.payload.branch;
2190 let new_name = envelope.payload.new_name;
2191
2192 repository_handle
2193 .update(&mut cx, |repository_handle, _| {
2194 repository_handle.rename_branch(branch, new_name)
2195 })?
2196 .await??;
2197
2198 Ok(proto::Ack {})
2199 }
2200
2201 async fn handle_show(
2202 this: Entity<Self>,
2203 envelope: TypedEnvelope<proto::GitShow>,
2204 mut cx: AsyncApp,
2205 ) -> Result<proto::GitCommitDetails> {
2206 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2207 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2208
2209 let commit = repository_handle
2210 .update(&mut cx, |repository_handle, _| {
2211 repository_handle.show(envelope.payload.commit)
2212 })?
2213 .await??;
2214 Ok(proto::GitCommitDetails {
2215 sha: commit.sha.into(),
2216 message: commit.message.into(),
2217 commit_timestamp: commit.commit_timestamp,
2218 author_email: commit.author_email.into(),
2219 author_name: commit.author_name.into(),
2220 })
2221 }
2222
2223 async fn handle_load_commit_diff(
2224 this: Entity<Self>,
2225 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2226 mut cx: AsyncApp,
2227 ) -> Result<proto::LoadCommitDiffResponse> {
2228 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2229 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2230
2231 let commit_diff = repository_handle
2232 .update(&mut cx, |repository_handle, _| {
2233 repository_handle.load_commit_diff(envelope.payload.commit)
2234 })?
2235 .await??;
2236 Ok(proto::LoadCommitDiffResponse {
2237 files: commit_diff
2238 .files
2239 .into_iter()
2240 .map(|file| proto::CommitFile {
2241 path: file.path.to_proto(),
2242 old_text: file.old_text,
2243 new_text: file.new_text,
2244 })
2245 .collect(),
2246 })
2247 }
2248
2249 async fn handle_reset(
2250 this: Entity<Self>,
2251 envelope: TypedEnvelope<proto::GitReset>,
2252 mut cx: AsyncApp,
2253 ) -> Result<proto::Ack> {
2254 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2255 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2256
2257 let mode = match envelope.payload.mode() {
2258 git_reset::ResetMode::Soft => ResetMode::Soft,
2259 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2260 };
2261
2262 repository_handle
2263 .update(&mut cx, |repository_handle, cx| {
2264 repository_handle.reset(envelope.payload.commit, mode, cx)
2265 })?
2266 .await??;
2267 Ok(proto::Ack {})
2268 }
2269
2270 async fn handle_checkout_files(
2271 this: Entity<Self>,
2272 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2273 mut cx: AsyncApp,
2274 ) -> Result<proto::Ack> {
2275 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2276 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2277 let paths = envelope
2278 .payload
2279 .paths
2280 .iter()
2281 .map(|s| RepoPath::from_proto(s))
2282 .collect::<Result<Vec<_>>>()?;
2283
2284 repository_handle
2285 .update(&mut cx, |repository_handle, cx| {
2286 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2287 })?
2288 .await?;
2289 Ok(proto::Ack {})
2290 }
2291
2292 async fn handle_open_commit_message_buffer(
2293 this: Entity<Self>,
2294 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2295 mut cx: AsyncApp,
2296 ) -> Result<proto::OpenBufferResponse> {
2297 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2298 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2299 let buffer = repository
2300 .update(&mut cx, |repository, cx| {
2301 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2302 })?
2303 .await?;
2304
2305 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2306 this.update(&mut cx, |this, cx| {
2307 this.buffer_store.update(cx, |buffer_store, cx| {
2308 buffer_store
2309 .create_buffer_for_peer(
2310 &buffer,
2311 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2312 cx,
2313 )
2314 .detach_and_log_err(cx);
2315 })
2316 })?;
2317
2318 Ok(proto::OpenBufferResponse {
2319 buffer_id: buffer_id.to_proto(),
2320 })
2321 }
2322
2323 async fn handle_askpass(
2324 this: Entity<Self>,
2325 envelope: TypedEnvelope<proto::AskPassRequest>,
2326 mut cx: AsyncApp,
2327 ) -> Result<proto::AskPassResponse> {
2328 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2329 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2330
2331 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2332 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2333 debug_panic!("no askpass found");
2334 anyhow::bail!("no askpass found");
2335 };
2336
2337 let response = askpass
2338 .ask_password(envelope.payload.prompt)
2339 .await
2340 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2341
2342 delegates
2343 .lock()
2344 .insert(envelope.payload.askpass_id, askpass);
2345
2346 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2347 Ok(proto::AskPassResponse {
2348 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2349 })
2350 }
2351
2352 async fn handle_check_for_pushed_commits(
2353 this: Entity<Self>,
2354 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2355 mut cx: AsyncApp,
2356 ) -> Result<proto::CheckForPushedCommitsResponse> {
2357 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2358 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2359
2360 let branches = repository_handle
2361 .update(&mut cx, |repository_handle, _| {
2362 repository_handle.check_for_pushed_commits()
2363 })?
2364 .await??;
2365 Ok(proto::CheckForPushedCommitsResponse {
2366 pushed_to: branches
2367 .into_iter()
2368 .map(|commit| commit.to_string())
2369 .collect(),
2370 })
2371 }
2372
2373 async fn handle_git_diff(
2374 this: Entity<Self>,
2375 envelope: TypedEnvelope<proto::GitDiff>,
2376 mut cx: AsyncApp,
2377 ) -> Result<proto::GitDiffResponse> {
2378 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2379 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2380 let diff_type = match envelope.payload.diff_type() {
2381 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2382 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2383 };
2384
2385 let mut diff = repository_handle
2386 .update(&mut cx, |repository_handle, cx| {
2387 repository_handle.diff(diff_type, cx)
2388 })?
2389 .await??;
2390 const ONE_MB: usize = 1_000_000;
2391 if diff.len() > ONE_MB {
2392 diff = diff.chars().take(ONE_MB).collect()
2393 }
2394
2395 Ok(proto::GitDiffResponse { diff })
2396 }
2397
2398 async fn handle_tree_diff(
2399 this: Entity<Self>,
2400 request: TypedEnvelope<proto::GetTreeDiff>,
2401 mut cx: AsyncApp,
2402 ) -> Result<proto::GetTreeDiffResponse> {
2403 let repository_id = RepositoryId(request.payload.repository_id);
2404 let diff_type = if request.payload.is_merge {
2405 DiffTreeType::MergeBase {
2406 base: request.payload.base.into(),
2407 head: request.payload.head.into(),
2408 }
2409 } else {
2410 DiffTreeType::Since {
2411 base: request.payload.base.into(),
2412 head: request.payload.head.into(),
2413 }
2414 };
2415
2416 let diff = this
2417 .update(&mut cx, |this, cx| {
2418 let repository = this.repositories().get(&repository_id)?;
2419 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2420 })?
2421 .context("missing repository")?
2422 .await??;
2423
2424 Ok(proto::GetTreeDiffResponse {
2425 entries: diff
2426 .entries
2427 .into_iter()
2428 .map(|(path, status)| proto::TreeDiffStatus {
2429 path: path.as_ref().to_proto(),
2430 status: match status {
2431 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2432 TreeDiffStatus::Modified { .. } => {
2433 proto::tree_diff_status::Status::Modified.into()
2434 }
2435 TreeDiffStatus::Deleted { .. } => {
2436 proto::tree_diff_status::Status::Deleted.into()
2437 }
2438 },
2439 oid: match status {
2440 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2441 Some(old.to_string())
2442 }
2443 TreeDiffStatus::Added => None,
2444 },
2445 })
2446 .collect(),
2447 })
2448 }
2449
2450 async fn handle_get_blob_content(
2451 this: Entity<Self>,
2452 request: TypedEnvelope<proto::GetBlobContent>,
2453 mut cx: AsyncApp,
2454 ) -> Result<proto::GetBlobContentResponse> {
2455 let oid = git::Oid::from_str(&request.payload.oid)?;
2456 let repository_id = RepositoryId(request.payload.repository_id);
2457 let content = this
2458 .update(&mut cx, |this, cx| {
2459 let repository = this.repositories().get(&repository_id)?;
2460 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2461 })?
2462 .context("missing repository")?
2463 .await?;
2464 Ok(proto::GetBlobContentResponse { content })
2465 }
2466
2467 async fn handle_open_unstaged_diff(
2468 this: Entity<Self>,
2469 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2470 mut cx: AsyncApp,
2471 ) -> Result<proto::OpenUnstagedDiffResponse> {
2472 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2473 let diff = this
2474 .update(&mut cx, |this, cx| {
2475 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2476 Some(this.open_unstaged_diff(buffer, cx))
2477 })?
2478 .context("missing buffer")?
2479 .await?;
2480 this.update(&mut cx, |this, _| {
2481 let shared_diffs = this
2482 .shared_diffs
2483 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2484 .or_default();
2485 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2486 })?;
2487 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2488 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2489 }
2490
2491 async fn handle_open_uncommitted_diff(
2492 this: Entity<Self>,
2493 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2494 mut cx: AsyncApp,
2495 ) -> Result<proto::OpenUncommittedDiffResponse> {
2496 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2497 let diff = this
2498 .update(&mut cx, |this, cx| {
2499 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2500 Some(this.open_uncommitted_diff(buffer, cx))
2501 })?
2502 .context("missing buffer")?
2503 .await?;
2504 this.update(&mut cx, |this, _| {
2505 let shared_diffs = this
2506 .shared_diffs
2507 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2508 .or_default();
2509 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2510 })?;
2511 diff.read_with(&cx, |diff, cx| {
2512 use proto::open_uncommitted_diff_response::Mode;
2513
2514 let unstaged_diff = diff.secondary_diff();
2515 let index_snapshot = unstaged_diff.and_then(|diff| {
2516 let diff = diff.read(cx);
2517 diff.base_text_exists().then(|| diff.base_text())
2518 });
2519
2520 let mode;
2521 let staged_text;
2522 let committed_text;
2523 if diff.base_text_exists() {
2524 let committed_snapshot = diff.base_text();
2525 committed_text = Some(committed_snapshot.text());
2526 if let Some(index_text) = index_snapshot {
2527 if index_text.remote_id() == committed_snapshot.remote_id() {
2528 mode = Mode::IndexMatchesHead;
2529 staged_text = None;
2530 } else {
2531 mode = Mode::IndexAndHead;
2532 staged_text = Some(index_text.text());
2533 }
2534 } else {
2535 mode = Mode::IndexAndHead;
2536 staged_text = None;
2537 }
2538 } else {
2539 mode = Mode::IndexAndHead;
2540 committed_text = None;
2541 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2542 }
2543
2544 proto::OpenUncommittedDiffResponse {
2545 committed_text,
2546 staged_text,
2547 mode: mode.into(),
2548 }
2549 })
2550 }
2551
2552 async fn handle_update_diff_bases(
2553 this: Entity<Self>,
2554 request: TypedEnvelope<proto::UpdateDiffBases>,
2555 mut cx: AsyncApp,
2556 ) -> Result<()> {
2557 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2558 this.update(&mut cx, |this, cx| {
2559 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2560 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2561 {
2562 let buffer = buffer.read(cx).text_snapshot();
2563 diff_state.update(cx, |diff_state, cx| {
2564 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2565 })
2566 }
2567 })
2568 }
2569
2570 async fn handle_blame_buffer(
2571 this: Entity<Self>,
2572 envelope: TypedEnvelope<proto::BlameBuffer>,
2573 mut cx: AsyncApp,
2574 ) -> Result<proto::BlameBufferResponse> {
2575 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2576 let version = deserialize_version(&envelope.payload.version);
2577 let buffer = this.read_with(&cx, |this, cx| {
2578 this.buffer_store.read(cx).get_existing(buffer_id)
2579 })??;
2580 buffer
2581 .update(&mut cx, |buffer, _| {
2582 buffer.wait_for_version(version.clone())
2583 })?
2584 .await?;
2585 let blame = this
2586 .update(&mut cx, |this, cx| {
2587 this.blame_buffer(&buffer, Some(version), cx)
2588 })?
2589 .await?;
2590 Ok(serialize_blame_buffer_response(blame))
2591 }
2592
2593 async fn handle_get_permalink_to_line(
2594 this: Entity<Self>,
2595 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2596 mut cx: AsyncApp,
2597 ) -> Result<proto::GetPermalinkToLineResponse> {
2598 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2599 // let version = deserialize_version(&envelope.payload.version);
2600 let selection = {
2601 let proto_selection = envelope
2602 .payload
2603 .selection
2604 .context("no selection to get permalink for defined")?;
2605 proto_selection.start as u32..proto_selection.end as u32
2606 };
2607 let buffer = this.read_with(&cx, |this, cx| {
2608 this.buffer_store.read(cx).get_existing(buffer_id)
2609 })??;
2610 let permalink = this
2611 .update(&mut cx, |this, cx| {
2612 this.get_permalink_to_line(&buffer, selection, cx)
2613 })?
2614 .await?;
2615 Ok(proto::GetPermalinkToLineResponse {
2616 permalink: permalink.to_string(),
2617 })
2618 }
2619
2620 fn repository_for_request(
2621 this: &Entity<Self>,
2622 id: RepositoryId,
2623 cx: &mut AsyncApp,
2624 ) -> Result<Entity<Repository>> {
2625 this.read_with(cx, |this, _| {
2626 this.repositories
2627 .get(&id)
2628 .context("missing repository handle")
2629 .cloned()
2630 })?
2631 }
2632
2633 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2634 self.repositories
2635 .iter()
2636 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2637 .collect()
2638 }
2639
2640 fn process_updated_entries(
2641 &self,
2642 worktree: &Entity<Worktree>,
2643 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2644 cx: &mut App,
2645 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2646 let path_style = worktree.read(cx).path_style();
2647 let mut repo_paths = self
2648 .repositories
2649 .values()
2650 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2651 .collect::<Vec<_>>();
2652 let mut entries: Vec<_> = updated_entries
2653 .iter()
2654 .map(|(path, _, _)| path.clone())
2655 .collect();
2656 entries.sort();
2657 let worktree = worktree.read(cx);
2658
2659 let entries = entries
2660 .into_iter()
2661 .map(|path| worktree.absolutize(&path))
2662 .collect::<Arc<[_]>>();
2663
2664 let executor = cx.background_executor().clone();
2665 cx.background_executor().spawn(async move {
2666 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2667 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2668 let mut tasks = FuturesOrdered::new();
2669 for (repo_path, repo) in repo_paths.into_iter().rev() {
2670 let entries = entries.clone();
2671 let task = executor.spawn(async move {
2672 // Find all repository paths that belong to this repo
2673 let mut ix = entries.partition_point(|path| path < &*repo_path);
2674 if ix == entries.len() {
2675 return None;
2676 };
2677
2678 let mut paths = Vec::new();
2679 // All paths prefixed by a given repo will constitute a continuous range.
2680 while let Some(path) = entries.get(ix)
2681 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2682 &repo_path, path, path_style,
2683 )
2684 {
2685 paths.push((repo_path, ix));
2686 ix += 1;
2687 }
2688 if paths.is_empty() {
2689 None
2690 } else {
2691 Some((repo, paths))
2692 }
2693 });
2694 tasks.push_back(task);
2695 }
2696
2697 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2698 let mut path_was_used = vec![false; entries.len()];
2699 let tasks = tasks.collect::<Vec<_>>().await;
2700 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2701 // We always want to assign a path to it's innermost repository.
2702 for t in tasks {
2703 let Some((repo, paths)) = t else {
2704 continue;
2705 };
2706 let entry = paths_by_git_repo.entry(repo).or_default();
2707 for (repo_path, ix) in paths {
2708 if path_was_used[ix] {
2709 continue;
2710 }
2711 path_was_used[ix] = true;
2712 entry.push(repo_path);
2713 }
2714 }
2715
2716 paths_by_git_repo
2717 })
2718 }
2719}
2720
2721impl BufferGitState {
2722 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2723 Self {
2724 unstaged_diff: Default::default(),
2725 uncommitted_diff: Default::default(),
2726 recalculate_diff_task: Default::default(),
2727 language: Default::default(),
2728 language_registry: Default::default(),
2729 recalculating_tx: postage::watch::channel_with(false).0,
2730 hunk_staging_operation_count: 0,
2731 hunk_staging_operation_count_as_of_write: 0,
2732 head_text: Default::default(),
2733 index_text: Default::default(),
2734 head_changed: Default::default(),
2735 index_changed: Default::default(),
2736 language_changed: Default::default(),
2737 conflict_updated_futures: Default::default(),
2738 conflict_set: Default::default(),
2739 reparse_conflict_markers_task: Default::default(),
2740 }
2741 }
2742
2743 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2744 self.language = buffer.read(cx).language().cloned();
2745 self.language_changed = true;
2746 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2747 }
2748
2749 fn reparse_conflict_markers(
2750 &mut self,
2751 buffer: text::BufferSnapshot,
2752 cx: &mut Context<Self>,
2753 ) -> oneshot::Receiver<()> {
2754 let (tx, rx) = oneshot::channel();
2755
2756 let Some(conflict_set) = self
2757 .conflict_set
2758 .as_ref()
2759 .and_then(|conflict_set| conflict_set.upgrade())
2760 else {
2761 return rx;
2762 };
2763
2764 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2765 if conflict_set.has_conflict {
2766 Some(conflict_set.snapshot())
2767 } else {
2768 None
2769 }
2770 });
2771
2772 if let Some(old_snapshot) = old_snapshot {
2773 self.conflict_updated_futures.push(tx);
2774 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2775 let (snapshot, changed_range) = cx
2776 .background_spawn(async move {
2777 let new_snapshot = ConflictSet::parse(&buffer);
2778 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2779 (new_snapshot, changed_range)
2780 })
2781 .await;
2782 this.update(cx, |this, cx| {
2783 if let Some(conflict_set) = &this.conflict_set {
2784 conflict_set
2785 .update(cx, |conflict_set, cx| {
2786 conflict_set.set_snapshot(snapshot, changed_range, cx);
2787 })
2788 .ok();
2789 }
2790 let futures = std::mem::take(&mut this.conflict_updated_futures);
2791 for tx in futures {
2792 tx.send(()).ok();
2793 }
2794 })
2795 }))
2796 }
2797
2798 rx
2799 }
2800
2801 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2802 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2803 }
2804
2805 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2806 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2807 }
2808
2809 fn handle_base_texts_updated(
2810 &mut self,
2811 buffer: text::BufferSnapshot,
2812 message: proto::UpdateDiffBases,
2813 cx: &mut Context<Self>,
2814 ) {
2815 use proto::update_diff_bases::Mode;
2816
2817 let Some(mode) = Mode::from_i32(message.mode) else {
2818 return;
2819 };
2820
2821 let diff_bases_change = match mode {
2822 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2823 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2824 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2825 Mode::IndexAndHead => DiffBasesChange::SetEach {
2826 index: message.staged_text,
2827 head: message.committed_text,
2828 },
2829 };
2830
2831 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2832 }
2833
2834 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2835 if *self.recalculating_tx.borrow() {
2836 let mut rx = self.recalculating_tx.subscribe();
2837 Some(async move {
2838 loop {
2839 let is_recalculating = rx.recv().await;
2840 if is_recalculating != Some(true) {
2841 break;
2842 }
2843 }
2844 })
2845 } else {
2846 None
2847 }
2848 }
2849
2850 fn diff_bases_changed(
2851 &mut self,
2852 buffer: text::BufferSnapshot,
2853 diff_bases_change: Option<DiffBasesChange>,
2854 cx: &mut Context<Self>,
2855 ) {
2856 match diff_bases_change {
2857 Some(DiffBasesChange::SetIndex(index)) => {
2858 self.index_text = index.map(|mut index| {
2859 text::LineEnding::normalize(&mut index);
2860 Arc::new(index)
2861 });
2862 self.index_changed = true;
2863 }
2864 Some(DiffBasesChange::SetHead(head)) => {
2865 self.head_text = head.map(|mut head| {
2866 text::LineEnding::normalize(&mut head);
2867 Arc::new(head)
2868 });
2869 self.head_changed = true;
2870 }
2871 Some(DiffBasesChange::SetBoth(text)) => {
2872 let text = text.map(|mut text| {
2873 text::LineEnding::normalize(&mut text);
2874 Arc::new(text)
2875 });
2876 self.head_text = text.clone();
2877 self.index_text = text;
2878 self.head_changed = true;
2879 self.index_changed = true;
2880 }
2881 Some(DiffBasesChange::SetEach { index, head }) => {
2882 self.index_text = index.map(|mut index| {
2883 text::LineEnding::normalize(&mut index);
2884 Arc::new(index)
2885 });
2886 self.index_changed = true;
2887 self.head_text = head.map(|mut head| {
2888 text::LineEnding::normalize(&mut head);
2889 Arc::new(head)
2890 });
2891 self.head_changed = true;
2892 }
2893 None => {}
2894 }
2895
2896 self.recalculate_diffs(buffer, cx)
2897 }
2898
2899 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2900 *self.recalculating_tx.borrow_mut() = true;
2901
2902 let language = self.language.clone();
2903 let language_registry = self.language_registry.clone();
2904 let unstaged_diff = self.unstaged_diff();
2905 let uncommitted_diff = self.uncommitted_diff();
2906 let head = self.head_text.clone();
2907 let index = self.index_text.clone();
2908 let index_changed = self.index_changed;
2909 let head_changed = self.head_changed;
2910 let language_changed = self.language_changed;
2911 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2912 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2913 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2914 (None, None) => true,
2915 _ => false,
2916 };
2917 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2918 log::debug!(
2919 "start recalculating diffs for buffer {}",
2920 buffer.remote_id()
2921 );
2922
2923 let mut new_unstaged_diff = None;
2924 if let Some(unstaged_diff) = &unstaged_diff {
2925 new_unstaged_diff = Some(
2926 BufferDiff::update_diff(
2927 unstaged_diff.clone(),
2928 buffer.clone(),
2929 index,
2930 index_changed,
2931 language_changed,
2932 language.clone(),
2933 language_registry.clone(),
2934 cx,
2935 )
2936 .await?,
2937 );
2938 }
2939
2940 let mut new_uncommitted_diff = None;
2941 if let Some(uncommitted_diff) = &uncommitted_diff {
2942 new_uncommitted_diff = if index_matches_head {
2943 new_unstaged_diff.clone()
2944 } else {
2945 Some(
2946 BufferDiff::update_diff(
2947 uncommitted_diff.clone(),
2948 buffer.clone(),
2949 head,
2950 head_changed,
2951 language_changed,
2952 language.clone(),
2953 language_registry.clone(),
2954 cx,
2955 )
2956 .await?,
2957 )
2958 }
2959 }
2960
2961 let cancel = this.update(cx, |this, _| {
2962 // This checks whether all pending stage/unstage operations
2963 // have quiesced (i.e. both the corresponding write and the
2964 // read of that write have completed). If not, then we cancel
2965 // this recalculation attempt to avoid invalidating pending
2966 // state too quickly; another recalculation will come along
2967 // later and clear the pending state once the state of the index has settled.
2968 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2969 *this.recalculating_tx.borrow_mut() = false;
2970 true
2971 } else {
2972 false
2973 }
2974 })?;
2975 if cancel {
2976 log::debug!(
2977 concat!(
2978 "aborting recalculating diffs for buffer {}",
2979 "due to subsequent hunk operations",
2980 ),
2981 buffer.remote_id()
2982 );
2983 return Ok(());
2984 }
2985
2986 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2987 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2988 {
2989 unstaged_diff.update(cx, |diff, cx| {
2990 if language_changed {
2991 diff.language_changed(cx);
2992 }
2993 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2994 })?
2995 } else {
2996 None
2997 };
2998
2999 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3000 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3001 {
3002 uncommitted_diff.update(cx, |diff, cx| {
3003 if language_changed {
3004 diff.language_changed(cx);
3005 }
3006 diff.set_snapshot_with_secondary(
3007 new_uncommitted_diff,
3008 &buffer,
3009 unstaged_changed_range,
3010 true,
3011 cx,
3012 );
3013 })?;
3014 }
3015
3016 log::debug!(
3017 "finished recalculating diffs for buffer {}",
3018 buffer.remote_id()
3019 );
3020
3021 if let Some(this) = this.upgrade() {
3022 this.update(cx, |this, _| {
3023 this.index_changed = false;
3024 this.head_changed = false;
3025 this.language_changed = false;
3026 *this.recalculating_tx.borrow_mut() = false;
3027 })?;
3028 }
3029
3030 Ok(())
3031 }));
3032 }
3033}
3034
3035fn make_remote_delegate(
3036 this: Entity<GitStore>,
3037 project_id: u64,
3038 repository_id: RepositoryId,
3039 askpass_id: u64,
3040 cx: &mut AsyncApp,
3041) -> AskPassDelegate {
3042 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3043 this.update(cx, |this, cx| {
3044 let Some((client, _)) = this.downstream_client() else {
3045 return;
3046 };
3047 let response = client.request(proto::AskPassRequest {
3048 project_id,
3049 repository_id: repository_id.to_proto(),
3050 askpass_id,
3051 prompt,
3052 });
3053 cx.spawn(async move |_, _| {
3054 let mut response = response.await?.response;
3055 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3056 .ok();
3057 response.zeroize();
3058 anyhow::Ok(())
3059 })
3060 .detach_and_log_err(cx);
3061 })
3062 .log_err();
3063 })
3064}
3065
3066impl RepositoryId {
3067 pub fn to_proto(self) -> u64 {
3068 self.0
3069 }
3070
3071 pub fn from_proto(id: u64) -> Self {
3072 RepositoryId(id)
3073 }
3074}
3075
3076impl RepositorySnapshot {
3077 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3078 Self {
3079 id,
3080 statuses_by_path: Default::default(),
3081 work_directory_abs_path,
3082 branch: None,
3083 head_commit: None,
3084 scan_id: 0,
3085 merge: Default::default(),
3086 remote_origin_url: None,
3087 remote_upstream_url: None,
3088 stash_entries: Default::default(),
3089 path_style,
3090 }
3091 }
3092
3093 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3094 proto::UpdateRepository {
3095 branch_summary: self.branch.as_ref().map(branch_to_proto),
3096 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3097 updated_statuses: self
3098 .statuses_by_path
3099 .iter()
3100 .map(|entry| entry.to_proto())
3101 .collect(),
3102 removed_statuses: Default::default(),
3103 current_merge_conflicts: self
3104 .merge
3105 .conflicted_paths
3106 .iter()
3107 .map(|repo_path| repo_path.to_proto())
3108 .collect(),
3109 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3110 project_id,
3111 id: self.id.to_proto(),
3112 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3113 entry_ids: vec![self.id.to_proto()],
3114 scan_id: self.scan_id,
3115 is_last_update: true,
3116 stash_entries: self
3117 .stash_entries
3118 .entries
3119 .iter()
3120 .map(stash_to_proto)
3121 .collect(),
3122 }
3123 }
3124
3125 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3126 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3127 let mut removed_statuses: Vec<String> = Vec::new();
3128
3129 let mut new_statuses = self.statuses_by_path.iter().peekable();
3130 let mut old_statuses = old.statuses_by_path.iter().peekable();
3131
3132 let mut current_new_entry = new_statuses.next();
3133 let mut current_old_entry = old_statuses.next();
3134 loop {
3135 match (current_new_entry, current_old_entry) {
3136 (Some(new_entry), Some(old_entry)) => {
3137 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3138 Ordering::Less => {
3139 updated_statuses.push(new_entry.to_proto());
3140 current_new_entry = new_statuses.next();
3141 }
3142 Ordering::Equal => {
3143 if new_entry.status != old_entry.status {
3144 updated_statuses.push(new_entry.to_proto());
3145 }
3146 current_old_entry = old_statuses.next();
3147 current_new_entry = new_statuses.next();
3148 }
3149 Ordering::Greater => {
3150 removed_statuses.push(old_entry.repo_path.to_proto());
3151 current_old_entry = old_statuses.next();
3152 }
3153 }
3154 }
3155 (None, Some(old_entry)) => {
3156 removed_statuses.push(old_entry.repo_path.to_proto());
3157 current_old_entry = old_statuses.next();
3158 }
3159 (Some(new_entry), None) => {
3160 updated_statuses.push(new_entry.to_proto());
3161 current_new_entry = new_statuses.next();
3162 }
3163 (None, None) => break,
3164 }
3165 }
3166
3167 proto::UpdateRepository {
3168 branch_summary: self.branch.as_ref().map(branch_to_proto),
3169 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3170 updated_statuses,
3171 removed_statuses,
3172 current_merge_conflicts: self
3173 .merge
3174 .conflicted_paths
3175 .iter()
3176 .map(|path| path.to_proto())
3177 .collect(),
3178 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3179 project_id,
3180 id: self.id.to_proto(),
3181 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3182 entry_ids: vec![],
3183 scan_id: self.scan_id,
3184 is_last_update: true,
3185 stash_entries: self
3186 .stash_entries
3187 .entries
3188 .iter()
3189 .map(stash_to_proto)
3190 .collect(),
3191 }
3192 }
3193
3194 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3195 self.statuses_by_path.iter().cloned()
3196 }
3197
3198 pub fn status_summary(&self) -> GitSummary {
3199 self.statuses_by_path.summary().item_summary
3200 }
3201
3202 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3203 self.statuses_by_path
3204 .get(&PathKey(path.as_ref().clone()), ())
3205 .cloned()
3206 }
3207
3208 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3209 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3210 }
3211
3212 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3213 self.path_style
3214 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3215 .unwrap()
3216 .into()
3217 }
3218
3219 #[inline]
3220 fn abs_path_to_repo_path_inner(
3221 work_directory_abs_path: &Path,
3222 abs_path: &Path,
3223 path_style: PathStyle,
3224 ) -> Option<RepoPath> {
3225 abs_path
3226 .strip_prefix(&work_directory_abs_path)
3227 .ok()
3228 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3229 }
3230
3231 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3232 self.merge.conflicted_paths.contains(repo_path)
3233 }
3234
3235 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3236 let had_conflict_on_last_merge_head_change =
3237 self.merge.conflicted_paths.contains(repo_path);
3238 let has_conflict_currently = self
3239 .status_for_path(repo_path)
3240 .is_some_and(|entry| entry.status.is_conflicted());
3241 had_conflict_on_last_merge_head_change || has_conflict_currently
3242 }
3243
3244 /// This is the name that will be displayed in the repository selector for this repository.
3245 pub fn display_name(&self) -> SharedString {
3246 self.work_directory_abs_path
3247 .file_name()
3248 .unwrap_or_default()
3249 .to_string_lossy()
3250 .to_string()
3251 .into()
3252 }
3253}
3254
3255pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3256 proto::StashEntry {
3257 oid: entry.oid.as_bytes().to_vec(),
3258 message: entry.message.clone(),
3259 branch: entry.branch.clone(),
3260 index: entry.index as u64,
3261 timestamp: entry.timestamp,
3262 }
3263}
3264
3265pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3266 Ok(StashEntry {
3267 oid: Oid::from_bytes(&entry.oid)?,
3268 message: entry.message.clone(),
3269 index: entry.index as usize,
3270 branch: entry.branch.clone(),
3271 timestamp: entry.timestamp,
3272 })
3273}
3274
3275impl MergeDetails {
3276 async fn load(
3277 backend: &Arc<dyn GitRepository>,
3278 status: &SumTree<StatusEntry>,
3279 prev_snapshot: &RepositorySnapshot,
3280 ) -> Result<(MergeDetails, bool)> {
3281 log::debug!("load merge details");
3282 let message = backend.merge_message().await;
3283 let heads = backend
3284 .revparse_batch(vec![
3285 "MERGE_HEAD".into(),
3286 "CHERRY_PICK_HEAD".into(),
3287 "REBASE_HEAD".into(),
3288 "REVERT_HEAD".into(),
3289 "APPLY_HEAD".into(),
3290 ])
3291 .await
3292 .log_err()
3293 .unwrap_or_default()
3294 .into_iter()
3295 .map(|opt| opt.map(SharedString::from))
3296 .collect::<Vec<_>>();
3297 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3298 let conflicted_paths = if merge_heads_changed {
3299 let current_conflicted_paths = TreeSet::from_ordered_entries(
3300 status
3301 .iter()
3302 .filter(|entry| entry.status.is_conflicted())
3303 .map(|entry| entry.repo_path.clone()),
3304 );
3305
3306 // It can happen that we run a scan while a lengthy merge is in progress
3307 // that will eventually result in conflicts, but before those conflicts
3308 // are reported by `git status`. Since for the moment we only care about
3309 // the merge heads state for the purposes of tracking conflicts, don't update
3310 // this state until we see some conflicts.
3311 if heads.iter().any(Option::is_some)
3312 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3313 && current_conflicted_paths.is_empty()
3314 {
3315 log::debug!("not updating merge heads because no conflicts found");
3316 return Ok((
3317 MergeDetails {
3318 message: message.map(SharedString::from),
3319 ..prev_snapshot.merge.clone()
3320 },
3321 false,
3322 ));
3323 }
3324
3325 current_conflicted_paths
3326 } else {
3327 prev_snapshot.merge.conflicted_paths.clone()
3328 };
3329 let details = MergeDetails {
3330 conflicted_paths,
3331 message: message.map(SharedString::from),
3332 heads,
3333 };
3334 Ok((details, merge_heads_changed))
3335 }
3336}
3337
3338impl Repository {
3339 pub fn snapshot(&self) -> RepositorySnapshot {
3340 self.snapshot.clone()
3341 }
3342
3343 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3344 self.pending_ops.iter().cloned()
3345 }
3346
3347 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3348 self.pending_ops.summary().clone()
3349 }
3350
3351 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3352 self.pending_ops
3353 .get(&PathKey(path.as_ref().clone()), ())
3354 .cloned()
3355 }
3356
3357 fn local(
3358 id: RepositoryId,
3359 work_directory_abs_path: Arc<Path>,
3360 dot_git_abs_path: Arc<Path>,
3361 repository_dir_abs_path: Arc<Path>,
3362 common_dir_abs_path: Arc<Path>,
3363 project_environment: WeakEntity<ProjectEnvironment>,
3364 fs: Arc<dyn Fs>,
3365 git_store: WeakEntity<GitStore>,
3366 cx: &mut Context<Self>,
3367 ) -> Self {
3368 let snapshot =
3369 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3370 Repository {
3371 this: cx.weak_entity(),
3372 git_store,
3373 snapshot,
3374 pending_ops: Default::default(),
3375 commit_message_buffer: None,
3376 askpass_delegates: Default::default(),
3377 paths_needing_status_update: Default::default(),
3378 latest_askpass_id: 0,
3379 job_sender: Repository::spawn_local_git_worker(
3380 work_directory_abs_path,
3381 dot_git_abs_path,
3382 repository_dir_abs_path,
3383 common_dir_abs_path,
3384 project_environment,
3385 fs,
3386 cx,
3387 ),
3388 job_id: 0,
3389 active_jobs: Default::default(),
3390 }
3391 }
3392
3393 fn remote(
3394 id: RepositoryId,
3395 work_directory_abs_path: Arc<Path>,
3396 path_style: PathStyle,
3397 project_id: ProjectId,
3398 client: AnyProtoClient,
3399 git_store: WeakEntity<GitStore>,
3400 cx: &mut Context<Self>,
3401 ) -> Self {
3402 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3403 Self {
3404 this: cx.weak_entity(),
3405 snapshot,
3406 commit_message_buffer: None,
3407 git_store,
3408 pending_ops: Default::default(),
3409 paths_needing_status_update: Default::default(),
3410 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3411 askpass_delegates: Default::default(),
3412 latest_askpass_id: 0,
3413 active_jobs: Default::default(),
3414 job_id: 0,
3415 }
3416 }
3417
3418 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3419 self.git_store.upgrade()
3420 }
3421
3422 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3423 let this = cx.weak_entity();
3424 let git_store = self.git_store.clone();
3425 let _ = self.send_keyed_job(
3426 Some(GitJobKey::ReloadBufferDiffBases),
3427 None,
3428 |state, mut cx| async move {
3429 let RepositoryState::Local { backend, .. } = state else {
3430 log::error!("tried to recompute diffs for a non-local repository");
3431 return Ok(());
3432 };
3433
3434 let Some(this) = this.upgrade() else {
3435 return Ok(());
3436 };
3437
3438 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3439 git_store.update(cx, |git_store, cx| {
3440 git_store
3441 .diffs
3442 .iter()
3443 .filter_map(|(buffer_id, diff_state)| {
3444 let buffer_store = git_store.buffer_store.read(cx);
3445 let buffer = buffer_store.get(*buffer_id)?;
3446 let file = File::from_dyn(buffer.read(cx).file())?;
3447 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3448 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3449 log::debug!(
3450 "start reload diff bases for repo path {}",
3451 repo_path.as_unix_str()
3452 );
3453 diff_state.update(cx, |diff_state, _| {
3454 let has_unstaged_diff = diff_state
3455 .unstaged_diff
3456 .as_ref()
3457 .is_some_and(|diff| diff.is_upgradable());
3458 let has_uncommitted_diff = diff_state
3459 .uncommitted_diff
3460 .as_ref()
3461 .is_some_and(|set| set.is_upgradable());
3462
3463 Some((
3464 buffer,
3465 repo_path,
3466 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3467 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3468 ))
3469 })
3470 })
3471 .collect::<Vec<_>>()
3472 })
3473 })??;
3474
3475 let buffer_diff_base_changes = cx
3476 .background_spawn(async move {
3477 let mut changes = Vec::new();
3478 for (buffer, repo_path, current_index_text, current_head_text) in
3479 &repo_diff_state_updates
3480 {
3481 let index_text = if current_index_text.is_some() {
3482 backend.load_index_text(repo_path.clone()).await
3483 } else {
3484 None
3485 };
3486 let head_text = if current_head_text.is_some() {
3487 backend.load_committed_text(repo_path.clone()).await
3488 } else {
3489 None
3490 };
3491
3492 let change =
3493 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3494 (Some(current_index), Some(current_head)) => {
3495 let index_changed =
3496 index_text.as_ref() != current_index.as_deref();
3497 let head_changed =
3498 head_text.as_ref() != current_head.as_deref();
3499 if index_changed && head_changed {
3500 if index_text == head_text {
3501 Some(DiffBasesChange::SetBoth(head_text))
3502 } else {
3503 Some(DiffBasesChange::SetEach {
3504 index: index_text,
3505 head: head_text,
3506 })
3507 }
3508 } else if index_changed {
3509 Some(DiffBasesChange::SetIndex(index_text))
3510 } else if head_changed {
3511 Some(DiffBasesChange::SetHead(head_text))
3512 } else {
3513 None
3514 }
3515 }
3516 (Some(current_index), None) => {
3517 let index_changed =
3518 index_text.as_ref() != current_index.as_deref();
3519 index_changed
3520 .then_some(DiffBasesChange::SetIndex(index_text))
3521 }
3522 (None, Some(current_head)) => {
3523 let head_changed =
3524 head_text.as_ref() != current_head.as_deref();
3525 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3526 }
3527 (None, None) => None,
3528 };
3529
3530 changes.push((buffer.clone(), change))
3531 }
3532 changes
3533 })
3534 .await;
3535
3536 git_store.update(&mut cx, |git_store, cx| {
3537 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3538 let buffer_snapshot = buffer.read(cx).text_snapshot();
3539 let buffer_id = buffer_snapshot.remote_id();
3540 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3541 continue;
3542 };
3543
3544 let downstream_client = git_store.downstream_client();
3545 diff_state.update(cx, |diff_state, cx| {
3546 use proto::update_diff_bases::Mode;
3547
3548 if let Some((diff_bases_change, (client, project_id))) =
3549 diff_bases_change.clone().zip(downstream_client)
3550 {
3551 let (staged_text, committed_text, mode) = match diff_bases_change {
3552 DiffBasesChange::SetIndex(index) => {
3553 (index, None, Mode::IndexOnly)
3554 }
3555 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3556 DiffBasesChange::SetEach { index, head } => {
3557 (index, head, Mode::IndexAndHead)
3558 }
3559 DiffBasesChange::SetBoth(text) => {
3560 (None, text, Mode::IndexMatchesHead)
3561 }
3562 };
3563 client
3564 .send(proto::UpdateDiffBases {
3565 project_id: project_id.to_proto(),
3566 buffer_id: buffer_id.to_proto(),
3567 staged_text,
3568 committed_text,
3569 mode: mode as i32,
3570 })
3571 .log_err();
3572 }
3573
3574 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3575 });
3576 }
3577 })
3578 },
3579 );
3580 }
3581
3582 pub fn send_job<F, Fut, R>(
3583 &mut self,
3584 status: Option<SharedString>,
3585 job: F,
3586 ) -> oneshot::Receiver<R>
3587 where
3588 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3589 Fut: Future<Output = R> + 'static,
3590 R: Send + 'static,
3591 {
3592 self.send_keyed_job(None, status, job)
3593 }
3594
3595 fn send_keyed_job<F, Fut, R>(
3596 &mut self,
3597 key: Option<GitJobKey>,
3598 status: Option<SharedString>,
3599 job: F,
3600 ) -> oneshot::Receiver<R>
3601 where
3602 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3603 Fut: Future<Output = R> + 'static,
3604 R: Send + 'static,
3605 {
3606 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3607 let job_id = post_inc(&mut self.job_id);
3608 let this = self.this.clone();
3609 self.job_sender
3610 .unbounded_send(GitJob {
3611 key,
3612 job: Box::new(move |state, cx: &mut AsyncApp| {
3613 let job = job(state, cx.clone());
3614 cx.spawn(async move |cx| {
3615 if let Some(s) = status.clone() {
3616 this.update(cx, |this, cx| {
3617 this.active_jobs.insert(
3618 job_id,
3619 JobInfo {
3620 start: Instant::now(),
3621 message: s.clone(),
3622 },
3623 );
3624
3625 cx.notify();
3626 })
3627 .ok();
3628 }
3629 let result = job.await;
3630
3631 this.update(cx, |this, cx| {
3632 this.active_jobs.remove(&job_id);
3633 cx.notify();
3634 })
3635 .ok();
3636
3637 result_tx.send(result).ok();
3638 })
3639 }),
3640 })
3641 .ok();
3642 result_rx
3643 }
3644
3645 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3646 let Some(git_store) = self.git_store.upgrade() else {
3647 return;
3648 };
3649 let entity = cx.entity();
3650 git_store.update(cx, |git_store, cx| {
3651 let Some((&id, _)) = git_store
3652 .repositories
3653 .iter()
3654 .find(|(_, handle)| *handle == &entity)
3655 else {
3656 return;
3657 };
3658 git_store.active_repo_id = Some(id);
3659 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3660 });
3661 }
3662
3663 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3664 self.snapshot.status()
3665 }
3666
3667 pub fn cached_stash(&self) -> GitStash {
3668 self.snapshot.stash_entries.clone()
3669 }
3670
3671 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3672 let git_store = self.git_store.upgrade()?;
3673 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3674 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3675 let abs_path = SanitizedPath::new(&abs_path);
3676 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3677 Some(ProjectPath {
3678 worktree_id: worktree.read(cx).id(),
3679 path: relative_path,
3680 })
3681 }
3682
3683 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3684 let git_store = self.git_store.upgrade()?;
3685 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3686 let abs_path = worktree_store.absolutize(path, cx)?;
3687 self.snapshot.abs_path_to_repo_path(&abs_path)
3688 }
3689
3690 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3691 other
3692 .read(cx)
3693 .snapshot
3694 .work_directory_abs_path
3695 .starts_with(&self.snapshot.work_directory_abs_path)
3696 }
3697
3698 pub fn open_commit_buffer(
3699 &mut self,
3700 languages: Option<Arc<LanguageRegistry>>,
3701 buffer_store: Entity<BufferStore>,
3702 cx: &mut Context<Self>,
3703 ) -> Task<Result<Entity<Buffer>>> {
3704 let id = self.id;
3705 if let Some(buffer) = self.commit_message_buffer.clone() {
3706 return Task::ready(Ok(buffer));
3707 }
3708 let this = cx.weak_entity();
3709
3710 let rx = self.send_job(None, move |state, mut cx| async move {
3711 let Some(this) = this.upgrade() else {
3712 bail!("git store was dropped");
3713 };
3714 match state {
3715 RepositoryState::Local { .. } => {
3716 this.update(&mut cx, |_, cx| {
3717 Self::open_local_commit_buffer(languages, buffer_store, cx)
3718 })?
3719 .await
3720 }
3721 RepositoryState::Remote { project_id, client } => {
3722 let request = client.request(proto::OpenCommitMessageBuffer {
3723 project_id: project_id.0,
3724 repository_id: id.to_proto(),
3725 });
3726 let response = request.await.context("requesting to open commit buffer")?;
3727 let buffer_id = BufferId::new(response.buffer_id)?;
3728 let buffer = buffer_store
3729 .update(&mut cx, |buffer_store, cx| {
3730 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3731 })?
3732 .await?;
3733 if let Some(language_registry) = languages {
3734 let git_commit_language =
3735 language_registry.language_for_name("Git Commit").await?;
3736 buffer.update(&mut cx, |buffer, cx| {
3737 buffer.set_language(Some(git_commit_language), cx);
3738 })?;
3739 }
3740 this.update(&mut cx, |this, _| {
3741 this.commit_message_buffer = Some(buffer.clone());
3742 })?;
3743 Ok(buffer)
3744 }
3745 }
3746 });
3747
3748 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3749 }
3750
3751 fn open_local_commit_buffer(
3752 language_registry: Option<Arc<LanguageRegistry>>,
3753 buffer_store: Entity<BufferStore>,
3754 cx: &mut Context<Self>,
3755 ) -> Task<Result<Entity<Buffer>>> {
3756 cx.spawn(async move |repository, cx| {
3757 let buffer = buffer_store
3758 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3759 .await?;
3760
3761 if let Some(language_registry) = language_registry {
3762 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3763 buffer.update(cx, |buffer, cx| {
3764 buffer.set_language(Some(git_commit_language), cx);
3765 })?;
3766 }
3767
3768 repository.update(cx, |repository, _| {
3769 repository.commit_message_buffer = Some(buffer.clone());
3770 })?;
3771 Ok(buffer)
3772 })
3773 }
3774
3775 pub fn checkout_files(
3776 &mut self,
3777 commit: &str,
3778 paths: Vec<RepoPath>,
3779 cx: &mut Context<Self>,
3780 ) -> Task<Result<()>> {
3781 let commit = commit.to_string();
3782 let id = self.id;
3783
3784 self.spawn_job_with_tracking(
3785 paths.clone(),
3786 pending_op::GitStatus::Reverted,
3787 cx,
3788 async move |this, cx| {
3789 this.update(cx, |this, _cx| {
3790 this.send_job(
3791 Some(format!("git checkout {}", commit).into()),
3792 move |git_repo, _| async move {
3793 match git_repo {
3794 RepositoryState::Local {
3795 backend,
3796 environment,
3797 ..
3798 } => {
3799 backend
3800 .checkout_files(commit, paths, environment.clone())
3801 .await
3802 }
3803 RepositoryState::Remote { project_id, client } => {
3804 client
3805 .request(proto::GitCheckoutFiles {
3806 project_id: project_id.0,
3807 repository_id: id.to_proto(),
3808 commit,
3809 paths: paths
3810 .into_iter()
3811 .map(|p| p.to_proto())
3812 .collect(),
3813 })
3814 .await?;
3815
3816 Ok(())
3817 }
3818 }
3819 },
3820 )
3821 })?
3822 .await?
3823 },
3824 )
3825 }
3826
3827 pub fn reset(
3828 &mut self,
3829 commit: String,
3830 reset_mode: ResetMode,
3831 _cx: &mut App,
3832 ) -> oneshot::Receiver<Result<()>> {
3833 let id = self.id;
3834
3835 self.send_job(None, move |git_repo, _| async move {
3836 match git_repo {
3837 RepositoryState::Local {
3838 backend,
3839 environment,
3840 ..
3841 } => backend.reset(commit, reset_mode, environment).await,
3842 RepositoryState::Remote { project_id, client } => {
3843 client
3844 .request(proto::GitReset {
3845 project_id: project_id.0,
3846 repository_id: id.to_proto(),
3847 commit,
3848 mode: match reset_mode {
3849 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3850 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3851 },
3852 })
3853 .await?;
3854
3855 Ok(())
3856 }
3857 }
3858 })
3859 }
3860
3861 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3862 let id = self.id;
3863 self.send_job(None, move |git_repo, _cx| async move {
3864 match git_repo {
3865 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3866 RepositoryState::Remote { project_id, client } => {
3867 let resp = client
3868 .request(proto::GitShow {
3869 project_id: project_id.0,
3870 repository_id: id.to_proto(),
3871 commit,
3872 })
3873 .await?;
3874
3875 Ok(CommitDetails {
3876 sha: resp.sha.into(),
3877 message: resp.message.into(),
3878 commit_timestamp: resp.commit_timestamp,
3879 author_email: resp.author_email.into(),
3880 author_name: resp.author_name.into(),
3881 })
3882 }
3883 }
3884 })
3885 }
3886
3887 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3888 let id = self.id;
3889 self.send_job(None, move |git_repo, cx| async move {
3890 match git_repo {
3891 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3892 RepositoryState::Remote {
3893 client, project_id, ..
3894 } => {
3895 let response = client
3896 .request(proto::LoadCommitDiff {
3897 project_id: project_id.0,
3898 repository_id: id.to_proto(),
3899 commit,
3900 })
3901 .await?;
3902 Ok(CommitDiff {
3903 files: response
3904 .files
3905 .into_iter()
3906 .map(|file| {
3907 Ok(CommitFile {
3908 path: RepoPath::from_proto(&file.path)?,
3909 old_text: file.old_text,
3910 new_text: file.new_text,
3911 })
3912 })
3913 .collect::<Result<Vec<_>>>()?,
3914 })
3915 }
3916 }
3917 })
3918 }
3919
3920 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3921 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3922 }
3923
3924 fn save_buffers<'a>(
3925 &self,
3926 entries: impl IntoIterator<Item = &'a RepoPath>,
3927 cx: &mut Context<Self>,
3928 ) -> Vec<Task<anyhow::Result<()>>> {
3929 let mut save_futures = Vec::new();
3930 if let Some(buffer_store) = self.buffer_store(cx) {
3931 buffer_store.update(cx, |buffer_store, cx| {
3932 for path in entries {
3933 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3934 continue;
3935 };
3936 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3937 && buffer
3938 .read(cx)
3939 .file()
3940 .is_some_and(|file| file.disk_state().exists())
3941 && buffer.read(cx).has_unsaved_edits()
3942 {
3943 save_futures.push(buffer_store.save_buffer(buffer, cx));
3944 }
3945 }
3946 })
3947 }
3948 save_futures
3949 }
3950
3951 pub fn stage_entries(
3952 &mut self,
3953 entries: Vec<RepoPath>,
3954 cx: &mut Context<Self>,
3955 ) -> Task<anyhow::Result<()>> {
3956 if entries.is_empty() {
3957 return Task::ready(Ok(()));
3958 }
3959 let id = self.id;
3960 let save_tasks = self.save_buffers(&entries, cx);
3961 let paths = entries
3962 .iter()
3963 .map(|p| p.as_unix_str())
3964 .collect::<Vec<_>>()
3965 .join(" ");
3966 let status = format!("git add {paths}");
3967 let job_key = GitJobKey::WriteIndex(entries.clone());
3968
3969 self.spawn_job_with_tracking(
3970 entries.clone(),
3971 pending_op::GitStatus::Staged,
3972 cx,
3973 async move |this, cx| {
3974 for save_task in save_tasks {
3975 save_task.await?;
3976 }
3977
3978 this.update(cx, |this, _| {
3979 this.send_keyed_job(
3980 Some(job_key),
3981 Some(status.into()),
3982 move |git_repo, _cx| async move {
3983 match git_repo {
3984 RepositoryState::Local {
3985 backend,
3986 environment,
3987 ..
3988 } => backend.stage_paths(entries, environment.clone()).await,
3989 RepositoryState::Remote { project_id, client } => {
3990 client
3991 .request(proto::Stage {
3992 project_id: project_id.0,
3993 repository_id: id.to_proto(),
3994 paths: entries
3995 .into_iter()
3996 .map(|repo_path| repo_path.to_proto())
3997 .collect(),
3998 })
3999 .await
4000 .context("sending stage request")?;
4001
4002 Ok(())
4003 }
4004 }
4005 },
4006 )
4007 })?
4008 .await?
4009 },
4010 )
4011 }
4012
4013 pub fn unstage_entries(
4014 &mut self,
4015 entries: Vec<RepoPath>,
4016 cx: &mut Context<Self>,
4017 ) -> Task<anyhow::Result<()>> {
4018 if entries.is_empty() {
4019 return Task::ready(Ok(()));
4020 }
4021 let id = self.id;
4022 let save_tasks = self.save_buffers(&entries, cx);
4023 let paths = entries
4024 .iter()
4025 .map(|p| p.as_unix_str())
4026 .collect::<Vec<_>>()
4027 .join(" ");
4028 let status = format!("git reset {paths}");
4029 let job_key = GitJobKey::WriteIndex(entries.clone());
4030
4031 self.spawn_job_with_tracking(
4032 entries.clone(),
4033 pending_op::GitStatus::Unstaged,
4034 cx,
4035 async move |this, cx| {
4036 for save_task in save_tasks {
4037 save_task.await?;
4038 }
4039
4040 this.update(cx, |this, _| {
4041 this.send_keyed_job(
4042 Some(job_key),
4043 Some(status.into()),
4044 move |git_repo, _cx| async move {
4045 match git_repo {
4046 RepositoryState::Local {
4047 backend,
4048 environment,
4049 ..
4050 } => backend.unstage_paths(entries, environment).await,
4051 RepositoryState::Remote { project_id, client } => {
4052 client
4053 .request(proto::Unstage {
4054 project_id: project_id.0,
4055 repository_id: id.to_proto(),
4056 paths: entries
4057 .into_iter()
4058 .map(|repo_path| repo_path.to_proto())
4059 .collect(),
4060 })
4061 .await
4062 .context("sending unstage request")?;
4063
4064 Ok(())
4065 }
4066 }
4067 },
4068 )
4069 })?
4070 .await?
4071 },
4072 )
4073 }
4074
4075 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4076 let to_stage = self
4077 .cached_status()
4078 .filter_map(|entry| {
4079 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4080 if ops.staging() || ops.staged() {
4081 None
4082 } else {
4083 Some(entry.repo_path)
4084 }
4085 } else if entry.status.staging().is_fully_staged() {
4086 None
4087 } else {
4088 Some(entry.repo_path)
4089 }
4090 })
4091 .collect();
4092 self.stage_entries(to_stage, cx)
4093 }
4094
4095 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4096 let to_unstage = self
4097 .cached_status()
4098 .filter_map(|entry| {
4099 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4100 if !ops.staging() && !ops.staged() {
4101 None
4102 } else {
4103 Some(entry.repo_path)
4104 }
4105 } else if entry.status.staging().is_fully_unstaged() {
4106 None
4107 } else {
4108 Some(entry.repo_path)
4109 }
4110 })
4111 .collect();
4112 self.unstage_entries(to_unstage, cx)
4113 }
4114
4115 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4116 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4117
4118 self.stash_entries(to_stash, cx)
4119 }
4120
4121 pub fn stash_entries(
4122 &mut self,
4123 entries: Vec<RepoPath>,
4124 cx: &mut Context<Self>,
4125 ) -> Task<anyhow::Result<()>> {
4126 let id = self.id;
4127
4128 cx.spawn(async move |this, cx| {
4129 this.update(cx, |this, _| {
4130 this.send_job(None, move |git_repo, _cx| async move {
4131 match git_repo {
4132 RepositoryState::Local {
4133 backend,
4134 environment,
4135 ..
4136 } => backend.stash_paths(entries, environment).await,
4137 RepositoryState::Remote { project_id, client } => {
4138 client
4139 .request(proto::Stash {
4140 project_id: project_id.0,
4141 repository_id: id.to_proto(),
4142 paths: entries
4143 .into_iter()
4144 .map(|repo_path| repo_path.to_proto())
4145 .collect(),
4146 })
4147 .await
4148 .context("sending stash request")?;
4149 Ok(())
4150 }
4151 }
4152 })
4153 })?
4154 .await??;
4155 Ok(())
4156 })
4157 }
4158
4159 pub fn stash_pop(
4160 &mut self,
4161 index: Option<usize>,
4162 cx: &mut Context<Self>,
4163 ) -> Task<anyhow::Result<()>> {
4164 let id = self.id;
4165 cx.spawn(async move |this, cx| {
4166 this.update(cx, |this, _| {
4167 this.send_job(None, move |git_repo, _cx| async move {
4168 match git_repo {
4169 RepositoryState::Local {
4170 backend,
4171 environment,
4172 ..
4173 } => backend.stash_pop(index, environment).await,
4174 RepositoryState::Remote { project_id, client } => {
4175 client
4176 .request(proto::StashPop {
4177 project_id: project_id.0,
4178 repository_id: id.to_proto(),
4179 stash_index: index.map(|i| i as u64),
4180 })
4181 .await
4182 .context("sending stash pop request")?;
4183 Ok(())
4184 }
4185 }
4186 })
4187 })?
4188 .await??;
4189 Ok(())
4190 })
4191 }
4192
4193 pub fn stash_apply(
4194 &mut self,
4195 index: Option<usize>,
4196 cx: &mut Context<Self>,
4197 ) -> Task<anyhow::Result<()>> {
4198 let id = self.id;
4199 cx.spawn(async move |this, cx| {
4200 this.update(cx, |this, _| {
4201 this.send_job(None, move |git_repo, _cx| async move {
4202 match git_repo {
4203 RepositoryState::Local {
4204 backend,
4205 environment,
4206 ..
4207 } => backend.stash_apply(index, environment).await,
4208 RepositoryState::Remote { project_id, client } => {
4209 client
4210 .request(proto::StashApply {
4211 project_id: project_id.0,
4212 repository_id: id.to_proto(),
4213 stash_index: index.map(|i| i as u64),
4214 })
4215 .await
4216 .context("sending stash apply request")?;
4217 Ok(())
4218 }
4219 }
4220 })
4221 })?
4222 .await??;
4223 Ok(())
4224 })
4225 }
4226
4227 pub fn stash_drop(
4228 &mut self,
4229 index: Option<usize>,
4230 cx: &mut Context<Self>,
4231 ) -> oneshot::Receiver<anyhow::Result<()>> {
4232 let id = self.id;
4233 let updates_tx = self
4234 .git_store()
4235 .and_then(|git_store| match &git_store.read(cx).state {
4236 GitStoreState::Local { downstream, .. } => downstream
4237 .as_ref()
4238 .map(|downstream| downstream.updates_tx.clone()),
4239 _ => None,
4240 });
4241 let this = cx.weak_entity();
4242 self.send_job(None, move |git_repo, mut cx| async move {
4243 match git_repo {
4244 RepositoryState::Local {
4245 backend,
4246 environment,
4247 ..
4248 } => {
4249 // TODO would be nice to not have to do this manually
4250 let result = backend.stash_drop(index, environment).await;
4251 if result.is_ok()
4252 && let Ok(stash_entries) = backend.stash_entries().await
4253 {
4254 let snapshot = this.update(&mut cx, |this, cx| {
4255 this.snapshot.stash_entries = stash_entries;
4256 cx.emit(RepositoryEvent::StashEntriesChanged);
4257 this.snapshot.clone()
4258 })?;
4259 if let Some(updates_tx) = updates_tx {
4260 updates_tx
4261 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4262 .ok();
4263 }
4264 }
4265
4266 result
4267 }
4268 RepositoryState::Remote { project_id, client } => {
4269 client
4270 .request(proto::StashDrop {
4271 project_id: project_id.0,
4272 repository_id: id.to_proto(),
4273 stash_index: index.map(|i| i as u64),
4274 })
4275 .await
4276 .context("sending stash pop request")?;
4277 Ok(())
4278 }
4279 }
4280 })
4281 }
4282
4283 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4284 let id = self.id;
4285 self.send_job(
4286 Some(format!("git hook {}", hook.as_str()).into()),
4287 move |git_repo, _cx| async move {
4288 match git_repo {
4289 RepositoryState::Local {
4290 backend,
4291 environment,
4292 ..
4293 } => backend.run_hook(hook, environment.clone()).await,
4294 RepositoryState::Remote { project_id, client } => {
4295 client
4296 .request(proto::RunGitHook {
4297 project_id: project_id.0,
4298 repository_id: id.to_proto(),
4299 hook: hook.to_proto(),
4300 })
4301 .await?;
4302
4303 Ok(())
4304 }
4305 }
4306 },
4307 )
4308 }
4309
4310 pub fn commit(
4311 &mut self,
4312 message: SharedString,
4313 name_and_email: Option<(SharedString, SharedString)>,
4314 options: CommitOptions,
4315 askpass: AskPassDelegate,
4316 cx: &mut App,
4317 ) -> oneshot::Receiver<Result<()>> {
4318 let id = self.id;
4319 let askpass_delegates = self.askpass_delegates.clone();
4320 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4321
4322 let rx = self.run_hook(RunHook::PreCommit, cx);
4323
4324 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4325 rx.await??;
4326
4327 match git_repo {
4328 RepositoryState::Local {
4329 backend,
4330 environment,
4331 ..
4332 } => {
4333 backend
4334 .commit(message, name_and_email, options, askpass, environment)
4335 .await
4336 }
4337 RepositoryState::Remote { project_id, client } => {
4338 askpass_delegates.lock().insert(askpass_id, askpass);
4339 let _defer = util::defer(|| {
4340 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4341 debug_assert!(askpass_delegate.is_some());
4342 });
4343 let (name, email) = name_and_email.unzip();
4344 client
4345 .request(proto::Commit {
4346 project_id: project_id.0,
4347 repository_id: id.to_proto(),
4348 message: String::from(message),
4349 name: name.map(String::from),
4350 email: email.map(String::from),
4351 options: Some(proto::commit::CommitOptions {
4352 amend: options.amend,
4353 signoff: options.signoff,
4354 }),
4355 askpass_id,
4356 })
4357 .await
4358 .context("sending commit request")?;
4359
4360 Ok(())
4361 }
4362 }
4363 })
4364 }
4365
4366 pub fn fetch(
4367 &mut self,
4368 fetch_options: FetchOptions,
4369 askpass: AskPassDelegate,
4370 _cx: &mut App,
4371 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4372 let askpass_delegates = self.askpass_delegates.clone();
4373 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4374 let id = self.id;
4375
4376 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4377 match git_repo {
4378 RepositoryState::Local {
4379 backend,
4380 environment,
4381 ..
4382 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4383 RepositoryState::Remote { project_id, client } => {
4384 askpass_delegates.lock().insert(askpass_id, askpass);
4385 let _defer = util::defer(|| {
4386 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4387 debug_assert!(askpass_delegate.is_some());
4388 });
4389
4390 let response = client
4391 .request(proto::Fetch {
4392 project_id: project_id.0,
4393 repository_id: id.to_proto(),
4394 askpass_id,
4395 remote: fetch_options.to_proto(),
4396 })
4397 .await
4398 .context("sending fetch request")?;
4399
4400 Ok(RemoteCommandOutput {
4401 stdout: response.stdout,
4402 stderr: response.stderr,
4403 })
4404 }
4405 }
4406 })
4407 }
4408
4409 pub fn push(
4410 &mut self,
4411 branch: SharedString,
4412 remote: SharedString,
4413 options: Option<PushOptions>,
4414 askpass: AskPassDelegate,
4415 cx: &mut Context<Self>,
4416 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4417 let askpass_delegates = self.askpass_delegates.clone();
4418 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4419 let id = self.id;
4420
4421 let args = options
4422 .map(|option| match option {
4423 PushOptions::SetUpstream => " --set-upstream",
4424 PushOptions::Force => " --force-with-lease",
4425 })
4426 .unwrap_or("");
4427
4428 let updates_tx = self
4429 .git_store()
4430 .and_then(|git_store| match &git_store.read(cx).state {
4431 GitStoreState::Local { downstream, .. } => downstream
4432 .as_ref()
4433 .map(|downstream| downstream.updates_tx.clone()),
4434 _ => None,
4435 });
4436
4437 let this = cx.weak_entity();
4438 self.send_job(
4439 Some(format!("git push {} {} {}", args, remote, branch).into()),
4440 move |git_repo, mut cx| async move {
4441 match git_repo {
4442 RepositoryState::Local {
4443 backend,
4444 environment,
4445 ..
4446 } => {
4447 let result = backend
4448 .push(
4449 branch.to_string(),
4450 remote.to_string(),
4451 options,
4452 askpass,
4453 environment.clone(),
4454 cx.clone(),
4455 )
4456 .await;
4457 // TODO would be nice to not have to do this manually
4458 if result.is_ok() {
4459 let branches = backend.branches().await?;
4460 let branch = branches.into_iter().find(|branch| branch.is_head);
4461 log::info!("head branch after scan is {branch:?}");
4462 let snapshot = this.update(&mut cx, |this, cx| {
4463 this.snapshot.branch = branch;
4464 cx.emit(RepositoryEvent::BranchChanged);
4465 this.snapshot.clone()
4466 })?;
4467 if let Some(updates_tx) = updates_tx {
4468 updates_tx
4469 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4470 .ok();
4471 }
4472 }
4473 result
4474 }
4475 RepositoryState::Remote { project_id, client } => {
4476 askpass_delegates.lock().insert(askpass_id, askpass);
4477 let _defer = util::defer(|| {
4478 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4479 debug_assert!(askpass_delegate.is_some());
4480 });
4481 let response = client
4482 .request(proto::Push {
4483 project_id: project_id.0,
4484 repository_id: id.to_proto(),
4485 askpass_id,
4486 branch_name: branch.to_string(),
4487 remote_name: remote.to_string(),
4488 options: options.map(|options| match options {
4489 PushOptions::Force => proto::push::PushOptions::Force,
4490 PushOptions::SetUpstream => {
4491 proto::push::PushOptions::SetUpstream
4492 }
4493 }
4494 as i32),
4495 })
4496 .await
4497 .context("sending push request")?;
4498
4499 Ok(RemoteCommandOutput {
4500 stdout: response.stdout,
4501 stderr: response.stderr,
4502 })
4503 }
4504 }
4505 },
4506 )
4507 }
4508
4509 pub fn pull(
4510 &mut self,
4511 branch: Option<SharedString>,
4512 remote: SharedString,
4513 rebase: bool,
4514 askpass: AskPassDelegate,
4515 _cx: &mut App,
4516 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4517 let askpass_delegates = self.askpass_delegates.clone();
4518 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4519 let id = self.id;
4520
4521 let mut status = "git pull".to_string();
4522 if rebase {
4523 status.push_str(" --rebase");
4524 }
4525 status.push_str(&format!(" {}", remote));
4526 if let Some(b) = &branch {
4527 status.push_str(&format!(" {}", b));
4528 }
4529
4530 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4531 match git_repo {
4532 RepositoryState::Local {
4533 backend,
4534 environment,
4535 ..
4536 } => {
4537 backend
4538 .pull(
4539 branch.as_ref().map(|b| b.to_string()),
4540 remote.to_string(),
4541 rebase,
4542 askpass,
4543 environment.clone(),
4544 cx,
4545 )
4546 .await
4547 }
4548 RepositoryState::Remote { project_id, client } => {
4549 askpass_delegates.lock().insert(askpass_id, askpass);
4550 let _defer = util::defer(|| {
4551 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4552 debug_assert!(askpass_delegate.is_some());
4553 });
4554 let response = client
4555 .request(proto::Pull {
4556 project_id: project_id.0,
4557 repository_id: id.to_proto(),
4558 askpass_id,
4559 rebase,
4560 branch_name: branch.as_ref().map(|b| b.to_string()),
4561 remote_name: remote.to_string(),
4562 })
4563 .await
4564 .context("sending pull request")?;
4565
4566 Ok(RemoteCommandOutput {
4567 stdout: response.stdout,
4568 stderr: response.stderr,
4569 })
4570 }
4571 }
4572 })
4573 }
4574
4575 fn spawn_set_index_text_job(
4576 &mut self,
4577 path: RepoPath,
4578 content: Option<String>,
4579 hunk_staging_operation_count: Option<usize>,
4580 cx: &mut Context<Self>,
4581 ) -> oneshot::Receiver<anyhow::Result<()>> {
4582 let id = self.id;
4583 let this = cx.weak_entity();
4584 let git_store = self.git_store.clone();
4585 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4586 self.send_keyed_job(
4587 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4588 None,
4589 move |git_repo, mut cx| async move {
4590 log::debug!(
4591 "start updating index text for buffer {}",
4592 path.as_unix_str()
4593 );
4594
4595 match git_repo {
4596 RepositoryState::Local {
4597 fs,
4598 backend,
4599 environment,
4600 ..
4601 } => {
4602 let executable = match fs.metadata(&abs_path).await {
4603 Ok(Some(meta)) => meta.is_executable,
4604 Ok(None) => false,
4605 Err(_err) => false,
4606 };
4607 backend
4608 .set_index_text(path.clone(), content, environment.clone(), executable)
4609 .await?;
4610 }
4611 RepositoryState::Remote { project_id, client } => {
4612 client
4613 .request(proto::SetIndexText {
4614 project_id: project_id.0,
4615 repository_id: id.to_proto(),
4616 path: path.to_proto(),
4617 text: content,
4618 })
4619 .await?;
4620 }
4621 }
4622 log::debug!(
4623 "finish updating index text for buffer {}",
4624 path.as_unix_str()
4625 );
4626
4627 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4628 let project_path = this
4629 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4630 .ok()
4631 .flatten();
4632 git_store.update(&mut cx, |git_store, cx| {
4633 let buffer_id = git_store
4634 .buffer_store
4635 .read(cx)
4636 .get_by_path(&project_path?)?
4637 .read(cx)
4638 .remote_id();
4639 let diff_state = git_store.diffs.get(&buffer_id)?;
4640 diff_state.update(cx, |diff_state, _| {
4641 diff_state.hunk_staging_operation_count_as_of_write =
4642 hunk_staging_operation_count;
4643 });
4644 Some(())
4645 })?;
4646 }
4647 Ok(())
4648 },
4649 )
4650 }
4651
4652 pub fn get_remotes(
4653 &mut self,
4654 branch_name: Option<String>,
4655 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4656 let id = self.id;
4657 self.send_job(None, move |repo, _cx| async move {
4658 match repo {
4659 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4660 RepositoryState::Remote { project_id, client } => {
4661 let response = client
4662 .request(proto::GetRemotes {
4663 project_id: project_id.0,
4664 repository_id: id.to_proto(),
4665 branch_name,
4666 })
4667 .await?;
4668
4669 let remotes = response
4670 .remotes
4671 .into_iter()
4672 .map(|remotes| git::repository::Remote {
4673 name: remotes.name.into(),
4674 })
4675 .collect();
4676
4677 Ok(remotes)
4678 }
4679 }
4680 })
4681 }
4682
4683 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4684 let id = self.id;
4685 self.send_job(None, move |repo, _| async move {
4686 match repo {
4687 RepositoryState::Local { backend, .. } => backend.branches().await,
4688 RepositoryState::Remote { project_id, client } => {
4689 let response = client
4690 .request(proto::GitGetBranches {
4691 project_id: project_id.0,
4692 repository_id: id.to_proto(),
4693 })
4694 .await?;
4695
4696 let branches = response
4697 .branches
4698 .into_iter()
4699 .map(|branch| proto_to_branch(&branch))
4700 .collect();
4701
4702 Ok(branches)
4703 }
4704 }
4705 })
4706 }
4707
4708 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4709 let id = self.id;
4710 self.send_job(None, move |repo, _| async move {
4711 match repo {
4712 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4713 RepositoryState::Remote { project_id, client } => {
4714 let response = client
4715 .request(proto::GitGetWorktrees {
4716 project_id: project_id.0,
4717 repository_id: id.to_proto(),
4718 })
4719 .await?;
4720
4721 let worktrees = response
4722 .worktrees
4723 .into_iter()
4724 .map(|worktree| proto_to_worktree(&worktree))
4725 .collect();
4726
4727 Ok(worktrees)
4728 }
4729 }
4730 })
4731 }
4732
4733 pub fn create_worktree(
4734 &mut self,
4735 name: String,
4736 path: PathBuf,
4737 commit: Option<String>,
4738 ) -> oneshot::Receiver<Result<()>> {
4739 let id = self.id;
4740 self.send_job(
4741 Some("git worktree add".into()),
4742 move |repo, _cx| async move {
4743 match repo {
4744 RepositoryState::Local { backend, .. } => {
4745 backend.create_worktree(name, path, commit).await
4746 }
4747 RepositoryState::Remote { project_id, client } => {
4748 client
4749 .request(proto::GitCreateWorktree {
4750 project_id: project_id.0,
4751 repository_id: id.to_proto(),
4752 name,
4753 directory: path.to_string_lossy().to_string(),
4754 commit,
4755 })
4756 .await?;
4757
4758 Ok(())
4759 }
4760 }
4761 },
4762 )
4763 }
4764
4765 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4766 let id = self.id;
4767 self.send_job(None, move |repo, _| async move {
4768 match repo {
4769 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4770 RepositoryState::Remote { project_id, client } => {
4771 let response = client
4772 .request(proto::GetDefaultBranch {
4773 project_id: project_id.0,
4774 repository_id: id.to_proto(),
4775 })
4776 .await?;
4777
4778 anyhow::Ok(response.branch.map(SharedString::from))
4779 }
4780 }
4781 })
4782 }
4783
4784 pub fn diff_tree(
4785 &mut self,
4786 diff_type: DiffTreeType,
4787 _cx: &App,
4788 ) -> oneshot::Receiver<Result<TreeDiff>> {
4789 let repository_id = self.snapshot.id;
4790 self.send_job(None, move |repo, _cx| async move {
4791 match repo {
4792 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4793 RepositoryState::Remote { client, project_id } => {
4794 let response = client
4795 .request(proto::GetTreeDiff {
4796 project_id: project_id.0,
4797 repository_id: repository_id.0,
4798 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4799 base: diff_type.base().to_string(),
4800 head: diff_type.head().to_string(),
4801 })
4802 .await?;
4803
4804 let entries = response
4805 .entries
4806 .into_iter()
4807 .filter_map(|entry| {
4808 let status = match entry.status() {
4809 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4810 proto::tree_diff_status::Status::Modified => {
4811 TreeDiffStatus::Modified {
4812 old: git::Oid::from_str(
4813 &entry.oid.context("missing oid").log_err()?,
4814 )
4815 .log_err()?,
4816 }
4817 }
4818 proto::tree_diff_status::Status::Deleted => {
4819 TreeDiffStatus::Deleted {
4820 old: git::Oid::from_str(
4821 &entry.oid.context("missing oid").log_err()?,
4822 )
4823 .log_err()?,
4824 }
4825 }
4826 };
4827 Some((
4828 RepoPath::from_rel_path(
4829 &RelPath::from_proto(&entry.path).log_err()?,
4830 ),
4831 status,
4832 ))
4833 })
4834 .collect();
4835
4836 Ok(TreeDiff { entries })
4837 }
4838 }
4839 })
4840 }
4841
4842 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4843 let id = self.id;
4844 self.send_job(None, move |repo, _cx| async move {
4845 match repo {
4846 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4847 RepositoryState::Remote { project_id, client } => {
4848 let response = client
4849 .request(proto::GitDiff {
4850 project_id: project_id.0,
4851 repository_id: id.to_proto(),
4852 diff_type: match diff_type {
4853 DiffType::HeadToIndex => {
4854 proto::git_diff::DiffType::HeadToIndex.into()
4855 }
4856 DiffType::HeadToWorktree => {
4857 proto::git_diff::DiffType::HeadToWorktree.into()
4858 }
4859 },
4860 })
4861 .await?;
4862
4863 Ok(response.diff)
4864 }
4865 }
4866 })
4867 }
4868
4869 pub fn create_branch(
4870 &mut self,
4871 branch_name: String,
4872 base_branch: Option<String>,
4873 ) -> oneshot::Receiver<Result<()>> {
4874 let id = self.id;
4875 let status_msg = if let Some(ref base) = base_branch {
4876 format!("git switch -c {branch_name} {base}").into()
4877 } else {
4878 format!("git switch -c {branch_name}").into()
4879 };
4880 self.send_job(Some(status_msg), move |repo, _cx| async move {
4881 match repo {
4882 RepositoryState::Local { backend, .. } => {
4883 backend.create_branch(branch_name, base_branch).await
4884 }
4885 RepositoryState::Remote { project_id, client } => {
4886 client
4887 .request(proto::GitCreateBranch {
4888 project_id: project_id.0,
4889 repository_id: id.to_proto(),
4890 branch_name,
4891 })
4892 .await?;
4893
4894 Ok(())
4895 }
4896 }
4897 })
4898 }
4899
4900 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4901 let id = self.id;
4902 self.send_job(
4903 Some(format!("git switch {branch_name}").into()),
4904 move |repo, _cx| async move {
4905 match repo {
4906 RepositoryState::Local { backend, .. } => {
4907 backend.change_branch(branch_name).await
4908 }
4909 RepositoryState::Remote { project_id, client } => {
4910 client
4911 .request(proto::GitChangeBranch {
4912 project_id: project_id.0,
4913 repository_id: id.to_proto(),
4914 branch_name,
4915 })
4916 .await?;
4917
4918 Ok(())
4919 }
4920 }
4921 },
4922 )
4923 }
4924
4925 pub fn rename_branch(
4926 &mut self,
4927 branch: String,
4928 new_name: String,
4929 ) -> oneshot::Receiver<Result<()>> {
4930 let id = self.id;
4931 self.send_job(
4932 Some(format!("git branch -m {branch} {new_name}").into()),
4933 move |repo, _cx| async move {
4934 match repo {
4935 RepositoryState::Local { backend, .. } => {
4936 backend.rename_branch(branch, new_name).await
4937 }
4938 RepositoryState::Remote { project_id, client } => {
4939 client
4940 .request(proto::GitRenameBranch {
4941 project_id: project_id.0,
4942 repository_id: id.to_proto(),
4943 branch,
4944 new_name,
4945 })
4946 .await?;
4947
4948 Ok(())
4949 }
4950 }
4951 },
4952 )
4953 }
4954
4955 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4956 let id = self.id;
4957 self.send_job(None, move |repo, _cx| async move {
4958 match repo {
4959 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4960 RepositoryState::Remote { project_id, client } => {
4961 let response = client
4962 .request(proto::CheckForPushedCommits {
4963 project_id: project_id.0,
4964 repository_id: id.to_proto(),
4965 })
4966 .await?;
4967
4968 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4969
4970 Ok(branches)
4971 }
4972 }
4973 })
4974 }
4975
4976 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4977 self.send_job(None, |repo, _cx| async move {
4978 match repo {
4979 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4980 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4981 }
4982 })
4983 }
4984
4985 pub fn restore_checkpoint(
4986 &mut self,
4987 checkpoint: GitRepositoryCheckpoint,
4988 ) -> oneshot::Receiver<Result<()>> {
4989 self.send_job(None, move |repo, _cx| async move {
4990 match repo {
4991 RepositoryState::Local { backend, .. } => {
4992 backend.restore_checkpoint(checkpoint).await
4993 }
4994 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4995 }
4996 })
4997 }
4998
4999 pub(crate) fn apply_remote_update(
5000 &mut self,
5001 update: proto::UpdateRepository,
5002 cx: &mut Context<Self>,
5003 ) -> Result<()> {
5004 let conflicted_paths = TreeSet::from_ordered_entries(
5005 update
5006 .current_merge_conflicts
5007 .into_iter()
5008 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5009 );
5010 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5011 let new_head_commit = update
5012 .head_commit_details
5013 .as_ref()
5014 .map(proto_to_commit_details);
5015 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5016 cx.emit(RepositoryEvent::BranchChanged)
5017 }
5018 self.snapshot.branch = new_branch;
5019 self.snapshot.head_commit = new_head_commit;
5020
5021 self.snapshot.merge.conflicted_paths = conflicted_paths;
5022 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5023 let new_stash_entries = GitStash {
5024 entries: update
5025 .stash_entries
5026 .iter()
5027 .filter_map(|entry| proto_to_stash(entry).ok())
5028 .collect(),
5029 };
5030 if self.snapshot.stash_entries != new_stash_entries {
5031 cx.emit(RepositoryEvent::StashEntriesChanged)
5032 }
5033 self.snapshot.stash_entries = new_stash_entries;
5034
5035 let edits = update
5036 .removed_statuses
5037 .into_iter()
5038 .filter_map(|path| {
5039 Some(sum_tree::Edit::Remove(PathKey(
5040 RelPath::from_proto(&path).log_err()?,
5041 )))
5042 })
5043 .chain(
5044 update
5045 .updated_statuses
5046 .into_iter()
5047 .filter_map(|updated_status| {
5048 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5049 }),
5050 )
5051 .collect::<Vec<_>>();
5052 if !edits.is_empty() {
5053 cx.emit(RepositoryEvent::StatusesChanged);
5054 }
5055 self.snapshot.statuses_by_path.edit(edits, ());
5056 if update.is_last_update {
5057 self.snapshot.scan_id = update.scan_id;
5058 }
5059 self.clear_pending_ops(cx);
5060 Ok(())
5061 }
5062
5063 pub fn compare_checkpoints(
5064 &mut self,
5065 left: GitRepositoryCheckpoint,
5066 right: GitRepositoryCheckpoint,
5067 ) -> oneshot::Receiver<Result<bool>> {
5068 self.send_job(None, move |repo, _cx| async move {
5069 match repo {
5070 RepositoryState::Local { backend, .. } => {
5071 backend.compare_checkpoints(left, right).await
5072 }
5073 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5074 }
5075 })
5076 }
5077
5078 pub fn diff_checkpoints(
5079 &mut self,
5080 base_checkpoint: GitRepositoryCheckpoint,
5081 target_checkpoint: GitRepositoryCheckpoint,
5082 ) -> oneshot::Receiver<Result<String>> {
5083 self.send_job(None, move |repo, _cx| async move {
5084 match repo {
5085 RepositoryState::Local { backend, .. } => {
5086 backend
5087 .diff_checkpoints(base_checkpoint, target_checkpoint)
5088 .await
5089 }
5090 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5091 }
5092 })
5093 }
5094
5095 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5096 let updated = SumTree::from_iter(
5097 self.pending_ops.iter().filter_map(|ops| {
5098 let inner_ops: Vec<PendingOp> =
5099 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5100 if inner_ops.is_empty() {
5101 None
5102 } else {
5103 Some(PendingOps {
5104 repo_path: ops.repo_path.clone(),
5105 ops: inner_ops,
5106 })
5107 }
5108 }),
5109 (),
5110 );
5111
5112 if updated != self.pending_ops {
5113 cx.emit(RepositoryEvent::PendingOpsChanged {
5114 pending_ops: self.pending_ops.clone(),
5115 })
5116 }
5117
5118 self.pending_ops = updated;
5119 }
5120
5121 fn schedule_scan(
5122 &mut self,
5123 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5124 cx: &mut Context<Self>,
5125 ) {
5126 let this = cx.weak_entity();
5127 let _ = self.send_keyed_job(
5128 Some(GitJobKey::ReloadGitState),
5129 None,
5130 |state, mut cx| async move {
5131 log::debug!("run scheduled git status scan");
5132
5133 let Some(this) = this.upgrade() else {
5134 return Ok(());
5135 };
5136 let RepositoryState::Local { backend, .. } = state else {
5137 bail!("not a local repository")
5138 };
5139 let (snapshot, events) = this
5140 .update(&mut cx, |this, _| {
5141 this.paths_needing_status_update.clear();
5142 compute_snapshot(
5143 this.id,
5144 this.work_directory_abs_path.clone(),
5145 this.snapshot.clone(),
5146 backend.clone(),
5147 )
5148 })?
5149 .await?;
5150 this.update(&mut cx, |this, cx| {
5151 this.snapshot = snapshot.clone();
5152 this.clear_pending_ops(cx);
5153 for event in events {
5154 cx.emit(event);
5155 }
5156 })?;
5157 if let Some(updates_tx) = updates_tx {
5158 updates_tx
5159 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5160 .ok();
5161 }
5162 Ok(())
5163 },
5164 );
5165 }
5166
5167 fn spawn_local_git_worker(
5168 work_directory_abs_path: Arc<Path>,
5169 dot_git_abs_path: Arc<Path>,
5170 _repository_dir_abs_path: Arc<Path>,
5171 _common_dir_abs_path: Arc<Path>,
5172 project_environment: WeakEntity<ProjectEnvironment>,
5173 fs: Arc<dyn Fs>,
5174 cx: &mut Context<Self>,
5175 ) -> mpsc::UnboundedSender<GitJob> {
5176 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5177 let fs_cloned = fs.clone();
5178
5179 cx.spawn(async move |_, cx| {
5180 let environment = project_environment
5181 .upgrade()
5182 .context("missing project environment")?
5183 .update(cx, |project_environment, cx| {
5184 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
5185 })?
5186 .await
5187 .unwrap_or_else(|| {
5188 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
5189 HashMap::default()
5190 });
5191 let search_paths = environment.get("PATH").map(|val| val.to_owned());
5192 let backend = cx
5193 .background_spawn(async move {
5194 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
5195 .or_else(|| which::which("git").ok());
5196 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
5197 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
5198 })
5199 .await?;
5200
5201 if let Some(git_hosting_provider_registry) =
5202 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5203 {
5204 git_hosting_providers::register_additional_providers(
5205 git_hosting_provider_registry,
5206 backend.clone(),
5207 );
5208 }
5209 let state = RepositoryState::Local {
5210 fs: fs_cloned,
5211 backend,
5212 environment: Arc::new(environment),
5213 };
5214 let mut jobs = VecDeque::new();
5215 loop {
5216 while let Ok(Some(next_job)) = job_rx.try_next() {
5217 jobs.push_back(next_job);
5218 }
5219
5220 if let Some(job) = jobs.pop_front() {
5221 if let Some(current_key) = &job.key
5222 && jobs
5223 .iter()
5224 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5225 {
5226 continue;
5227 }
5228 (job.job)(state.clone(), cx).await;
5229 } else if let Some(job) = job_rx.next().await {
5230 jobs.push_back(job);
5231 } else {
5232 break;
5233 }
5234 }
5235 anyhow::Ok(())
5236 })
5237 .detach_and_log_err(cx);
5238
5239 job_tx
5240 }
5241
5242 fn spawn_remote_git_worker(
5243 project_id: ProjectId,
5244 client: AnyProtoClient,
5245 cx: &mut Context<Self>,
5246 ) -> mpsc::UnboundedSender<GitJob> {
5247 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5248
5249 cx.spawn(async move |_, cx| {
5250 let state = RepositoryState::Remote { project_id, client };
5251 let mut jobs = VecDeque::new();
5252 loop {
5253 while let Ok(Some(next_job)) = job_rx.try_next() {
5254 jobs.push_back(next_job);
5255 }
5256
5257 if let Some(job) = jobs.pop_front() {
5258 if let Some(current_key) = &job.key
5259 && jobs
5260 .iter()
5261 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5262 {
5263 continue;
5264 }
5265 (job.job)(state.clone(), cx).await;
5266 } else if let Some(job) = job_rx.next().await {
5267 jobs.push_back(job);
5268 } else {
5269 break;
5270 }
5271 }
5272 anyhow::Ok(())
5273 })
5274 .detach_and_log_err(cx);
5275
5276 job_tx
5277 }
5278
5279 fn load_staged_text(
5280 &mut self,
5281 buffer_id: BufferId,
5282 repo_path: RepoPath,
5283 cx: &App,
5284 ) -> Task<Result<Option<String>>> {
5285 let rx = self.send_job(None, move |state, _| async move {
5286 match state {
5287 RepositoryState::Local { backend, .. } => {
5288 anyhow::Ok(backend.load_index_text(repo_path).await)
5289 }
5290 RepositoryState::Remote { project_id, client } => {
5291 let response = client
5292 .request(proto::OpenUnstagedDiff {
5293 project_id: project_id.to_proto(),
5294 buffer_id: buffer_id.to_proto(),
5295 })
5296 .await?;
5297 Ok(response.staged_text)
5298 }
5299 }
5300 });
5301 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5302 }
5303
5304 fn load_committed_text(
5305 &mut self,
5306 buffer_id: BufferId,
5307 repo_path: RepoPath,
5308 cx: &App,
5309 ) -> Task<Result<DiffBasesChange>> {
5310 let rx = self.send_job(None, move |state, _| async move {
5311 match state {
5312 RepositoryState::Local { backend, .. } => {
5313 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5314 let staged_text = backend.load_index_text(repo_path).await;
5315 let diff_bases_change = if committed_text == staged_text {
5316 DiffBasesChange::SetBoth(committed_text)
5317 } else {
5318 DiffBasesChange::SetEach {
5319 index: staged_text,
5320 head: committed_text,
5321 }
5322 };
5323 anyhow::Ok(diff_bases_change)
5324 }
5325 RepositoryState::Remote { project_id, client } => {
5326 use proto::open_uncommitted_diff_response::Mode;
5327
5328 let response = client
5329 .request(proto::OpenUncommittedDiff {
5330 project_id: project_id.to_proto(),
5331 buffer_id: buffer_id.to_proto(),
5332 })
5333 .await?;
5334 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5335 let bases = match mode {
5336 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5337 Mode::IndexAndHead => DiffBasesChange::SetEach {
5338 head: response.committed_text,
5339 index: response.staged_text,
5340 },
5341 };
5342 Ok(bases)
5343 }
5344 }
5345 });
5346
5347 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5348 }
5349 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5350 let repository_id = self.snapshot.id;
5351 let rx = self.send_job(None, move |state, _| async move {
5352 match state {
5353 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5354 RepositoryState::Remote { client, project_id } => {
5355 let response = client
5356 .request(proto::GetBlobContent {
5357 project_id: project_id.to_proto(),
5358 repository_id: repository_id.0,
5359 oid: oid.to_string(),
5360 })
5361 .await?;
5362 Ok(response.content)
5363 }
5364 }
5365 });
5366 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5367 }
5368
5369 fn paths_changed(
5370 &mut self,
5371 paths: Vec<RepoPath>,
5372 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5373 cx: &mut Context<Self>,
5374 ) {
5375 self.paths_needing_status_update.extend(paths);
5376
5377 let this = cx.weak_entity();
5378 let _ = self.send_keyed_job(
5379 Some(GitJobKey::RefreshStatuses),
5380 None,
5381 |state, mut cx| async move {
5382 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5383 (
5384 this.snapshot.clone(),
5385 mem::take(&mut this.paths_needing_status_update),
5386 )
5387 })?;
5388 let RepositoryState::Local { backend, .. } = state else {
5389 bail!("not a local repository")
5390 };
5391
5392 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5393 if paths.is_empty() {
5394 return Ok(());
5395 }
5396 let statuses = backend.status(&paths).await?;
5397 let stash_entries = backend.stash_entries().await?;
5398
5399 let changed_path_statuses = cx
5400 .background_spawn(async move {
5401 let mut changed_path_statuses = Vec::new();
5402 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5403 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5404
5405 for (repo_path, status) in &*statuses.entries {
5406 changed_paths.remove(repo_path);
5407 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5408 && cursor.item().is_some_and(|entry| entry.status == *status)
5409 {
5410 continue;
5411 }
5412
5413 changed_path_statuses.push(Edit::Insert(StatusEntry {
5414 repo_path: repo_path.clone(),
5415 status: *status,
5416 }));
5417 }
5418 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5419 for path in changed_paths.into_iter() {
5420 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5421 changed_path_statuses
5422 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5423 }
5424 }
5425 changed_path_statuses
5426 })
5427 .await;
5428
5429 this.update(&mut cx, |this, cx| {
5430 if this.snapshot.stash_entries != stash_entries {
5431 cx.emit(RepositoryEvent::StashEntriesChanged);
5432 this.snapshot.stash_entries = stash_entries;
5433 }
5434
5435 if !changed_path_statuses.is_empty() {
5436 cx.emit(RepositoryEvent::StatusesChanged);
5437 this.snapshot
5438 .statuses_by_path
5439 .edit(changed_path_statuses, ());
5440 this.snapshot.scan_id += 1;
5441 }
5442
5443 if let Some(updates_tx) = updates_tx {
5444 updates_tx
5445 .unbounded_send(DownstreamUpdate::UpdateRepository(
5446 this.snapshot.clone(),
5447 ))
5448 .ok();
5449 }
5450 })
5451 },
5452 );
5453 }
5454
5455 /// currently running git command and when it started
5456 pub fn current_job(&self) -> Option<JobInfo> {
5457 self.active_jobs.values().next().cloned()
5458 }
5459
5460 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5461 self.send_job(None, |_, _| async {})
5462 }
5463
5464 fn spawn_job_with_tracking<AsyncFn>(
5465 &mut self,
5466 paths: Vec<RepoPath>,
5467 git_status: pending_op::GitStatus,
5468 cx: &mut Context<Self>,
5469 f: AsyncFn,
5470 ) -> Task<Result<()>>
5471 where
5472 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5473 {
5474 let ids = self.new_pending_ops_for_paths(paths, git_status);
5475
5476 cx.spawn(async move |this, cx| {
5477 let (job_status, result) = match f(this.clone(), cx).await {
5478 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5479 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5480 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5481 };
5482
5483 this.update(cx, |this, _| {
5484 let mut edits = Vec::with_capacity(ids.len());
5485 for (id, entry) in ids {
5486 if let Some(mut ops) = this
5487 .pending_ops
5488 .get(&PathKey(entry.as_ref().clone()), ())
5489 .cloned()
5490 {
5491 if let Some(op) = ops.op_by_id_mut(id) {
5492 op.job_status = job_status;
5493 }
5494 edits.push(sum_tree::Edit::Insert(ops));
5495 }
5496 }
5497 this.pending_ops.edit(edits, ());
5498 })?;
5499
5500 result
5501 })
5502 }
5503
5504 fn new_pending_ops_for_paths(
5505 &mut self,
5506 paths: Vec<RepoPath>,
5507 git_status: pending_op::GitStatus,
5508 ) -> Vec<(PendingOpId, RepoPath)> {
5509 let mut edits = Vec::with_capacity(paths.len());
5510 let mut ids = Vec::with_capacity(paths.len());
5511 for path in paths {
5512 let mut ops = self
5513 .pending_ops
5514 .get(&PathKey(path.as_ref().clone()), ())
5515 .cloned()
5516 .unwrap_or_else(|| PendingOps::new(&path));
5517 let id = ops.max_id() + 1;
5518 ops.ops.push(PendingOp {
5519 id,
5520 git_status,
5521 job_status: pending_op::JobStatus::Running,
5522 });
5523 edits.push(sum_tree::Edit::Insert(ops));
5524 ids.push((id, path));
5525 }
5526 self.pending_ops.edit(edits, ());
5527 ids
5528 }
5529}
5530
5531fn get_permalink_in_rust_registry_src(
5532 provider_registry: Arc<GitHostingProviderRegistry>,
5533 path: PathBuf,
5534 selection: Range<u32>,
5535) -> Result<url::Url> {
5536 #[derive(Deserialize)]
5537 struct CargoVcsGit {
5538 sha1: String,
5539 }
5540
5541 #[derive(Deserialize)]
5542 struct CargoVcsInfo {
5543 git: CargoVcsGit,
5544 path_in_vcs: String,
5545 }
5546
5547 #[derive(Deserialize)]
5548 struct CargoPackage {
5549 repository: String,
5550 }
5551
5552 #[derive(Deserialize)]
5553 struct CargoToml {
5554 package: CargoPackage,
5555 }
5556
5557 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5558 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5559 Some((dir, json))
5560 }) else {
5561 bail!("No .cargo_vcs_info.json found in parent directories")
5562 };
5563 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5564 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5565 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5566 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5567 .context("parsing package.repository field of manifest")?;
5568 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5569 let permalink = provider.build_permalink(
5570 remote,
5571 BuildPermalinkParams::new(
5572 &cargo_vcs_info.git.sha1,
5573 &RepoPath::from_rel_path(
5574 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5575 ),
5576 Some(selection),
5577 ),
5578 );
5579 Ok(permalink)
5580}
5581
5582fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5583 let Some(blame) = blame else {
5584 return proto::BlameBufferResponse {
5585 blame_response: None,
5586 };
5587 };
5588
5589 let entries = blame
5590 .entries
5591 .into_iter()
5592 .map(|entry| proto::BlameEntry {
5593 sha: entry.sha.as_bytes().into(),
5594 start_line: entry.range.start,
5595 end_line: entry.range.end,
5596 original_line_number: entry.original_line_number,
5597 author: entry.author,
5598 author_mail: entry.author_mail,
5599 author_time: entry.author_time,
5600 author_tz: entry.author_tz,
5601 committer: entry.committer_name,
5602 committer_mail: entry.committer_email,
5603 committer_time: entry.committer_time,
5604 committer_tz: entry.committer_tz,
5605 summary: entry.summary,
5606 previous: entry.previous,
5607 filename: entry.filename,
5608 })
5609 .collect::<Vec<_>>();
5610
5611 let messages = blame
5612 .messages
5613 .into_iter()
5614 .map(|(oid, message)| proto::CommitMessage {
5615 oid: oid.as_bytes().into(),
5616 message,
5617 })
5618 .collect::<Vec<_>>();
5619
5620 proto::BlameBufferResponse {
5621 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5622 entries,
5623 messages,
5624 remote_url: blame.remote_url,
5625 }),
5626 }
5627}
5628
5629fn deserialize_blame_buffer_response(
5630 response: proto::BlameBufferResponse,
5631) -> Option<git::blame::Blame> {
5632 let response = response.blame_response?;
5633 let entries = response
5634 .entries
5635 .into_iter()
5636 .filter_map(|entry| {
5637 Some(git::blame::BlameEntry {
5638 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5639 range: entry.start_line..entry.end_line,
5640 original_line_number: entry.original_line_number,
5641 committer_name: entry.committer,
5642 committer_time: entry.committer_time,
5643 committer_tz: entry.committer_tz,
5644 committer_email: entry.committer_mail,
5645 author: entry.author,
5646 author_mail: entry.author_mail,
5647 author_time: entry.author_time,
5648 author_tz: entry.author_tz,
5649 summary: entry.summary,
5650 previous: entry.previous,
5651 filename: entry.filename,
5652 })
5653 })
5654 .collect::<Vec<_>>();
5655
5656 let messages = response
5657 .messages
5658 .into_iter()
5659 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5660 .collect::<HashMap<_, _>>();
5661
5662 Some(Blame {
5663 entries,
5664 messages,
5665 remote_url: response.remote_url,
5666 })
5667}
5668
5669fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5670 proto::Branch {
5671 is_head: branch.is_head,
5672 ref_name: branch.ref_name.to_string(),
5673 unix_timestamp: branch
5674 .most_recent_commit
5675 .as_ref()
5676 .map(|commit| commit.commit_timestamp as u64),
5677 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5678 ref_name: upstream.ref_name.to_string(),
5679 tracking: upstream
5680 .tracking
5681 .status()
5682 .map(|upstream| proto::UpstreamTracking {
5683 ahead: upstream.ahead as u64,
5684 behind: upstream.behind as u64,
5685 }),
5686 }),
5687 most_recent_commit: branch
5688 .most_recent_commit
5689 .as_ref()
5690 .map(|commit| proto::CommitSummary {
5691 sha: commit.sha.to_string(),
5692 subject: commit.subject.to_string(),
5693 commit_timestamp: commit.commit_timestamp,
5694 author_name: commit.author_name.to_string(),
5695 }),
5696 }
5697}
5698
5699fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5700 proto::Worktree {
5701 path: worktree.path.to_string_lossy().to_string(),
5702 ref_name: worktree.ref_name.to_string(),
5703 sha: worktree.sha.to_string(),
5704 }
5705}
5706
5707fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5708 git::repository::Worktree {
5709 path: PathBuf::from(proto.path.clone()),
5710 ref_name: proto.ref_name.clone().into(),
5711 sha: proto.sha.clone().into(),
5712 }
5713}
5714
5715fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5716 git::repository::Branch {
5717 is_head: proto.is_head,
5718 ref_name: proto.ref_name.clone().into(),
5719 upstream: proto
5720 .upstream
5721 .as_ref()
5722 .map(|upstream| git::repository::Upstream {
5723 ref_name: upstream.ref_name.to_string().into(),
5724 tracking: upstream
5725 .tracking
5726 .as_ref()
5727 .map(|tracking| {
5728 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5729 ahead: tracking.ahead as u32,
5730 behind: tracking.behind as u32,
5731 })
5732 })
5733 .unwrap_or(git::repository::UpstreamTracking::Gone),
5734 }),
5735 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5736 git::repository::CommitSummary {
5737 sha: commit.sha.to_string().into(),
5738 subject: commit.subject.to_string().into(),
5739 commit_timestamp: commit.commit_timestamp,
5740 author_name: commit.author_name.to_string().into(),
5741 has_parent: true,
5742 }
5743 }),
5744 }
5745}
5746
5747fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5748 proto::GitCommitDetails {
5749 sha: commit.sha.to_string(),
5750 message: commit.message.to_string(),
5751 commit_timestamp: commit.commit_timestamp,
5752 author_email: commit.author_email.to_string(),
5753 author_name: commit.author_name.to_string(),
5754 }
5755}
5756
5757fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5758 CommitDetails {
5759 sha: proto.sha.clone().into(),
5760 message: proto.message.clone().into(),
5761 commit_timestamp: proto.commit_timestamp,
5762 author_email: proto.author_email.clone().into(),
5763 author_name: proto.author_name.clone().into(),
5764 }
5765}
5766
5767async fn compute_snapshot(
5768 id: RepositoryId,
5769 work_directory_abs_path: Arc<Path>,
5770 prev_snapshot: RepositorySnapshot,
5771 backend: Arc<dyn GitRepository>,
5772) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5773 let mut events = Vec::new();
5774 let branches = backend.branches().await?;
5775 let branch = branches.into_iter().find(|branch| branch.is_head);
5776 let statuses = backend
5777 .status(&[RepoPath::from_rel_path(
5778 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
5779 )])
5780 .await?;
5781 let stash_entries = backend.stash_entries().await?;
5782 let statuses_by_path = SumTree::from_iter(
5783 statuses
5784 .entries
5785 .iter()
5786 .map(|(repo_path, status)| StatusEntry {
5787 repo_path: repo_path.clone(),
5788 status: *status,
5789 }),
5790 (),
5791 );
5792 let (merge_details, merge_heads_changed) =
5793 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5794 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5795
5796 if merge_heads_changed {
5797 events.push(RepositoryEvent::MergeHeadsChanged);
5798 }
5799
5800 if statuses_by_path != prev_snapshot.statuses_by_path {
5801 events.push(RepositoryEvent::StatusesChanged)
5802 }
5803
5804 // Useful when branch is None in detached head state
5805 let head_commit = match backend.head_sha().await {
5806 Some(head_sha) => backend.show(head_sha).await.log_err(),
5807 None => None,
5808 };
5809
5810 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5811 events.push(RepositoryEvent::BranchChanged);
5812 }
5813
5814 // Used by edit prediction data collection
5815 let remote_origin_url = backend.remote_url("origin");
5816 let remote_upstream_url = backend.remote_url("upstream");
5817
5818 let snapshot = RepositorySnapshot {
5819 id,
5820 statuses_by_path,
5821 work_directory_abs_path,
5822 path_style: prev_snapshot.path_style,
5823 scan_id: prev_snapshot.scan_id + 1,
5824 branch,
5825 head_commit,
5826 merge: merge_details,
5827 remote_origin_url,
5828 remote_upstream_url,
5829 stash_entries,
5830 };
5831
5832 Ok((snapshot, events))
5833}
5834
5835fn status_from_proto(
5836 simple_status: i32,
5837 status: Option<proto::GitFileStatus>,
5838) -> anyhow::Result<FileStatus> {
5839 use proto::git_file_status::Variant;
5840
5841 let Some(variant) = status.and_then(|status| status.variant) else {
5842 let code = proto::GitStatus::from_i32(simple_status)
5843 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5844 let result = match code {
5845 proto::GitStatus::Added => TrackedStatus {
5846 worktree_status: StatusCode::Added,
5847 index_status: StatusCode::Unmodified,
5848 }
5849 .into(),
5850 proto::GitStatus::Modified => TrackedStatus {
5851 worktree_status: StatusCode::Modified,
5852 index_status: StatusCode::Unmodified,
5853 }
5854 .into(),
5855 proto::GitStatus::Conflict => UnmergedStatus {
5856 first_head: UnmergedStatusCode::Updated,
5857 second_head: UnmergedStatusCode::Updated,
5858 }
5859 .into(),
5860 proto::GitStatus::Deleted => TrackedStatus {
5861 worktree_status: StatusCode::Deleted,
5862 index_status: StatusCode::Unmodified,
5863 }
5864 .into(),
5865 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5866 };
5867 return Ok(result);
5868 };
5869
5870 let result = match variant {
5871 Variant::Untracked(_) => FileStatus::Untracked,
5872 Variant::Ignored(_) => FileStatus::Ignored,
5873 Variant::Unmerged(unmerged) => {
5874 let [first_head, second_head] =
5875 [unmerged.first_head, unmerged.second_head].map(|head| {
5876 let code = proto::GitStatus::from_i32(head)
5877 .with_context(|| format!("Invalid git status code: {head}"))?;
5878 let result = match code {
5879 proto::GitStatus::Added => UnmergedStatusCode::Added,
5880 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5881 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5882 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5883 };
5884 Ok(result)
5885 });
5886 let [first_head, second_head] = [first_head?, second_head?];
5887 UnmergedStatus {
5888 first_head,
5889 second_head,
5890 }
5891 .into()
5892 }
5893 Variant::Tracked(tracked) => {
5894 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5895 .map(|status| {
5896 let code = proto::GitStatus::from_i32(status)
5897 .with_context(|| format!("Invalid git status code: {status}"))?;
5898 let result = match code {
5899 proto::GitStatus::Modified => StatusCode::Modified,
5900 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5901 proto::GitStatus::Added => StatusCode::Added,
5902 proto::GitStatus::Deleted => StatusCode::Deleted,
5903 proto::GitStatus::Renamed => StatusCode::Renamed,
5904 proto::GitStatus::Copied => StatusCode::Copied,
5905 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5906 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5907 };
5908 Ok(result)
5909 });
5910 let [index_status, worktree_status] = [index_status?, worktree_status?];
5911 TrackedStatus {
5912 index_status,
5913 worktree_status,
5914 }
5915 .into()
5916 }
5917 };
5918 Ok(result)
5919}
5920
5921fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5922 use proto::git_file_status::{Tracked, Unmerged, Variant};
5923
5924 let variant = match status {
5925 FileStatus::Untracked => Variant::Untracked(Default::default()),
5926 FileStatus::Ignored => Variant::Ignored(Default::default()),
5927 FileStatus::Unmerged(UnmergedStatus {
5928 first_head,
5929 second_head,
5930 }) => Variant::Unmerged(Unmerged {
5931 first_head: unmerged_status_to_proto(first_head),
5932 second_head: unmerged_status_to_proto(second_head),
5933 }),
5934 FileStatus::Tracked(TrackedStatus {
5935 index_status,
5936 worktree_status,
5937 }) => Variant::Tracked(Tracked {
5938 index_status: tracked_status_to_proto(index_status),
5939 worktree_status: tracked_status_to_proto(worktree_status),
5940 }),
5941 };
5942 proto::GitFileStatus {
5943 variant: Some(variant),
5944 }
5945}
5946
5947fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5948 match code {
5949 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5950 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5951 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5952 }
5953}
5954
5955fn tracked_status_to_proto(code: StatusCode) -> i32 {
5956 match code {
5957 StatusCode::Added => proto::GitStatus::Added as _,
5958 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5959 StatusCode::Modified => proto::GitStatus::Modified as _,
5960 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5961 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5962 StatusCode::Copied => proto::GitStatus::Copied as _,
5963 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5964 }
5965}