1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use std::{
60 cmp::Ordering,
61 collections::{BTreeSet, HashSet, VecDeque},
62 future::Future,
63 mem,
64 ops::Range,
65 path::{Path, PathBuf},
66 str::FromStr,
67 sync::{
68 Arc,
69 atomic::{self, AtomicU64},
70 },
71 time::Instant,
72};
73use sum_tree::{Edit, SumTree, TreeSet};
74use task::Shell;
75use text::{Bias, BufferId};
76use util::{
77 ResultExt, debug_panic,
78 paths::{PathStyle, SanitizedPath},
79 post_inc,
80 rel_path::RelPath,
81};
82use worktree::{
83 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
84 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
85};
86use zeroize::Zeroize;
87
88pub struct GitStore {
89 state: GitStoreState,
90 buffer_store: Entity<BufferStore>,
91 worktree_store: Entity<WorktreeStore>,
92 repositories: HashMap<RepositoryId, Entity<Repository>>,
93 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
94 active_repo_id: Option<RepositoryId>,
95 #[allow(clippy::type_complexity)]
96 loading_diffs:
97 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
98 diffs: HashMap<BufferId, Entity<BufferGitState>>,
99 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
100 _subscriptions: Vec<Subscription>,
101}
102
103#[derive(Default)]
104struct SharedDiffs {
105 unstaged: Option<Entity<BufferDiff>>,
106 uncommitted: Option<Entity<BufferDiff>>,
107}
108
109struct BufferGitState {
110 unstaged_diff: Option<WeakEntity<BufferDiff>>,
111 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
112 conflict_set: Option<WeakEntity<ConflictSet>>,
113 recalculate_diff_task: Option<Task<Result<()>>>,
114 reparse_conflict_markers_task: Option<Task<Result<()>>>,
115 language: Option<Arc<Language>>,
116 language_registry: Option<Arc<LanguageRegistry>>,
117 conflict_updated_futures: Vec<oneshot::Sender<()>>,
118 recalculating_tx: postage::watch::Sender<bool>,
119
120 /// These operation counts are used to ensure that head and index text
121 /// values read from the git repository are up-to-date with any hunk staging
122 /// operations that have been performed on the BufferDiff.
123 ///
124 /// The operation count is incremented immediately when the user initiates a
125 /// hunk stage/unstage operation. Then, upon finishing writing the new index
126 /// text do disk, the `operation count as of write` is updated to reflect
127 /// the operation count that prompted the write.
128 hunk_staging_operation_count: usize,
129 hunk_staging_operation_count_as_of_write: usize,
130
131 head_text: Option<Arc<String>>,
132 index_text: Option<Arc<String>>,
133 head_changed: bool,
134 index_changed: bool,
135 language_changed: bool,
136}
137
138#[derive(Clone, Debug)]
139enum DiffBasesChange {
140 SetIndex(Option<String>),
141 SetHead(Option<String>),
142 SetEach {
143 index: Option<String>,
144 head: Option<String>,
145 },
146 SetBoth(Option<String>),
147}
148
149#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
150enum DiffKind {
151 Unstaged,
152 Uncommitted,
153}
154
155enum GitStoreState {
156 Local {
157 next_repository_id: Arc<AtomicU64>,
158 downstream: Option<LocalDownstreamState>,
159 project_environment: Entity<ProjectEnvironment>,
160 fs: Arc<dyn Fs>,
161 },
162 Remote {
163 upstream_client: AnyProtoClient,
164 upstream_project_id: u64,
165 downstream: Option<(AnyProtoClient, ProjectId)>,
166 },
167}
168
169enum DownstreamUpdate {
170 UpdateRepository(RepositorySnapshot),
171 RemoveRepository(RepositoryId),
172}
173
174struct LocalDownstreamState {
175 client: AnyProtoClient,
176 project_id: ProjectId,
177 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
178 _task: Task<Result<()>>,
179}
180
181#[derive(Clone, Debug)]
182pub struct GitStoreCheckpoint {
183 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
184}
185
186#[derive(Clone, Debug, PartialEq, Eq)]
187pub struct StatusEntry {
188 pub repo_path: RepoPath,
189 pub status: FileStatus,
190}
191
192impl StatusEntry {
193 fn to_proto(&self) -> proto::StatusEntry {
194 let simple_status = match self.status {
195 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
196 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
197 FileStatus::Tracked(TrackedStatus {
198 index_status,
199 worktree_status,
200 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
201 worktree_status
202 } else {
203 index_status
204 }),
205 };
206
207 proto::StatusEntry {
208 repo_path: self.repo_path.to_proto(),
209 simple_status,
210 status: Some(status_to_proto(self.status)),
211 }
212 }
213}
214
215impl TryFrom<proto::StatusEntry> for StatusEntry {
216 type Error = anyhow::Error;
217
218 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
219 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
220 let status = status_from_proto(value.simple_status, value.status)?;
221 Ok(Self { repo_path, status })
222 }
223}
224
225impl sum_tree::Item for StatusEntry {
226 type Summary = PathSummary<GitSummary>;
227
228 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
229 PathSummary {
230 max_path: self.repo_path.as_ref().clone(),
231 item_summary: self.status.summary(),
232 }
233 }
234}
235
236impl sum_tree::KeyedItem for StatusEntry {
237 type Key = PathKey;
238
239 fn key(&self) -> Self::Key {
240 PathKey(self.repo_path.as_ref().clone())
241 }
242}
243
244#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
245pub struct RepositoryId(pub u64);
246
247#[derive(Clone, Debug, Default, PartialEq, Eq)]
248pub struct MergeDetails {
249 pub conflicted_paths: TreeSet<RepoPath>,
250 pub message: Option<SharedString>,
251 pub heads: Vec<Option<SharedString>>,
252}
253
254#[derive(Clone, Debug, PartialEq, Eq)]
255pub struct RepositorySnapshot {
256 pub id: RepositoryId,
257 pub statuses_by_path: SumTree<StatusEntry>,
258 pub work_directory_abs_path: Arc<Path>,
259 pub path_style: PathStyle,
260 pub branch: Option<Branch>,
261 pub head_commit: Option<CommitDetails>,
262 pub scan_id: u64,
263 pub merge: MergeDetails,
264 pub remote_origin_url: Option<String>,
265 pub remote_upstream_url: Option<String>,
266 pub stash_entries: GitStash,
267}
268
269type JobId = u64;
270
271#[derive(Clone, Debug, PartialEq, Eq)]
272pub struct JobInfo {
273 pub start: Instant,
274 pub message: SharedString,
275}
276
277pub struct Repository {
278 this: WeakEntity<Self>,
279 snapshot: RepositorySnapshot,
280 commit_message_buffer: Option<Entity<Buffer>>,
281 git_store: WeakEntity<GitStore>,
282 // For a local repository, holds paths that have had worktree events since the last status scan completed,
283 // and that should be examined during the next status scan.
284 paths_needing_status_update: BTreeSet<RepoPath>,
285 job_sender: mpsc::UnboundedSender<GitJob>,
286 active_jobs: HashMap<JobId, JobInfo>,
287 pending_ops: SumTree<PendingOps>,
288 job_id: JobId,
289 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
290 latest_askpass_id: u64,
291}
292
293impl std::ops::Deref for Repository {
294 type Target = RepositorySnapshot;
295
296 fn deref(&self) -> &Self::Target {
297 &self.snapshot
298 }
299}
300
301#[derive(Clone)]
302pub enum RepositoryState {
303 Local {
304 backend: Arc<dyn GitRepository>,
305 environment: Arc<HashMap<String, String>>,
306 },
307 Remote {
308 project_id: ProjectId,
309 client: AnyProtoClient,
310 },
311}
312
313#[derive(Clone, Debug, PartialEq, Eq)]
314pub enum RepositoryEvent {
315 StatusesChanged,
316 MergeHeadsChanged,
317 BranchChanged,
318 StashEntriesChanged,
319 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
320}
321
322#[derive(Clone, Debug)]
323pub struct JobsUpdated;
324
325#[derive(Debug)]
326pub enum GitStoreEvent {
327 ActiveRepositoryChanged(Option<RepositoryId>),
328 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
329 RepositoryAdded,
330 RepositoryRemoved(RepositoryId),
331 IndexWriteError(anyhow::Error),
332 JobsUpdated,
333 ConflictsUpdated,
334}
335
336impl EventEmitter<RepositoryEvent> for Repository {}
337impl EventEmitter<JobsUpdated> for Repository {}
338impl EventEmitter<GitStoreEvent> for GitStore {}
339
340pub struct GitJob {
341 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
342 key: Option<GitJobKey>,
343}
344
345#[derive(PartialEq, Eq)]
346enum GitJobKey {
347 WriteIndex(Vec<RepoPath>),
348 ReloadBufferDiffBases,
349 RefreshStatuses,
350 ReloadGitState,
351}
352
353impl GitStore {
354 pub fn local(
355 worktree_store: &Entity<WorktreeStore>,
356 buffer_store: Entity<BufferStore>,
357 environment: Entity<ProjectEnvironment>,
358 fs: Arc<dyn Fs>,
359 cx: &mut Context<Self>,
360 ) -> Self {
361 Self::new(
362 worktree_store.clone(),
363 buffer_store,
364 GitStoreState::Local {
365 next_repository_id: Arc::new(AtomicU64::new(1)),
366 downstream: None,
367 project_environment: environment,
368 fs,
369 },
370 cx,
371 )
372 }
373
374 pub fn remote(
375 worktree_store: &Entity<WorktreeStore>,
376 buffer_store: Entity<BufferStore>,
377 upstream_client: AnyProtoClient,
378 project_id: u64,
379 cx: &mut Context<Self>,
380 ) -> Self {
381 Self::new(
382 worktree_store.clone(),
383 buffer_store,
384 GitStoreState::Remote {
385 upstream_client,
386 upstream_project_id: project_id,
387 downstream: None,
388 },
389 cx,
390 )
391 }
392
393 fn new(
394 worktree_store: Entity<WorktreeStore>,
395 buffer_store: Entity<BufferStore>,
396 state: GitStoreState,
397 cx: &mut Context<Self>,
398 ) -> Self {
399 let _subscriptions = vec![
400 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
401 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
402 ];
403
404 GitStore {
405 state,
406 buffer_store,
407 worktree_store,
408 repositories: HashMap::default(),
409 worktree_ids: HashMap::default(),
410 active_repo_id: None,
411 _subscriptions,
412 loading_diffs: HashMap::default(),
413 shared_diffs: HashMap::default(),
414 diffs: HashMap::default(),
415 }
416 }
417
418 pub fn init(client: &AnyProtoClient) {
419 client.add_entity_request_handler(Self::handle_get_remotes);
420 client.add_entity_request_handler(Self::handle_get_branches);
421 client.add_entity_request_handler(Self::handle_get_default_branch);
422 client.add_entity_request_handler(Self::handle_change_branch);
423 client.add_entity_request_handler(Self::handle_create_branch);
424 client.add_entity_request_handler(Self::handle_rename_branch);
425 client.add_entity_request_handler(Self::handle_git_init);
426 client.add_entity_request_handler(Self::handle_push);
427 client.add_entity_request_handler(Self::handle_pull);
428 client.add_entity_request_handler(Self::handle_fetch);
429 client.add_entity_request_handler(Self::handle_stage);
430 client.add_entity_request_handler(Self::handle_unstage);
431 client.add_entity_request_handler(Self::handle_stash);
432 client.add_entity_request_handler(Self::handle_stash_pop);
433 client.add_entity_request_handler(Self::handle_stash_apply);
434 client.add_entity_request_handler(Self::handle_stash_drop);
435 client.add_entity_request_handler(Self::handle_commit);
436 client.add_entity_request_handler(Self::handle_run_hook);
437 client.add_entity_request_handler(Self::handle_reset);
438 client.add_entity_request_handler(Self::handle_show);
439 client.add_entity_request_handler(Self::handle_load_commit_diff);
440 client.add_entity_request_handler(Self::handle_checkout_files);
441 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
442 client.add_entity_request_handler(Self::handle_set_index_text);
443 client.add_entity_request_handler(Self::handle_askpass);
444 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
445 client.add_entity_request_handler(Self::handle_git_diff);
446 client.add_entity_request_handler(Self::handle_tree_diff);
447 client.add_entity_request_handler(Self::handle_get_blob_content);
448 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
449 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
450 client.add_entity_message_handler(Self::handle_update_diff_bases);
451 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
452 client.add_entity_request_handler(Self::handle_blame_buffer);
453 client.add_entity_message_handler(Self::handle_update_repository);
454 client.add_entity_message_handler(Self::handle_remove_repository);
455 client.add_entity_request_handler(Self::handle_git_clone);
456 client.add_entity_request_handler(Self::handle_get_worktrees);
457 client.add_entity_request_handler(Self::handle_create_worktree);
458 }
459
460 pub fn is_local(&self) -> bool {
461 matches!(self.state, GitStoreState::Local { .. })
462 }
463 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
464 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
465 let id = repo.read(cx).id;
466 if self.active_repo_id != Some(id) {
467 self.active_repo_id = Some(id);
468 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
469 }
470 }
471 }
472
473 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
474 match &mut self.state {
475 GitStoreState::Remote {
476 downstream: downstream_client,
477 ..
478 } => {
479 for repo in self.repositories.values() {
480 let update = repo.read(cx).snapshot.initial_update(project_id);
481 for update in split_repository_update(update) {
482 client.send(update).log_err();
483 }
484 }
485 *downstream_client = Some((client, ProjectId(project_id)));
486 }
487 GitStoreState::Local {
488 downstream: downstream_client,
489 ..
490 } => {
491 let mut snapshots = HashMap::default();
492 let (updates_tx, mut updates_rx) = mpsc::unbounded();
493 for repo in self.repositories.values() {
494 updates_tx
495 .unbounded_send(DownstreamUpdate::UpdateRepository(
496 repo.read(cx).snapshot.clone(),
497 ))
498 .ok();
499 }
500 *downstream_client = Some(LocalDownstreamState {
501 client: client.clone(),
502 project_id: ProjectId(project_id),
503 updates_tx,
504 _task: cx.spawn(async move |this, cx| {
505 cx.background_spawn(async move {
506 while let Some(update) = updates_rx.next().await {
507 match update {
508 DownstreamUpdate::UpdateRepository(snapshot) => {
509 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
510 {
511 let update =
512 snapshot.build_update(old_snapshot, project_id);
513 *old_snapshot = snapshot;
514 for update in split_repository_update(update) {
515 client.send(update)?;
516 }
517 } else {
518 let update = snapshot.initial_update(project_id);
519 for update in split_repository_update(update) {
520 client.send(update)?;
521 }
522 snapshots.insert(snapshot.id, snapshot);
523 }
524 }
525 DownstreamUpdate::RemoveRepository(id) => {
526 client.send(proto::RemoveRepository {
527 project_id,
528 id: id.to_proto(),
529 })?;
530 }
531 }
532 }
533 anyhow::Ok(())
534 })
535 .await
536 .ok();
537 this.update(cx, |this, _| {
538 if let GitStoreState::Local {
539 downstream: downstream_client,
540 ..
541 } = &mut this.state
542 {
543 downstream_client.take();
544 } else {
545 unreachable!("unshared called on remote store");
546 }
547 })
548 }),
549 });
550 }
551 }
552 }
553
554 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
555 match &mut self.state {
556 GitStoreState::Local {
557 downstream: downstream_client,
558 ..
559 } => {
560 downstream_client.take();
561 }
562 GitStoreState::Remote {
563 downstream: downstream_client,
564 ..
565 } => {
566 downstream_client.take();
567 }
568 }
569 self.shared_diffs.clear();
570 }
571
572 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
573 self.shared_diffs.remove(peer_id);
574 }
575
576 pub fn active_repository(&self) -> Option<Entity<Repository>> {
577 self.active_repo_id
578 .as_ref()
579 .map(|id| self.repositories[id].clone())
580 }
581
582 pub fn open_unstaged_diff(
583 &mut self,
584 buffer: Entity<Buffer>,
585 cx: &mut Context<Self>,
586 ) -> Task<Result<Entity<BufferDiff>>> {
587 let buffer_id = buffer.read(cx).remote_id();
588 if let Some(diff_state) = self.diffs.get(&buffer_id)
589 && let Some(unstaged_diff) = diff_state
590 .read(cx)
591 .unstaged_diff
592 .as_ref()
593 .and_then(|weak| weak.upgrade())
594 {
595 if let Some(task) =
596 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
597 {
598 return cx.background_executor().spawn(async move {
599 task.await;
600 Ok(unstaged_diff)
601 });
602 }
603 return Task::ready(Ok(unstaged_diff));
604 }
605
606 let Some((repo, repo_path)) =
607 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
608 else {
609 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
610 };
611
612 let task = self
613 .loading_diffs
614 .entry((buffer_id, DiffKind::Unstaged))
615 .or_insert_with(|| {
616 let staged_text = repo.update(cx, |repo, cx| {
617 repo.load_staged_text(buffer_id, repo_path, cx)
618 });
619 cx.spawn(async move |this, cx| {
620 Self::open_diff_internal(
621 this,
622 DiffKind::Unstaged,
623 staged_text.await.map(DiffBasesChange::SetIndex),
624 buffer,
625 cx,
626 )
627 .await
628 .map_err(Arc::new)
629 })
630 .shared()
631 })
632 .clone();
633
634 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
635 }
636
637 pub fn open_diff_since(
638 &mut self,
639 oid: Option<git::Oid>,
640 buffer: Entity<Buffer>,
641 repo: Entity<Repository>,
642 languages: Arc<LanguageRegistry>,
643 cx: &mut Context<Self>,
644 ) -> Task<Result<Entity<BufferDiff>>> {
645 cx.spawn(async move |this, cx| {
646 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
647 let content = match oid {
648 None => None,
649 Some(oid) => Some(
650 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
651 .await?,
652 ),
653 };
654 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
655
656 buffer_diff
657 .update(cx, |buffer_diff, cx| {
658 buffer_diff.set_base_text(
659 content.map(Arc::new),
660 buffer_snapshot.language().cloned(),
661 Some(languages.clone()),
662 buffer_snapshot.text,
663 cx,
664 )
665 })?
666 .await?;
667 let unstaged_diff = this
668 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
669 .await?;
670 buffer_diff.update(cx, |buffer_diff, _| {
671 buffer_diff.set_secondary_diff(unstaged_diff);
672 })?;
673
674 this.update(cx, |_, cx| {
675 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
676 .detach();
677 })?;
678
679 Ok(buffer_diff)
680 })
681 }
682
683 pub fn open_uncommitted_diff(
684 &mut self,
685 buffer: Entity<Buffer>,
686 cx: &mut Context<Self>,
687 ) -> Task<Result<Entity<BufferDiff>>> {
688 let buffer_id = buffer.read(cx).remote_id();
689
690 if let Some(diff_state) = self.diffs.get(&buffer_id)
691 && let Some(uncommitted_diff) = diff_state
692 .read(cx)
693 .uncommitted_diff
694 .as_ref()
695 .and_then(|weak| weak.upgrade())
696 {
697 if let Some(task) =
698 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
699 {
700 return cx.background_executor().spawn(async move {
701 task.await;
702 Ok(uncommitted_diff)
703 });
704 }
705 return Task::ready(Ok(uncommitted_diff));
706 }
707
708 let Some((repo, repo_path)) =
709 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
710 else {
711 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
712 };
713
714 let task = self
715 .loading_diffs
716 .entry((buffer_id, DiffKind::Uncommitted))
717 .or_insert_with(|| {
718 let changes = repo.update(cx, |repo, cx| {
719 repo.load_committed_text(buffer_id, repo_path, cx)
720 });
721
722 // todo(lw): hot foreground spawn
723 cx.spawn(async move |this, cx| {
724 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
725 .await
726 .map_err(Arc::new)
727 })
728 .shared()
729 })
730 .clone();
731
732 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
733 }
734
735 async fn open_diff_internal(
736 this: WeakEntity<Self>,
737 kind: DiffKind,
738 texts: Result<DiffBasesChange>,
739 buffer_entity: Entity<Buffer>,
740 cx: &mut AsyncApp,
741 ) -> Result<Entity<BufferDiff>> {
742 let diff_bases_change = match texts {
743 Err(e) => {
744 this.update(cx, |this, cx| {
745 let buffer = buffer_entity.read(cx);
746 let buffer_id = buffer.remote_id();
747 this.loading_diffs.remove(&(buffer_id, kind));
748 })?;
749 return Err(e);
750 }
751 Ok(change) => change,
752 };
753
754 this.update(cx, |this, cx| {
755 let buffer = buffer_entity.read(cx);
756 let buffer_id = buffer.remote_id();
757 let language = buffer.language().cloned();
758 let language_registry = buffer.language_registry();
759 let text_snapshot = buffer.text_snapshot();
760 this.loading_diffs.remove(&(buffer_id, kind));
761
762 let git_store = cx.weak_entity();
763 let diff_state = this
764 .diffs
765 .entry(buffer_id)
766 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
767
768 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
769
770 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
771 diff_state.update(cx, |diff_state, cx| {
772 diff_state.language = language;
773 diff_state.language_registry = language_registry;
774
775 match kind {
776 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
777 DiffKind::Uncommitted => {
778 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
779 diff
780 } else {
781 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
782 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
783 unstaged_diff
784 };
785
786 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
787 diff_state.uncommitted_diff = Some(diff.downgrade())
788 }
789 }
790
791 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
792 let rx = diff_state.wait_for_recalculation();
793
794 anyhow::Ok(async move {
795 if let Some(rx) = rx {
796 rx.await;
797 }
798 Ok(diff)
799 })
800 })
801 })??
802 .await
803 }
804
805 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
806 let diff_state = self.diffs.get(&buffer_id)?;
807 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
808 }
809
810 pub fn get_uncommitted_diff(
811 &self,
812 buffer_id: BufferId,
813 cx: &App,
814 ) -> Option<Entity<BufferDiff>> {
815 let diff_state = self.diffs.get(&buffer_id)?;
816 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
817 }
818
819 pub fn open_conflict_set(
820 &mut self,
821 buffer: Entity<Buffer>,
822 cx: &mut Context<Self>,
823 ) -> Entity<ConflictSet> {
824 log::debug!("open conflict set");
825 let buffer_id = buffer.read(cx).remote_id();
826
827 if let Some(git_state) = self.diffs.get(&buffer_id)
828 && let Some(conflict_set) = git_state
829 .read(cx)
830 .conflict_set
831 .as_ref()
832 .and_then(|weak| weak.upgrade())
833 {
834 let conflict_set = conflict_set;
835 let buffer_snapshot = buffer.read(cx).text_snapshot();
836
837 git_state.update(cx, |state, cx| {
838 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
839 });
840
841 return conflict_set;
842 }
843
844 let is_unmerged = self
845 .repository_and_path_for_buffer_id(buffer_id, cx)
846 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
847 let git_store = cx.weak_entity();
848 let buffer_git_state = self
849 .diffs
850 .entry(buffer_id)
851 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
852 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
853
854 self._subscriptions
855 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
856 cx.emit(GitStoreEvent::ConflictsUpdated);
857 }));
858
859 buffer_git_state.update(cx, |state, cx| {
860 state.conflict_set = Some(conflict_set.downgrade());
861 let buffer_snapshot = buffer.read(cx).text_snapshot();
862 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
863 });
864
865 conflict_set
866 }
867
868 pub fn project_path_git_status(
869 &self,
870 project_path: &ProjectPath,
871 cx: &App,
872 ) -> Option<FileStatus> {
873 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
874 Some(repo.read(cx).status_for_path(&repo_path)?.status)
875 }
876
877 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
878 let mut work_directory_abs_paths = Vec::new();
879 let mut checkpoints = Vec::new();
880 for repository in self.repositories.values() {
881 repository.update(cx, |repository, _| {
882 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
883 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
884 });
885 }
886
887 cx.background_executor().spawn(async move {
888 let checkpoints = future::try_join_all(checkpoints).await?;
889 Ok(GitStoreCheckpoint {
890 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
891 .into_iter()
892 .zip(checkpoints)
893 .collect(),
894 })
895 })
896 }
897
898 pub fn restore_checkpoint(
899 &self,
900 checkpoint: GitStoreCheckpoint,
901 cx: &mut App,
902 ) -> Task<Result<()>> {
903 let repositories_by_work_dir_abs_path = self
904 .repositories
905 .values()
906 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
907 .collect::<HashMap<_, _>>();
908
909 let mut tasks = Vec::new();
910 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
911 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
912 let restore = repository.update(cx, |repository, _| {
913 repository.restore_checkpoint(checkpoint)
914 });
915 tasks.push(async move { restore.await? });
916 }
917 }
918 cx.background_spawn(async move {
919 future::try_join_all(tasks).await?;
920 Ok(())
921 })
922 }
923
924 /// Compares two checkpoints, returning true if they are equal.
925 pub fn compare_checkpoints(
926 &self,
927 left: GitStoreCheckpoint,
928 mut right: GitStoreCheckpoint,
929 cx: &mut App,
930 ) -> Task<Result<bool>> {
931 let repositories_by_work_dir_abs_path = self
932 .repositories
933 .values()
934 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
935 .collect::<HashMap<_, _>>();
936
937 let mut tasks = Vec::new();
938 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
939 if let Some(right_checkpoint) = right
940 .checkpoints_by_work_dir_abs_path
941 .remove(&work_dir_abs_path)
942 {
943 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
944 {
945 let compare = repository.update(cx, |repository, _| {
946 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
947 });
948
949 tasks.push(async move { compare.await? });
950 }
951 } else {
952 return Task::ready(Ok(false));
953 }
954 }
955 cx.background_spawn(async move {
956 Ok(future::try_join_all(tasks)
957 .await?
958 .into_iter()
959 .all(|result| result))
960 })
961 }
962
963 /// Blames a buffer.
964 pub fn blame_buffer(
965 &self,
966 buffer: &Entity<Buffer>,
967 version: Option<clock::Global>,
968 cx: &mut App,
969 ) -> Task<Result<Option<Blame>>> {
970 let buffer = buffer.read(cx);
971 let Some((repo, repo_path)) =
972 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
973 else {
974 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
975 };
976 let content = match &version {
977 Some(version) => buffer.rope_for_version(version),
978 None => buffer.as_rope().clone(),
979 };
980 let version = version.unwrap_or(buffer.version());
981 let buffer_id = buffer.remote_id();
982
983 let rx = repo.update(cx, |repo, _| {
984 repo.send_job(None, move |state, _| async move {
985 match state {
986 RepositoryState::Local { backend, .. } => backend
987 .blame(repo_path.clone(), content)
988 .await
989 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
990 .map(Some),
991 RepositoryState::Remote { project_id, client } => {
992 let response = client
993 .request(proto::BlameBuffer {
994 project_id: project_id.to_proto(),
995 buffer_id: buffer_id.into(),
996 version: serialize_version(&version),
997 })
998 .await?;
999 Ok(deserialize_blame_buffer_response(response))
1000 }
1001 }
1002 })
1003 });
1004
1005 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1006 }
1007
1008 pub fn get_permalink_to_line(
1009 &self,
1010 buffer: &Entity<Buffer>,
1011 selection: Range<u32>,
1012 cx: &mut App,
1013 ) -> Task<Result<url::Url>> {
1014 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1015 return Task::ready(Err(anyhow!("buffer has no file")));
1016 };
1017
1018 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1019 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1020 cx,
1021 ) else {
1022 // If we're not in a Git repo, check whether this is a Rust source
1023 // file in the Cargo registry (presumably opened with go-to-definition
1024 // from a normal Rust file). If so, we can put together a permalink
1025 // using crate metadata.
1026 if buffer
1027 .read(cx)
1028 .language()
1029 .is_none_or(|lang| lang.name() != "Rust".into())
1030 {
1031 return Task::ready(Err(anyhow!("no permalink available")));
1032 }
1033 let file_path = file.worktree.read(cx).absolutize(&file.path);
1034 return cx.spawn(async move |cx| {
1035 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1036 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1037 .context("no permalink available")
1038 });
1039 };
1040
1041 let buffer_id = buffer.read(cx).remote_id();
1042 let branch = repo.read(cx).branch.clone();
1043 let remote = branch
1044 .as_ref()
1045 .and_then(|b| b.upstream.as_ref())
1046 .and_then(|b| b.remote_name())
1047 .unwrap_or("origin")
1048 .to_string();
1049
1050 let rx = repo.update(cx, |repo, _| {
1051 repo.send_job(None, move |state, cx| async move {
1052 match state {
1053 RepositoryState::Local { backend, .. } => {
1054 let origin_url = backend
1055 .remote_url(&remote)
1056 .with_context(|| format!("remote \"{remote}\" not found"))?;
1057
1058 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1059
1060 let provider_registry =
1061 cx.update(GitHostingProviderRegistry::default_global)?;
1062
1063 let (provider, remote) =
1064 parse_git_remote_url(provider_registry, &origin_url)
1065 .context("parsing Git remote URL")?;
1066
1067 Ok(provider.build_permalink(
1068 remote,
1069 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1070 ))
1071 }
1072 RepositoryState::Remote { project_id, client } => {
1073 let response = client
1074 .request(proto::GetPermalinkToLine {
1075 project_id: project_id.to_proto(),
1076 buffer_id: buffer_id.into(),
1077 selection: Some(proto::Range {
1078 start: selection.start as u64,
1079 end: selection.end as u64,
1080 }),
1081 })
1082 .await?;
1083
1084 url::Url::parse(&response.permalink).context("failed to parse permalink")
1085 }
1086 }
1087 })
1088 });
1089 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1090 }
1091
1092 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1093 match &self.state {
1094 GitStoreState::Local {
1095 downstream: downstream_client,
1096 ..
1097 } => downstream_client
1098 .as_ref()
1099 .map(|state| (state.client.clone(), state.project_id)),
1100 GitStoreState::Remote {
1101 downstream: downstream_client,
1102 ..
1103 } => downstream_client.clone(),
1104 }
1105 }
1106
1107 fn upstream_client(&self) -> Option<AnyProtoClient> {
1108 match &self.state {
1109 GitStoreState::Local { .. } => None,
1110 GitStoreState::Remote {
1111 upstream_client, ..
1112 } => Some(upstream_client.clone()),
1113 }
1114 }
1115
1116 fn on_worktree_store_event(
1117 &mut self,
1118 worktree_store: Entity<WorktreeStore>,
1119 event: &WorktreeStoreEvent,
1120 cx: &mut Context<Self>,
1121 ) {
1122 let GitStoreState::Local {
1123 project_environment,
1124 downstream,
1125 next_repository_id,
1126 fs,
1127 } = &self.state
1128 else {
1129 return;
1130 };
1131
1132 match event {
1133 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1134 if let Some(worktree) = self
1135 .worktree_store
1136 .read(cx)
1137 .worktree_for_id(*worktree_id, cx)
1138 {
1139 let paths_by_git_repo =
1140 self.process_updated_entries(&worktree, updated_entries, cx);
1141 let downstream = downstream
1142 .as_ref()
1143 .map(|downstream| downstream.updates_tx.clone());
1144 cx.spawn(async move |_, cx| {
1145 let paths_by_git_repo = paths_by_git_repo.await;
1146 for (repo, paths) in paths_by_git_repo {
1147 repo.update(cx, |repo, cx| {
1148 repo.paths_changed(paths, downstream.clone(), cx);
1149 })
1150 .ok();
1151 }
1152 })
1153 .detach();
1154 }
1155 }
1156 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1157 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1158 else {
1159 return;
1160 };
1161 if !worktree.read(cx).is_visible() {
1162 log::debug!(
1163 "not adding repositories for local worktree {:?} because it's not visible",
1164 worktree.read(cx).abs_path()
1165 );
1166 return;
1167 }
1168 self.update_repositories_from_worktree(
1169 *worktree_id,
1170 project_environment.clone(),
1171 next_repository_id.clone(),
1172 downstream
1173 .as_ref()
1174 .map(|downstream| downstream.updates_tx.clone()),
1175 changed_repos.clone(),
1176 fs.clone(),
1177 cx,
1178 );
1179 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1180 }
1181 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1182 let repos_without_worktree: Vec<RepositoryId> = self
1183 .worktree_ids
1184 .iter_mut()
1185 .filter_map(|(repo_id, worktree_ids)| {
1186 worktree_ids.remove(worktree_id);
1187 if worktree_ids.is_empty() {
1188 Some(*repo_id)
1189 } else {
1190 None
1191 }
1192 })
1193 .collect();
1194 let is_active_repo_removed = repos_without_worktree
1195 .iter()
1196 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1197
1198 for repo_id in repos_without_worktree {
1199 self.repositories.remove(&repo_id);
1200 self.worktree_ids.remove(&repo_id);
1201 if let Some(updates_tx) =
1202 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1203 {
1204 updates_tx
1205 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1206 .ok();
1207 }
1208 }
1209
1210 if is_active_repo_removed {
1211 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1212 self.active_repo_id = Some(repo_id);
1213 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1214 } else {
1215 self.active_repo_id = None;
1216 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1217 }
1218 }
1219 }
1220 _ => {}
1221 }
1222 }
1223 fn on_repository_event(
1224 &mut self,
1225 repo: Entity<Repository>,
1226 event: &RepositoryEvent,
1227 cx: &mut Context<Self>,
1228 ) {
1229 let id = repo.read(cx).id;
1230 let repo_snapshot = repo.read(cx).snapshot.clone();
1231 for (buffer_id, diff) in self.diffs.iter() {
1232 if let Some((buffer_repo, repo_path)) =
1233 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1234 && buffer_repo == repo
1235 {
1236 diff.update(cx, |diff, cx| {
1237 if let Some(conflict_set) = &diff.conflict_set {
1238 let conflict_status_changed =
1239 conflict_set.update(cx, |conflict_set, cx| {
1240 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1241 conflict_set.set_has_conflict(has_conflict, cx)
1242 })?;
1243 if conflict_status_changed {
1244 let buffer_store = self.buffer_store.read(cx);
1245 if let Some(buffer) = buffer_store.get(*buffer_id) {
1246 let _ = diff
1247 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1248 }
1249 }
1250 }
1251 anyhow::Ok(())
1252 })
1253 .ok();
1254 }
1255 }
1256 cx.emit(GitStoreEvent::RepositoryUpdated(
1257 id,
1258 event.clone(),
1259 self.active_repo_id == Some(id),
1260 ))
1261 }
1262
1263 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1264 cx.emit(GitStoreEvent::JobsUpdated)
1265 }
1266
1267 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1268 fn update_repositories_from_worktree(
1269 &mut self,
1270 worktree_id: WorktreeId,
1271 project_environment: Entity<ProjectEnvironment>,
1272 next_repository_id: Arc<AtomicU64>,
1273 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1274 updated_git_repositories: UpdatedGitRepositoriesSet,
1275 fs: Arc<dyn Fs>,
1276 cx: &mut Context<Self>,
1277 ) {
1278 let mut removed_ids = Vec::new();
1279 for update in updated_git_repositories.iter() {
1280 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1281 let existing_work_directory_abs_path =
1282 repo.read(cx).work_directory_abs_path.clone();
1283 Some(&existing_work_directory_abs_path)
1284 == update.old_work_directory_abs_path.as_ref()
1285 || Some(&existing_work_directory_abs_path)
1286 == update.new_work_directory_abs_path.as_ref()
1287 }) {
1288 let repo_id = *id;
1289 if let Some(new_work_directory_abs_path) =
1290 update.new_work_directory_abs_path.clone()
1291 {
1292 self.worktree_ids
1293 .entry(repo_id)
1294 .or_insert_with(HashSet::new)
1295 .insert(worktree_id);
1296 existing.update(cx, |existing, cx| {
1297 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1298 existing.schedule_scan(updates_tx.clone(), cx);
1299 });
1300 } else {
1301 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1302 worktree_ids.remove(&worktree_id);
1303 if worktree_ids.is_empty() {
1304 removed_ids.push(repo_id);
1305 }
1306 }
1307 }
1308 } else if let UpdatedGitRepository {
1309 new_work_directory_abs_path: Some(work_directory_abs_path),
1310 dot_git_abs_path: Some(dot_git_abs_path),
1311 repository_dir_abs_path: Some(repository_dir_abs_path),
1312 common_dir_abs_path: Some(common_dir_abs_path),
1313 ..
1314 } = update
1315 {
1316 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1317 let git_store = cx.weak_entity();
1318 let repo = cx.new(|cx| {
1319 let mut repo = Repository::local(
1320 id,
1321 work_directory_abs_path.clone(),
1322 dot_git_abs_path.clone(),
1323 repository_dir_abs_path.clone(),
1324 common_dir_abs_path.clone(),
1325 project_environment.downgrade(),
1326 fs.clone(),
1327 git_store,
1328 cx,
1329 );
1330 if let Some(updates_tx) = updates_tx.as_ref() {
1331 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1332 updates_tx
1333 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1334 .ok();
1335 }
1336 repo.schedule_scan(updates_tx.clone(), cx);
1337 repo
1338 });
1339 self._subscriptions
1340 .push(cx.subscribe(&repo, Self::on_repository_event));
1341 self._subscriptions
1342 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1343 self.repositories.insert(id, repo);
1344 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1345 cx.emit(GitStoreEvent::RepositoryAdded);
1346 self.active_repo_id.get_or_insert_with(|| {
1347 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1348 id
1349 });
1350 }
1351 }
1352
1353 for id in removed_ids {
1354 if self.active_repo_id == Some(id) {
1355 self.active_repo_id = None;
1356 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1357 }
1358 self.repositories.remove(&id);
1359 if let Some(updates_tx) = updates_tx.as_ref() {
1360 updates_tx
1361 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1362 .ok();
1363 }
1364 }
1365 }
1366
1367 fn on_buffer_store_event(
1368 &mut self,
1369 _: Entity<BufferStore>,
1370 event: &BufferStoreEvent,
1371 cx: &mut Context<Self>,
1372 ) {
1373 match event {
1374 BufferStoreEvent::BufferAdded(buffer) => {
1375 cx.subscribe(buffer, |this, buffer, event, cx| {
1376 if let BufferEvent::LanguageChanged = event {
1377 let buffer_id = buffer.read(cx).remote_id();
1378 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1379 diff_state.update(cx, |diff_state, cx| {
1380 diff_state.buffer_language_changed(buffer, cx);
1381 });
1382 }
1383 }
1384 })
1385 .detach();
1386 }
1387 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1388 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1389 diffs.remove(buffer_id);
1390 }
1391 }
1392 BufferStoreEvent::BufferDropped(buffer_id) => {
1393 self.diffs.remove(buffer_id);
1394 for diffs in self.shared_diffs.values_mut() {
1395 diffs.remove(buffer_id);
1396 }
1397 }
1398 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1399 // Whenever a buffer's file path changes, it's possible that the
1400 // new path is actually a path that is being tracked by a git
1401 // repository. In that case, we'll want to update the buffer's
1402 // `BufferDiffState`, in case it already has one.
1403 let buffer_id = buffer.read(cx).remote_id();
1404 let diff_state = self.diffs.get(&buffer_id);
1405 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1406
1407 if let Some(diff_state) = diff_state
1408 && let Some((repo, repo_path)) = repo
1409 {
1410 let buffer = buffer.clone();
1411 let diff_state = diff_state.clone();
1412
1413 cx.spawn(async move |_git_store, cx| {
1414 async {
1415 let diff_bases_change = repo
1416 .update(cx, |repo, cx| {
1417 repo.load_committed_text(buffer_id, repo_path, cx)
1418 })?
1419 .await?;
1420
1421 diff_state.update(cx, |diff_state, cx| {
1422 let buffer_snapshot = buffer.read(cx).text_snapshot();
1423 diff_state.diff_bases_changed(
1424 buffer_snapshot,
1425 Some(diff_bases_change),
1426 cx,
1427 );
1428 })
1429 }
1430 .await
1431 .log_err();
1432 })
1433 .detach();
1434 }
1435 }
1436 _ => {}
1437 }
1438 }
1439
1440 pub fn recalculate_buffer_diffs(
1441 &mut self,
1442 buffers: Vec<Entity<Buffer>>,
1443 cx: &mut Context<Self>,
1444 ) -> impl Future<Output = ()> + use<> {
1445 let mut futures = Vec::new();
1446 for buffer in buffers {
1447 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1448 let buffer = buffer.read(cx).text_snapshot();
1449 diff_state.update(cx, |diff_state, cx| {
1450 diff_state.recalculate_diffs(buffer.clone(), cx);
1451 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1452 });
1453 futures.push(diff_state.update(cx, |diff_state, cx| {
1454 diff_state
1455 .reparse_conflict_markers(buffer, cx)
1456 .map(|_| {})
1457 .boxed()
1458 }));
1459 }
1460 }
1461 async move {
1462 futures::future::join_all(futures).await;
1463 }
1464 }
1465
1466 fn on_buffer_diff_event(
1467 &mut self,
1468 diff: Entity<buffer_diff::BufferDiff>,
1469 event: &BufferDiffEvent,
1470 cx: &mut Context<Self>,
1471 ) {
1472 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1473 let buffer_id = diff.read(cx).buffer_id;
1474 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1475 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1476 diff_state.hunk_staging_operation_count += 1;
1477 diff_state.hunk_staging_operation_count
1478 });
1479 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1480 let recv = repo.update(cx, |repo, cx| {
1481 log::debug!("hunks changed for {}", path.as_unix_str());
1482 repo.spawn_set_index_text_job(
1483 path,
1484 new_index_text.as_ref().map(|rope| rope.to_string()),
1485 Some(hunk_staging_operation_count),
1486 cx,
1487 )
1488 });
1489 let diff = diff.downgrade();
1490 cx.spawn(async move |this, cx| {
1491 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1492 diff.update(cx, |diff, cx| {
1493 diff.clear_pending_hunks(cx);
1494 })
1495 .ok();
1496 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1497 .ok();
1498 }
1499 })
1500 .detach();
1501 }
1502 }
1503 }
1504 }
1505
1506 fn local_worktree_git_repos_changed(
1507 &mut self,
1508 worktree: Entity<Worktree>,
1509 changed_repos: &UpdatedGitRepositoriesSet,
1510 cx: &mut Context<Self>,
1511 ) {
1512 log::debug!("local worktree repos changed");
1513 debug_assert!(worktree.read(cx).is_local());
1514
1515 for repository in self.repositories.values() {
1516 repository.update(cx, |repository, cx| {
1517 let repo_abs_path = &repository.work_directory_abs_path;
1518 if changed_repos.iter().any(|update| {
1519 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1520 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1521 }) {
1522 repository.reload_buffer_diff_bases(cx);
1523 }
1524 });
1525 }
1526 }
1527
1528 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1529 &self.repositories
1530 }
1531
1532 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1533 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1534 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1535 Some(status.status)
1536 }
1537
1538 pub fn repository_and_path_for_buffer_id(
1539 &self,
1540 buffer_id: BufferId,
1541 cx: &App,
1542 ) -> Option<(Entity<Repository>, RepoPath)> {
1543 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1544 let project_path = buffer.read(cx).project_path(cx)?;
1545 self.repository_and_path_for_project_path(&project_path, cx)
1546 }
1547
1548 pub fn repository_and_path_for_project_path(
1549 &self,
1550 path: &ProjectPath,
1551 cx: &App,
1552 ) -> Option<(Entity<Repository>, RepoPath)> {
1553 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1554 self.repositories
1555 .values()
1556 .filter_map(|repo| {
1557 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1558 Some((repo.clone(), repo_path))
1559 })
1560 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1561 }
1562
1563 pub fn git_init(
1564 &self,
1565 path: Arc<Path>,
1566 fallback_branch_name: String,
1567 cx: &App,
1568 ) -> Task<Result<()>> {
1569 match &self.state {
1570 GitStoreState::Local { fs, .. } => {
1571 let fs = fs.clone();
1572 cx.background_executor()
1573 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1574 }
1575 GitStoreState::Remote {
1576 upstream_client,
1577 upstream_project_id: project_id,
1578 ..
1579 } => {
1580 let client = upstream_client.clone();
1581 let project_id = *project_id;
1582 cx.background_executor().spawn(async move {
1583 client
1584 .request(proto::GitInit {
1585 project_id: project_id,
1586 abs_path: path.to_string_lossy().into_owned(),
1587 fallback_branch_name,
1588 })
1589 .await?;
1590 Ok(())
1591 })
1592 }
1593 }
1594 }
1595
1596 pub fn git_clone(
1597 &self,
1598 repo: String,
1599 path: impl Into<Arc<std::path::Path>>,
1600 cx: &App,
1601 ) -> Task<Result<()>> {
1602 let path = path.into();
1603 match &self.state {
1604 GitStoreState::Local { fs, .. } => {
1605 let fs = fs.clone();
1606 cx.background_executor()
1607 .spawn(async move { fs.git_clone(&repo, &path).await })
1608 }
1609 GitStoreState::Remote {
1610 upstream_client,
1611 upstream_project_id,
1612 ..
1613 } => {
1614 if upstream_client.is_via_collab() {
1615 return Task::ready(Err(anyhow!(
1616 "Git Clone isn't supported for project guests"
1617 )));
1618 }
1619 let request = upstream_client.request(proto::GitClone {
1620 project_id: *upstream_project_id,
1621 abs_path: path.to_string_lossy().into_owned(),
1622 remote_repo: repo,
1623 });
1624
1625 cx.background_spawn(async move {
1626 let result = request.await?;
1627
1628 match result.success {
1629 true => Ok(()),
1630 false => Err(anyhow!("Git Clone failed")),
1631 }
1632 })
1633 }
1634 }
1635 }
1636
1637 async fn handle_update_repository(
1638 this: Entity<Self>,
1639 envelope: TypedEnvelope<proto::UpdateRepository>,
1640 mut cx: AsyncApp,
1641 ) -> Result<()> {
1642 this.update(&mut cx, |this, cx| {
1643 let path_style = this.worktree_store.read(cx).path_style();
1644 let mut update = envelope.payload;
1645
1646 let id = RepositoryId::from_proto(update.id);
1647 let client = this.upstream_client().context("no upstream client")?;
1648
1649 let mut repo_subscription = None;
1650 let repo = this.repositories.entry(id).or_insert_with(|| {
1651 let git_store = cx.weak_entity();
1652 let repo = cx.new(|cx| {
1653 Repository::remote(
1654 id,
1655 Path::new(&update.abs_path).into(),
1656 path_style,
1657 ProjectId(update.project_id),
1658 client,
1659 git_store,
1660 cx,
1661 )
1662 });
1663 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1664 cx.emit(GitStoreEvent::RepositoryAdded);
1665 repo
1666 });
1667 this._subscriptions.extend(repo_subscription);
1668
1669 repo.update(cx, {
1670 let update = update.clone();
1671 |repo, cx| repo.apply_remote_update(update, cx)
1672 })?;
1673
1674 this.active_repo_id.get_or_insert_with(|| {
1675 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1676 id
1677 });
1678
1679 if let Some((client, project_id)) = this.downstream_client() {
1680 update.project_id = project_id.to_proto();
1681 client.send(update).log_err();
1682 }
1683 Ok(())
1684 })?
1685 }
1686
1687 async fn handle_remove_repository(
1688 this: Entity<Self>,
1689 envelope: TypedEnvelope<proto::RemoveRepository>,
1690 mut cx: AsyncApp,
1691 ) -> Result<()> {
1692 this.update(&mut cx, |this, cx| {
1693 let mut update = envelope.payload;
1694 let id = RepositoryId::from_proto(update.id);
1695 this.repositories.remove(&id);
1696 if let Some((client, project_id)) = this.downstream_client() {
1697 update.project_id = project_id.to_proto();
1698 client.send(update).log_err();
1699 }
1700 if this.active_repo_id == Some(id) {
1701 this.active_repo_id = None;
1702 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1703 }
1704 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1705 })
1706 }
1707
1708 async fn handle_git_init(
1709 this: Entity<Self>,
1710 envelope: TypedEnvelope<proto::GitInit>,
1711 cx: AsyncApp,
1712 ) -> Result<proto::Ack> {
1713 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1714 let name = envelope.payload.fallback_branch_name;
1715 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1716 .await?;
1717
1718 Ok(proto::Ack {})
1719 }
1720
1721 async fn handle_git_clone(
1722 this: Entity<Self>,
1723 envelope: TypedEnvelope<proto::GitClone>,
1724 cx: AsyncApp,
1725 ) -> Result<proto::GitCloneResponse> {
1726 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1727 let repo_name = envelope.payload.remote_repo;
1728 let result = cx
1729 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1730 .await;
1731
1732 Ok(proto::GitCloneResponse {
1733 success: result.is_ok(),
1734 })
1735 }
1736
1737 async fn handle_fetch(
1738 this: Entity<Self>,
1739 envelope: TypedEnvelope<proto::Fetch>,
1740 mut cx: AsyncApp,
1741 ) -> Result<proto::RemoteMessageResponse> {
1742 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1743 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1744 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1745 let askpass_id = envelope.payload.askpass_id;
1746
1747 let askpass = make_remote_delegate(
1748 this,
1749 envelope.payload.project_id,
1750 repository_id,
1751 askpass_id,
1752 &mut cx,
1753 );
1754
1755 let remote_output = repository_handle
1756 .update(&mut cx, |repository_handle, cx| {
1757 repository_handle.fetch(fetch_options, askpass, cx)
1758 })?
1759 .await??;
1760
1761 Ok(proto::RemoteMessageResponse {
1762 stdout: remote_output.stdout,
1763 stderr: remote_output.stderr,
1764 })
1765 }
1766
1767 async fn handle_push(
1768 this: Entity<Self>,
1769 envelope: TypedEnvelope<proto::Push>,
1770 mut cx: AsyncApp,
1771 ) -> Result<proto::RemoteMessageResponse> {
1772 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1773 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1774
1775 let askpass_id = envelope.payload.askpass_id;
1776 let askpass = make_remote_delegate(
1777 this,
1778 envelope.payload.project_id,
1779 repository_id,
1780 askpass_id,
1781 &mut cx,
1782 );
1783
1784 let options = envelope
1785 .payload
1786 .options
1787 .as_ref()
1788 .map(|_| match envelope.payload.options() {
1789 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1790 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1791 });
1792
1793 let branch_name = envelope.payload.branch_name.into();
1794 let remote_name = envelope.payload.remote_name.into();
1795
1796 let remote_output = repository_handle
1797 .update(&mut cx, |repository_handle, cx| {
1798 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1799 })?
1800 .await??;
1801 Ok(proto::RemoteMessageResponse {
1802 stdout: remote_output.stdout,
1803 stderr: remote_output.stderr,
1804 })
1805 }
1806
1807 async fn handle_pull(
1808 this: Entity<Self>,
1809 envelope: TypedEnvelope<proto::Pull>,
1810 mut cx: AsyncApp,
1811 ) -> Result<proto::RemoteMessageResponse> {
1812 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1813 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1814 let askpass_id = envelope.payload.askpass_id;
1815 let askpass = make_remote_delegate(
1816 this,
1817 envelope.payload.project_id,
1818 repository_id,
1819 askpass_id,
1820 &mut cx,
1821 );
1822
1823 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1824 let remote_name = envelope.payload.remote_name.into();
1825 let rebase = envelope.payload.rebase;
1826
1827 let remote_message = repository_handle
1828 .update(&mut cx, |repository_handle, cx| {
1829 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1830 })?
1831 .await??;
1832
1833 Ok(proto::RemoteMessageResponse {
1834 stdout: remote_message.stdout,
1835 stderr: remote_message.stderr,
1836 })
1837 }
1838
1839 async fn handle_stage(
1840 this: Entity<Self>,
1841 envelope: TypedEnvelope<proto::Stage>,
1842 mut cx: AsyncApp,
1843 ) -> Result<proto::Ack> {
1844 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1845 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1846
1847 let entries = envelope
1848 .payload
1849 .paths
1850 .into_iter()
1851 .map(|path| RepoPath::new(&path))
1852 .collect::<Result<Vec<_>>>()?;
1853
1854 repository_handle
1855 .update(&mut cx, |repository_handle, cx| {
1856 repository_handle.stage_entries(entries, cx)
1857 })?
1858 .await?;
1859 Ok(proto::Ack {})
1860 }
1861
1862 async fn handle_unstage(
1863 this: Entity<Self>,
1864 envelope: TypedEnvelope<proto::Unstage>,
1865 mut cx: AsyncApp,
1866 ) -> Result<proto::Ack> {
1867 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1868 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1869
1870 let entries = envelope
1871 .payload
1872 .paths
1873 .into_iter()
1874 .map(|path| RepoPath::new(&path))
1875 .collect::<Result<Vec<_>>>()?;
1876
1877 repository_handle
1878 .update(&mut cx, |repository_handle, cx| {
1879 repository_handle.unstage_entries(entries, cx)
1880 })?
1881 .await?;
1882
1883 Ok(proto::Ack {})
1884 }
1885
1886 async fn handle_stash(
1887 this: Entity<Self>,
1888 envelope: TypedEnvelope<proto::Stash>,
1889 mut cx: AsyncApp,
1890 ) -> Result<proto::Ack> {
1891 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1892 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1893
1894 let entries = envelope
1895 .payload
1896 .paths
1897 .into_iter()
1898 .map(|path| RepoPath::new(&path))
1899 .collect::<Result<Vec<_>>>()?;
1900
1901 repository_handle
1902 .update(&mut cx, |repository_handle, cx| {
1903 repository_handle.stash_entries(entries, cx)
1904 })?
1905 .await?;
1906
1907 Ok(proto::Ack {})
1908 }
1909
1910 async fn handle_stash_pop(
1911 this: Entity<Self>,
1912 envelope: TypedEnvelope<proto::StashPop>,
1913 mut cx: AsyncApp,
1914 ) -> Result<proto::Ack> {
1915 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1916 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1917 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1918
1919 repository_handle
1920 .update(&mut cx, |repository_handle, cx| {
1921 repository_handle.stash_pop(stash_index, cx)
1922 })?
1923 .await?;
1924
1925 Ok(proto::Ack {})
1926 }
1927
1928 async fn handle_stash_apply(
1929 this: Entity<Self>,
1930 envelope: TypedEnvelope<proto::StashApply>,
1931 mut cx: AsyncApp,
1932 ) -> Result<proto::Ack> {
1933 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1934 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1935 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1936
1937 repository_handle
1938 .update(&mut cx, |repository_handle, cx| {
1939 repository_handle.stash_apply(stash_index, cx)
1940 })?
1941 .await?;
1942
1943 Ok(proto::Ack {})
1944 }
1945
1946 async fn handle_stash_drop(
1947 this: Entity<Self>,
1948 envelope: TypedEnvelope<proto::StashDrop>,
1949 mut cx: AsyncApp,
1950 ) -> Result<proto::Ack> {
1951 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1952 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1953 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1954
1955 repository_handle
1956 .update(&mut cx, |repository_handle, cx| {
1957 repository_handle.stash_drop(stash_index, cx)
1958 })?
1959 .await??;
1960
1961 Ok(proto::Ack {})
1962 }
1963
1964 async fn handle_set_index_text(
1965 this: Entity<Self>,
1966 envelope: TypedEnvelope<proto::SetIndexText>,
1967 mut cx: AsyncApp,
1968 ) -> Result<proto::Ack> {
1969 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1970 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1971 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1972
1973 repository_handle
1974 .update(&mut cx, |repository_handle, cx| {
1975 repository_handle.spawn_set_index_text_job(
1976 repo_path,
1977 envelope.payload.text,
1978 None,
1979 cx,
1980 )
1981 })?
1982 .await??;
1983 Ok(proto::Ack {})
1984 }
1985
1986 async fn handle_run_hook(
1987 this: Entity<Self>,
1988 envelope: TypedEnvelope<proto::RunGitHook>,
1989 mut cx: AsyncApp,
1990 ) -> Result<proto::Ack> {
1991 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1992 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1993 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
1994 repository_handle
1995 .update(&mut cx, |repository_handle, cx| {
1996 repository_handle.run_hook(hook, cx)
1997 })?
1998 .await??;
1999 Ok(proto::Ack {})
2000 }
2001
2002 async fn handle_commit(
2003 this: Entity<Self>,
2004 envelope: TypedEnvelope<proto::Commit>,
2005 mut cx: AsyncApp,
2006 ) -> Result<proto::Ack> {
2007 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2008 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2009 let askpass_id = envelope.payload.askpass_id;
2010
2011 let askpass = make_remote_delegate(
2012 this,
2013 envelope.payload.project_id,
2014 repository_id,
2015 askpass_id,
2016 &mut cx,
2017 );
2018
2019 let message = SharedString::from(envelope.payload.message);
2020 let name = envelope.payload.name.map(SharedString::from);
2021 let email = envelope.payload.email.map(SharedString::from);
2022 let options = envelope.payload.options.unwrap_or_default();
2023
2024 repository_handle
2025 .update(&mut cx, |repository_handle, cx| {
2026 repository_handle.commit(
2027 message,
2028 name.zip(email),
2029 CommitOptions {
2030 amend: options.amend,
2031 signoff: options.signoff,
2032 },
2033 askpass,
2034 cx,
2035 )
2036 })?
2037 .await??;
2038 Ok(proto::Ack {})
2039 }
2040
2041 async fn handle_get_remotes(
2042 this: Entity<Self>,
2043 envelope: TypedEnvelope<proto::GetRemotes>,
2044 mut cx: AsyncApp,
2045 ) -> Result<proto::GetRemotesResponse> {
2046 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2047 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2048
2049 let branch_name = envelope.payload.branch_name;
2050
2051 let remotes = repository_handle
2052 .update(&mut cx, |repository_handle, _| {
2053 repository_handle.get_remotes(branch_name)
2054 })?
2055 .await??;
2056
2057 Ok(proto::GetRemotesResponse {
2058 remotes: remotes
2059 .into_iter()
2060 .map(|remotes| proto::get_remotes_response::Remote {
2061 name: remotes.name.to_string(),
2062 })
2063 .collect::<Vec<_>>(),
2064 })
2065 }
2066
2067 async fn handle_get_worktrees(
2068 this: Entity<Self>,
2069 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2070 mut cx: AsyncApp,
2071 ) -> Result<proto::GitWorktreesResponse> {
2072 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2073 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2074
2075 let worktrees = repository_handle
2076 .update(&mut cx, |repository_handle, _| {
2077 repository_handle.worktrees()
2078 })?
2079 .await??;
2080
2081 Ok(proto::GitWorktreesResponse {
2082 worktrees: worktrees
2083 .into_iter()
2084 .map(|worktree| worktree_to_proto(&worktree))
2085 .collect::<Vec<_>>(),
2086 })
2087 }
2088
2089 async fn handle_create_worktree(
2090 this: Entity<Self>,
2091 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2092 mut cx: AsyncApp,
2093 ) -> Result<proto::Ack> {
2094 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2095 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2096 let directory = PathBuf::from(envelope.payload.directory);
2097 let name = envelope.payload.name;
2098 let commit = envelope.payload.commit;
2099
2100 repository_handle
2101 .update(&mut cx, |repository_handle, _| {
2102 repository_handle.create_worktree(name, directory, commit)
2103 })?
2104 .await??;
2105
2106 Ok(proto::Ack {})
2107 }
2108
2109 async fn handle_get_branches(
2110 this: Entity<Self>,
2111 envelope: TypedEnvelope<proto::GitGetBranches>,
2112 mut cx: AsyncApp,
2113 ) -> Result<proto::GitBranchesResponse> {
2114 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2115 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2116
2117 let branches = repository_handle
2118 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2119 .await??;
2120
2121 Ok(proto::GitBranchesResponse {
2122 branches: branches
2123 .into_iter()
2124 .map(|branch| branch_to_proto(&branch))
2125 .collect::<Vec<_>>(),
2126 })
2127 }
2128 async fn handle_get_default_branch(
2129 this: Entity<Self>,
2130 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2131 mut cx: AsyncApp,
2132 ) -> Result<proto::GetDefaultBranchResponse> {
2133 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2134 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2135
2136 let branch = repository_handle
2137 .update(&mut cx, |repository_handle, _| {
2138 repository_handle.default_branch()
2139 })?
2140 .await??
2141 .map(Into::into);
2142
2143 Ok(proto::GetDefaultBranchResponse { branch })
2144 }
2145 async fn handle_create_branch(
2146 this: Entity<Self>,
2147 envelope: TypedEnvelope<proto::GitCreateBranch>,
2148 mut cx: AsyncApp,
2149 ) -> Result<proto::Ack> {
2150 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2151 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2152 let branch_name = envelope.payload.branch_name;
2153
2154 repository_handle
2155 .update(&mut cx, |repository_handle, _| {
2156 repository_handle.create_branch(branch_name, None)
2157 })?
2158 .await??;
2159
2160 Ok(proto::Ack {})
2161 }
2162
2163 async fn handle_change_branch(
2164 this: Entity<Self>,
2165 envelope: TypedEnvelope<proto::GitChangeBranch>,
2166 mut cx: AsyncApp,
2167 ) -> Result<proto::Ack> {
2168 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2169 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2170 let branch_name = envelope.payload.branch_name;
2171
2172 repository_handle
2173 .update(&mut cx, |repository_handle, _| {
2174 repository_handle.change_branch(branch_name)
2175 })?
2176 .await??;
2177
2178 Ok(proto::Ack {})
2179 }
2180
2181 async fn handle_rename_branch(
2182 this: Entity<Self>,
2183 envelope: TypedEnvelope<proto::GitRenameBranch>,
2184 mut cx: AsyncApp,
2185 ) -> Result<proto::Ack> {
2186 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2187 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2188 let branch = envelope.payload.branch;
2189 let new_name = envelope.payload.new_name;
2190
2191 repository_handle
2192 .update(&mut cx, |repository_handle, _| {
2193 repository_handle.rename_branch(branch, new_name)
2194 })?
2195 .await??;
2196
2197 Ok(proto::Ack {})
2198 }
2199
2200 async fn handle_show(
2201 this: Entity<Self>,
2202 envelope: TypedEnvelope<proto::GitShow>,
2203 mut cx: AsyncApp,
2204 ) -> Result<proto::GitCommitDetails> {
2205 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2206 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2207
2208 let commit = repository_handle
2209 .update(&mut cx, |repository_handle, _| {
2210 repository_handle.show(envelope.payload.commit)
2211 })?
2212 .await??;
2213 Ok(proto::GitCommitDetails {
2214 sha: commit.sha.into(),
2215 message: commit.message.into(),
2216 commit_timestamp: commit.commit_timestamp,
2217 author_email: commit.author_email.into(),
2218 author_name: commit.author_name.into(),
2219 })
2220 }
2221
2222 async fn handle_load_commit_diff(
2223 this: Entity<Self>,
2224 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2225 mut cx: AsyncApp,
2226 ) -> Result<proto::LoadCommitDiffResponse> {
2227 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2228 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2229
2230 let commit_diff = repository_handle
2231 .update(&mut cx, |repository_handle, _| {
2232 repository_handle.load_commit_diff(envelope.payload.commit)
2233 })?
2234 .await??;
2235 Ok(proto::LoadCommitDiffResponse {
2236 files: commit_diff
2237 .files
2238 .into_iter()
2239 .map(|file| proto::CommitFile {
2240 path: file.path.to_proto(),
2241 old_text: file.old_text,
2242 new_text: file.new_text,
2243 })
2244 .collect(),
2245 })
2246 }
2247
2248 async fn handle_reset(
2249 this: Entity<Self>,
2250 envelope: TypedEnvelope<proto::GitReset>,
2251 mut cx: AsyncApp,
2252 ) -> Result<proto::Ack> {
2253 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2254 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2255
2256 let mode = match envelope.payload.mode() {
2257 git_reset::ResetMode::Soft => ResetMode::Soft,
2258 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2259 };
2260
2261 repository_handle
2262 .update(&mut cx, |repository_handle, cx| {
2263 repository_handle.reset(envelope.payload.commit, mode, cx)
2264 })?
2265 .await??;
2266 Ok(proto::Ack {})
2267 }
2268
2269 async fn handle_checkout_files(
2270 this: Entity<Self>,
2271 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2272 mut cx: AsyncApp,
2273 ) -> Result<proto::Ack> {
2274 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2275 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2276 let paths = envelope
2277 .payload
2278 .paths
2279 .iter()
2280 .map(|s| RepoPath::from_proto(s))
2281 .collect::<Result<Vec<_>>>()?;
2282
2283 repository_handle
2284 .update(&mut cx, |repository_handle, cx| {
2285 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2286 })?
2287 .await?;
2288 Ok(proto::Ack {})
2289 }
2290
2291 async fn handle_open_commit_message_buffer(
2292 this: Entity<Self>,
2293 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2294 mut cx: AsyncApp,
2295 ) -> Result<proto::OpenBufferResponse> {
2296 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2297 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2298 let buffer = repository
2299 .update(&mut cx, |repository, cx| {
2300 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2301 })?
2302 .await?;
2303
2304 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2305 this.update(&mut cx, |this, cx| {
2306 this.buffer_store.update(cx, |buffer_store, cx| {
2307 buffer_store
2308 .create_buffer_for_peer(
2309 &buffer,
2310 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2311 cx,
2312 )
2313 .detach_and_log_err(cx);
2314 })
2315 })?;
2316
2317 Ok(proto::OpenBufferResponse {
2318 buffer_id: buffer_id.to_proto(),
2319 })
2320 }
2321
2322 async fn handle_askpass(
2323 this: Entity<Self>,
2324 envelope: TypedEnvelope<proto::AskPassRequest>,
2325 mut cx: AsyncApp,
2326 ) -> Result<proto::AskPassResponse> {
2327 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2328 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2329
2330 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2331 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2332 debug_panic!("no askpass found");
2333 anyhow::bail!("no askpass found");
2334 };
2335
2336 let response = askpass
2337 .ask_password(envelope.payload.prompt)
2338 .await
2339 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2340
2341 delegates
2342 .lock()
2343 .insert(envelope.payload.askpass_id, askpass);
2344
2345 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2346 Ok(proto::AskPassResponse {
2347 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2348 })
2349 }
2350
2351 async fn handle_check_for_pushed_commits(
2352 this: Entity<Self>,
2353 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2354 mut cx: AsyncApp,
2355 ) -> Result<proto::CheckForPushedCommitsResponse> {
2356 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2357 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2358
2359 let branches = repository_handle
2360 .update(&mut cx, |repository_handle, _| {
2361 repository_handle.check_for_pushed_commits()
2362 })?
2363 .await??;
2364 Ok(proto::CheckForPushedCommitsResponse {
2365 pushed_to: branches
2366 .into_iter()
2367 .map(|commit| commit.to_string())
2368 .collect(),
2369 })
2370 }
2371
2372 async fn handle_git_diff(
2373 this: Entity<Self>,
2374 envelope: TypedEnvelope<proto::GitDiff>,
2375 mut cx: AsyncApp,
2376 ) -> Result<proto::GitDiffResponse> {
2377 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2378 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2379 let diff_type = match envelope.payload.diff_type() {
2380 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2381 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2382 };
2383
2384 let mut diff = repository_handle
2385 .update(&mut cx, |repository_handle, cx| {
2386 repository_handle.diff(diff_type, cx)
2387 })?
2388 .await??;
2389 const ONE_MB: usize = 1_000_000;
2390 if diff.len() > ONE_MB {
2391 diff = diff.chars().take(ONE_MB).collect()
2392 }
2393
2394 Ok(proto::GitDiffResponse { diff })
2395 }
2396
2397 async fn handle_tree_diff(
2398 this: Entity<Self>,
2399 request: TypedEnvelope<proto::GetTreeDiff>,
2400 mut cx: AsyncApp,
2401 ) -> Result<proto::GetTreeDiffResponse> {
2402 let repository_id = RepositoryId(request.payload.repository_id);
2403 let diff_type = if request.payload.is_merge {
2404 DiffTreeType::MergeBase {
2405 base: request.payload.base.into(),
2406 head: request.payload.head.into(),
2407 }
2408 } else {
2409 DiffTreeType::Since {
2410 base: request.payload.base.into(),
2411 head: request.payload.head.into(),
2412 }
2413 };
2414
2415 let diff = this
2416 .update(&mut cx, |this, cx| {
2417 let repository = this.repositories().get(&repository_id)?;
2418 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2419 })?
2420 .context("missing repository")?
2421 .await??;
2422
2423 Ok(proto::GetTreeDiffResponse {
2424 entries: diff
2425 .entries
2426 .into_iter()
2427 .map(|(path, status)| proto::TreeDiffStatus {
2428 path: path.as_ref().to_proto(),
2429 status: match status {
2430 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2431 TreeDiffStatus::Modified { .. } => {
2432 proto::tree_diff_status::Status::Modified.into()
2433 }
2434 TreeDiffStatus::Deleted { .. } => {
2435 proto::tree_diff_status::Status::Deleted.into()
2436 }
2437 },
2438 oid: match status {
2439 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2440 Some(old.to_string())
2441 }
2442 TreeDiffStatus::Added => None,
2443 },
2444 })
2445 .collect(),
2446 })
2447 }
2448
2449 async fn handle_get_blob_content(
2450 this: Entity<Self>,
2451 request: TypedEnvelope<proto::GetBlobContent>,
2452 mut cx: AsyncApp,
2453 ) -> Result<proto::GetBlobContentResponse> {
2454 let oid = git::Oid::from_str(&request.payload.oid)?;
2455 let repository_id = RepositoryId(request.payload.repository_id);
2456 let content = this
2457 .update(&mut cx, |this, cx| {
2458 let repository = this.repositories().get(&repository_id)?;
2459 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2460 })?
2461 .context("missing repository")?
2462 .await?;
2463 Ok(proto::GetBlobContentResponse { content })
2464 }
2465
2466 async fn handle_open_unstaged_diff(
2467 this: Entity<Self>,
2468 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2469 mut cx: AsyncApp,
2470 ) -> Result<proto::OpenUnstagedDiffResponse> {
2471 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2472 let diff = this
2473 .update(&mut cx, |this, cx| {
2474 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2475 Some(this.open_unstaged_diff(buffer, cx))
2476 })?
2477 .context("missing buffer")?
2478 .await?;
2479 this.update(&mut cx, |this, _| {
2480 let shared_diffs = this
2481 .shared_diffs
2482 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2483 .or_default();
2484 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2485 })?;
2486 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2487 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2488 }
2489
2490 async fn handle_open_uncommitted_diff(
2491 this: Entity<Self>,
2492 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2493 mut cx: AsyncApp,
2494 ) -> Result<proto::OpenUncommittedDiffResponse> {
2495 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2496 let diff = this
2497 .update(&mut cx, |this, cx| {
2498 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2499 Some(this.open_uncommitted_diff(buffer, cx))
2500 })?
2501 .context("missing buffer")?
2502 .await?;
2503 this.update(&mut cx, |this, _| {
2504 let shared_diffs = this
2505 .shared_diffs
2506 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2507 .or_default();
2508 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2509 })?;
2510 diff.read_with(&cx, |diff, cx| {
2511 use proto::open_uncommitted_diff_response::Mode;
2512
2513 let unstaged_diff = diff.secondary_diff();
2514 let index_snapshot = unstaged_diff.and_then(|diff| {
2515 let diff = diff.read(cx);
2516 diff.base_text_exists().then(|| diff.base_text())
2517 });
2518
2519 let mode;
2520 let staged_text;
2521 let committed_text;
2522 if diff.base_text_exists() {
2523 let committed_snapshot = diff.base_text();
2524 committed_text = Some(committed_snapshot.text());
2525 if let Some(index_text) = index_snapshot {
2526 if index_text.remote_id() == committed_snapshot.remote_id() {
2527 mode = Mode::IndexMatchesHead;
2528 staged_text = None;
2529 } else {
2530 mode = Mode::IndexAndHead;
2531 staged_text = Some(index_text.text());
2532 }
2533 } else {
2534 mode = Mode::IndexAndHead;
2535 staged_text = None;
2536 }
2537 } else {
2538 mode = Mode::IndexAndHead;
2539 committed_text = None;
2540 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2541 }
2542
2543 proto::OpenUncommittedDiffResponse {
2544 committed_text,
2545 staged_text,
2546 mode: mode.into(),
2547 }
2548 })
2549 }
2550
2551 async fn handle_update_diff_bases(
2552 this: Entity<Self>,
2553 request: TypedEnvelope<proto::UpdateDiffBases>,
2554 mut cx: AsyncApp,
2555 ) -> Result<()> {
2556 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2557 this.update(&mut cx, |this, cx| {
2558 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2559 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2560 {
2561 let buffer = buffer.read(cx).text_snapshot();
2562 diff_state.update(cx, |diff_state, cx| {
2563 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2564 })
2565 }
2566 })
2567 }
2568
2569 async fn handle_blame_buffer(
2570 this: Entity<Self>,
2571 envelope: TypedEnvelope<proto::BlameBuffer>,
2572 mut cx: AsyncApp,
2573 ) -> Result<proto::BlameBufferResponse> {
2574 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2575 let version = deserialize_version(&envelope.payload.version);
2576 let buffer = this.read_with(&cx, |this, cx| {
2577 this.buffer_store.read(cx).get_existing(buffer_id)
2578 })??;
2579 buffer
2580 .update(&mut cx, |buffer, _| {
2581 buffer.wait_for_version(version.clone())
2582 })?
2583 .await?;
2584 let blame = this
2585 .update(&mut cx, |this, cx| {
2586 this.blame_buffer(&buffer, Some(version), cx)
2587 })?
2588 .await?;
2589 Ok(serialize_blame_buffer_response(blame))
2590 }
2591
2592 async fn handle_get_permalink_to_line(
2593 this: Entity<Self>,
2594 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2595 mut cx: AsyncApp,
2596 ) -> Result<proto::GetPermalinkToLineResponse> {
2597 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2598 // let version = deserialize_version(&envelope.payload.version);
2599 let selection = {
2600 let proto_selection = envelope
2601 .payload
2602 .selection
2603 .context("no selection to get permalink for defined")?;
2604 proto_selection.start as u32..proto_selection.end as u32
2605 };
2606 let buffer = this.read_with(&cx, |this, cx| {
2607 this.buffer_store.read(cx).get_existing(buffer_id)
2608 })??;
2609 let permalink = this
2610 .update(&mut cx, |this, cx| {
2611 this.get_permalink_to_line(&buffer, selection, cx)
2612 })?
2613 .await?;
2614 Ok(proto::GetPermalinkToLineResponse {
2615 permalink: permalink.to_string(),
2616 })
2617 }
2618
2619 fn repository_for_request(
2620 this: &Entity<Self>,
2621 id: RepositoryId,
2622 cx: &mut AsyncApp,
2623 ) -> Result<Entity<Repository>> {
2624 this.read_with(cx, |this, _| {
2625 this.repositories
2626 .get(&id)
2627 .context("missing repository handle")
2628 .cloned()
2629 })?
2630 }
2631
2632 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2633 self.repositories
2634 .iter()
2635 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2636 .collect()
2637 }
2638
2639 fn process_updated_entries(
2640 &self,
2641 worktree: &Entity<Worktree>,
2642 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2643 cx: &mut App,
2644 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2645 let path_style = worktree.read(cx).path_style();
2646 let mut repo_paths = self
2647 .repositories
2648 .values()
2649 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2650 .collect::<Vec<_>>();
2651 let mut entries: Vec<_> = updated_entries
2652 .iter()
2653 .map(|(path, _, _)| path.clone())
2654 .collect();
2655 entries.sort();
2656 let worktree = worktree.read(cx);
2657
2658 let entries = entries
2659 .into_iter()
2660 .map(|path| worktree.absolutize(&path))
2661 .collect::<Arc<[_]>>();
2662
2663 let executor = cx.background_executor().clone();
2664 cx.background_executor().spawn(async move {
2665 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2666 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2667 let mut tasks = FuturesOrdered::new();
2668 for (repo_path, repo) in repo_paths.into_iter().rev() {
2669 let entries = entries.clone();
2670 let task = executor.spawn(async move {
2671 // Find all repository paths that belong to this repo
2672 let mut ix = entries.partition_point(|path| path < &*repo_path);
2673 if ix == entries.len() {
2674 return None;
2675 };
2676
2677 let mut paths = Vec::new();
2678 // All paths prefixed by a given repo will constitute a continuous range.
2679 while let Some(path) = entries.get(ix)
2680 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2681 &repo_path, path, path_style,
2682 )
2683 {
2684 paths.push((repo_path, ix));
2685 ix += 1;
2686 }
2687 if paths.is_empty() {
2688 None
2689 } else {
2690 Some((repo, paths))
2691 }
2692 });
2693 tasks.push_back(task);
2694 }
2695
2696 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2697 let mut path_was_used = vec![false; entries.len()];
2698 let tasks = tasks.collect::<Vec<_>>().await;
2699 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2700 // We always want to assign a path to it's innermost repository.
2701 for t in tasks {
2702 let Some((repo, paths)) = t else {
2703 continue;
2704 };
2705 let entry = paths_by_git_repo.entry(repo).or_default();
2706 for (repo_path, ix) in paths {
2707 if path_was_used[ix] {
2708 continue;
2709 }
2710 path_was_used[ix] = true;
2711 entry.push(repo_path);
2712 }
2713 }
2714
2715 paths_by_git_repo
2716 })
2717 }
2718}
2719
2720impl BufferGitState {
2721 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2722 Self {
2723 unstaged_diff: Default::default(),
2724 uncommitted_diff: Default::default(),
2725 recalculate_diff_task: Default::default(),
2726 language: Default::default(),
2727 language_registry: Default::default(),
2728 recalculating_tx: postage::watch::channel_with(false).0,
2729 hunk_staging_operation_count: 0,
2730 hunk_staging_operation_count_as_of_write: 0,
2731 head_text: Default::default(),
2732 index_text: Default::default(),
2733 head_changed: Default::default(),
2734 index_changed: Default::default(),
2735 language_changed: Default::default(),
2736 conflict_updated_futures: Default::default(),
2737 conflict_set: Default::default(),
2738 reparse_conflict_markers_task: Default::default(),
2739 }
2740 }
2741
2742 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2743 self.language = buffer.read(cx).language().cloned();
2744 self.language_changed = true;
2745 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2746 }
2747
2748 fn reparse_conflict_markers(
2749 &mut self,
2750 buffer: text::BufferSnapshot,
2751 cx: &mut Context<Self>,
2752 ) -> oneshot::Receiver<()> {
2753 let (tx, rx) = oneshot::channel();
2754
2755 let Some(conflict_set) = self
2756 .conflict_set
2757 .as_ref()
2758 .and_then(|conflict_set| conflict_set.upgrade())
2759 else {
2760 return rx;
2761 };
2762
2763 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2764 if conflict_set.has_conflict {
2765 Some(conflict_set.snapshot())
2766 } else {
2767 None
2768 }
2769 });
2770
2771 if let Some(old_snapshot) = old_snapshot {
2772 self.conflict_updated_futures.push(tx);
2773 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2774 let (snapshot, changed_range) = cx
2775 .background_spawn(async move {
2776 let new_snapshot = ConflictSet::parse(&buffer);
2777 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2778 (new_snapshot, changed_range)
2779 })
2780 .await;
2781 this.update(cx, |this, cx| {
2782 if let Some(conflict_set) = &this.conflict_set {
2783 conflict_set
2784 .update(cx, |conflict_set, cx| {
2785 conflict_set.set_snapshot(snapshot, changed_range, cx);
2786 })
2787 .ok();
2788 }
2789 let futures = std::mem::take(&mut this.conflict_updated_futures);
2790 for tx in futures {
2791 tx.send(()).ok();
2792 }
2793 })
2794 }))
2795 }
2796
2797 rx
2798 }
2799
2800 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2801 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2802 }
2803
2804 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2805 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2806 }
2807
2808 fn handle_base_texts_updated(
2809 &mut self,
2810 buffer: text::BufferSnapshot,
2811 message: proto::UpdateDiffBases,
2812 cx: &mut Context<Self>,
2813 ) {
2814 use proto::update_diff_bases::Mode;
2815
2816 let Some(mode) = Mode::from_i32(message.mode) else {
2817 return;
2818 };
2819
2820 let diff_bases_change = match mode {
2821 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2822 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2823 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2824 Mode::IndexAndHead => DiffBasesChange::SetEach {
2825 index: message.staged_text,
2826 head: message.committed_text,
2827 },
2828 };
2829
2830 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2831 }
2832
2833 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2834 if *self.recalculating_tx.borrow() {
2835 let mut rx = self.recalculating_tx.subscribe();
2836 Some(async move {
2837 loop {
2838 let is_recalculating = rx.recv().await;
2839 if is_recalculating != Some(true) {
2840 break;
2841 }
2842 }
2843 })
2844 } else {
2845 None
2846 }
2847 }
2848
2849 fn diff_bases_changed(
2850 &mut self,
2851 buffer: text::BufferSnapshot,
2852 diff_bases_change: Option<DiffBasesChange>,
2853 cx: &mut Context<Self>,
2854 ) {
2855 match diff_bases_change {
2856 Some(DiffBasesChange::SetIndex(index)) => {
2857 self.index_text = index.map(|mut index| {
2858 text::LineEnding::normalize(&mut index);
2859 Arc::new(index)
2860 });
2861 self.index_changed = true;
2862 }
2863 Some(DiffBasesChange::SetHead(head)) => {
2864 self.head_text = head.map(|mut head| {
2865 text::LineEnding::normalize(&mut head);
2866 Arc::new(head)
2867 });
2868 self.head_changed = true;
2869 }
2870 Some(DiffBasesChange::SetBoth(text)) => {
2871 let text = text.map(|mut text| {
2872 text::LineEnding::normalize(&mut text);
2873 Arc::new(text)
2874 });
2875 self.head_text = text.clone();
2876 self.index_text = text;
2877 self.head_changed = true;
2878 self.index_changed = true;
2879 }
2880 Some(DiffBasesChange::SetEach { index, head }) => {
2881 self.index_text = index.map(|mut index| {
2882 text::LineEnding::normalize(&mut index);
2883 Arc::new(index)
2884 });
2885 self.index_changed = true;
2886 self.head_text = head.map(|mut head| {
2887 text::LineEnding::normalize(&mut head);
2888 Arc::new(head)
2889 });
2890 self.head_changed = true;
2891 }
2892 None => {}
2893 }
2894
2895 self.recalculate_diffs(buffer, cx)
2896 }
2897
2898 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2899 *self.recalculating_tx.borrow_mut() = true;
2900
2901 let language = self.language.clone();
2902 let language_registry = self.language_registry.clone();
2903 let unstaged_diff = self.unstaged_diff();
2904 let uncommitted_diff = self.uncommitted_diff();
2905 let head = self.head_text.clone();
2906 let index = self.index_text.clone();
2907 let index_changed = self.index_changed;
2908 let head_changed = self.head_changed;
2909 let language_changed = self.language_changed;
2910 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2911 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2912 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2913 (None, None) => true,
2914 _ => false,
2915 };
2916 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2917 log::debug!(
2918 "start recalculating diffs for buffer {}",
2919 buffer.remote_id()
2920 );
2921
2922 let mut new_unstaged_diff = None;
2923 if let Some(unstaged_diff) = &unstaged_diff {
2924 new_unstaged_diff = Some(
2925 BufferDiff::update_diff(
2926 unstaged_diff.clone(),
2927 buffer.clone(),
2928 index,
2929 index_changed,
2930 language_changed,
2931 language.clone(),
2932 language_registry.clone(),
2933 cx,
2934 )
2935 .await?,
2936 );
2937 }
2938
2939 let mut new_uncommitted_diff = None;
2940 if let Some(uncommitted_diff) = &uncommitted_diff {
2941 new_uncommitted_diff = if index_matches_head {
2942 new_unstaged_diff.clone()
2943 } else {
2944 Some(
2945 BufferDiff::update_diff(
2946 uncommitted_diff.clone(),
2947 buffer.clone(),
2948 head,
2949 head_changed,
2950 language_changed,
2951 language.clone(),
2952 language_registry.clone(),
2953 cx,
2954 )
2955 .await?,
2956 )
2957 }
2958 }
2959
2960 let cancel = this.update(cx, |this, _| {
2961 // This checks whether all pending stage/unstage operations
2962 // have quiesced (i.e. both the corresponding write and the
2963 // read of that write have completed). If not, then we cancel
2964 // this recalculation attempt to avoid invalidating pending
2965 // state too quickly; another recalculation will come along
2966 // later and clear the pending state once the state of the index has settled.
2967 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2968 *this.recalculating_tx.borrow_mut() = false;
2969 true
2970 } else {
2971 false
2972 }
2973 })?;
2974 if cancel {
2975 log::debug!(
2976 concat!(
2977 "aborting recalculating diffs for buffer {}",
2978 "due to subsequent hunk operations",
2979 ),
2980 buffer.remote_id()
2981 );
2982 return Ok(());
2983 }
2984
2985 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2986 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2987 {
2988 unstaged_diff.update(cx, |diff, cx| {
2989 if language_changed {
2990 diff.language_changed(cx);
2991 }
2992 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2993 })?
2994 } else {
2995 None
2996 };
2997
2998 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2999 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3000 {
3001 uncommitted_diff.update(cx, |diff, cx| {
3002 if language_changed {
3003 diff.language_changed(cx);
3004 }
3005 diff.set_snapshot_with_secondary(
3006 new_uncommitted_diff,
3007 &buffer,
3008 unstaged_changed_range,
3009 true,
3010 cx,
3011 );
3012 })?;
3013 }
3014
3015 log::debug!(
3016 "finished recalculating diffs for buffer {}",
3017 buffer.remote_id()
3018 );
3019
3020 if let Some(this) = this.upgrade() {
3021 this.update(cx, |this, _| {
3022 this.index_changed = false;
3023 this.head_changed = false;
3024 this.language_changed = false;
3025 *this.recalculating_tx.borrow_mut() = false;
3026 })?;
3027 }
3028
3029 Ok(())
3030 }));
3031 }
3032}
3033
3034fn make_remote_delegate(
3035 this: Entity<GitStore>,
3036 project_id: u64,
3037 repository_id: RepositoryId,
3038 askpass_id: u64,
3039 cx: &mut AsyncApp,
3040) -> AskPassDelegate {
3041 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3042 this.update(cx, |this, cx| {
3043 let Some((client, _)) = this.downstream_client() else {
3044 return;
3045 };
3046 let response = client.request(proto::AskPassRequest {
3047 project_id,
3048 repository_id: repository_id.to_proto(),
3049 askpass_id,
3050 prompt,
3051 });
3052 cx.spawn(async move |_, _| {
3053 let mut response = response.await?.response;
3054 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3055 .ok();
3056 response.zeroize();
3057 anyhow::Ok(())
3058 })
3059 .detach_and_log_err(cx);
3060 })
3061 .log_err();
3062 })
3063}
3064
3065impl RepositoryId {
3066 pub fn to_proto(self) -> u64 {
3067 self.0
3068 }
3069
3070 pub fn from_proto(id: u64) -> Self {
3071 RepositoryId(id)
3072 }
3073}
3074
3075impl RepositorySnapshot {
3076 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3077 Self {
3078 id,
3079 statuses_by_path: Default::default(),
3080 work_directory_abs_path,
3081 branch: None,
3082 head_commit: None,
3083 scan_id: 0,
3084 merge: Default::default(),
3085 remote_origin_url: None,
3086 remote_upstream_url: None,
3087 stash_entries: Default::default(),
3088 path_style,
3089 }
3090 }
3091
3092 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3093 proto::UpdateRepository {
3094 branch_summary: self.branch.as_ref().map(branch_to_proto),
3095 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3096 updated_statuses: self
3097 .statuses_by_path
3098 .iter()
3099 .map(|entry| entry.to_proto())
3100 .collect(),
3101 removed_statuses: Default::default(),
3102 current_merge_conflicts: self
3103 .merge
3104 .conflicted_paths
3105 .iter()
3106 .map(|repo_path| repo_path.to_proto())
3107 .collect(),
3108 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3109 project_id,
3110 id: self.id.to_proto(),
3111 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3112 entry_ids: vec![self.id.to_proto()],
3113 scan_id: self.scan_id,
3114 is_last_update: true,
3115 stash_entries: self
3116 .stash_entries
3117 .entries
3118 .iter()
3119 .map(stash_to_proto)
3120 .collect(),
3121 }
3122 }
3123
3124 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3125 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3126 let mut removed_statuses: Vec<String> = Vec::new();
3127
3128 let mut new_statuses = self.statuses_by_path.iter().peekable();
3129 let mut old_statuses = old.statuses_by_path.iter().peekable();
3130
3131 let mut current_new_entry = new_statuses.next();
3132 let mut current_old_entry = old_statuses.next();
3133 loop {
3134 match (current_new_entry, current_old_entry) {
3135 (Some(new_entry), Some(old_entry)) => {
3136 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3137 Ordering::Less => {
3138 updated_statuses.push(new_entry.to_proto());
3139 current_new_entry = new_statuses.next();
3140 }
3141 Ordering::Equal => {
3142 if new_entry.status != old_entry.status {
3143 updated_statuses.push(new_entry.to_proto());
3144 }
3145 current_old_entry = old_statuses.next();
3146 current_new_entry = new_statuses.next();
3147 }
3148 Ordering::Greater => {
3149 removed_statuses.push(old_entry.repo_path.to_proto());
3150 current_old_entry = old_statuses.next();
3151 }
3152 }
3153 }
3154 (None, Some(old_entry)) => {
3155 removed_statuses.push(old_entry.repo_path.to_proto());
3156 current_old_entry = old_statuses.next();
3157 }
3158 (Some(new_entry), None) => {
3159 updated_statuses.push(new_entry.to_proto());
3160 current_new_entry = new_statuses.next();
3161 }
3162 (None, None) => break,
3163 }
3164 }
3165
3166 proto::UpdateRepository {
3167 branch_summary: self.branch.as_ref().map(branch_to_proto),
3168 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3169 updated_statuses,
3170 removed_statuses,
3171 current_merge_conflicts: self
3172 .merge
3173 .conflicted_paths
3174 .iter()
3175 .map(|path| path.to_proto())
3176 .collect(),
3177 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3178 project_id,
3179 id: self.id.to_proto(),
3180 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3181 entry_ids: vec![],
3182 scan_id: self.scan_id,
3183 is_last_update: true,
3184 stash_entries: self
3185 .stash_entries
3186 .entries
3187 .iter()
3188 .map(stash_to_proto)
3189 .collect(),
3190 }
3191 }
3192
3193 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3194 self.statuses_by_path.iter().cloned()
3195 }
3196
3197 pub fn status_summary(&self) -> GitSummary {
3198 self.statuses_by_path.summary().item_summary
3199 }
3200
3201 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3202 self.statuses_by_path
3203 .get(&PathKey(path.as_ref().clone()), ())
3204 .cloned()
3205 }
3206
3207 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3208 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3209 }
3210
3211 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3212 self.path_style
3213 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3214 .unwrap()
3215 .into()
3216 }
3217
3218 #[inline]
3219 fn abs_path_to_repo_path_inner(
3220 work_directory_abs_path: &Path,
3221 abs_path: &Path,
3222 path_style: PathStyle,
3223 ) -> Option<RepoPath> {
3224 abs_path
3225 .strip_prefix(&work_directory_abs_path)
3226 .ok()
3227 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3228 }
3229
3230 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3231 self.merge.conflicted_paths.contains(repo_path)
3232 }
3233
3234 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3235 let had_conflict_on_last_merge_head_change =
3236 self.merge.conflicted_paths.contains(repo_path);
3237 let has_conflict_currently = self
3238 .status_for_path(repo_path)
3239 .is_some_and(|entry| entry.status.is_conflicted());
3240 had_conflict_on_last_merge_head_change || has_conflict_currently
3241 }
3242
3243 /// This is the name that will be displayed in the repository selector for this repository.
3244 pub fn display_name(&self) -> SharedString {
3245 self.work_directory_abs_path
3246 .file_name()
3247 .unwrap_or_default()
3248 .to_string_lossy()
3249 .to_string()
3250 .into()
3251 }
3252}
3253
3254pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3255 proto::StashEntry {
3256 oid: entry.oid.as_bytes().to_vec(),
3257 message: entry.message.clone(),
3258 branch: entry.branch.clone(),
3259 index: entry.index as u64,
3260 timestamp: entry.timestamp,
3261 }
3262}
3263
3264pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3265 Ok(StashEntry {
3266 oid: Oid::from_bytes(&entry.oid)?,
3267 message: entry.message.clone(),
3268 index: entry.index as usize,
3269 branch: entry.branch.clone(),
3270 timestamp: entry.timestamp,
3271 })
3272}
3273
3274impl MergeDetails {
3275 async fn load(
3276 backend: &Arc<dyn GitRepository>,
3277 status: &SumTree<StatusEntry>,
3278 prev_snapshot: &RepositorySnapshot,
3279 ) -> Result<(MergeDetails, bool)> {
3280 log::debug!("load merge details");
3281 let message = backend.merge_message().await;
3282 let heads = backend
3283 .revparse_batch(vec![
3284 "MERGE_HEAD".into(),
3285 "CHERRY_PICK_HEAD".into(),
3286 "REBASE_HEAD".into(),
3287 "REVERT_HEAD".into(),
3288 "APPLY_HEAD".into(),
3289 ])
3290 .await
3291 .log_err()
3292 .unwrap_or_default()
3293 .into_iter()
3294 .map(|opt| opt.map(SharedString::from))
3295 .collect::<Vec<_>>();
3296 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3297 let conflicted_paths = if merge_heads_changed {
3298 let current_conflicted_paths = TreeSet::from_ordered_entries(
3299 status
3300 .iter()
3301 .filter(|entry| entry.status.is_conflicted())
3302 .map(|entry| entry.repo_path.clone()),
3303 );
3304
3305 // It can happen that we run a scan while a lengthy merge is in progress
3306 // that will eventually result in conflicts, but before those conflicts
3307 // are reported by `git status`. Since for the moment we only care about
3308 // the merge heads state for the purposes of tracking conflicts, don't update
3309 // this state until we see some conflicts.
3310 if heads.iter().any(Option::is_some)
3311 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3312 && current_conflicted_paths.is_empty()
3313 {
3314 log::debug!("not updating merge heads because no conflicts found");
3315 return Ok((
3316 MergeDetails {
3317 message: message.map(SharedString::from),
3318 ..prev_snapshot.merge.clone()
3319 },
3320 false,
3321 ));
3322 }
3323
3324 current_conflicted_paths
3325 } else {
3326 prev_snapshot.merge.conflicted_paths.clone()
3327 };
3328 let details = MergeDetails {
3329 conflicted_paths,
3330 message: message.map(SharedString::from),
3331 heads,
3332 };
3333 Ok((details, merge_heads_changed))
3334 }
3335}
3336
3337impl Repository {
3338 pub fn snapshot(&self) -> RepositorySnapshot {
3339 self.snapshot.clone()
3340 }
3341
3342 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3343 self.pending_ops.iter().cloned()
3344 }
3345
3346 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3347 self.pending_ops.summary().clone()
3348 }
3349
3350 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3351 self.pending_ops
3352 .get(&PathKey(path.as_ref().clone()), ())
3353 .cloned()
3354 }
3355
3356 fn local(
3357 id: RepositoryId,
3358 work_directory_abs_path: Arc<Path>,
3359 dot_git_abs_path: Arc<Path>,
3360 repository_dir_abs_path: Arc<Path>,
3361 common_dir_abs_path: Arc<Path>,
3362 project_environment: WeakEntity<ProjectEnvironment>,
3363 fs: Arc<dyn Fs>,
3364 git_store: WeakEntity<GitStore>,
3365 cx: &mut Context<Self>,
3366 ) -> Self {
3367 let snapshot =
3368 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3369 Repository {
3370 this: cx.weak_entity(),
3371 git_store,
3372 snapshot,
3373 pending_ops: Default::default(),
3374 commit_message_buffer: None,
3375 askpass_delegates: Default::default(),
3376 paths_needing_status_update: Default::default(),
3377 latest_askpass_id: 0,
3378 job_sender: Repository::spawn_local_git_worker(
3379 work_directory_abs_path,
3380 dot_git_abs_path,
3381 repository_dir_abs_path,
3382 common_dir_abs_path,
3383 project_environment,
3384 fs,
3385 cx,
3386 ),
3387 job_id: 0,
3388 active_jobs: Default::default(),
3389 }
3390 }
3391
3392 fn remote(
3393 id: RepositoryId,
3394 work_directory_abs_path: Arc<Path>,
3395 path_style: PathStyle,
3396 project_id: ProjectId,
3397 client: AnyProtoClient,
3398 git_store: WeakEntity<GitStore>,
3399 cx: &mut Context<Self>,
3400 ) -> Self {
3401 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3402 Self {
3403 this: cx.weak_entity(),
3404 snapshot,
3405 commit_message_buffer: None,
3406 git_store,
3407 pending_ops: Default::default(),
3408 paths_needing_status_update: Default::default(),
3409 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3410 askpass_delegates: Default::default(),
3411 latest_askpass_id: 0,
3412 active_jobs: Default::default(),
3413 job_id: 0,
3414 }
3415 }
3416
3417 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3418 self.git_store.upgrade()
3419 }
3420
3421 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3422 let this = cx.weak_entity();
3423 let git_store = self.git_store.clone();
3424 let _ = self.send_keyed_job(
3425 Some(GitJobKey::ReloadBufferDiffBases),
3426 None,
3427 |state, mut cx| async move {
3428 let RepositoryState::Local { backend, .. } = state else {
3429 log::error!("tried to recompute diffs for a non-local repository");
3430 return Ok(());
3431 };
3432
3433 let Some(this) = this.upgrade() else {
3434 return Ok(());
3435 };
3436
3437 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3438 git_store.update(cx, |git_store, cx| {
3439 git_store
3440 .diffs
3441 .iter()
3442 .filter_map(|(buffer_id, diff_state)| {
3443 let buffer_store = git_store.buffer_store.read(cx);
3444 let buffer = buffer_store.get(*buffer_id)?;
3445 let file = File::from_dyn(buffer.read(cx).file())?;
3446 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3447 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3448 log::debug!(
3449 "start reload diff bases for repo path {}",
3450 repo_path.as_unix_str()
3451 );
3452 diff_state.update(cx, |diff_state, _| {
3453 let has_unstaged_diff = diff_state
3454 .unstaged_diff
3455 .as_ref()
3456 .is_some_and(|diff| diff.is_upgradable());
3457 let has_uncommitted_diff = diff_state
3458 .uncommitted_diff
3459 .as_ref()
3460 .is_some_and(|set| set.is_upgradable());
3461
3462 Some((
3463 buffer,
3464 repo_path,
3465 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3466 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3467 ))
3468 })
3469 })
3470 .collect::<Vec<_>>()
3471 })
3472 })??;
3473
3474 let buffer_diff_base_changes = cx
3475 .background_spawn(async move {
3476 let mut changes = Vec::new();
3477 for (buffer, repo_path, current_index_text, current_head_text) in
3478 &repo_diff_state_updates
3479 {
3480 let index_text = if current_index_text.is_some() {
3481 backend.load_index_text(repo_path.clone()).await
3482 } else {
3483 None
3484 };
3485 let head_text = if current_head_text.is_some() {
3486 backend.load_committed_text(repo_path.clone()).await
3487 } else {
3488 None
3489 };
3490
3491 let change =
3492 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3493 (Some(current_index), Some(current_head)) => {
3494 let index_changed =
3495 index_text.as_ref() != current_index.as_deref();
3496 let head_changed =
3497 head_text.as_ref() != current_head.as_deref();
3498 if index_changed && head_changed {
3499 if index_text == head_text {
3500 Some(DiffBasesChange::SetBoth(head_text))
3501 } else {
3502 Some(DiffBasesChange::SetEach {
3503 index: index_text,
3504 head: head_text,
3505 })
3506 }
3507 } else if index_changed {
3508 Some(DiffBasesChange::SetIndex(index_text))
3509 } else if head_changed {
3510 Some(DiffBasesChange::SetHead(head_text))
3511 } else {
3512 None
3513 }
3514 }
3515 (Some(current_index), None) => {
3516 let index_changed =
3517 index_text.as_ref() != current_index.as_deref();
3518 index_changed
3519 .then_some(DiffBasesChange::SetIndex(index_text))
3520 }
3521 (None, Some(current_head)) => {
3522 let head_changed =
3523 head_text.as_ref() != current_head.as_deref();
3524 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3525 }
3526 (None, None) => None,
3527 };
3528
3529 changes.push((buffer.clone(), change))
3530 }
3531 changes
3532 })
3533 .await;
3534
3535 git_store.update(&mut cx, |git_store, cx| {
3536 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3537 let buffer_snapshot = buffer.read(cx).text_snapshot();
3538 let buffer_id = buffer_snapshot.remote_id();
3539 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3540 continue;
3541 };
3542
3543 let downstream_client = git_store.downstream_client();
3544 diff_state.update(cx, |diff_state, cx| {
3545 use proto::update_diff_bases::Mode;
3546
3547 if let Some((diff_bases_change, (client, project_id))) =
3548 diff_bases_change.clone().zip(downstream_client)
3549 {
3550 let (staged_text, committed_text, mode) = match diff_bases_change {
3551 DiffBasesChange::SetIndex(index) => {
3552 (index, None, Mode::IndexOnly)
3553 }
3554 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3555 DiffBasesChange::SetEach { index, head } => {
3556 (index, head, Mode::IndexAndHead)
3557 }
3558 DiffBasesChange::SetBoth(text) => {
3559 (None, text, Mode::IndexMatchesHead)
3560 }
3561 };
3562 client
3563 .send(proto::UpdateDiffBases {
3564 project_id: project_id.to_proto(),
3565 buffer_id: buffer_id.to_proto(),
3566 staged_text,
3567 committed_text,
3568 mode: mode as i32,
3569 })
3570 .log_err();
3571 }
3572
3573 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3574 });
3575 }
3576 })
3577 },
3578 );
3579 }
3580
3581 pub fn send_job<F, Fut, R>(
3582 &mut self,
3583 status: Option<SharedString>,
3584 job: F,
3585 ) -> oneshot::Receiver<R>
3586 where
3587 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3588 Fut: Future<Output = R> + 'static,
3589 R: Send + 'static,
3590 {
3591 self.send_keyed_job(None, status, job)
3592 }
3593
3594 fn send_keyed_job<F, Fut, R>(
3595 &mut self,
3596 key: Option<GitJobKey>,
3597 status: Option<SharedString>,
3598 job: F,
3599 ) -> oneshot::Receiver<R>
3600 where
3601 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3602 Fut: Future<Output = R> + 'static,
3603 R: Send + 'static,
3604 {
3605 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3606 let job_id = post_inc(&mut self.job_id);
3607 let this = self.this.clone();
3608 self.job_sender
3609 .unbounded_send(GitJob {
3610 key,
3611 job: Box::new(move |state, cx: &mut AsyncApp| {
3612 let job = job(state, cx.clone());
3613 cx.spawn(async move |cx| {
3614 if let Some(s) = status.clone() {
3615 this.update(cx, |this, cx| {
3616 this.active_jobs.insert(
3617 job_id,
3618 JobInfo {
3619 start: Instant::now(),
3620 message: s.clone(),
3621 },
3622 );
3623
3624 cx.notify();
3625 })
3626 .ok();
3627 }
3628 let result = job.await;
3629
3630 this.update(cx, |this, cx| {
3631 this.active_jobs.remove(&job_id);
3632 cx.notify();
3633 })
3634 .ok();
3635
3636 result_tx.send(result).ok();
3637 })
3638 }),
3639 })
3640 .ok();
3641 result_rx
3642 }
3643
3644 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3645 let Some(git_store) = self.git_store.upgrade() else {
3646 return;
3647 };
3648 let entity = cx.entity();
3649 git_store.update(cx, |git_store, cx| {
3650 let Some((&id, _)) = git_store
3651 .repositories
3652 .iter()
3653 .find(|(_, handle)| *handle == &entity)
3654 else {
3655 return;
3656 };
3657 git_store.active_repo_id = Some(id);
3658 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3659 });
3660 }
3661
3662 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3663 self.snapshot.status()
3664 }
3665
3666 pub fn cached_stash(&self) -> GitStash {
3667 self.snapshot.stash_entries.clone()
3668 }
3669
3670 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3671 let git_store = self.git_store.upgrade()?;
3672 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3673 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3674 let abs_path = SanitizedPath::new(&abs_path);
3675 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3676 Some(ProjectPath {
3677 worktree_id: worktree.read(cx).id(),
3678 path: relative_path,
3679 })
3680 }
3681
3682 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3683 let git_store = self.git_store.upgrade()?;
3684 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3685 let abs_path = worktree_store.absolutize(path, cx)?;
3686 self.snapshot.abs_path_to_repo_path(&abs_path)
3687 }
3688
3689 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3690 other
3691 .read(cx)
3692 .snapshot
3693 .work_directory_abs_path
3694 .starts_with(&self.snapshot.work_directory_abs_path)
3695 }
3696
3697 pub fn open_commit_buffer(
3698 &mut self,
3699 languages: Option<Arc<LanguageRegistry>>,
3700 buffer_store: Entity<BufferStore>,
3701 cx: &mut Context<Self>,
3702 ) -> Task<Result<Entity<Buffer>>> {
3703 let id = self.id;
3704 if let Some(buffer) = self.commit_message_buffer.clone() {
3705 return Task::ready(Ok(buffer));
3706 }
3707 let this = cx.weak_entity();
3708
3709 let rx = self.send_job(None, move |state, mut cx| async move {
3710 let Some(this) = this.upgrade() else {
3711 bail!("git store was dropped");
3712 };
3713 match state {
3714 RepositoryState::Local { .. } => {
3715 this.update(&mut cx, |_, cx| {
3716 Self::open_local_commit_buffer(languages, buffer_store, cx)
3717 })?
3718 .await
3719 }
3720 RepositoryState::Remote { project_id, client } => {
3721 let request = client.request(proto::OpenCommitMessageBuffer {
3722 project_id: project_id.0,
3723 repository_id: id.to_proto(),
3724 });
3725 let response = request.await.context("requesting to open commit buffer")?;
3726 let buffer_id = BufferId::new(response.buffer_id)?;
3727 let buffer = buffer_store
3728 .update(&mut cx, |buffer_store, cx| {
3729 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3730 })?
3731 .await?;
3732 if let Some(language_registry) = languages {
3733 let git_commit_language =
3734 language_registry.language_for_name("Git Commit").await?;
3735 buffer.update(&mut cx, |buffer, cx| {
3736 buffer.set_language(Some(git_commit_language), cx);
3737 })?;
3738 }
3739 this.update(&mut cx, |this, _| {
3740 this.commit_message_buffer = Some(buffer.clone());
3741 })?;
3742 Ok(buffer)
3743 }
3744 }
3745 });
3746
3747 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3748 }
3749
3750 fn open_local_commit_buffer(
3751 language_registry: Option<Arc<LanguageRegistry>>,
3752 buffer_store: Entity<BufferStore>,
3753 cx: &mut Context<Self>,
3754 ) -> Task<Result<Entity<Buffer>>> {
3755 cx.spawn(async move |repository, cx| {
3756 let buffer = buffer_store
3757 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3758 .await?;
3759
3760 if let Some(language_registry) = language_registry {
3761 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3762 buffer.update(cx, |buffer, cx| {
3763 buffer.set_language(Some(git_commit_language), cx);
3764 })?;
3765 }
3766
3767 repository.update(cx, |repository, _| {
3768 repository.commit_message_buffer = Some(buffer.clone());
3769 })?;
3770 Ok(buffer)
3771 })
3772 }
3773
3774 pub fn checkout_files(
3775 &mut self,
3776 commit: &str,
3777 paths: Vec<RepoPath>,
3778 cx: &mut Context<Self>,
3779 ) -> Task<Result<()>> {
3780 let commit = commit.to_string();
3781 let id = self.id;
3782
3783 self.spawn_job_with_tracking(
3784 paths.clone(),
3785 pending_op::GitStatus::Reverted,
3786 cx,
3787 async move |this, cx| {
3788 this.update(cx, |this, _cx| {
3789 this.send_job(
3790 Some(format!("git checkout {}", commit).into()),
3791 move |git_repo, _| async move {
3792 match git_repo {
3793 RepositoryState::Local {
3794 backend,
3795 environment,
3796 ..
3797 } => {
3798 backend
3799 .checkout_files(commit, paths, environment.clone())
3800 .await
3801 }
3802 RepositoryState::Remote { project_id, client } => {
3803 client
3804 .request(proto::GitCheckoutFiles {
3805 project_id: project_id.0,
3806 repository_id: id.to_proto(),
3807 commit,
3808 paths: paths
3809 .into_iter()
3810 .map(|p| p.to_proto())
3811 .collect(),
3812 })
3813 .await?;
3814
3815 Ok(())
3816 }
3817 }
3818 },
3819 )
3820 })?
3821 .await?
3822 },
3823 )
3824 }
3825
3826 pub fn reset(
3827 &mut self,
3828 commit: String,
3829 reset_mode: ResetMode,
3830 _cx: &mut App,
3831 ) -> oneshot::Receiver<Result<()>> {
3832 let id = self.id;
3833
3834 self.send_job(None, move |git_repo, _| async move {
3835 match git_repo {
3836 RepositoryState::Local {
3837 backend,
3838 environment,
3839 ..
3840 } => backend.reset(commit, reset_mode, environment).await,
3841 RepositoryState::Remote { project_id, client } => {
3842 client
3843 .request(proto::GitReset {
3844 project_id: project_id.0,
3845 repository_id: id.to_proto(),
3846 commit,
3847 mode: match reset_mode {
3848 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3849 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3850 },
3851 })
3852 .await?;
3853
3854 Ok(())
3855 }
3856 }
3857 })
3858 }
3859
3860 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3861 let id = self.id;
3862 self.send_job(None, move |git_repo, _cx| async move {
3863 match git_repo {
3864 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3865 RepositoryState::Remote { project_id, client } => {
3866 let resp = client
3867 .request(proto::GitShow {
3868 project_id: project_id.0,
3869 repository_id: id.to_proto(),
3870 commit,
3871 })
3872 .await?;
3873
3874 Ok(CommitDetails {
3875 sha: resp.sha.into(),
3876 message: resp.message.into(),
3877 commit_timestamp: resp.commit_timestamp,
3878 author_email: resp.author_email.into(),
3879 author_name: resp.author_name.into(),
3880 })
3881 }
3882 }
3883 })
3884 }
3885
3886 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3887 let id = self.id;
3888 self.send_job(None, move |git_repo, cx| async move {
3889 match git_repo {
3890 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3891 RepositoryState::Remote {
3892 client, project_id, ..
3893 } => {
3894 let response = client
3895 .request(proto::LoadCommitDiff {
3896 project_id: project_id.0,
3897 repository_id: id.to_proto(),
3898 commit,
3899 })
3900 .await?;
3901 Ok(CommitDiff {
3902 files: response
3903 .files
3904 .into_iter()
3905 .map(|file| {
3906 Ok(CommitFile {
3907 path: RepoPath::from_proto(&file.path)?,
3908 old_text: file.old_text,
3909 new_text: file.new_text,
3910 })
3911 })
3912 .collect::<Result<Vec<_>>>()?,
3913 })
3914 }
3915 }
3916 })
3917 }
3918
3919 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3920 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3921 }
3922
3923 fn save_buffers<'a>(
3924 &self,
3925 entries: impl IntoIterator<Item = &'a RepoPath>,
3926 cx: &mut Context<Self>,
3927 ) -> Vec<Task<anyhow::Result<()>>> {
3928 let mut save_futures = Vec::new();
3929 if let Some(buffer_store) = self.buffer_store(cx) {
3930 buffer_store.update(cx, |buffer_store, cx| {
3931 for path in entries {
3932 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3933 continue;
3934 };
3935 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3936 && buffer
3937 .read(cx)
3938 .file()
3939 .is_some_and(|file| file.disk_state().exists())
3940 && buffer.read(cx).has_unsaved_edits()
3941 {
3942 save_futures.push(buffer_store.save_buffer(buffer, cx));
3943 }
3944 }
3945 })
3946 }
3947 save_futures
3948 }
3949
3950 pub fn stage_entries(
3951 &mut self,
3952 entries: Vec<RepoPath>,
3953 cx: &mut Context<Self>,
3954 ) -> Task<anyhow::Result<()>> {
3955 if entries.is_empty() {
3956 return Task::ready(Ok(()));
3957 }
3958 let id = self.id;
3959 let save_tasks = self.save_buffers(&entries, cx);
3960 let paths = entries
3961 .iter()
3962 .map(|p| p.as_unix_str())
3963 .collect::<Vec<_>>()
3964 .join(" ");
3965 let status = format!("git add {paths}");
3966 let job_key = GitJobKey::WriteIndex(entries.clone());
3967
3968 self.spawn_job_with_tracking(
3969 entries.clone(),
3970 pending_op::GitStatus::Staged,
3971 cx,
3972 async move |this, cx| {
3973 for save_task in save_tasks {
3974 save_task.await?;
3975 }
3976
3977 this.update(cx, |this, _| {
3978 this.send_keyed_job(
3979 Some(job_key),
3980 Some(status.into()),
3981 move |git_repo, _cx| async move {
3982 match git_repo {
3983 RepositoryState::Local {
3984 backend,
3985 environment,
3986 ..
3987 } => backend.stage_paths(entries, environment.clone()).await,
3988 RepositoryState::Remote { project_id, client } => {
3989 client
3990 .request(proto::Stage {
3991 project_id: project_id.0,
3992 repository_id: id.to_proto(),
3993 paths: entries
3994 .into_iter()
3995 .map(|repo_path| repo_path.to_proto())
3996 .collect(),
3997 })
3998 .await
3999 .context("sending stage request")?;
4000
4001 Ok(())
4002 }
4003 }
4004 },
4005 )
4006 })?
4007 .await?
4008 },
4009 )
4010 }
4011
4012 pub fn unstage_entries(
4013 &mut self,
4014 entries: Vec<RepoPath>,
4015 cx: &mut Context<Self>,
4016 ) -> Task<anyhow::Result<()>> {
4017 if entries.is_empty() {
4018 return Task::ready(Ok(()));
4019 }
4020 let id = self.id;
4021 let save_tasks = self.save_buffers(&entries, cx);
4022 let paths = entries
4023 .iter()
4024 .map(|p| p.as_unix_str())
4025 .collect::<Vec<_>>()
4026 .join(" ");
4027 let status = format!("git reset {paths}");
4028 let job_key = GitJobKey::WriteIndex(entries.clone());
4029
4030 self.spawn_job_with_tracking(
4031 entries.clone(),
4032 pending_op::GitStatus::Unstaged,
4033 cx,
4034 async move |this, cx| {
4035 for save_task in save_tasks {
4036 save_task.await?;
4037 }
4038
4039 this.update(cx, |this, _| {
4040 this.send_keyed_job(
4041 Some(job_key),
4042 Some(status.into()),
4043 move |git_repo, _cx| async move {
4044 match git_repo {
4045 RepositoryState::Local {
4046 backend,
4047 environment,
4048 ..
4049 } => backend.unstage_paths(entries, environment).await,
4050 RepositoryState::Remote { project_id, client } => {
4051 client
4052 .request(proto::Unstage {
4053 project_id: project_id.0,
4054 repository_id: id.to_proto(),
4055 paths: entries
4056 .into_iter()
4057 .map(|repo_path| repo_path.to_proto())
4058 .collect(),
4059 })
4060 .await
4061 .context("sending unstage request")?;
4062
4063 Ok(())
4064 }
4065 }
4066 },
4067 )
4068 })?
4069 .await?
4070 },
4071 )
4072 }
4073
4074 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4075 let to_stage = self
4076 .cached_status()
4077 .filter_map(|entry| {
4078 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4079 if ops.staging() || ops.staged() {
4080 None
4081 } else {
4082 Some(entry.repo_path)
4083 }
4084 } else if entry.status.staging().is_fully_staged() {
4085 None
4086 } else {
4087 Some(entry.repo_path)
4088 }
4089 })
4090 .collect();
4091 self.stage_entries(to_stage, cx)
4092 }
4093
4094 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4095 let to_unstage = self
4096 .cached_status()
4097 .filter_map(|entry| {
4098 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4099 if !ops.staging() && !ops.staged() {
4100 None
4101 } else {
4102 Some(entry.repo_path)
4103 }
4104 } else if entry.status.staging().is_fully_unstaged() {
4105 None
4106 } else {
4107 Some(entry.repo_path)
4108 }
4109 })
4110 .collect();
4111 self.unstage_entries(to_unstage, cx)
4112 }
4113
4114 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4115 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4116
4117 self.stash_entries(to_stash, cx)
4118 }
4119
4120 pub fn stash_entries(
4121 &mut self,
4122 entries: Vec<RepoPath>,
4123 cx: &mut Context<Self>,
4124 ) -> Task<anyhow::Result<()>> {
4125 let id = self.id;
4126
4127 cx.spawn(async move |this, cx| {
4128 this.update(cx, |this, _| {
4129 this.send_job(None, move |git_repo, _cx| async move {
4130 match git_repo {
4131 RepositoryState::Local {
4132 backend,
4133 environment,
4134 ..
4135 } => backend.stash_paths(entries, environment).await,
4136 RepositoryState::Remote { project_id, client } => {
4137 client
4138 .request(proto::Stash {
4139 project_id: project_id.0,
4140 repository_id: id.to_proto(),
4141 paths: entries
4142 .into_iter()
4143 .map(|repo_path| repo_path.to_proto())
4144 .collect(),
4145 })
4146 .await
4147 .context("sending stash request")?;
4148 Ok(())
4149 }
4150 }
4151 })
4152 })?
4153 .await??;
4154 Ok(())
4155 })
4156 }
4157
4158 pub fn stash_pop(
4159 &mut self,
4160 index: Option<usize>,
4161 cx: &mut Context<Self>,
4162 ) -> Task<anyhow::Result<()>> {
4163 let id = self.id;
4164 cx.spawn(async move |this, cx| {
4165 this.update(cx, |this, _| {
4166 this.send_job(None, move |git_repo, _cx| async move {
4167 match git_repo {
4168 RepositoryState::Local {
4169 backend,
4170 environment,
4171 ..
4172 } => backend.stash_pop(index, environment).await,
4173 RepositoryState::Remote { project_id, client } => {
4174 client
4175 .request(proto::StashPop {
4176 project_id: project_id.0,
4177 repository_id: id.to_proto(),
4178 stash_index: index.map(|i| i as u64),
4179 })
4180 .await
4181 .context("sending stash pop request")?;
4182 Ok(())
4183 }
4184 }
4185 })
4186 })?
4187 .await??;
4188 Ok(())
4189 })
4190 }
4191
4192 pub fn stash_apply(
4193 &mut self,
4194 index: Option<usize>,
4195 cx: &mut Context<Self>,
4196 ) -> Task<anyhow::Result<()>> {
4197 let id = self.id;
4198 cx.spawn(async move |this, cx| {
4199 this.update(cx, |this, _| {
4200 this.send_job(None, move |git_repo, _cx| async move {
4201 match git_repo {
4202 RepositoryState::Local {
4203 backend,
4204 environment,
4205 ..
4206 } => backend.stash_apply(index, environment).await,
4207 RepositoryState::Remote { project_id, client } => {
4208 client
4209 .request(proto::StashApply {
4210 project_id: project_id.0,
4211 repository_id: id.to_proto(),
4212 stash_index: index.map(|i| i as u64),
4213 })
4214 .await
4215 .context("sending stash apply request")?;
4216 Ok(())
4217 }
4218 }
4219 })
4220 })?
4221 .await??;
4222 Ok(())
4223 })
4224 }
4225
4226 pub fn stash_drop(
4227 &mut self,
4228 index: Option<usize>,
4229 cx: &mut Context<Self>,
4230 ) -> oneshot::Receiver<anyhow::Result<()>> {
4231 let id = self.id;
4232 let updates_tx = self
4233 .git_store()
4234 .and_then(|git_store| match &git_store.read(cx).state {
4235 GitStoreState::Local { downstream, .. } => downstream
4236 .as_ref()
4237 .map(|downstream| downstream.updates_tx.clone()),
4238 _ => None,
4239 });
4240 let this = cx.weak_entity();
4241 self.send_job(None, move |git_repo, mut cx| async move {
4242 match git_repo {
4243 RepositoryState::Local {
4244 backend,
4245 environment,
4246 ..
4247 } => {
4248 // TODO would be nice to not have to do this manually
4249 let result = backend.stash_drop(index, environment).await;
4250 if result.is_ok()
4251 && let Ok(stash_entries) = backend.stash_entries().await
4252 {
4253 let snapshot = this.update(&mut cx, |this, cx| {
4254 this.snapshot.stash_entries = stash_entries;
4255 cx.emit(RepositoryEvent::StashEntriesChanged);
4256 this.snapshot.clone()
4257 })?;
4258 if let Some(updates_tx) = updates_tx {
4259 updates_tx
4260 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4261 .ok();
4262 }
4263 }
4264
4265 result
4266 }
4267 RepositoryState::Remote { project_id, client } => {
4268 client
4269 .request(proto::StashDrop {
4270 project_id: project_id.0,
4271 repository_id: id.to_proto(),
4272 stash_index: index.map(|i| i as u64),
4273 })
4274 .await
4275 .context("sending stash pop request")?;
4276 Ok(())
4277 }
4278 }
4279 })
4280 }
4281
4282 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4283 let id = self.id;
4284 self.send_job(
4285 Some(format!("git hook {}", hook.as_str()).into()),
4286 move |git_repo, _cx| async move {
4287 match git_repo {
4288 RepositoryState::Local {
4289 backend,
4290 environment,
4291 } => backend.run_hook(hook, environment.clone()).await,
4292 RepositoryState::Remote { project_id, client } => {
4293 client
4294 .request(proto::RunGitHook {
4295 project_id: project_id.0,
4296 repository_id: id.to_proto(),
4297 hook: hook.to_proto(),
4298 })
4299 .await?;
4300
4301 Ok(())
4302 }
4303 }
4304 },
4305 )
4306 }
4307
4308 pub fn commit(
4309 &mut self,
4310 message: SharedString,
4311 name_and_email: Option<(SharedString, SharedString)>,
4312 options: CommitOptions,
4313 askpass: AskPassDelegate,
4314 cx: &mut App,
4315 ) -> oneshot::Receiver<Result<()>> {
4316 let id = self.id;
4317 let askpass_delegates = self.askpass_delegates.clone();
4318 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4319
4320 let rx = self.run_hook(RunHook::PreCommit, cx);
4321
4322 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4323 rx.await??;
4324
4325 match git_repo {
4326 RepositoryState::Local {
4327 backend,
4328 environment,
4329 ..
4330 } => {
4331 backend
4332 .commit(message, name_and_email, options, askpass, environment)
4333 .await
4334 }
4335 RepositoryState::Remote { project_id, client } => {
4336 askpass_delegates.lock().insert(askpass_id, askpass);
4337 let _defer = util::defer(|| {
4338 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4339 debug_assert!(askpass_delegate.is_some());
4340 });
4341 let (name, email) = name_and_email.unzip();
4342 client
4343 .request(proto::Commit {
4344 project_id: project_id.0,
4345 repository_id: id.to_proto(),
4346 message: String::from(message),
4347 name: name.map(String::from),
4348 email: email.map(String::from),
4349 options: Some(proto::commit::CommitOptions {
4350 amend: options.amend,
4351 signoff: options.signoff,
4352 }),
4353 askpass_id,
4354 })
4355 .await
4356 .context("sending commit request")?;
4357
4358 Ok(())
4359 }
4360 }
4361 })
4362 }
4363
4364 pub fn fetch(
4365 &mut self,
4366 fetch_options: FetchOptions,
4367 askpass: AskPassDelegate,
4368 _cx: &mut App,
4369 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4370 let askpass_delegates = self.askpass_delegates.clone();
4371 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4372 let id = self.id;
4373
4374 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4375 match git_repo {
4376 RepositoryState::Local {
4377 backend,
4378 environment,
4379 ..
4380 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4381 RepositoryState::Remote { project_id, client } => {
4382 askpass_delegates.lock().insert(askpass_id, askpass);
4383 let _defer = util::defer(|| {
4384 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4385 debug_assert!(askpass_delegate.is_some());
4386 });
4387
4388 let response = client
4389 .request(proto::Fetch {
4390 project_id: project_id.0,
4391 repository_id: id.to_proto(),
4392 askpass_id,
4393 remote: fetch_options.to_proto(),
4394 })
4395 .await
4396 .context("sending fetch request")?;
4397
4398 Ok(RemoteCommandOutput {
4399 stdout: response.stdout,
4400 stderr: response.stderr,
4401 })
4402 }
4403 }
4404 })
4405 }
4406
4407 pub fn push(
4408 &mut self,
4409 branch: SharedString,
4410 remote: SharedString,
4411 options: Option<PushOptions>,
4412 askpass: AskPassDelegate,
4413 cx: &mut Context<Self>,
4414 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4415 let askpass_delegates = self.askpass_delegates.clone();
4416 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4417 let id = self.id;
4418
4419 let args = options
4420 .map(|option| match option {
4421 PushOptions::SetUpstream => " --set-upstream",
4422 PushOptions::Force => " --force-with-lease",
4423 })
4424 .unwrap_or("");
4425
4426 let updates_tx = self
4427 .git_store()
4428 .and_then(|git_store| match &git_store.read(cx).state {
4429 GitStoreState::Local { downstream, .. } => downstream
4430 .as_ref()
4431 .map(|downstream| downstream.updates_tx.clone()),
4432 _ => None,
4433 });
4434
4435 let this = cx.weak_entity();
4436 self.send_job(
4437 Some(format!("git push {} {} {}", args, remote, branch).into()),
4438 move |git_repo, mut cx| async move {
4439 match git_repo {
4440 RepositoryState::Local {
4441 backend,
4442 environment,
4443 ..
4444 } => {
4445 let result = backend
4446 .push(
4447 branch.to_string(),
4448 remote.to_string(),
4449 options,
4450 askpass,
4451 environment.clone(),
4452 cx.clone(),
4453 )
4454 .await;
4455 // TODO would be nice to not have to do this manually
4456 if result.is_ok() {
4457 let branches = backend.branches().await?;
4458 let branch = branches.into_iter().find(|branch| branch.is_head);
4459 log::info!("head branch after scan is {branch:?}");
4460 let snapshot = this.update(&mut cx, |this, cx| {
4461 this.snapshot.branch = branch;
4462 cx.emit(RepositoryEvent::BranchChanged);
4463 this.snapshot.clone()
4464 })?;
4465 if let Some(updates_tx) = updates_tx {
4466 updates_tx
4467 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4468 .ok();
4469 }
4470 }
4471 result
4472 }
4473 RepositoryState::Remote { project_id, client } => {
4474 askpass_delegates.lock().insert(askpass_id, askpass);
4475 let _defer = util::defer(|| {
4476 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4477 debug_assert!(askpass_delegate.is_some());
4478 });
4479 let response = client
4480 .request(proto::Push {
4481 project_id: project_id.0,
4482 repository_id: id.to_proto(),
4483 askpass_id,
4484 branch_name: branch.to_string(),
4485 remote_name: remote.to_string(),
4486 options: options.map(|options| match options {
4487 PushOptions::Force => proto::push::PushOptions::Force,
4488 PushOptions::SetUpstream => {
4489 proto::push::PushOptions::SetUpstream
4490 }
4491 }
4492 as i32),
4493 })
4494 .await
4495 .context("sending push request")?;
4496
4497 Ok(RemoteCommandOutput {
4498 stdout: response.stdout,
4499 stderr: response.stderr,
4500 })
4501 }
4502 }
4503 },
4504 )
4505 }
4506
4507 pub fn pull(
4508 &mut self,
4509 branch: Option<SharedString>,
4510 remote: SharedString,
4511 rebase: bool,
4512 askpass: AskPassDelegate,
4513 _cx: &mut App,
4514 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4515 let askpass_delegates = self.askpass_delegates.clone();
4516 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4517 let id = self.id;
4518
4519 let mut status = "git pull".to_string();
4520 if rebase {
4521 status.push_str(" --rebase");
4522 }
4523 status.push_str(&format!(" {}", remote));
4524 if let Some(b) = &branch {
4525 status.push_str(&format!(" {}", b));
4526 }
4527
4528 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4529 match git_repo {
4530 RepositoryState::Local {
4531 backend,
4532 environment,
4533 ..
4534 } => {
4535 backend
4536 .pull(
4537 branch.as_ref().map(|b| b.to_string()),
4538 remote.to_string(),
4539 rebase,
4540 askpass,
4541 environment.clone(),
4542 cx,
4543 )
4544 .await
4545 }
4546 RepositoryState::Remote { project_id, client } => {
4547 askpass_delegates.lock().insert(askpass_id, askpass);
4548 let _defer = util::defer(|| {
4549 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4550 debug_assert!(askpass_delegate.is_some());
4551 });
4552 let response = client
4553 .request(proto::Pull {
4554 project_id: project_id.0,
4555 repository_id: id.to_proto(),
4556 askpass_id,
4557 rebase,
4558 branch_name: branch.as_ref().map(|b| b.to_string()),
4559 remote_name: remote.to_string(),
4560 })
4561 .await
4562 .context("sending pull request")?;
4563
4564 Ok(RemoteCommandOutput {
4565 stdout: response.stdout,
4566 stderr: response.stderr,
4567 })
4568 }
4569 }
4570 })
4571 }
4572
4573 fn spawn_set_index_text_job(
4574 &mut self,
4575 path: RepoPath,
4576 content: Option<String>,
4577 hunk_staging_operation_count: Option<usize>,
4578 cx: &mut Context<Self>,
4579 ) -> oneshot::Receiver<anyhow::Result<()>> {
4580 let id = self.id;
4581 let this = cx.weak_entity();
4582 let git_store = self.git_store.clone();
4583 self.send_keyed_job(
4584 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4585 None,
4586 move |git_repo, mut cx| async move {
4587 log::debug!(
4588 "start updating index text for buffer {}",
4589 path.as_unix_str()
4590 );
4591 match git_repo {
4592 RepositoryState::Local {
4593 backend,
4594 environment,
4595 ..
4596 } => {
4597 backend
4598 .set_index_text(path.clone(), content, environment.clone())
4599 .await?;
4600 }
4601 RepositoryState::Remote { project_id, client } => {
4602 client
4603 .request(proto::SetIndexText {
4604 project_id: project_id.0,
4605 repository_id: id.to_proto(),
4606 path: path.to_proto(),
4607 text: content,
4608 })
4609 .await?;
4610 }
4611 }
4612 log::debug!(
4613 "finish updating index text for buffer {}",
4614 path.as_unix_str()
4615 );
4616
4617 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4618 let project_path = this
4619 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4620 .ok()
4621 .flatten();
4622 git_store.update(&mut cx, |git_store, cx| {
4623 let buffer_id = git_store
4624 .buffer_store
4625 .read(cx)
4626 .get_by_path(&project_path?)?
4627 .read(cx)
4628 .remote_id();
4629 let diff_state = git_store.diffs.get(&buffer_id)?;
4630 diff_state.update(cx, |diff_state, _| {
4631 diff_state.hunk_staging_operation_count_as_of_write =
4632 hunk_staging_operation_count;
4633 });
4634 Some(())
4635 })?;
4636 }
4637 Ok(())
4638 },
4639 )
4640 }
4641
4642 pub fn get_remotes(
4643 &mut self,
4644 branch_name: Option<String>,
4645 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4646 let id = self.id;
4647 self.send_job(None, move |repo, _cx| async move {
4648 match repo {
4649 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4650 RepositoryState::Remote { project_id, client } => {
4651 let response = client
4652 .request(proto::GetRemotes {
4653 project_id: project_id.0,
4654 repository_id: id.to_proto(),
4655 branch_name,
4656 })
4657 .await?;
4658
4659 let remotes = response
4660 .remotes
4661 .into_iter()
4662 .map(|remotes| git::repository::Remote {
4663 name: remotes.name.into(),
4664 })
4665 .collect();
4666
4667 Ok(remotes)
4668 }
4669 }
4670 })
4671 }
4672
4673 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4674 let id = self.id;
4675 self.send_job(None, move |repo, _| async move {
4676 match repo {
4677 RepositoryState::Local { backend, .. } => backend.branches().await,
4678 RepositoryState::Remote { project_id, client } => {
4679 let response = client
4680 .request(proto::GitGetBranches {
4681 project_id: project_id.0,
4682 repository_id: id.to_proto(),
4683 })
4684 .await?;
4685
4686 let branches = response
4687 .branches
4688 .into_iter()
4689 .map(|branch| proto_to_branch(&branch))
4690 .collect();
4691
4692 Ok(branches)
4693 }
4694 }
4695 })
4696 }
4697
4698 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4699 let id = self.id;
4700 self.send_job(None, move |repo, _| async move {
4701 match repo {
4702 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4703 RepositoryState::Remote { project_id, client } => {
4704 let response = client
4705 .request(proto::GitGetWorktrees {
4706 project_id: project_id.0,
4707 repository_id: id.to_proto(),
4708 })
4709 .await?;
4710
4711 let worktrees = response
4712 .worktrees
4713 .into_iter()
4714 .map(|worktree| proto_to_worktree(&worktree))
4715 .collect();
4716
4717 Ok(worktrees)
4718 }
4719 }
4720 })
4721 }
4722
4723 pub fn create_worktree(
4724 &mut self,
4725 name: String,
4726 path: PathBuf,
4727 commit: Option<String>,
4728 ) -> oneshot::Receiver<Result<()>> {
4729 let id = self.id;
4730 self.send_job(
4731 Some("git worktree add".into()),
4732 move |repo, _cx| async move {
4733 match repo {
4734 RepositoryState::Local { backend, .. } => {
4735 backend.create_worktree(name, path, commit).await
4736 }
4737 RepositoryState::Remote { project_id, client } => {
4738 client
4739 .request(proto::GitCreateWorktree {
4740 project_id: project_id.0,
4741 repository_id: id.to_proto(),
4742 name,
4743 directory: path.to_string_lossy().to_string(),
4744 commit,
4745 })
4746 .await?;
4747
4748 Ok(())
4749 }
4750 }
4751 },
4752 )
4753 }
4754
4755 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4756 let id = self.id;
4757 self.send_job(None, move |repo, _| async move {
4758 match repo {
4759 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4760 RepositoryState::Remote { project_id, client } => {
4761 let response = client
4762 .request(proto::GetDefaultBranch {
4763 project_id: project_id.0,
4764 repository_id: id.to_proto(),
4765 })
4766 .await?;
4767
4768 anyhow::Ok(response.branch.map(SharedString::from))
4769 }
4770 }
4771 })
4772 }
4773
4774 pub fn diff_tree(
4775 &mut self,
4776 diff_type: DiffTreeType,
4777 _cx: &App,
4778 ) -> oneshot::Receiver<Result<TreeDiff>> {
4779 let repository_id = self.snapshot.id;
4780 self.send_job(None, move |repo, _cx| async move {
4781 match repo {
4782 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4783 RepositoryState::Remote { client, project_id } => {
4784 let response = client
4785 .request(proto::GetTreeDiff {
4786 project_id: project_id.0,
4787 repository_id: repository_id.0,
4788 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4789 base: diff_type.base().to_string(),
4790 head: diff_type.head().to_string(),
4791 })
4792 .await?;
4793
4794 let entries = response
4795 .entries
4796 .into_iter()
4797 .filter_map(|entry| {
4798 let status = match entry.status() {
4799 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4800 proto::tree_diff_status::Status::Modified => {
4801 TreeDiffStatus::Modified {
4802 old: git::Oid::from_str(
4803 &entry.oid.context("missing oid").log_err()?,
4804 )
4805 .log_err()?,
4806 }
4807 }
4808 proto::tree_diff_status::Status::Deleted => {
4809 TreeDiffStatus::Deleted {
4810 old: git::Oid::from_str(
4811 &entry.oid.context("missing oid").log_err()?,
4812 )
4813 .log_err()?,
4814 }
4815 }
4816 };
4817 Some((
4818 RepoPath::from_rel_path(
4819 &RelPath::from_proto(&entry.path).log_err()?,
4820 ),
4821 status,
4822 ))
4823 })
4824 .collect();
4825
4826 Ok(TreeDiff { entries })
4827 }
4828 }
4829 })
4830 }
4831
4832 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4833 let id = self.id;
4834 self.send_job(None, move |repo, _cx| async move {
4835 match repo {
4836 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4837 RepositoryState::Remote { project_id, client } => {
4838 let response = client
4839 .request(proto::GitDiff {
4840 project_id: project_id.0,
4841 repository_id: id.to_proto(),
4842 diff_type: match diff_type {
4843 DiffType::HeadToIndex => {
4844 proto::git_diff::DiffType::HeadToIndex.into()
4845 }
4846 DiffType::HeadToWorktree => {
4847 proto::git_diff::DiffType::HeadToWorktree.into()
4848 }
4849 },
4850 })
4851 .await?;
4852
4853 Ok(response.diff)
4854 }
4855 }
4856 })
4857 }
4858
4859 pub fn create_branch(
4860 &mut self,
4861 branch_name: String,
4862 base_branch: Option<String>,
4863 ) -> oneshot::Receiver<Result<()>> {
4864 let id = self.id;
4865 let status_msg = if let Some(ref base) = base_branch {
4866 format!("git switch -c {branch_name} {base}").into()
4867 } else {
4868 format!("git switch -c {branch_name}").into()
4869 };
4870 self.send_job(Some(status_msg), move |repo, _cx| async move {
4871 match repo {
4872 RepositoryState::Local { backend, .. } => {
4873 backend.create_branch(branch_name, base_branch).await
4874 }
4875 RepositoryState::Remote { project_id, client } => {
4876 client
4877 .request(proto::GitCreateBranch {
4878 project_id: project_id.0,
4879 repository_id: id.to_proto(),
4880 branch_name,
4881 })
4882 .await?;
4883
4884 Ok(())
4885 }
4886 }
4887 })
4888 }
4889
4890 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4891 let id = self.id;
4892 self.send_job(
4893 Some(format!("git switch {branch_name}").into()),
4894 move |repo, _cx| async move {
4895 match repo {
4896 RepositoryState::Local { backend, .. } => {
4897 backend.change_branch(branch_name).await
4898 }
4899 RepositoryState::Remote { project_id, client } => {
4900 client
4901 .request(proto::GitChangeBranch {
4902 project_id: project_id.0,
4903 repository_id: id.to_proto(),
4904 branch_name,
4905 })
4906 .await?;
4907
4908 Ok(())
4909 }
4910 }
4911 },
4912 )
4913 }
4914
4915 pub fn rename_branch(
4916 &mut self,
4917 branch: String,
4918 new_name: String,
4919 ) -> oneshot::Receiver<Result<()>> {
4920 let id = self.id;
4921 self.send_job(
4922 Some(format!("git branch -m {branch} {new_name}").into()),
4923 move |repo, _cx| async move {
4924 match repo {
4925 RepositoryState::Local { backend, .. } => {
4926 backend.rename_branch(branch, new_name).await
4927 }
4928 RepositoryState::Remote { project_id, client } => {
4929 client
4930 .request(proto::GitRenameBranch {
4931 project_id: project_id.0,
4932 repository_id: id.to_proto(),
4933 branch,
4934 new_name,
4935 })
4936 .await?;
4937
4938 Ok(())
4939 }
4940 }
4941 },
4942 )
4943 }
4944
4945 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4946 let id = self.id;
4947 self.send_job(None, move |repo, _cx| async move {
4948 match repo {
4949 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4950 RepositoryState::Remote { project_id, client } => {
4951 let response = client
4952 .request(proto::CheckForPushedCommits {
4953 project_id: project_id.0,
4954 repository_id: id.to_proto(),
4955 })
4956 .await?;
4957
4958 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4959
4960 Ok(branches)
4961 }
4962 }
4963 })
4964 }
4965
4966 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4967 self.send_job(None, |repo, _cx| async move {
4968 match repo {
4969 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4970 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4971 }
4972 })
4973 }
4974
4975 pub fn restore_checkpoint(
4976 &mut self,
4977 checkpoint: GitRepositoryCheckpoint,
4978 ) -> oneshot::Receiver<Result<()>> {
4979 self.send_job(None, move |repo, _cx| async move {
4980 match repo {
4981 RepositoryState::Local { backend, .. } => {
4982 backend.restore_checkpoint(checkpoint).await
4983 }
4984 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4985 }
4986 })
4987 }
4988
4989 pub(crate) fn apply_remote_update(
4990 &mut self,
4991 update: proto::UpdateRepository,
4992 cx: &mut Context<Self>,
4993 ) -> Result<()> {
4994 let conflicted_paths = TreeSet::from_ordered_entries(
4995 update
4996 .current_merge_conflicts
4997 .into_iter()
4998 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4999 );
5000 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5001 let new_head_commit = update
5002 .head_commit_details
5003 .as_ref()
5004 .map(proto_to_commit_details);
5005 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5006 cx.emit(RepositoryEvent::BranchChanged)
5007 }
5008 self.snapshot.branch = new_branch;
5009 self.snapshot.head_commit = new_head_commit;
5010
5011 self.snapshot.merge.conflicted_paths = conflicted_paths;
5012 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5013 let new_stash_entries = GitStash {
5014 entries: update
5015 .stash_entries
5016 .iter()
5017 .filter_map(|entry| proto_to_stash(entry).ok())
5018 .collect(),
5019 };
5020 if self.snapshot.stash_entries != new_stash_entries {
5021 cx.emit(RepositoryEvent::StashEntriesChanged)
5022 }
5023 self.snapshot.stash_entries = new_stash_entries;
5024
5025 let edits = update
5026 .removed_statuses
5027 .into_iter()
5028 .filter_map(|path| {
5029 Some(sum_tree::Edit::Remove(PathKey(
5030 RelPath::from_proto(&path).log_err()?,
5031 )))
5032 })
5033 .chain(
5034 update
5035 .updated_statuses
5036 .into_iter()
5037 .filter_map(|updated_status| {
5038 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5039 }),
5040 )
5041 .collect::<Vec<_>>();
5042 if !edits.is_empty() {
5043 cx.emit(RepositoryEvent::StatusesChanged);
5044 }
5045 self.snapshot.statuses_by_path.edit(edits, ());
5046 if update.is_last_update {
5047 self.snapshot.scan_id = update.scan_id;
5048 }
5049 self.clear_pending_ops(cx);
5050 Ok(())
5051 }
5052
5053 pub fn compare_checkpoints(
5054 &mut self,
5055 left: GitRepositoryCheckpoint,
5056 right: GitRepositoryCheckpoint,
5057 ) -> oneshot::Receiver<Result<bool>> {
5058 self.send_job(None, move |repo, _cx| async move {
5059 match repo {
5060 RepositoryState::Local { backend, .. } => {
5061 backend.compare_checkpoints(left, right).await
5062 }
5063 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5064 }
5065 })
5066 }
5067
5068 pub fn diff_checkpoints(
5069 &mut self,
5070 base_checkpoint: GitRepositoryCheckpoint,
5071 target_checkpoint: GitRepositoryCheckpoint,
5072 ) -> oneshot::Receiver<Result<String>> {
5073 self.send_job(None, move |repo, _cx| async move {
5074 match repo {
5075 RepositoryState::Local { backend, .. } => {
5076 backend
5077 .diff_checkpoints(base_checkpoint, target_checkpoint)
5078 .await
5079 }
5080 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5081 }
5082 })
5083 }
5084
5085 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5086 let updated = SumTree::from_iter(
5087 self.pending_ops.iter().filter_map(|ops| {
5088 let inner_ops: Vec<PendingOp> =
5089 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5090 if inner_ops.is_empty() {
5091 None
5092 } else {
5093 Some(PendingOps {
5094 repo_path: ops.repo_path.clone(),
5095 ops: inner_ops,
5096 })
5097 }
5098 }),
5099 (),
5100 );
5101
5102 if updated != self.pending_ops {
5103 cx.emit(RepositoryEvent::PendingOpsChanged {
5104 pending_ops: self.pending_ops.clone(),
5105 })
5106 }
5107
5108 self.pending_ops = updated;
5109 }
5110
5111 fn schedule_scan(
5112 &mut self,
5113 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5114 cx: &mut Context<Self>,
5115 ) {
5116 let this = cx.weak_entity();
5117 let _ = self.send_keyed_job(
5118 Some(GitJobKey::ReloadGitState),
5119 None,
5120 |state, mut cx| async move {
5121 log::debug!("run scheduled git status scan");
5122
5123 let Some(this) = this.upgrade() else {
5124 return Ok(());
5125 };
5126 let RepositoryState::Local { backend, .. } = state else {
5127 bail!("not a local repository")
5128 };
5129 let (snapshot, events) = this
5130 .update(&mut cx, |this, _| {
5131 this.paths_needing_status_update.clear();
5132 compute_snapshot(
5133 this.id,
5134 this.work_directory_abs_path.clone(),
5135 this.snapshot.clone(),
5136 backend.clone(),
5137 )
5138 })?
5139 .await?;
5140 this.update(&mut cx, |this, cx| {
5141 this.snapshot = snapshot.clone();
5142 this.clear_pending_ops(cx);
5143 for event in events {
5144 cx.emit(event);
5145 }
5146 })?;
5147 if let Some(updates_tx) = updates_tx {
5148 updates_tx
5149 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5150 .ok();
5151 }
5152 Ok(())
5153 },
5154 );
5155 }
5156
5157 fn spawn_local_git_worker(
5158 work_directory_abs_path: Arc<Path>,
5159 dot_git_abs_path: Arc<Path>,
5160 _repository_dir_abs_path: Arc<Path>,
5161 _common_dir_abs_path: Arc<Path>,
5162 project_environment: WeakEntity<ProjectEnvironment>,
5163 fs: Arc<dyn Fs>,
5164 cx: &mut Context<Self>,
5165 ) -> mpsc::UnboundedSender<GitJob> {
5166 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5167
5168 cx.spawn(async move |_, cx| {
5169 let environment = project_environment
5170 .upgrade()
5171 .context("missing project environment")?
5172 .update(cx, |project_environment, cx| {
5173 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
5174 })?
5175 .await
5176 .unwrap_or_else(|| {
5177 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
5178 HashMap::default()
5179 });
5180 let search_paths = environment.get("PATH").map(|val| val.to_owned());
5181 let backend = cx
5182 .background_spawn(async move {
5183 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
5184 .or_else(|| which::which("git").ok());
5185 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
5186 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
5187 })
5188 .await?;
5189
5190 if let Some(git_hosting_provider_registry) =
5191 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5192 {
5193 git_hosting_providers::register_additional_providers(
5194 git_hosting_provider_registry,
5195 backend.clone(),
5196 );
5197 }
5198
5199 let state = RepositoryState::Local {
5200 backend,
5201 environment: Arc::new(environment),
5202 };
5203 let mut jobs = VecDeque::new();
5204 loop {
5205 while let Ok(Some(next_job)) = job_rx.try_next() {
5206 jobs.push_back(next_job);
5207 }
5208
5209 if let Some(job) = jobs.pop_front() {
5210 if let Some(current_key) = &job.key
5211 && jobs
5212 .iter()
5213 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5214 {
5215 continue;
5216 }
5217 (job.job)(state.clone(), cx).await;
5218 } else if let Some(job) = job_rx.next().await {
5219 jobs.push_back(job);
5220 } else {
5221 break;
5222 }
5223 }
5224 anyhow::Ok(())
5225 })
5226 .detach_and_log_err(cx);
5227
5228 job_tx
5229 }
5230
5231 fn spawn_remote_git_worker(
5232 project_id: ProjectId,
5233 client: AnyProtoClient,
5234 cx: &mut Context<Self>,
5235 ) -> mpsc::UnboundedSender<GitJob> {
5236 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5237
5238 cx.spawn(async move |_, cx| {
5239 let state = RepositoryState::Remote { project_id, client };
5240 let mut jobs = VecDeque::new();
5241 loop {
5242 while let Ok(Some(next_job)) = job_rx.try_next() {
5243 jobs.push_back(next_job);
5244 }
5245
5246 if let Some(job) = jobs.pop_front() {
5247 if let Some(current_key) = &job.key
5248 && jobs
5249 .iter()
5250 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5251 {
5252 continue;
5253 }
5254 (job.job)(state.clone(), cx).await;
5255 } else if let Some(job) = job_rx.next().await {
5256 jobs.push_back(job);
5257 } else {
5258 break;
5259 }
5260 }
5261 anyhow::Ok(())
5262 })
5263 .detach_and_log_err(cx);
5264
5265 job_tx
5266 }
5267
5268 fn load_staged_text(
5269 &mut self,
5270 buffer_id: BufferId,
5271 repo_path: RepoPath,
5272 cx: &App,
5273 ) -> Task<Result<Option<String>>> {
5274 let rx = self.send_job(None, move |state, _| async move {
5275 match state {
5276 RepositoryState::Local { backend, .. } => {
5277 anyhow::Ok(backend.load_index_text(repo_path).await)
5278 }
5279 RepositoryState::Remote { project_id, client } => {
5280 let response = client
5281 .request(proto::OpenUnstagedDiff {
5282 project_id: project_id.to_proto(),
5283 buffer_id: buffer_id.to_proto(),
5284 })
5285 .await?;
5286 Ok(response.staged_text)
5287 }
5288 }
5289 });
5290 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5291 }
5292
5293 fn load_committed_text(
5294 &mut self,
5295 buffer_id: BufferId,
5296 repo_path: RepoPath,
5297 cx: &App,
5298 ) -> Task<Result<DiffBasesChange>> {
5299 let rx = self.send_job(None, move |state, _| async move {
5300 match state {
5301 RepositoryState::Local { backend, .. } => {
5302 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5303 let staged_text = backend.load_index_text(repo_path).await;
5304 let diff_bases_change = if committed_text == staged_text {
5305 DiffBasesChange::SetBoth(committed_text)
5306 } else {
5307 DiffBasesChange::SetEach {
5308 index: staged_text,
5309 head: committed_text,
5310 }
5311 };
5312 anyhow::Ok(diff_bases_change)
5313 }
5314 RepositoryState::Remote { project_id, client } => {
5315 use proto::open_uncommitted_diff_response::Mode;
5316
5317 let response = client
5318 .request(proto::OpenUncommittedDiff {
5319 project_id: project_id.to_proto(),
5320 buffer_id: buffer_id.to_proto(),
5321 })
5322 .await?;
5323 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5324 let bases = match mode {
5325 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5326 Mode::IndexAndHead => DiffBasesChange::SetEach {
5327 head: response.committed_text,
5328 index: response.staged_text,
5329 },
5330 };
5331 Ok(bases)
5332 }
5333 }
5334 });
5335
5336 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5337 }
5338 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5339 let repository_id = self.snapshot.id;
5340 let rx = self.send_job(None, move |state, _| async move {
5341 match state {
5342 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5343 RepositoryState::Remote { client, project_id } => {
5344 let response = client
5345 .request(proto::GetBlobContent {
5346 project_id: project_id.to_proto(),
5347 repository_id: repository_id.0,
5348 oid: oid.to_string(),
5349 })
5350 .await?;
5351 Ok(response.content)
5352 }
5353 }
5354 });
5355 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5356 }
5357
5358 fn paths_changed(
5359 &mut self,
5360 paths: Vec<RepoPath>,
5361 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5362 cx: &mut Context<Self>,
5363 ) {
5364 self.paths_needing_status_update.extend(paths);
5365
5366 let this = cx.weak_entity();
5367 let _ = self.send_keyed_job(
5368 Some(GitJobKey::RefreshStatuses),
5369 None,
5370 |state, mut cx| async move {
5371 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5372 (
5373 this.snapshot.clone(),
5374 mem::take(&mut this.paths_needing_status_update),
5375 )
5376 })?;
5377 let RepositoryState::Local { backend, .. } = state else {
5378 bail!("not a local repository")
5379 };
5380
5381 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5382 if paths.is_empty() {
5383 return Ok(());
5384 }
5385 let statuses = backend.status(&paths).await?;
5386 let stash_entries = backend.stash_entries().await?;
5387
5388 let changed_path_statuses = cx
5389 .background_spawn(async move {
5390 let mut changed_path_statuses = Vec::new();
5391 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5392 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5393
5394 for (repo_path, status) in &*statuses.entries {
5395 changed_paths.remove(repo_path);
5396 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5397 && cursor.item().is_some_and(|entry| entry.status == *status)
5398 {
5399 continue;
5400 }
5401
5402 changed_path_statuses.push(Edit::Insert(StatusEntry {
5403 repo_path: repo_path.clone(),
5404 status: *status,
5405 }));
5406 }
5407 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5408 for path in changed_paths.into_iter() {
5409 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5410 changed_path_statuses
5411 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5412 }
5413 }
5414 changed_path_statuses
5415 })
5416 .await;
5417
5418 this.update(&mut cx, |this, cx| {
5419 if this.snapshot.stash_entries != stash_entries {
5420 cx.emit(RepositoryEvent::StashEntriesChanged);
5421 this.snapshot.stash_entries = stash_entries;
5422 }
5423
5424 if !changed_path_statuses.is_empty() {
5425 cx.emit(RepositoryEvent::StatusesChanged);
5426 this.snapshot
5427 .statuses_by_path
5428 .edit(changed_path_statuses, ());
5429 this.snapshot.scan_id += 1;
5430 }
5431
5432 if let Some(updates_tx) = updates_tx {
5433 updates_tx
5434 .unbounded_send(DownstreamUpdate::UpdateRepository(
5435 this.snapshot.clone(),
5436 ))
5437 .ok();
5438 }
5439 })
5440 },
5441 );
5442 }
5443
5444 /// currently running git command and when it started
5445 pub fn current_job(&self) -> Option<JobInfo> {
5446 self.active_jobs.values().next().cloned()
5447 }
5448
5449 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5450 self.send_job(None, |_, _| async {})
5451 }
5452
5453 fn spawn_job_with_tracking<AsyncFn>(
5454 &mut self,
5455 paths: Vec<RepoPath>,
5456 git_status: pending_op::GitStatus,
5457 cx: &mut Context<Self>,
5458 f: AsyncFn,
5459 ) -> Task<Result<()>>
5460 where
5461 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5462 {
5463 let ids = self.new_pending_ops_for_paths(paths, git_status);
5464
5465 cx.spawn(async move |this, cx| {
5466 let (job_status, result) = match f(this.clone(), cx).await {
5467 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5468 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5469 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5470 };
5471
5472 this.update(cx, |this, _| {
5473 let mut edits = Vec::with_capacity(ids.len());
5474 for (id, entry) in ids {
5475 if let Some(mut ops) = this
5476 .pending_ops
5477 .get(&PathKey(entry.as_ref().clone()), ())
5478 .cloned()
5479 {
5480 if let Some(op) = ops.op_by_id_mut(id) {
5481 op.job_status = job_status;
5482 }
5483 edits.push(sum_tree::Edit::Insert(ops));
5484 }
5485 }
5486 this.pending_ops.edit(edits, ());
5487 })?;
5488
5489 result
5490 })
5491 }
5492
5493 fn new_pending_ops_for_paths(
5494 &mut self,
5495 paths: Vec<RepoPath>,
5496 git_status: pending_op::GitStatus,
5497 ) -> Vec<(PendingOpId, RepoPath)> {
5498 let mut edits = Vec::with_capacity(paths.len());
5499 let mut ids = Vec::with_capacity(paths.len());
5500 for path in paths {
5501 let mut ops = self
5502 .pending_ops
5503 .get(&PathKey(path.as_ref().clone()), ())
5504 .cloned()
5505 .unwrap_or_else(|| PendingOps::new(&path));
5506 let id = ops.max_id() + 1;
5507 ops.ops.push(PendingOp {
5508 id,
5509 git_status,
5510 job_status: pending_op::JobStatus::Running,
5511 });
5512 edits.push(sum_tree::Edit::Insert(ops));
5513 ids.push((id, path));
5514 }
5515 self.pending_ops.edit(edits, ());
5516 ids
5517 }
5518}
5519
5520fn get_permalink_in_rust_registry_src(
5521 provider_registry: Arc<GitHostingProviderRegistry>,
5522 path: PathBuf,
5523 selection: Range<u32>,
5524) -> Result<url::Url> {
5525 #[derive(Deserialize)]
5526 struct CargoVcsGit {
5527 sha1: String,
5528 }
5529
5530 #[derive(Deserialize)]
5531 struct CargoVcsInfo {
5532 git: CargoVcsGit,
5533 path_in_vcs: String,
5534 }
5535
5536 #[derive(Deserialize)]
5537 struct CargoPackage {
5538 repository: String,
5539 }
5540
5541 #[derive(Deserialize)]
5542 struct CargoToml {
5543 package: CargoPackage,
5544 }
5545
5546 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5547 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5548 Some((dir, json))
5549 }) else {
5550 bail!("No .cargo_vcs_info.json found in parent directories")
5551 };
5552 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5553 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5554 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5555 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5556 .context("parsing package.repository field of manifest")?;
5557 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5558 let permalink = provider.build_permalink(
5559 remote,
5560 BuildPermalinkParams::new(
5561 &cargo_vcs_info.git.sha1,
5562 &RepoPath::from_rel_path(
5563 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5564 ),
5565 Some(selection),
5566 ),
5567 );
5568 Ok(permalink)
5569}
5570
5571fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5572 let Some(blame) = blame else {
5573 return proto::BlameBufferResponse {
5574 blame_response: None,
5575 };
5576 };
5577
5578 let entries = blame
5579 .entries
5580 .into_iter()
5581 .map(|entry| proto::BlameEntry {
5582 sha: entry.sha.as_bytes().into(),
5583 start_line: entry.range.start,
5584 end_line: entry.range.end,
5585 original_line_number: entry.original_line_number,
5586 author: entry.author,
5587 author_mail: entry.author_mail,
5588 author_time: entry.author_time,
5589 author_tz: entry.author_tz,
5590 committer: entry.committer_name,
5591 committer_mail: entry.committer_email,
5592 committer_time: entry.committer_time,
5593 committer_tz: entry.committer_tz,
5594 summary: entry.summary,
5595 previous: entry.previous,
5596 filename: entry.filename,
5597 })
5598 .collect::<Vec<_>>();
5599
5600 let messages = blame
5601 .messages
5602 .into_iter()
5603 .map(|(oid, message)| proto::CommitMessage {
5604 oid: oid.as_bytes().into(),
5605 message,
5606 })
5607 .collect::<Vec<_>>();
5608
5609 proto::BlameBufferResponse {
5610 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5611 entries,
5612 messages,
5613 remote_url: blame.remote_url,
5614 }),
5615 }
5616}
5617
5618fn deserialize_blame_buffer_response(
5619 response: proto::BlameBufferResponse,
5620) -> Option<git::blame::Blame> {
5621 let response = response.blame_response?;
5622 let entries = response
5623 .entries
5624 .into_iter()
5625 .filter_map(|entry| {
5626 Some(git::blame::BlameEntry {
5627 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5628 range: entry.start_line..entry.end_line,
5629 original_line_number: entry.original_line_number,
5630 committer_name: entry.committer,
5631 committer_time: entry.committer_time,
5632 committer_tz: entry.committer_tz,
5633 committer_email: entry.committer_mail,
5634 author: entry.author,
5635 author_mail: entry.author_mail,
5636 author_time: entry.author_time,
5637 author_tz: entry.author_tz,
5638 summary: entry.summary,
5639 previous: entry.previous,
5640 filename: entry.filename,
5641 })
5642 })
5643 .collect::<Vec<_>>();
5644
5645 let messages = response
5646 .messages
5647 .into_iter()
5648 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5649 .collect::<HashMap<_, _>>();
5650
5651 Some(Blame {
5652 entries,
5653 messages,
5654 remote_url: response.remote_url,
5655 })
5656}
5657
5658fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5659 proto::Branch {
5660 is_head: branch.is_head,
5661 ref_name: branch.ref_name.to_string(),
5662 unix_timestamp: branch
5663 .most_recent_commit
5664 .as_ref()
5665 .map(|commit| commit.commit_timestamp as u64),
5666 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5667 ref_name: upstream.ref_name.to_string(),
5668 tracking: upstream
5669 .tracking
5670 .status()
5671 .map(|upstream| proto::UpstreamTracking {
5672 ahead: upstream.ahead as u64,
5673 behind: upstream.behind as u64,
5674 }),
5675 }),
5676 most_recent_commit: branch
5677 .most_recent_commit
5678 .as_ref()
5679 .map(|commit| proto::CommitSummary {
5680 sha: commit.sha.to_string(),
5681 subject: commit.subject.to_string(),
5682 commit_timestamp: commit.commit_timestamp,
5683 author_name: commit.author_name.to_string(),
5684 }),
5685 }
5686}
5687
5688fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5689 proto::Worktree {
5690 path: worktree.path.to_string_lossy().to_string(),
5691 ref_name: worktree.ref_name.to_string(),
5692 sha: worktree.sha.to_string(),
5693 }
5694}
5695
5696fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5697 git::repository::Worktree {
5698 path: PathBuf::from(proto.path.clone()),
5699 ref_name: proto.ref_name.clone().into(),
5700 sha: proto.sha.clone().into(),
5701 }
5702}
5703
5704fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5705 git::repository::Branch {
5706 is_head: proto.is_head,
5707 ref_name: proto.ref_name.clone().into(),
5708 upstream: proto
5709 .upstream
5710 .as_ref()
5711 .map(|upstream| git::repository::Upstream {
5712 ref_name: upstream.ref_name.to_string().into(),
5713 tracking: upstream
5714 .tracking
5715 .as_ref()
5716 .map(|tracking| {
5717 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5718 ahead: tracking.ahead as u32,
5719 behind: tracking.behind as u32,
5720 })
5721 })
5722 .unwrap_or(git::repository::UpstreamTracking::Gone),
5723 }),
5724 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5725 git::repository::CommitSummary {
5726 sha: commit.sha.to_string().into(),
5727 subject: commit.subject.to_string().into(),
5728 commit_timestamp: commit.commit_timestamp,
5729 author_name: commit.author_name.to_string().into(),
5730 has_parent: true,
5731 }
5732 }),
5733 }
5734}
5735
5736fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5737 proto::GitCommitDetails {
5738 sha: commit.sha.to_string(),
5739 message: commit.message.to_string(),
5740 commit_timestamp: commit.commit_timestamp,
5741 author_email: commit.author_email.to_string(),
5742 author_name: commit.author_name.to_string(),
5743 }
5744}
5745
5746fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5747 CommitDetails {
5748 sha: proto.sha.clone().into(),
5749 message: proto.message.clone().into(),
5750 commit_timestamp: proto.commit_timestamp,
5751 author_email: proto.author_email.clone().into(),
5752 author_name: proto.author_name.clone().into(),
5753 }
5754}
5755
5756async fn compute_snapshot(
5757 id: RepositoryId,
5758 work_directory_abs_path: Arc<Path>,
5759 prev_snapshot: RepositorySnapshot,
5760 backend: Arc<dyn GitRepository>,
5761) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5762 let mut events = Vec::new();
5763 let branches = backend.branches().await?;
5764 let branch = branches.into_iter().find(|branch| branch.is_head);
5765 let statuses = backend
5766 .status(&[RepoPath::from_rel_path(
5767 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
5768 )])
5769 .await?;
5770 let stash_entries = backend.stash_entries().await?;
5771 let statuses_by_path = SumTree::from_iter(
5772 statuses
5773 .entries
5774 .iter()
5775 .map(|(repo_path, status)| StatusEntry {
5776 repo_path: repo_path.clone(),
5777 status: *status,
5778 }),
5779 (),
5780 );
5781 let (merge_details, merge_heads_changed) =
5782 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5783 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5784
5785 if merge_heads_changed {
5786 events.push(RepositoryEvent::MergeHeadsChanged);
5787 }
5788
5789 if statuses_by_path != prev_snapshot.statuses_by_path {
5790 events.push(RepositoryEvent::StatusesChanged)
5791 }
5792
5793 // Useful when branch is None in detached head state
5794 let head_commit = match backend.head_sha().await {
5795 Some(head_sha) => backend.show(head_sha).await.log_err(),
5796 None => None,
5797 };
5798
5799 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5800 events.push(RepositoryEvent::BranchChanged);
5801 }
5802
5803 // Used by edit prediction data collection
5804 let remote_origin_url = backend.remote_url("origin");
5805 let remote_upstream_url = backend.remote_url("upstream");
5806
5807 let snapshot = RepositorySnapshot {
5808 id,
5809 statuses_by_path,
5810 work_directory_abs_path,
5811 path_style: prev_snapshot.path_style,
5812 scan_id: prev_snapshot.scan_id + 1,
5813 branch,
5814 head_commit,
5815 merge: merge_details,
5816 remote_origin_url,
5817 remote_upstream_url,
5818 stash_entries,
5819 };
5820
5821 Ok((snapshot, events))
5822}
5823
5824fn status_from_proto(
5825 simple_status: i32,
5826 status: Option<proto::GitFileStatus>,
5827) -> anyhow::Result<FileStatus> {
5828 use proto::git_file_status::Variant;
5829
5830 let Some(variant) = status.and_then(|status| status.variant) else {
5831 let code = proto::GitStatus::from_i32(simple_status)
5832 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5833 let result = match code {
5834 proto::GitStatus::Added => TrackedStatus {
5835 worktree_status: StatusCode::Added,
5836 index_status: StatusCode::Unmodified,
5837 }
5838 .into(),
5839 proto::GitStatus::Modified => TrackedStatus {
5840 worktree_status: StatusCode::Modified,
5841 index_status: StatusCode::Unmodified,
5842 }
5843 .into(),
5844 proto::GitStatus::Conflict => UnmergedStatus {
5845 first_head: UnmergedStatusCode::Updated,
5846 second_head: UnmergedStatusCode::Updated,
5847 }
5848 .into(),
5849 proto::GitStatus::Deleted => TrackedStatus {
5850 worktree_status: StatusCode::Deleted,
5851 index_status: StatusCode::Unmodified,
5852 }
5853 .into(),
5854 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5855 };
5856 return Ok(result);
5857 };
5858
5859 let result = match variant {
5860 Variant::Untracked(_) => FileStatus::Untracked,
5861 Variant::Ignored(_) => FileStatus::Ignored,
5862 Variant::Unmerged(unmerged) => {
5863 let [first_head, second_head] =
5864 [unmerged.first_head, unmerged.second_head].map(|head| {
5865 let code = proto::GitStatus::from_i32(head)
5866 .with_context(|| format!("Invalid git status code: {head}"))?;
5867 let result = match code {
5868 proto::GitStatus::Added => UnmergedStatusCode::Added,
5869 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5870 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5871 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5872 };
5873 Ok(result)
5874 });
5875 let [first_head, second_head] = [first_head?, second_head?];
5876 UnmergedStatus {
5877 first_head,
5878 second_head,
5879 }
5880 .into()
5881 }
5882 Variant::Tracked(tracked) => {
5883 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5884 .map(|status| {
5885 let code = proto::GitStatus::from_i32(status)
5886 .with_context(|| format!("Invalid git status code: {status}"))?;
5887 let result = match code {
5888 proto::GitStatus::Modified => StatusCode::Modified,
5889 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5890 proto::GitStatus::Added => StatusCode::Added,
5891 proto::GitStatus::Deleted => StatusCode::Deleted,
5892 proto::GitStatus::Renamed => StatusCode::Renamed,
5893 proto::GitStatus::Copied => StatusCode::Copied,
5894 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5895 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5896 };
5897 Ok(result)
5898 });
5899 let [index_status, worktree_status] = [index_status?, worktree_status?];
5900 TrackedStatus {
5901 index_status,
5902 worktree_status,
5903 }
5904 .into()
5905 }
5906 };
5907 Ok(result)
5908}
5909
5910fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5911 use proto::git_file_status::{Tracked, Unmerged, Variant};
5912
5913 let variant = match status {
5914 FileStatus::Untracked => Variant::Untracked(Default::default()),
5915 FileStatus::Ignored => Variant::Ignored(Default::default()),
5916 FileStatus::Unmerged(UnmergedStatus {
5917 first_head,
5918 second_head,
5919 }) => Variant::Unmerged(Unmerged {
5920 first_head: unmerged_status_to_proto(first_head),
5921 second_head: unmerged_status_to_proto(second_head),
5922 }),
5923 FileStatus::Tracked(TrackedStatus {
5924 index_status,
5925 worktree_status,
5926 }) => Variant::Tracked(Tracked {
5927 index_status: tracked_status_to_proto(index_status),
5928 worktree_status: tracked_status_to_proto(worktree_status),
5929 }),
5930 };
5931 proto::GitFileStatus {
5932 variant: Some(variant),
5933 }
5934}
5935
5936fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5937 match code {
5938 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5939 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5940 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5941 }
5942}
5943
5944fn tracked_status_to_proto(code: StatusCode) -> i32 {
5945 match code {
5946 StatusCode::Added => proto::GitStatus::Added as _,
5947 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5948 StatusCode::Modified => proto::GitStatus::Modified as _,
5949 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5950 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5951 StatusCode::Copied => proto::GitStatus::Copied as _,
5952 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5953 }
5954}