1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use std::{
60 cmp::Ordering,
61 collections::{BTreeSet, HashSet, VecDeque},
62 future::Future,
63 mem,
64 ops::Range,
65 path::{Path, PathBuf},
66 str::FromStr,
67 sync::{
68 Arc,
69 atomic::{self, AtomicU64},
70 },
71 time::Instant,
72};
73use sum_tree::{Edit, SumTree, TreeSet};
74use task::Shell;
75use text::{Bias, BufferId};
76use util::{
77 ResultExt, debug_panic,
78 paths::{PathStyle, SanitizedPath},
79 post_inc,
80 rel_path::RelPath,
81};
82use worktree::{
83 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
84 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
85};
86use zeroize::Zeroize;
87
88pub struct GitStore {
89 state: GitStoreState,
90 buffer_store: Entity<BufferStore>,
91 worktree_store: Entity<WorktreeStore>,
92 repositories: HashMap<RepositoryId, Entity<Repository>>,
93 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
94 active_repo_id: Option<RepositoryId>,
95 #[allow(clippy::type_complexity)]
96 loading_diffs:
97 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
98 diffs: HashMap<BufferId, Entity<BufferGitState>>,
99 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
100 _subscriptions: Vec<Subscription>,
101}
102
103#[derive(Default)]
104struct SharedDiffs {
105 unstaged: Option<Entity<BufferDiff>>,
106 uncommitted: Option<Entity<BufferDiff>>,
107}
108
109struct BufferGitState {
110 unstaged_diff: Option<WeakEntity<BufferDiff>>,
111 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
112 conflict_set: Option<WeakEntity<ConflictSet>>,
113 recalculate_diff_task: Option<Task<Result<()>>>,
114 reparse_conflict_markers_task: Option<Task<Result<()>>>,
115 language: Option<Arc<Language>>,
116 language_registry: Option<Arc<LanguageRegistry>>,
117 conflict_updated_futures: Vec<oneshot::Sender<()>>,
118 recalculating_tx: postage::watch::Sender<bool>,
119
120 /// These operation counts are used to ensure that head and index text
121 /// values read from the git repository are up-to-date with any hunk staging
122 /// operations that have been performed on the BufferDiff.
123 ///
124 /// The operation count is incremented immediately when the user initiates a
125 /// hunk stage/unstage operation. Then, upon finishing writing the new index
126 /// text do disk, the `operation count as of write` is updated to reflect
127 /// the operation count that prompted the write.
128 hunk_staging_operation_count: usize,
129 hunk_staging_operation_count_as_of_write: usize,
130
131 head_text: Option<Arc<String>>,
132 index_text: Option<Arc<String>>,
133 head_changed: bool,
134 index_changed: bool,
135 language_changed: bool,
136}
137
138#[derive(Clone, Debug)]
139enum DiffBasesChange {
140 SetIndex(Option<String>),
141 SetHead(Option<String>),
142 SetEach {
143 index: Option<String>,
144 head: Option<String>,
145 },
146 SetBoth(Option<String>),
147}
148
149#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
150enum DiffKind {
151 Unstaged,
152 Uncommitted,
153}
154
155enum GitStoreState {
156 Local {
157 next_repository_id: Arc<AtomicU64>,
158 downstream: Option<LocalDownstreamState>,
159 project_environment: Entity<ProjectEnvironment>,
160 fs: Arc<dyn Fs>,
161 },
162 Remote {
163 upstream_client: AnyProtoClient,
164 upstream_project_id: u64,
165 downstream: Option<(AnyProtoClient, ProjectId)>,
166 },
167}
168
169enum DownstreamUpdate {
170 UpdateRepository(RepositorySnapshot),
171 RemoveRepository(RepositoryId),
172}
173
174struct LocalDownstreamState {
175 client: AnyProtoClient,
176 project_id: ProjectId,
177 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
178 _task: Task<Result<()>>,
179}
180
181#[derive(Clone, Debug)]
182pub struct GitStoreCheckpoint {
183 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
184}
185
186#[derive(Clone, Debug, PartialEq, Eq)]
187pub struct StatusEntry {
188 pub repo_path: RepoPath,
189 pub status: FileStatus,
190}
191
192impl StatusEntry {
193 fn to_proto(&self) -> proto::StatusEntry {
194 let simple_status = match self.status {
195 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
196 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
197 FileStatus::Tracked(TrackedStatus {
198 index_status,
199 worktree_status,
200 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
201 worktree_status
202 } else {
203 index_status
204 }),
205 };
206
207 proto::StatusEntry {
208 repo_path: self.repo_path.to_proto(),
209 simple_status,
210 status: Some(status_to_proto(self.status)),
211 }
212 }
213}
214
215impl TryFrom<proto::StatusEntry> for StatusEntry {
216 type Error = anyhow::Error;
217
218 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
219 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
220 let status = status_from_proto(value.simple_status, value.status)?;
221 Ok(Self { repo_path, status })
222 }
223}
224
225impl sum_tree::Item for StatusEntry {
226 type Summary = PathSummary<GitSummary>;
227
228 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
229 PathSummary {
230 max_path: self.repo_path.as_ref().clone(),
231 item_summary: self.status.summary(),
232 }
233 }
234}
235
236impl sum_tree::KeyedItem for StatusEntry {
237 type Key = PathKey;
238
239 fn key(&self) -> Self::Key {
240 PathKey(self.repo_path.as_ref().clone())
241 }
242}
243
244#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
245pub struct RepositoryId(pub u64);
246
247#[derive(Clone, Debug, Default, PartialEq, Eq)]
248pub struct MergeDetails {
249 pub conflicted_paths: TreeSet<RepoPath>,
250 pub message: Option<SharedString>,
251 pub heads: Vec<Option<SharedString>>,
252}
253
254#[derive(Clone, Debug, PartialEq, Eq)]
255pub struct RepositorySnapshot {
256 pub id: RepositoryId,
257 pub statuses_by_path: SumTree<StatusEntry>,
258 pub pending_ops_by_path: SumTree<PendingOps>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 job_id: JobId,
289 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
290 latest_askpass_id: u64,
291}
292
293impl std::ops::Deref for Repository {
294 type Target = RepositorySnapshot;
295
296 fn deref(&self) -> &Self::Target {
297 &self.snapshot
298 }
299}
300
301#[derive(Clone)]
302pub enum RepositoryState {
303 Local {
304 backend: Arc<dyn GitRepository>,
305 environment: Arc<HashMap<String, String>>,
306 },
307 Remote {
308 project_id: ProjectId,
309 client: AnyProtoClient,
310 },
311}
312
313#[derive(Clone, Debug, PartialEq, Eq)]
314pub enum RepositoryEvent {
315 StatusesChanged {
316 // TODO could report which statuses changed here
317 full_scan: bool,
318 },
319 MergeHeadsChanged,
320 BranchChanged,
321 StashEntriesChanged,
322 PendingOpsChanged {
323 pending_ops: SumTree<pending_op::PendingOps>,
324 },
325}
326
327#[derive(Clone, Debug)]
328pub struct JobsUpdated;
329
330#[derive(Debug)]
331pub enum GitStoreEvent {
332 ActiveRepositoryChanged(Option<RepositoryId>),
333 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
334 RepositoryAdded,
335 RepositoryRemoved(RepositoryId),
336 IndexWriteError(anyhow::Error),
337 JobsUpdated,
338 ConflictsUpdated,
339}
340
341impl EventEmitter<RepositoryEvent> for Repository {}
342impl EventEmitter<JobsUpdated> for Repository {}
343impl EventEmitter<GitStoreEvent> for GitStore {}
344
345pub struct GitJob {
346 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
347 key: Option<GitJobKey>,
348}
349
350#[derive(PartialEq, Eq)]
351enum GitJobKey {
352 WriteIndex(Vec<RepoPath>),
353 ReloadBufferDiffBases,
354 RefreshStatuses,
355 ReloadGitState,
356}
357
358impl GitStore {
359 pub fn local(
360 worktree_store: &Entity<WorktreeStore>,
361 buffer_store: Entity<BufferStore>,
362 environment: Entity<ProjectEnvironment>,
363 fs: Arc<dyn Fs>,
364 cx: &mut Context<Self>,
365 ) -> Self {
366 Self::new(
367 worktree_store.clone(),
368 buffer_store,
369 GitStoreState::Local {
370 next_repository_id: Arc::new(AtomicU64::new(1)),
371 downstream: None,
372 project_environment: environment,
373 fs,
374 },
375 cx,
376 )
377 }
378
379 pub fn remote(
380 worktree_store: &Entity<WorktreeStore>,
381 buffer_store: Entity<BufferStore>,
382 upstream_client: AnyProtoClient,
383 project_id: u64,
384 cx: &mut Context<Self>,
385 ) -> Self {
386 Self::new(
387 worktree_store.clone(),
388 buffer_store,
389 GitStoreState::Remote {
390 upstream_client,
391 upstream_project_id: project_id,
392 downstream: None,
393 },
394 cx,
395 )
396 }
397
398 fn new(
399 worktree_store: Entity<WorktreeStore>,
400 buffer_store: Entity<BufferStore>,
401 state: GitStoreState,
402 cx: &mut Context<Self>,
403 ) -> Self {
404 let _subscriptions = vec![
405 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
406 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
407 ];
408
409 GitStore {
410 state,
411 buffer_store,
412 worktree_store,
413 repositories: HashMap::default(),
414 worktree_ids: HashMap::default(),
415 active_repo_id: None,
416 _subscriptions,
417 loading_diffs: HashMap::default(),
418 shared_diffs: HashMap::default(),
419 diffs: HashMap::default(),
420 }
421 }
422
423 pub fn init(client: &AnyProtoClient) {
424 client.add_entity_request_handler(Self::handle_get_remotes);
425 client.add_entity_request_handler(Self::handle_get_branches);
426 client.add_entity_request_handler(Self::handle_get_default_branch);
427 client.add_entity_request_handler(Self::handle_change_branch);
428 client.add_entity_request_handler(Self::handle_create_branch);
429 client.add_entity_request_handler(Self::handle_rename_branch);
430 client.add_entity_request_handler(Self::handle_git_init);
431 client.add_entity_request_handler(Self::handle_push);
432 client.add_entity_request_handler(Self::handle_pull);
433 client.add_entity_request_handler(Self::handle_fetch);
434 client.add_entity_request_handler(Self::handle_stage);
435 client.add_entity_request_handler(Self::handle_unstage);
436 client.add_entity_request_handler(Self::handle_stash);
437 client.add_entity_request_handler(Self::handle_stash_pop);
438 client.add_entity_request_handler(Self::handle_stash_apply);
439 client.add_entity_request_handler(Self::handle_stash_drop);
440 client.add_entity_request_handler(Self::handle_commit);
441 client.add_entity_request_handler(Self::handle_reset);
442 client.add_entity_request_handler(Self::handle_show);
443 client.add_entity_request_handler(Self::handle_load_commit_diff);
444 client.add_entity_request_handler(Self::handle_checkout_files);
445 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
446 client.add_entity_request_handler(Self::handle_set_index_text);
447 client.add_entity_request_handler(Self::handle_askpass);
448 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
449 client.add_entity_request_handler(Self::handle_git_diff);
450 client.add_entity_request_handler(Self::handle_tree_diff);
451 client.add_entity_request_handler(Self::handle_get_blob_content);
452 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
453 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
454 client.add_entity_message_handler(Self::handle_update_diff_bases);
455 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
456 client.add_entity_request_handler(Self::handle_blame_buffer);
457 client.add_entity_message_handler(Self::handle_update_repository);
458 client.add_entity_message_handler(Self::handle_remove_repository);
459 client.add_entity_request_handler(Self::handle_git_clone);
460 client.add_entity_request_handler(Self::handle_get_worktrees);
461 client.add_entity_request_handler(Self::handle_create_worktree);
462 }
463
464 pub fn is_local(&self) -> bool {
465 matches!(self.state, GitStoreState::Local { .. })
466 }
467 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
468 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
469 let id = repo.read(cx).id;
470 if self.active_repo_id != Some(id) {
471 self.active_repo_id = Some(id);
472 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
473 }
474 }
475 }
476
477 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
478 match &mut self.state {
479 GitStoreState::Remote {
480 downstream: downstream_client,
481 ..
482 } => {
483 for repo in self.repositories.values() {
484 let update = repo.read(cx).snapshot.initial_update(project_id);
485 for update in split_repository_update(update) {
486 client.send(update).log_err();
487 }
488 }
489 *downstream_client = Some((client, ProjectId(project_id)));
490 }
491 GitStoreState::Local {
492 downstream: downstream_client,
493 ..
494 } => {
495 let mut snapshots = HashMap::default();
496 let (updates_tx, mut updates_rx) = mpsc::unbounded();
497 for repo in self.repositories.values() {
498 updates_tx
499 .unbounded_send(DownstreamUpdate::UpdateRepository(
500 repo.read(cx).snapshot.clone(),
501 ))
502 .ok();
503 }
504 *downstream_client = Some(LocalDownstreamState {
505 client: client.clone(),
506 project_id: ProjectId(project_id),
507 updates_tx,
508 _task: cx.spawn(async move |this, cx| {
509 cx.background_spawn(async move {
510 while let Some(update) = updates_rx.next().await {
511 match update {
512 DownstreamUpdate::UpdateRepository(snapshot) => {
513 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
514 {
515 let update =
516 snapshot.build_update(old_snapshot, project_id);
517 *old_snapshot = snapshot;
518 for update in split_repository_update(update) {
519 client.send(update)?;
520 }
521 } else {
522 let update = snapshot.initial_update(project_id);
523 for update in split_repository_update(update) {
524 client.send(update)?;
525 }
526 snapshots.insert(snapshot.id, snapshot);
527 }
528 }
529 DownstreamUpdate::RemoveRepository(id) => {
530 client.send(proto::RemoveRepository {
531 project_id,
532 id: id.to_proto(),
533 })?;
534 }
535 }
536 }
537 anyhow::Ok(())
538 })
539 .await
540 .ok();
541 this.update(cx, |this, _| {
542 if let GitStoreState::Local {
543 downstream: downstream_client,
544 ..
545 } = &mut this.state
546 {
547 downstream_client.take();
548 } else {
549 unreachable!("unshared called on remote store");
550 }
551 })
552 }),
553 });
554 }
555 }
556 }
557
558 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
559 match &mut self.state {
560 GitStoreState::Local {
561 downstream: downstream_client,
562 ..
563 } => {
564 downstream_client.take();
565 }
566 GitStoreState::Remote {
567 downstream: downstream_client,
568 ..
569 } => {
570 downstream_client.take();
571 }
572 }
573 self.shared_diffs.clear();
574 }
575
576 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
577 self.shared_diffs.remove(peer_id);
578 }
579
580 pub fn active_repository(&self) -> Option<Entity<Repository>> {
581 self.active_repo_id
582 .as_ref()
583 .map(|id| self.repositories[id].clone())
584 }
585
586 pub fn open_unstaged_diff(
587 &mut self,
588 buffer: Entity<Buffer>,
589 cx: &mut Context<Self>,
590 ) -> Task<Result<Entity<BufferDiff>>> {
591 let buffer_id = buffer.read(cx).remote_id();
592 if let Some(diff_state) = self.diffs.get(&buffer_id)
593 && let Some(unstaged_diff) = diff_state
594 .read(cx)
595 .unstaged_diff
596 .as_ref()
597 .and_then(|weak| weak.upgrade())
598 {
599 if let Some(task) =
600 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
601 {
602 return cx.background_executor().spawn(async move {
603 task.await;
604 Ok(unstaged_diff)
605 });
606 }
607 return Task::ready(Ok(unstaged_diff));
608 }
609
610 let Some((repo, repo_path)) =
611 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
612 else {
613 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
614 };
615
616 let task = self
617 .loading_diffs
618 .entry((buffer_id, DiffKind::Unstaged))
619 .or_insert_with(|| {
620 let staged_text = repo.update(cx, |repo, cx| {
621 repo.load_staged_text(buffer_id, repo_path, cx)
622 });
623 cx.spawn(async move |this, cx| {
624 Self::open_diff_internal(
625 this,
626 DiffKind::Unstaged,
627 staged_text.await.map(DiffBasesChange::SetIndex),
628 buffer,
629 cx,
630 )
631 .await
632 .map_err(Arc::new)
633 })
634 .shared()
635 })
636 .clone();
637
638 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
639 }
640
641 pub fn open_diff_since(
642 &mut self,
643 oid: Option<git::Oid>,
644 buffer: Entity<Buffer>,
645 repo: Entity<Repository>,
646 languages: Arc<LanguageRegistry>,
647 cx: &mut Context<Self>,
648 ) -> Task<Result<Entity<BufferDiff>>> {
649 cx.spawn(async move |this, cx| {
650 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
651 let content = match oid {
652 None => None,
653 Some(oid) => Some(
654 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
655 .await?,
656 ),
657 };
658 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
659
660 buffer_diff
661 .update(cx, |buffer_diff, cx| {
662 buffer_diff.set_base_text(
663 content.map(Arc::new),
664 buffer_snapshot.language().cloned(),
665 Some(languages.clone()),
666 buffer_snapshot.text,
667 cx,
668 )
669 })?
670 .await?;
671 let unstaged_diff = this
672 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
673 .await?;
674 buffer_diff.update(cx, |buffer_diff, _| {
675 buffer_diff.set_secondary_diff(unstaged_diff);
676 })?;
677
678 this.update(cx, |_, cx| {
679 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
680 .detach();
681 })?;
682
683 Ok(buffer_diff)
684 })
685 }
686
687 pub fn open_uncommitted_diff(
688 &mut self,
689 buffer: Entity<Buffer>,
690 cx: &mut Context<Self>,
691 ) -> Task<Result<Entity<BufferDiff>>> {
692 let buffer_id = buffer.read(cx).remote_id();
693
694 if let Some(diff_state) = self.diffs.get(&buffer_id)
695 && let Some(uncommitted_diff) = diff_state
696 .read(cx)
697 .uncommitted_diff
698 .as_ref()
699 .and_then(|weak| weak.upgrade())
700 {
701 if let Some(task) =
702 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
703 {
704 return cx.background_executor().spawn(async move {
705 task.await;
706 Ok(uncommitted_diff)
707 });
708 }
709 return Task::ready(Ok(uncommitted_diff));
710 }
711
712 let Some((repo, repo_path)) =
713 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
714 else {
715 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
716 };
717
718 let task = self
719 .loading_diffs
720 .entry((buffer_id, DiffKind::Uncommitted))
721 .or_insert_with(|| {
722 let changes = repo.update(cx, |repo, cx| {
723 repo.load_committed_text(buffer_id, repo_path, cx)
724 });
725
726 // todo(lw): hot foreground spawn
727 cx.spawn(async move |this, cx| {
728 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
729 .await
730 .map_err(Arc::new)
731 })
732 .shared()
733 })
734 .clone();
735
736 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
737 }
738
739 async fn open_diff_internal(
740 this: WeakEntity<Self>,
741 kind: DiffKind,
742 texts: Result<DiffBasesChange>,
743 buffer_entity: Entity<Buffer>,
744 cx: &mut AsyncApp,
745 ) -> Result<Entity<BufferDiff>> {
746 let diff_bases_change = match texts {
747 Err(e) => {
748 this.update(cx, |this, cx| {
749 let buffer = buffer_entity.read(cx);
750 let buffer_id = buffer.remote_id();
751 this.loading_diffs.remove(&(buffer_id, kind));
752 })?;
753 return Err(e);
754 }
755 Ok(change) => change,
756 };
757
758 this.update(cx, |this, cx| {
759 let buffer = buffer_entity.read(cx);
760 let buffer_id = buffer.remote_id();
761 let language = buffer.language().cloned();
762 let language_registry = buffer.language_registry();
763 let text_snapshot = buffer.text_snapshot();
764 this.loading_diffs.remove(&(buffer_id, kind));
765
766 let git_store = cx.weak_entity();
767 let diff_state = this
768 .diffs
769 .entry(buffer_id)
770 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
771
772 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
773
774 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
775 diff_state.update(cx, |diff_state, cx| {
776 diff_state.language = language;
777 diff_state.language_registry = language_registry;
778
779 match kind {
780 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
781 DiffKind::Uncommitted => {
782 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
783 diff
784 } else {
785 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
786 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
787 unstaged_diff
788 };
789
790 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
791 diff_state.uncommitted_diff = Some(diff.downgrade())
792 }
793 }
794
795 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
796 let rx = diff_state.wait_for_recalculation();
797
798 anyhow::Ok(async move {
799 if let Some(rx) = rx {
800 rx.await;
801 }
802 Ok(diff)
803 })
804 })
805 })??
806 .await
807 }
808
809 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
810 let diff_state = self.diffs.get(&buffer_id)?;
811 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
812 }
813
814 pub fn get_uncommitted_diff(
815 &self,
816 buffer_id: BufferId,
817 cx: &App,
818 ) -> Option<Entity<BufferDiff>> {
819 let diff_state = self.diffs.get(&buffer_id)?;
820 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
821 }
822
823 pub fn open_conflict_set(
824 &mut self,
825 buffer: Entity<Buffer>,
826 cx: &mut Context<Self>,
827 ) -> Entity<ConflictSet> {
828 log::debug!("open conflict set");
829 let buffer_id = buffer.read(cx).remote_id();
830
831 if let Some(git_state) = self.diffs.get(&buffer_id)
832 && let Some(conflict_set) = git_state
833 .read(cx)
834 .conflict_set
835 .as_ref()
836 .and_then(|weak| weak.upgrade())
837 {
838 let conflict_set = conflict_set;
839 let buffer_snapshot = buffer.read(cx).text_snapshot();
840
841 git_state.update(cx, |state, cx| {
842 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
843 });
844
845 return conflict_set;
846 }
847
848 let is_unmerged = self
849 .repository_and_path_for_buffer_id(buffer_id, cx)
850 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
851 let git_store = cx.weak_entity();
852 let buffer_git_state = self
853 .diffs
854 .entry(buffer_id)
855 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
856 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
857
858 self._subscriptions
859 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
860 cx.emit(GitStoreEvent::ConflictsUpdated);
861 }));
862
863 buffer_git_state.update(cx, |state, cx| {
864 state.conflict_set = Some(conflict_set.downgrade());
865 let buffer_snapshot = buffer.read(cx).text_snapshot();
866 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
867 });
868
869 conflict_set
870 }
871
872 pub fn project_path_git_status(
873 &self,
874 project_path: &ProjectPath,
875 cx: &App,
876 ) -> Option<FileStatus> {
877 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
878 Some(repo.read(cx).status_for_path(&repo_path)?.status)
879 }
880
881 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
882 let mut work_directory_abs_paths = Vec::new();
883 let mut checkpoints = Vec::new();
884 for repository in self.repositories.values() {
885 repository.update(cx, |repository, _| {
886 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
887 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
888 });
889 }
890
891 cx.background_executor().spawn(async move {
892 let checkpoints = future::try_join_all(checkpoints).await?;
893 Ok(GitStoreCheckpoint {
894 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
895 .into_iter()
896 .zip(checkpoints)
897 .collect(),
898 })
899 })
900 }
901
902 pub fn restore_checkpoint(
903 &self,
904 checkpoint: GitStoreCheckpoint,
905 cx: &mut App,
906 ) -> Task<Result<()>> {
907 let repositories_by_work_dir_abs_path = self
908 .repositories
909 .values()
910 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
911 .collect::<HashMap<_, _>>();
912
913 let mut tasks = Vec::new();
914 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
915 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
916 let restore = repository.update(cx, |repository, _| {
917 repository.restore_checkpoint(checkpoint)
918 });
919 tasks.push(async move { restore.await? });
920 }
921 }
922 cx.background_spawn(async move {
923 future::try_join_all(tasks).await?;
924 Ok(())
925 })
926 }
927
928 /// Compares two checkpoints, returning true if they are equal.
929 pub fn compare_checkpoints(
930 &self,
931 left: GitStoreCheckpoint,
932 mut right: GitStoreCheckpoint,
933 cx: &mut App,
934 ) -> Task<Result<bool>> {
935 let repositories_by_work_dir_abs_path = self
936 .repositories
937 .values()
938 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
939 .collect::<HashMap<_, _>>();
940
941 let mut tasks = Vec::new();
942 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
943 if let Some(right_checkpoint) = right
944 .checkpoints_by_work_dir_abs_path
945 .remove(&work_dir_abs_path)
946 {
947 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
948 {
949 let compare = repository.update(cx, |repository, _| {
950 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
951 });
952
953 tasks.push(async move { compare.await? });
954 }
955 } else {
956 return Task::ready(Ok(false));
957 }
958 }
959 cx.background_spawn(async move {
960 Ok(future::try_join_all(tasks)
961 .await?
962 .into_iter()
963 .all(|result| result))
964 })
965 }
966
967 /// Blames a buffer.
968 pub fn blame_buffer(
969 &self,
970 buffer: &Entity<Buffer>,
971 version: Option<clock::Global>,
972 cx: &mut App,
973 ) -> Task<Result<Option<Blame>>> {
974 let buffer = buffer.read(cx);
975 let Some((repo, repo_path)) =
976 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
977 else {
978 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
979 };
980 let content = match &version {
981 Some(version) => buffer.rope_for_version(version),
982 None => buffer.as_rope().clone(),
983 };
984 let version = version.unwrap_or(buffer.version());
985 let buffer_id = buffer.remote_id();
986
987 let rx = repo.update(cx, |repo, _| {
988 repo.send_job(None, move |state, _| async move {
989 match state {
990 RepositoryState::Local { backend, .. } => backend
991 .blame(repo_path.clone(), content)
992 .await
993 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
994 .map(Some),
995 RepositoryState::Remote { project_id, client } => {
996 let response = client
997 .request(proto::BlameBuffer {
998 project_id: project_id.to_proto(),
999 buffer_id: buffer_id.into(),
1000 version: serialize_version(&version),
1001 })
1002 .await?;
1003 Ok(deserialize_blame_buffer_response(response))
1004 }
1005 }
1006 })
1007 });
1008
1009 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1010 }
1011
1012 pub fn file_history(
1013 &self,
1014 repo: &Entity<Repository>,
1015 path: RepoPath,
1016 cx: &mut App,
1017 ) -> Task<Result<git::repository::FileHistory>> {
1018 let rx = repo.update(cx, |repo, _| {
1019 repo.send_job(None, move |state, _| async move {
1020 match state {
1021 RepositoryState::Local { backend, .. } => backend.file_history(path).await,
1022 RepositoryState::Remote { .. } => Err(anyhow!(
1023 "file history not supported for remote repositories yet"
1024 )),
1025 }
1026 })
1027 });
1028
1029 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1030 }
1031
1032 pub fn get_permalink_to_line(
1033 &self,
1034 buffer: &Entity<Buffer>,
1035 selection: Range<u32>,
1036 cx: &mut App,
1037 ) -> Task<Result<url::Url>> {
1038 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1039 return Task::ready(Err(anyhow!("buffer has no file")));
1040 };
1041
1042 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1043 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1044 cx,
1045 ) else {
1046 // If we're not in a Git repo, check whether this is a Rust source
1047 // file in the Cargo registry (presumably opened with go-to-definition
1048 // from a normal Rust file). If so, we can put together a permalink
1049 // using crate metadata.
1050 if buffer
1051 .read(cx)
1052 .language()
1053 .is_none_or(|lang| lang.name() != "Rust".into())
1054 {
1055 return Task::ready(Err(anyhow!("no permalink available")));
1056 }
1057 let file_path = file.worktree.read(cx).absolutize(&file.path);
1058 return cx.spawn(async move |cx| {
1059 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1060 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1061 .context("no permalink available")
1062 });
1063 };
1064
1065 let buffer_id = buffer.read(cx).remote_id();
1066 let branch = repo.read(cx).branch.clone();
1067 let remote = branch
1068 .as_ref()
1069 .and_then(|b| b.upstream.as_ref())
1070 .and_then(|b| b.remote_name())
1071 .unwrap_or("origin")
1072 .to_string();
1073
1074 let rx = repo.update(cx, |repo, _| {
1075 repo.send_job(None, move |state, cx| async move {
1076 match state {
1077 RepositoryState::Local { backend, .. } => {
1078 let origin_url = backend
1079 .remote_url(&remote)
1080 .with_context(|| format!("remote \"{remote}\" not found"))?;
1081
1082 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1083
1084 let provider_registry =
1085 cx.update(GitHostingProviderRegistry::default_global)?;
1086
1087 let (provider, remote) =
1088 parse_git_remote_url(provider_registry, &origin_url)
1089 .context("parsing Git remote URL")?;
1090
1091 Ok(provider.build_permalink(
1092 remote,
1093 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1094 ))
1095 }
1096 RepositoryState::Remote { project_id, client } => {
1097 let response = client
1098 .request(proto::GetPermalinkToLine {
1099 project_id: project_id.to_proto(),
1100 buffer_id: buffer_id.into(),
1101 selection: Some(proto::Range {
1102 start: selection.start as u64,
1103 end: selection.end as u64,
1104 }),
1105 })
1106 .await?;
1107
1108 url::Url::parse(&response.permalink).context("failed to parse permalink")
1109 }
1110 }
1111 })
1112 });
1113 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1114 }
1115
1116 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1117 match &self.state {
1118 GitStoreState::Local {
1119 downstream: downstream_client,
1120 ..
1121 } => downstream_client
1122 .as_ref()
1123 .map(|state| (state.client.clone(), state.project_id)),
1124 GitStoreState::Remote {
1125 downstream: downstream_client,
1126 ..
1127 } => downstream_client.clone(),
1128 }
1129 }
1130
1131 fn upstream_client(&self) -> Option<AnyProtoClient> {
1132 match &self.state {
1133 GitStoreState::Local { .. } => None,
1134 GitStoreState::Remote {
1135 upstream_client, ..
1136 } => Some(upstream_client.clone()),
1137 }
1138 }
1139
1140 fn on_worktree_store_event(
1141 &mut self,
1142 worktree_store: Entity<WorktreeStore>,
1143 event: &WorktreeStoreEvent,
1144 cx: &mut Context<Self>,
1145 ) {
1146 let GitStoreState::Local {
1147 project_environment,
1148 downstream,
1149 next_repository_id,
1150 fs,
1151 } = &self.state
1152 else {
1153 return;
1154 };
1155
1156 match event {
1157 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1158 if let Some(worktree) = self
1159 .worktree_store
1160 .read(cx)
1161 .worktree_for_id(*worktree_id, cx)
1162 {
1163 let paths_by_git_repo =
1164 self.process_updated_entries(&worktree, updated_entries, cx);
1165 let downstream = downstream
1166 .as_ref()
1167 .map(|downstream| downstream.updates_tx.clone());
1168 cx.spawn(async move |_, cx| {
1169 let paths_by_git_repo = paths_by_git_repo.await;
1170 for (repo, paths) in paths_by_git_repo {
1171 repo.update(cx, |repo, cx| {
1172 repo.paths_changed(paths, downstream.clone(), cx);
1173 })
1174 .ok();
1175 }
1176 })
1177 .detach();
1178 }
1179 }
1180 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1181 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1182 else {
1183 return;
1184 };
1185 if !worktree.read(cx).is_visible() {
1186 log::debug!(
1187 "not adding repositories for local worktree {:?} because it's not visible",
1188 worktree.read(cx).abs_path()
1189 );
1190 return;
1191 }
1192 self.update_repositories_from_worktree(
1193 *worktree_id,
1194 project_environment.clone(),
1195 next_repository_id.clone(),
1196 downstream
1197 .as_ref()
1198 .map(|downstream| downstream.updates_tx.clone()),
1199 changed_repos.clone(),
1200 fs.clone(),
1201 cx,
1202 );
1203 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1204 }
1205 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1206 let repos_without_worktree: Vec<RepositoryId> = self
1207 .worktree_ids
1208 .iter_mut()
1209 .filter_map(|(repo_id, worktree_ids)| {
1210 worktree_ids.remove(worktree_id);
1211 if worktree_ids.is_empty() {
1212 Some(*repo_id)
1213 } else {
1214 None
1215 }
1216 })
1217 .collect();
1218 let is_active_repo_removed = repos_without_worktree
1219 .iter()
1220 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1221
1222 for repo_id in repos_without_worktree {
1223 self.repositories.remove(&repo_id);
1224 self.worktree_ids.remove(&repo_id);
1225 if let Some(updates_tx) =
1226 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1227 {
1228 updates_tx
1229 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1230 .ok();
1231 }
1232 }
1233
1234 if is_active_repo_removed {
1235 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1236 self.active_repo_id = Some(repo_id);
1237 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1238 } else {
1239 self.active_repo_id = None;
1240 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1241 }
1242 }
1243 }
1244 _ => {}
1245 }
1246 }
1247 fn on_repository_event(
1248 &mut self,
1249 repo: Entity<Repository>,
1250 event: &RepositoryEvent,
1251 cx: &mut Context<Self>,
1252 ) {
1253 let id = repo.read(cx).id;
1254 let repo_snapshot = repo.read(cx).snapshot.clone();
1255 for (buffer_id, diff) in self.diffs.iter() {
1256 if let Some((buffer_repo, repo_path)) =
1257 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1258 && buffer_repo == repo
1259 {
1260 diff.update(cx, |diff, cx| {
1261 if let Some(conflict_set) = &diff.conflict_set {
1262 let conflict_status_changed =
1263 conflict_set.update(cx, |conflict_set, cx| {
1264 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1265 conflict_set.set_has_conflict(has_conflict, cx)
1266 })?;
1267 if conflict_status_changed {
1268 let buffer_store = self.buffer_store.read(cx);
1269 if let Some(buffer) = buffer_store.get(*buffer_id) {
1270 let _ = diff
1271 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1272 }
1273 }
1274 }
1275 anyhow::Ok(())
1276 })
1277 .ok();
1278 }
1279 }
1280 cx.emit(GitStoreEvent::RepositoryUpdated(
1281 id,
1282 event.clone(),
1283 self.active_repo_id == Some(id),
1284 ))
1285 }
1286
1287 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1288 cx.emit(GitStoreEvent::JobsUpdated)
1289 }
1290
1291 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1292 fn update_repositories_from_worktree(
1293 &mut self,
1294 worktree_id: WorktreeId,
1295 project_environment: Entity<ProjectEnvironment>,
1296 next_repository_id: Arc<AtomicU64>,
1297 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1298 updated_git_repositories: UpdatedGitRepositoriesSet,
1299 fs: Arc<dyn Fs>,
1300 cx: &mut Context<Self>,
1301 ) {
1302 let mut removed_ids = Vec::new();
1303 for update in updated_git_repositories.iter() {
1304 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1305 let existing_work_directory_abs_path =
1306 repo.read(cx).work_directory_abs_path.clone();
1307 Some(&existing_work_directory_abs_path)
1308 == update.old_work_directory_abs_path.as_ref()
1309 || Some(&existing_work_directory_abs_path)
1310 == update.new_work_directory_abs_path.as_ref()
1311 }) {
1312 let repo_id = *id;
1313 if let Some(new_work_directory_abs_path) =
1314 update.new_work_directory_abs_path.clone()
1315 {
1316 self.worktree_ids
1317 .entry(repo_id)
1318 .or_insert_with(HashSet::new)
1319 .insert(worktree_id);
1320 existing.update(cx, |existing, cx| {
1321 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1322 existing.schedule_scan(updates_tx.clone(), cx);
1323 });
1324 } else {
1325 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1326 worktree_ids.remove(&worktree_id);
1327 if worktree_ids.is_empty() {
1328 removed_ids.push(repo_id);
1329 }
1330 }
1331 }
1332 } else if let UpdatedGitRepository {
1333 new_work_directory_abs_path: Some(work_directory_abs_path),
1334 dot_git_abs_path: Some(dot_git_abs_path),
1335 repository_dir_abs_path: Some(repository_dir_abs_path),
1336 common_dir_abs_path: Some(common_dir_abs_path),
1337 ..
1338 } = update
1339 {
1340 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1341 let git_store = cx.weak_entity();
1342 let repo = cx.new(|cx| {
1343 let mut repo = Repository::local(
1344 id,
1345 work_directory_abs_path.clone(),
1346 dot_git_abs_path.clone(),
1347 repository_dir_abs_path.clone(),
1348 common_dir_abs_path.clone(),
1349 project_environment.downgrade(),
1350 fs.clone(),
1351 git_store,
1352 cx,
1353 );
1354 if let Some(updates_tx) = updates_tx.as_ref() {
1355 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1356 updates_tx
1357 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1358 .ok();
1359 }
1360 repo.schedule_scan(updates_tx.clone(), cx);
1361 repo
1362 });
1363 self._subscriptions
1364 .push(cx.subscribe(&repo, Self::on_repository_event));
1365 self._subscriptions
1366 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1367 self.repositories.insert(id, repo);
1368 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1369 cx.emit(GitStoreEvent::RepositoryAdded);
1370 self.active_repo_id.get_or_insert_with(|| {
1371 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1372 id
1373 });
1374 }
1375 }
1376
1377 for id in removed_ids {
1378 if self.active_repo_id == Some(id) {
1379 self.active_repo_id = None;
1380 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1381 }
1382 self.repositories.remove(&id);
1383 if let Some(updates_tx) = updates_tx.as_ref() {
1384 updates_tx
1385 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1386 .ok();
1387 }
1388 }
1389 }
1390
1391 fn on_buffer_store_event(
1392 &mut self,
1393 _: Entity<BufferStore>,
1394 event: &BufferStoreEvent,
1395 cx: &mut Context<Self>,
1396 ) {
1397 match event {
1398 BufferStoreEvent::BufferAdded(buffer) => {
1399 cx.subscribe(buffer, |this, buffer, event, cx| {
1400 if let BufferEvent::LanguageChanged = event {
1401 let buffer_id = buffer.read(cx).remote_id();
1402 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1403 diff_state.update(cx, |diff_state, cx| {
1404 diff_state.buffer_language_changed(buffer, cx);
1405 });
1406 }
1407 }
1408 })
1409 .detach();
1410 }
1411 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1412 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1413 diffs.remove(buffer_id);
1414 }
1415 }
1416 BufferStoreEvent::BufferDropped(buffer_id) => {
1417 self.diffs.remove(buffer_id);
1418 for diffs in self.shared_diffs.values_mut() {
1419 diffs.remove(buffer_id);
1420 }
1421 }
1422 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1423 // Whenever a buffer's file path changes, it's possible that the
1424 // new path is actually a path that is being tracked by a git
1425 // repository. In that case, we'll want to update the buffer's
1426 // `BufferDiffState`, in case it already has one.
1427 let buffer_id = buffer.read(cx).remote_id();
1428 let diff_state = self.diffs.get(&buffer_id);
1429 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1430
1431 if let Some(diff_state) = diff_state
1432 && let Some((repo, repo_path)) = repo
1433 {
1434 let buffer = buffer.clone();
1435 let diff_state = diff_state.clone();
1436
1437 cx.spawn(async move |_git_store, cx| {
1438 async {
1439 let diff_bases_change = repo
1440 .update(cx, |repo, cx| {
1441 repo.load_committed_text(buffer_id, repo_path, cx)
1442 })?
1443 .await?;
1444
1445 diff_state.update(cx, |diff_state, cx| {
1446 let buffer_snapshot = buffer.read(cx).text_snapshot();
1447 diff_state.diff_bases_changed(
1448 buffer_snapshot,
1449 Some(diff_bases_change),
1450 cx,
1451 );
1452 })
1453 }
1454 .await
1455 .log_err();
1456 })
1457 .detach();
1458 }
1459 }
1460 _ => {}
1461 }
1462 }
1463
1464 pub fn recalculate_buffer_diffs(
1465 &mut self,
1466 buffers: Vec<Entity<Buffer>>,
1467 cx: &mut Context<Self>,
1468 ) -> impl Future<Output = ()> + use<> {
1469 let mut futures = Vec::new();
1470 for buffer in buffers {
1471 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1472 let buffer = buffer.read(cx).text_snapshot();
1473 diff_state.update(cx, |diff_state, cx| {
1474 diff_state.recalculate_diffs(buffer.clone(), cx);
1475 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1476 });
1477 futures.push(diff_state.update(cx, |diff_state, cx| {
1478 diff_state
1479 .reparse_conflict_markers(buffer, cx)
1480 .map(|_| {})
1481 .boxed()
1482 }));
1483 }
1484 }
1485 async move {
1486 futures::future::join_all(futures).await;
1487 }
1488 }
1489
1490 fn on_buffer_diff_event(
1491 &mut self,
1492 diff: Entity<buffer_diff::BufferDiff>,
1493 event: &BufferDiffEvent,
1494 cx: &mut Context<Self>,
1495 ) {
1496 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1497 let buffer_id = diff.read(cx).buffer_id;
1498 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1499 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1500 diff_state.hunk_staging_operation_count += 1;
1501 diff_state.hunk_staging_operation_count
1502 });
1503 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1504 let recv = repo.update(cx, |repo, cx| {
1505 log::debug!("hunks changed for {}", path.as_unix_str());
1506 repo.spawn_set_index_text_job(
1507 path,
1508 new_index_text.as_ref().map(|rope| rope.to_string()),
1509 Some(hunk_staging_operation_count),
1510 cx,
1511 )
1512 });
1513 let diff = diff.downgrade();
1514 cx.spawn(async move |this, cx| {
1515 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1516 diff.update(cx, |diff, cx| {
1517 diff.clear_pending_hunks(cx);
1518 })
1519 .ok();
1520 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1521 .ok();
1522 }
1523 })
1524 .detach();
1525 }
1526 }
1527 }
1528 }
1529
1530 fn local_worktree_git_repos_changed(
1531 &mut self,
1532 worktree: Entity<Worktree>,
1533 changed_repos: &UpdatedGitRepositoriesSet,
1534 cx: &mut Context<Self>,
1535 ) {
1536 log::debug!("local worktree repos changed");
1537 debug_assert!(worktree.read(cx).is_local());
1538
1539 for repository in self.repositories.values() {
1540 repository.update(cx, |repository, cx| {
1541 let repo_abs_path = &repository.work_directory_abs_path;
1542 if changed_repos.iter().any(|update| {
1543 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1544 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1545 }) {
1546 repository.reload_buffer_diff_bases(cx);
1547 }
1548 });
1549 }
1550 }
1551
1552 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1553 &self.repositories
1554 }
1555
1556 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1557 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1558 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1559 Some(status.status)
1560 }
1561
1562 pub fn repository_and_path_for_buffer_id(
1563 &self,
1564 buffer_id: BufferId,
1565 cx: &App,
1566 ) -> Option<(Entity<Repository>, RepoPath)> {
1567 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1568 let project_path = buffer.read(cx).project_path(cx)?;
1569 self.repository_and_path_for_project_path(&project_path, cx)
1570 }
1571
1572 pub fn repository_and_path_for_project_path(
1573 &self,
1574 path: &ProjectPath,
1575 cx: &App,
1576 ) -> Option<(Entity<Repository>, RepoPath)> {
1577 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1578 self.repositories
1579 .values()
1580 .filter_map(|repo| {
1581 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1582 Some((repo.clone(), repo_path))
1583 })
1584 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1585 }
1586
1587 pub fn git_init(
1588 &self,
1589 path: Arc<Path>,
1590 fallback_branch_name: String,
1591 cx: &App,
1592 ) -> Task<Result<()>> {
1593 match &self.state {
1594 GitStoreState::Local { fs, .. } => {
1595 let fs = fs.clone();
1596 cx.background_executor()
1597 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1598 }
1599 GitStoreState::Remote {
1600 upstream_client,
1601 upstream_project_id: project_id,
1602 ..
1603 } => {
1604 let client = upstream_client.clone();
1605 let project_id = *project_id;
1606 cx.background_executor().spawn(async move {
1607 client
1608 .request(proto::GitInit {
1609 project_id: project_id,
1610 abs_path: path.to_string_lossy().into_owned(),
1611 fallback_branch_name,
1612 })
1613 .await?;
1614 Ok(())
1615 })
1616 }
1617 }
1618 }
1619
1620 pub fn git_clone(
1621 &self,
1622 repo: String,
1623 path: impl Into<Arc<std::path::Path>>,
1624 cx: &App,
1625 ) -> Task<Result<()>> {
1626 let path = path.into();
1627 match &self.state {
1628 GitStoreState::Local { fs, .. } => {
1629 let fs = fs.clone();
1630 cx.background_executor()
1631 .spawn(async move { fs.git_clone(&repo, &path).await })
1632 }
1633 GitStoreState::Remote {
1634 upstream_client,
1635 upstream_project_id,
1636 ..
1637 } => {
1638 if upstream_client.is_via_collab() {
1639 return Task::ready(Err(anyhow!(
1640 "Git Clone isn't supported for project guests"
1641 )));
1642 }
1643 let request = upstream_client.request(proto::GitClone {
1644 project_id: *upstream_project_id,
1645 abs_path: path.to_string_lossy().into_owned(),
1646 remote_repo: repo,
1647 });
1648
1649 cx.background_spawn(async move {
1650 let result = request.await?;
1651
1652 match result.success {
1653 true => Ok(()),
1654 false => Err(anyhow!("Git Clone failed")),
1655 }
1656 })
1657 }
1658 }
1659 }
1660
1661 async fn handle_update_repository(
1662 this: Entity<Self>,
1663 envelope: TypedEnvelope<proto::UpdateRepository>,
1664 mut cx: AsyncApp,
1665 ) -> Result<()> {
1666 this.update(&mut cx, |this, cx| {
1667 let path_style = this.worktree_store.read(cx).path_style();
1668 let mut update = envelope.payload;
1669
1670 let id = RepositoryId::from_proto(update.id);
1671 let client = this.upstream_client().context("no upstream client")?;
1672
1673 let mut repo_subscription = None;
1674 let repo = this.repositories.entry(id).or_insert_with(|| {
1675 let git_store = cx.weak_entity();
1676 let repo = cx.new(|cx| {
1677 Repository::remote(
1678 id,
1679 Path::new(&update.abs_path).into(),
1680 path_style,
1681 ProjectId(update.project_id),
1682 client,
1683 git_store,
1684 cx,
1685 )
1686 });
1687 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1688 cx.emit(GitStoreEvent::RepositoryAdded);
1689 repo
1690 });
1691 this._subscriptions.extend(repo_subscription);
1692
1693 repo.update(cx, {
1694 let update = update.clone();
1695 |repo, cx| repo.apply_remote_update(update, cx)
1696 })?;
1697
1698 this.active_repo_id.get_or_insert_with(|| {
1699 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1700 id
1701 });
1702
1703 if let Some((client, project_id)) = this.downstream_client() {
1704 update.project_id = project_id.to_proto();
1705 client.send(update).log_err();
1706 }
1707 Ok(())
1708 })?
1709 }
1710
1711 async fn handle_remove_repository(
1712 this: Entity<Self>,
1713 envelope: TypedEnvelope<proto::RemoveRepository>,
1714 mut cx: AsyncApp,
1715 ) -> Result<()> {
1716 this.update(&mut cx, |this, cx| {
1717 let mut update = envelope.payload;
1718 let id = RepositoryId::from_proto(update.id);
1719 this.repositories.remove(&id);
1720 if let Some((client, project_id)) = this.downstream_client() {
1721 update.project_id = project_id.to_proto();
1722 client.send(update).log_err();
1723 }
1724 if this.active_repo_id == Some(id) {
1725 this.active_repo_id = None;
1726 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1727 }
1728 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1729 })
1730 }
1731
1732 async fn handle_git_init(
1733 this: Entity<Self>,
1734 envelope: TypedEnvelope<proto::GitInit>,
1735 cx: AsyncApp,
1736 ) -> Result<proto::Ack> {
1737 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1738 let name = envelope.payload.fallback_branch_name;
1739 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1740 .await?;
1741
1742 Ok(proto::Ack {})
1743 }
1744
1745 async fn handle_git_clone(
1746 this: Entity<Self>,
1747 envelope: TypedEnvelope<proto::GitClone>,
1748 cx: AsyncApp,
1749 ) -> Result<proto::GitCloneResponse> {
1750 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1751 let repo_name = envelope.payload.remote_repo;
1752 let result = cx
1753 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1754 .await;
1755
1756 Ok(proto::GitCloneResponse {
1757 success: result.is_ok(),
1758 })
1759 }
1760
1761 async fn handle_fetch(
1762 this: Entity<Self>,
1763 envelope: TypedEnvelope<proto::Fetch>,
1764 mut cx: AsyncApp,
1765 ) -> Result<proto::RemoteMessageResponse> {
1766 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1767 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1768 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1769 let askpass_id = envelope.payload.askpass_id;
1770
1771 let askpass = make_remote_delegate(
1772 this,
1773 envelope.payload.project_id,
1774 repository_id,
1775 askpass_id,
1776 &mut cx,
1777 );
1778
1779 let remote_output = repository_handle
1780 .update(&mut cx, |repository_handle, cx| {
1781 repository_handle.fetch(fetch_options, askpass, cx)
1782 })?
1783 .await??;
1784
1785 Ok(proto::RemoteMessageResponse {
1786 stdout: remote_output.stdout,
1787 stderr: remote_output.stderr,
1788 })
1789 }
1790
1791 async fn handle_push(
1792 this: Entity<Self>,
1793 envelope: TypedEnvelope<proto::Push>,
1794 mut cx: AsyncApp,
1795 ) -> Result<proto::RemoteMessageResponse> {
1796 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1797 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1798
1799 let askpass_id = envelope.payload.askpass_id;
1800 let askpass = make_remote_delegate(
1801 this,
1802 envelope.payload.project_id,
1803 repository_id,
1804 askpass_id,
1805 &mut cx,
1806 );
1807
1808 let options = envelope
1809 .payload
1810 .options
1811 .as_ref()
1812 .map(|_| match envelope.payload.options() {
1813 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1814 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1815 });
1816
1817 let branch_name = envelope.payload.branch_name.into();
1818 let remote_name = envelope.payload.remote_name.into();
1819
1820 let remote_output = repository_handle
1821 .update(&mut cx, |repository_handle, cx| {
1822 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1823 })?
1824 .await??;
1825 Ok(proto::RemoteMessageResponse {
1826 stdout: remote_output.stdout,
1827 stderr: remote_output.stderr,
1828 })
1829 }
1830
1831 async fn handle_pull(
1832 this: Entity<Self>,
1833 envelope: TypedEnvelope<proto::Pull>,
1834 mut cx: AsyncApp,
1835 ) -> Result<proto::RemoteMessageResponse> {
1836 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1837 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1838 let askpass_id = envelope.payload.askpass_id;
1839 let askpass = make_remote_delegate(
1840 this,
1841 envelope.payload.project_id,
1842 repository_id,
1843 askpass_id,
1844 &mut cx,
1845 );
1846
1847 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1848 let remote_name = envelope.payload.remote_name.into();
1849 let rebase = envelope.payload.rebase;
1850
1851 let remote_message = repository_handle
1852 .update(&mut cx, |repository_handle, cx| {
1853 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1854 })?
1855 .await??;
1856
1857 Ok(proto::RemoteMessageResponse {
1858 stdout: remote_message.stdout,
1859 stderr: remote_message.stderr,
1860 })
1861 }
1862
1863 async fn handle_stage(
1864 this: Entity<Self>,
1865 envelope: TypedEnvelope<proto::Stage>,
1866 mut cx: AsyncApp,
1867 ) -> Result<proto::Ack> {
1868 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1869 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1870
1871 let entries = envelope
1872 .payload
1873 .paths
1874 .into_iter()
1875 .map(|path| RepoPath::new(&path))
1876 .collect::<Result<Vec<_>>>()?;
1877
1878 repository_handle
1879 .update(&mut cx, |repository_handle, cx| {
1880 repository_handle.stage_entries(entries, cx)
1881 })?
1882 .await?;
1883 Ok(proto::Ack {})
1884 }
1885
1886 async fn handle_unstage(
1887 this: Entity<Self>,
1888 envelope: TypedEnvelope<proto::Unstage>,
1889 mut cx: AsyncApp,
1890 ) -> Result<proto::Ack> {
1891 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1892 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1893
1894 let entries = envelope
1895 .payload
1896 .paths
1897 .into_iter()
1898 .map(|path| RepoPath::new(&path))
1899 .collect::<Result<Vec<_>>>()?;
1900
1901 repository_handle
1902 .update(&mut cx, |repository_handle, cx| {
1903 repository_handle.unstage_entries(entries, cx)
1904 })?
1905 .await?;
1906
1907 Ok(proto::Ack {})
1908 }
1909
1910 async fn handle_stash(
1911 this: Entity<Self>,
1912 envelope: TypedEnvelope<proto::Stash>,
1913 mut cx: AsyncApp,
1914 ) -> Result<proto::Ack> {
1915 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1916 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1917
1918 let entries = envelope
1919 .payload
1920 .paths
1921 .into_iter()
1922 .map(|path| RepoPath::new(&path))
1923 .collect::<Result<Vec<_>>>()?;
1924
1925 repository_handle
1926 .update(&mut cx, |repository_handle, cx| {
1927 repository_handle.stash_entries(entries, cx)
1928 })?
1929 .await?;
1930
1931 Ok(proto::Ack {})
1932 }
1933
1934 async fn handle_stash_pop(
1935 this: Entity<Self>,
1936 envelope: TypedEnvelope<proto::StashPop>,
1937 mut cx: AsyncApp,
1938 ) -> Result<proto::Ack> {
1939 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1940 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1941 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1942
1943 repository_handle
1944 .update(&mut cx, |repository_handle, cx| {
1945 repository_handle.stash_pop(stash_index, cx)
1946 })?
1947 .await?;
1948
1949 Ok(proto::Ack {})
1950 }
1951
1952 async fn handle_stash_apply(
1953 this: Entity<Self>,
1954 envelope: TypedEnvelope<proto::StashApply>,
1955 mut cx: AsyncApp,
1956 ) -> Result<proto::Ack> {
1957 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1958 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1959 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1960
1961 repository_handle
1962 .update(&mut cx, |repository_handle, cx| {
1963 repository_handle.stash_apply(stash_index, cx)
1964 })?
1965 .await?;
1966
1967 Ok(proto::Ack {})
1968 }
1969
1970 async fn handle_stash_drop(
1971 this: Entity<Self>,
1972 envelope: TypedEnvelope<proto::StashDrop>,
1973 mut cx: AsyncApp,
1974 ) -> Result<proto::Ack> {
1975 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1976 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1977 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1978
1979 repository_handle
1980 .update(&mut cx, |repository_handle, cx| {
1981 repository_handle.stash_drop(stash_index, cx)
1982 })?
1983 .await??;
1984
1985 Ok(proto::Ack {})
1986 }
1987
1988 async fn handle_set_index_text(
1989 this: Entity<Self>,
1990 envelope: TypedEnvelope<proto::SetIndexText>,
1991 mut cx: AsyncApp,
1992 ) -> Result<proto::Ack> {
1993 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1994 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1995 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1996
1997 repository_handle
1998 .update(&mut cx, |repository_handle, cx| {
1999 repository_handle.spawn_set_index_text_job(
2000 repo_path,
2001 envelope.payload.text,
2002 None,
2003 cx,
2004 )
2005 })?
2006 .await??;
2007 Ok(proto::Ack {})
2008 }
2009
2010 async fn handle_commit(
2011 this: Entity<Self>,
2012 envelope: TypedEnvelope<proto::Commit>,
2013 mut cx: AsyncApp,
2014 ) -> Result<proto::Ack> {
2015 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2016 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2017 let askpass_id = envelope.payload.askpass_id;
2018
2019 let askpass = make_remote_delegate(
2020 this,
2021 envelope.payload.project_id,
2022 repository_id,
2023 askpass_id,
2024 &mut cx,
2025 );
2026
2027 let message = SharedString::from(envelope.payload.message);
2028 let name = envelope.payload.name.map(SharedString::from);
2029 let email = envelope.payload.email.map(SharedString::from);
2030 let options = envelope.payload.options.unwrap_or_default();
2031
2032 repository_handle
2033 .update(&mut cx, |repository_handle, cx| {
2034 repository_handle.commit(
2035 message,
2036 name.zip(email),
2037 CommitOptions {
2038 amend: options.amend,
2039 signoff: options.signoff,
2040 },
2041 askpass,
2042 cx,
2043 )
2044 })?
2045 .await??;
2046 Ok(proto::Ack {})
2047 }
2048
2049 async fn handle_get_remotes(
2050 this: Entity<Self>,
2051 envelope: TypedEnvelope<proto::GetRemotes>,
2052 mut cx: AsyncApp,
2053 ) -> Result<proto::GetRemotesResponse> {
2054 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2055 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2056
2057 let branch_name = envelope.payload.branch_name;
2058
2059 let remotes = repository_handle
2060 .update(&mut cx, |repository_handle, _| {
2061 repository_handle.get_remotes(branch_name)
2062 })?
2063 .await??;
2064
2065 Ok(proto::GetRemotesResponse {
2066 remotes: remotes
2067 .into_iter()
2068 .map(|remotes| proto::get_remotes_response::Remote {
2069 name: remotes.name.to_string(),
2070 })
2071 .collect::<Vec<_>>(),
2072 })
2073 }
2074
2075 async fn handle_get_worktrees(
2076 this: Entity<Self>,
2077 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2078 mut cx: AsyncApp,
2079 ) -> Result<proto::GitWorktreesResponse> {
2080 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2081 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2082
2083 let worktrees = repository_handle
2084 .update(&mut cx, |repository_handle, _| {
2085 repository_handle.worktrees()
2086 })?
2087 .await??;
2088
2089 Ok(proto::GitWorktreesResponse {
2090 worktrees: worktrees
2091 .into_iter()
2092 .map(|worktree| worktree_to_proto(&worktree))
2093 .collect::<Vec<_>>(),
2094 })
2095 }
2096
2097 async fn handle_create_worktree(
2098 this: Entity<Self>,
2099 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2100 mut cx: AsyncApp,
2101 ) -> Result<proto::Ack> {
2102 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2103 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2104 let directory = PathBuf::from(envelope.payload.directory);
2105 let name = envelope.payload.name;
2106 let commit = envelope.payload.commit;
2107
2108 repository_handle
2109 .update(&mut cx, |repository_handle, _| {
2110 repository_handle.create_worktree(name, directory, commit)
2111 })?
2112 .await??;
2113
2114 Ok(proto::Ack {})
2115 }
2116
2117 async fn handle_get_branches(
2118 this: Entity<Self>,
2119 envelope: TypedEnvelope<proto::GitGetBranches>,
2120 mut cx: AsyncApp,
2121 ) -> Result<proto::GitBranchesResponse> {
2122 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2123 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2124
2125 let branches = repository_handle
2126 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2127 .await??;
2128
2129 Ok(proto::GitBranchesResponse {
2130 branches: branches
2131 .into_iter()
2132 .map(|branch| branch_to_proto(&branch))
2133 .collect::<Vec<_>>(),
2134 })
2135 }
2136 async fn handle_get_default_branch(
2137 this: Entity<Self>,
2138 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2139 mut cx: AsyncApp,
2140 ) -> Result<proto::GetDefaultBranchResponse> {
2141 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2142 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2143
2144 let branch = repository_handle
2145 .update(&mut cx, |repository_handle, _| {
2146 repository_handle.default_branch()
2147 })?
2148 .await??
2149 .map(Into::into);
2150
2151 Ok(proto::GetDefaultBranchResponse { branch })
2152 }
2153 async fn handle_create_branch(
2154 this: Entity<Self>,
2155 envelope: TypedEnvelope<proto::GitCreateBranch>,
2156 mut cx: AsyncApp,
2157 ) -> Result<proto::Ack> {
2158 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2159 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2160 let branch_name = envelope.payload.branch_name;
2161
2162 repository_handle
2163 .update(&mut cx, |repository_handle, _| {
2164 repository_handle.create_branch(branch_name, None)
2165 })?
2166 .await??;
2167
2168 Ok(proto::Ack {})
2169 }
2170
2171 async fn handle_change_branch(
2172 this: Entity<Self>,
2173 envelope: TypedEnvelope<proto::GitChangeBranch>,
2174 mut cx: AsyncApp,
2175 ) -> Result<proto::Ack> {
2176 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2177 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2178 let branch_name = envelope.payload.branch_name;
2179
2180 repository_handle
2181 .update(&mut cx, |repository_handle, _| {
2182 repository_handle.change_branch(branch_name)
2183 })?
2184 .await??;
2185
2186 Ok(proto::Ack {})
2187 }
2188
2189 async fn handle_rename_branch(
2190 this: Entity<Self>,
2191 envelope: TypedEnvelope<proto::GitRenameBranch>,
2192 mut cx: AsyncApp,
2193 ) -> Result<proto::Ack> {
2194 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2195 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2196 let branch = envelope.payload.branch;
2197 let new_name = envelope.payload.new_name;
2198
2199 repository_handle
2200 .update(&mut cx, |repository_handle, _| {
2201 repository_handle.rename_branch(branch, new_name)
2202 })?
2203 .await??;
2204
2205 Ok(proto::Ack {})
2206 }
2207
2208 async fn handle_show(
2209 this: Entity<Self>,
2210 envelope: TypedEnvelope<proto::GitShow>,
2211 mut cx: AsyncApp,
2212 ) -> Result<proto::GitCommitDetails> {
2213 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2214 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2215
2216 let commit = repository_handle
2217 .update(&mut cx, |repository_handle, _| {
2218 repository_handle.show(envelope.payload.commit)
2219 })?
2220 .await??;
2221 Ok(proto::GitCommitDetails {
2222 sha: commit.sha.into(),
2223 message: commit.message.into(),
2224 commit_timestamp: commit.commit_timestamp,
2225 author_email: commit.author_email.into(),
2226 author_name: commit.author_name.into(),
2227 })
2228 }
2229
2230 async fn handle_load_commit_diff(
2231 this: Entity<Self>,
2232 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2233 mut cx: AsyncApp,
2234 ) -> Result<proto::LoadCommitDiffResponse> {
2235 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2236 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2237
2238 let commit_diff = repository_handle
2239 .update(&mut cx, |repository_handle, _| {
2240 repository_handle.load_commit_diff(envelope.payload.commit)
2241 })?
2242 .await??;
2243 Ok(proto::LoadCommitDiffResponse {
2244 files: commit_diff
2245 .files
2246 .into_iter()
2247 .map(|file| proto::CommitFile {
2248 path: file.path.to_proto(),
2249 old_text: file.old_text,
2250 new_text: file.new_text,
2251 })
2252 .collect(),
2253 })
2254 }
2255
2256 async fn handle_reset(
2257 this: Entity<Self>,
2258 envelope: TypedEnvelope<proto::GitReset>,
2259 mut cx: AsyncApp,
2260 ) -> Result<proto::Ack> {
2261 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2262 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2263
2264 let mode = match envelope.payload.mode() {
2265 git_reset::ResetMode::Soft => ResetMode::Soft,
2266 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2267 };
2268
2269 repository_handle
2270 .update(&mut cx, |repository_handle, cx| {
2271 repository_handle.reset(envelope.payload.commit, mode, cx)
2272 })?
2273 .await??;
2274 Ok(proto::Ack {})
2275 }
2276
2277 async fn handle_checkout_files(
2278 this: Entity<Self>,
2279 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2280 mut cx: AsyncApp,
2281 ) -> Result<proto::Ack> {
2282 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2283 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2284 let paths = envelope
2285 .payload
2286 .paths
2287 .iter()
2288 .map(|s| RepoPath::from_proto(s))
2289 .collect::<Result<Vec<_>>>()?;
2290
2291 repository_handle
2292 .update(&mut cx, |repository_handle, cx| {
2293 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2294 })?
2295 .await?;
2296 Ok(proto::Ack {})
2297 }
2298
2299 async fn handle_open_commit_message_buffer(
2300 this: Entity<Self>,
2301 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2302 mut cx: AsyncApp,
2303 ) -> Result<proto::OpenBufferResponse> {
2304 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2305 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2306 let buffer = repository
2307 .update(&mut cx, |repository, cx| {
2308 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2309 })?
2310 .await?;
2311
2312 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2313 this.update(&mut cx, |this, cx| {
2314 this.buffer_store.update(cx, |buffer_store, cx| {
2315 buffer_store
2316 .create_buffer_for_peer(
2317 &buffer,
2318 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2319 cx,
2320 )
2321 .detach_and_log_err(cx);
2322 })
2323 })?;
2324
2325 Ok(proto::OpenBufferResponse {
2326 buffer_id: buffer_id.to_proto(),
2327 })
2328 }
2329
2330 async fn handle_askpass(
2331 this: Entity<Self>,
2332 envelope: TypedEnvelope<proto::AskPassRequest>,
2333 mut cx: AsyncApp,
2334 ) -> Result<proto::AskPassResponse> {
2335 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2336 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2337
2338 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2339 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2340 debug_panic!("no askpass found");
2341 anyhow::bail!("no askpass found");
2342 };
2343
2344 let response = askpass
2345 .ask_password(envelope.payload.prompt)
2346 .await
2347 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2348
2349 delegates
2350 .lock()
2351 .insert(envelope.payload.askpass_id, askpass);
2352
2353 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2354 Ok(proto::AskPassResponse {
2355 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2356 })
2357 }
2358
2359 async fn handle_check_for_pushed_commits(
2360 this: Entity<Self>,
2361 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2362 mut cx: AsyncApp,
2363 ) -> Result<proto::CheckForPushedCommitsResponse> {
2364 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2365 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2366
2367 let branches = repository_handle
2368 .update(&mut cx, |repository_handle, _| {
2369 repository_handle.check_for_pushed_commits()
2370 })?
2371 .await??;
2372 Ok(proto::CheckForPushedCommitsResponse {
2373 pushed_to: branches
2374 .into_iter()
2375 .map(|commit| commit.to_string())
2376 .collect(),
2377 })
2378 }
2379
2380 async fn handle_git_diff(
2381 this: Entity<Self>,
2382 envelope: TypedEnvelope<proto::GitDiff>,
2383 mut cx: AsyncApp,
2384 ) -> Result<proto::GitDiffResponse> {
2385 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2386 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2387 let diff_type = match envelope.payload.diff_type() {
2388 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2389 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2390 };
2391
2392 let mut diff = repository_handle
2393 .update(&mut cx, |repository_handle, cx| {
2394 repository_handle.diff(diff_type, cx)
2395 })?
2396 .await??;
2397 const ONE_MB: usize = 1_000_000;
2398 if diff.len() > ONE_MB {
2399 diff = diff.chars().take(ONE_MB).collect()
2400 }
2401
2402 Ok(proto::GitDiffResponse { diff })
2403 }
2404
2405 async fn handle_tree_diff(
2406 this: Entity<Self>,
2407 request: TypedEnvelope<proto::GetTreeDiff>,
2408 mut cx: AsyncApp,
2409 ) -> Result<proto::GetTreeDiffResponse> {
2410 let repository_id = RepositoryId(request.payload.repository_id);
2411 let diff_type = if request.payload.is_merge {
2412 DiffTreeType::MergeBase {
2413 base: request.payload.base.into(),
2414 head: request.payload.head.into(),
2415 }
2416 } else {
2417 DiffTreeType::Since {
2418 base: request.payload.base.into(),
2419 head: request.payload.head.into(),
2420 }
2421 };
2422
2423 let diff = this
2424 .update(&mut cx, |this, cx| {
2425 let repository = this.repositories().get(&repository_id)?;
2426 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2427 })?
2428 .context("missing repository")?
2429 .await??;
2430
2431 Ok(proto::GetTreeDiffResponse {
2432 entries: diff
2433 .entries
2434 .into_iter()
2435 .map(|(path, status)| proto::TreeDiffStatus {
2436 path: path.as_ref().to_proto(),
2437 status: match status {
2438 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2439 TreeDiffStatus::Modified { .. } => {
2440 proto::tree_diff_status::Status::Modified.into()
2441 }
2442 TreeDiffStatus::Deleted { .. } => {
2443 proto::tree_diff_status::Status::Deleted.into()
2444 }
2445 },
2446 oid: match status {
2447 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2448 Some(old.to_string())
2449 }
2450 TreeDiffStatus::Added => None,
2451 },
2452 })
2453 .collect(),
2454 })
2455 }
2456
2457 async fn handle_get_blob_content(
2458 this: Entity<Self>,
2459 request: TypedEnvelope<proto::GetBlobContent>,
2460 mut cx: AsyncApp,
2461 ) -> Result<proto::GetBlobContentResponse> {
2462 let oid = git::Oid::from_str(&request.payload.oid)?;
2463 let repository_id = RepositoryId(request.payload.repository_id);
2464 let content = this
2465 .update(&mut cx, |this, cx| {
2466 let repository = this.repositories().get(&repository_id)?;
2467 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2468 })?
2469 .context("missing repository")?
2470 .await?;
2471 Ok(proto::GetBlobContentResponse { content })
2472 }
2473
2474 async fn handle_open_unstaged_diff(
2475 this: Entity<Self>,
2476 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2477 mut cx: AsyncApp,
2478 ) -> Result<proto::OpenUnstagedDiffResponse> {
2479 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2480 let diff = this
2481 .update(&mut cx, |this, cx| {
2482 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2483 Some(this.open_unstaged_diff(buffer, cx))
2484 })?
2485 .context("missing buffer")?
2486 .await?;
2487 this.update(&mut cx, |this, _| {
2488 let shared_diffs = this
2489 .shared_diffs
2490 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2491 .or_default();
2492 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2493 })?;
2494 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2495 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2496 }
2497
2498 async fn handle_open_uncommitted_diff(
2499 this: Entity<Self>,
2500 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2501 mut cx: AsyncApp,
2502 ) -> Result<proto::OpenUncommittedDiffResponse> {
2503 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2504 let diff = this
2505 .update(&mut cx, |this, cx| {
2506 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2507 Some(this.open_uncommitted_diff(buffer, cx))
2508 })?
2509 .context("missing buffer")?
2510 .await?;
2511 this.update(&mut cx, |this, _| {
2512 let shared_diffs = this
2513 .shared_diffs
2514 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2515 .or_default();
2516 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2517 })?;
2518 diff.read_with(&cx, |diff, cx| {
2519 use proto::open_uncommitted_diff_response::Mode;
2520
2521 let unstaged_diff = diff.secondary_diff();
2522 let index_snapshot = unstaged_diff.and_then(|diff| {
2523 let diff = diff.read(cx);
2524 diff.base_text_exists().then(|| diff.base_text())
2525 });
2526
2527 let mode;
2528 let staged_text;
2529 let committed_text;
2530 if diff.base_text_exists() {
2531 let committed_snapshot = diff.base_text();
2532 committed_text = Some(committed_snapshot.text());
2533 if let Some(index_text) = index_snapshot {
2534 if index_text.remote_id() == committed_snapshot.remote_id() {
2535 mode = Mode::IndexMatchesHead;
2536 staged_text = None;
2537 } else {
2538 mode = Mode::IndexAndHead;
2539 staged_text = Some(index_text.text());
2540 }
2541 } else {
2542 mode = Mode::IndexAndHead;
2543 staged_text = None;
2544 }
2545 } else {
2546 mode = Mode::IndexAndHead;
2547 committed_text = None;
2548 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2549 }
2550
2551 proto::OpenUncommittedDiffResponse {
2552 committed_text,
2553 staged_text,
2554 mode: mode.into(),
2555 }
2556 })
2557 }
2558
2559 async fn handle_update_diff_bases(
2560 this: Entity<Self>,
2561 request: TypedEnvelope<proto::UpdateDiffBases>,
2562 mut cx: AsyncApp,
2563 ) -> Result<()> {
2564 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2565 this.update(&mut cx, |this, cx| {
2566 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2567 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2568 {
2569 let buffer = buffer.read(cx).text_snapshot();
2570 diff_state.update(cx, |diff_state, cx| {
2571 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2572 })
2573 }
2574 })
2575 }
2576
2577 async fn handle_blame_buffer(
2578 this: Entity<Self>,
2579 envelope: TypedEnvelope<proto::BlameBuffer>,
2580 mut cx: AsyncApp,
2581 ) -> Result<proto::BlameBufferResponse> {
2582 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2583 let version = deserialize_version(&envelope.payload.version);
2584 let buffer = this.read_with(&cx, |this, cx| {
2585 this.buffer_store.read(cx).get_existing(buffer_id)
2586 })??;
2587 buffer
2588 .update(&mut cx, |buffer, _| {
2589 buffer.wait_for_version(version.clone())
2590 })?
2591 .await?;
2592 let blame = this
2593 .update(&mut cx, |this, cx| {
2594 this.blame_buffer(&buffer, Some(version), cx)
2595 })?
2596 .await?;
2597 Ok(serialize_blame_buffer_response(blame))
2598 }
2599
2600 async fn handle_get_permalink_to_line(
2601 this: Entity<Self>,
2602 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2603 mut cx: AsyncApp,
2604 ) -> Result<proto::GetPermalinkToLineResponse> {
2605 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2606 // let version = deserialize_version(&envelope.payload.version);
2607 let selection = {
2608 let proto_selection = envelope
2609 .payload
2610 .selection
2611 .context("no selection to get permalink for defined")?;
2612 proto_selection.start as u32..proto_selection.end as u32
2613 };
2614 let buffer = this.read_with(&cx, |this, cx| {
2615 this.buffer_store.read(cx).get_existing(buffer_id)
2616 })??;
2617 let permalink = this
2618 .update(&mut cx, |this, cx| {
2619 this.get_permalink_to_line(&buffer, selection, cx)
2620 })?
2621 .await?;
2622 Ok(proto::GetPermalinkToLineResponse {
2623 permalink: permalink.to_string(),
2624 })
2625 }
2626
2627 fn repository_for_request(
2628 this: &Entity<Self>,
2629 id: RepositoryId,
2630 cx: &mut AsyncApp,
2631 ) -> Result<Entity<Repository>> {
2632 this.read_with(cx, |this, _| {
2633 this.repositories
2634 .get(&id)
2635 .context("missing repository handle")
2636 .cloned()
2637 })?
2638 }
2639
2640 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2641 self.repositories
2642 .iter()
2643 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2644 .collect()
2645 }
2646
2647 fn process_updated_entries(
2648 &self,
2649 worktree: &Entity<Worktree>,
2650 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2651 cx: &mut App,
2652 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2653 let path_style = worktree.read(cx).path_style();
2654 let mut repo_paths = self
2655 .repositories
2656 .values()
2657 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2658 .collect::<Vec<_>>();
2659 let mut entries: Vec<_> = updated_entries
2660 .iter()
2661 .map(|(path, _, _)| path.clone())
2662 .collect();
2663 entries.sort();
2664 let worktree = worktree.read(cx);
2665
2666 let entries = entries
2667 .into_iter()
2668 .map(|path| worktree.absolutize(&path))
2669 .collect::<Arc<[_]>>();
2670
2671 let executor = cx.background_executor().clone();
2672 cx.background_executor().spawn(async move {
2673 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2674 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2675 let mut tasks = FuturesOrdered::new();
2676 for (repo_path, repo) in repo_paths.into_iter().rev() {
2677 let entries = entries.clone();
2678 let task = executor.spawn(async move {
2679 // Find all repository paths that belong to this repo
2680 let mut ix = entries.partition_point(|path| path < &*repo_path);
2681 if ix == entries.len() {
2682 return None;
2683 };
2684
2685 let mut paths = Vec::new();
2686 // All paths prefixed by a given repo will constitute a continuous range.
2687 while let Some(path) = entries.get(ix)
2688 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2689 &repo_path, path, path_style,
2690 )
2691 {
2692 paths.push((repo_path, ix));
2693 ix += 1;
2694 }
2695 if paths.is_empty() {
2696 None
2697 } else {
2698 Some((repo, paths))
2699 }
2700 });
2701 tasks.push_back(task);
2702 }
2703
2704 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2705 let mut path_was_used = vec![false; entries.len()];
2706 let tasks = tasks.collect::<Vec<_>>().await;
2707 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2708 // We always want to assign a path to it's innermost repository.
2709 for t in tasks {
2710 let Some((repo, paths)) = t else {
2711 continue;
2712 };
2713 let entry = paths_by_git_repo.entry(repo).or_default();
2714 for (repo_path, ix) in paths {
2715 if path_was_used[ix] {
2716 continue;
2717 }
2718 path_was_used[ix] = true;
2719 entry.push(repo_path);
2720 }
2721 }
2722
2723 paths_by_git_repo
2724 })
2725 }
2726}
2727
2728impl BufferGitState {
2729 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2730 Self {
2731 unstaged_diff: Default::default(),
2732 uncommitted_diff: Default::default(),
2733 recalculate_diff_task: Default::default(),
2734 language: Default::default(),
2735 language_registry: Default::default(),
2736 recalculating_tx: postage::watch::channel_with(false).0,
2737 hunk_staging_operation_count: 0,
2738 hunk_staging_operation_count_as_of_write: 0,
2739 head_text: Default::default(),
2740 index_text: Default::default(),
2741 head_changed: Default::default(),
2742 index_changed: Default::default(),
2743 language_changed: Default::default(),
2744 conflict_updated_futures: Default::default(),
2745 conflict_set: Default::default(),
2746 reparse_conflict_markers_task: Default::default(),
2747 }
2748 }
2749
2750 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2751 self.language = buffer.read(cx).language().cloned();
2752 self.language_changed = true;
2753 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2754 }
2755
2756 fn reparse_conflict_markers(
2757 &mut self,
2758 buffer: text::BufferSnapshot,
2759 cx: &mut Context<Self>,
2760 ) -> oneshot::Receiver<()> {
2761 let (tx, rx) = oneshot::channel();
2762
2763 let Some(conflict_set) = self
2764 .conflict_set
2765 .as_ref()
2766 .and_then(|conflict_set| conflict_set.upgrade())
2767 else {
2768 return rx;
2769 };
2770
2771 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2772 if conflict_set.has_conflict {
2773 Some(conflict_set.snapshot())
2774 } else {
2775 None
2776 }
2777 });
2778
2779 if let Some(old_snapshot) = old_snapshot {
2780 self.conflict_updated_futures.push(tx);
2781 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2782 let (snapshot, changed_range) = cx
2783 .background_spawn(async move {
2784 let new_snapshot = ConflictSet::parse(&buffer);
2785 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2786 (new_snapshot, changed_range)
2787 })
2788 .await;
2789 this.update(cx, |this, cx| {
2790 if let Some(conflict_set) = &this.conflict_set {
2791 conflict_set
2792 .update(cx, |conflict_set, cx| {
2793 conflict_set.set_snapshot(snapshot, changed_range, cx);
2794 })
2795 .ok();
2796 }
2797 let futures = std::mem::take(&mut this.conflict_updated_futures);
2798 for tx in futures {
2799 tx.send(()).ok();
2800 }
2801 })
2802 }))
2803 }
2804
2805 rx
2806 }
2807
2808 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2809 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2810 }
2811
2812 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2813 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2814 }
2815
2816 fn handle_base_texts_updated(
2817 &mut self,
2818 buffer: text::BufferSnapshot,
2819 message: proto::UpdateDiffBases,
2820 cx: &mut Context<Self>,
2821 ) {
2822 use proto::update_diff_bases::Mode;
2823
2824 let Some(mode) = Mode::from_i32(message.mode) else {
2825 return;
2826 };
2827
2828 let diff_bases_change = match mode {
2829 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2830 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2831 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2832 Mode::IndexAndHead => DiffBasesChange::SetEach {
2833 index: message.staged_text,
2834 head: message.committed_text,
2835 },
2836 };
2837
2838 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2839 }
2840
2841 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2842 if *self.recalculating_tx.borrow() {
2843 let mut rx = self.recalculating_tx.subscribe();
2844 Some(async move {
2845 loop {
2846 let is_recalculating = rx.recv().await;
2847 if is_recalculating != Some(true) {
2848 break;
2849 }
2850 }
2851 })
2852 } else {
2853 None
2854 }
2855 }
2856
2857 fn diff_bases_changed(
2858 &mut self,
2859 buffer: text::BufferSnapshot,
2860 diff_bases_change: Option<DiffBasesChange>,
2861 cx: &mut Context<Self>,
2862 ) {
2863 match diff_bases_change {
2864 Some(DiffBasesChange::SetIndex(index)) => {
2865 self.index_text = index.map(|mut index| {
2866 text::LineEnding::normalize(&mut index);
2867 Arc::new(index)
2868 });
2869 self.index_changed = true;
2870 }
2871 Some(DiffBasesChange::SetHead(head)) => {
2872 self.head_text = head.map(|mut head| {
2873 text::LineEnding::normalize(&mut head);
2874 Arc::new(head)
2875 });
2876 self.head_changed = true;
2877 }
2878 Some(DiffBasesChange::SetBoth(text)) => {
2879 let text = text.map(|mut text| {
2880 text::LineEnding::normalize(&mut text);
2881 Arc::new(text)
2882 });
2883 self.head_text = text.clone();
2884 self.index_text = text;
2885 self.head_changed = true;
2886 self.index_changed = true;
2887 }
2888 Some(DiffBasesChange::SetEach { index, head }) => {
2889 self.index_text = index.map(|mut index| {
2890 text::LineEnding::normalize(&mut index);
2891 Arc::new(index)
2892 });
2893 self.index_changed = true;
2894 self.head_text = head.map(|mut head| {
2895 text::LineEnding::normalize(&mut head);
2896 Arc::new(head)
2897 });
2898 self.head_changed = true;
2899 }
2900 None => {}
2901 }
2902
2903 self.recalculate_diffs(buffer, cx)
2904 }
2905
2906 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2907 *self.recalculating_tx.borrow_mut() = true;
2908
2909 let language = self.language.clone();
2910 let language_registry = self.language_registry.clone();
2911 let unstaged_diff = self.unstaged_diff();
2912 let uncommitted_diff = self.uncommitted_diff();
2913 let head = self.head_text.clone();
2914 let index = self.index_text.clone();
2915 let index_changed = self.index_changed;
2916 let head_changed = self.head_changed;
2917 let language_changed = self.language_changed;
2918 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2919 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2920 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2921 (None, None) => true,
2922 _ => false,
2923 };
2924 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2925 log::debug!(
2926 "start recalculating diffs for buffer {}",
2927 buffer.remote_id()
2928 );
2929
2930 let mut new_unstaged_diff = None;
2931 if let Some(unstaged_diff) = &unstaged_diff {
2932 new_unstaged_diff = Some(
2933 BufferDiff::update_diff(
2934 unstaged_diff.clone(),
2935 buffer.clone(),
2936 index,
2937 index_changed,
2938 language_changed,
2939 language.clone(),
2940 language_registry.clone(),
2941 cx,
2942 )
2943 .await?,
2944 );
2945 }
2946
2947 let mut new_uncommitted_diff = None;
2948 if let Some(uncommitted_diff) = &uncommitted_diff {
2949 new_uncommitted_diff = if index_matches_head {
2950 new_unstaged_diff.clone()
2951 } else {
2952 Some(
2953 BufferDiff::update_diff(
2954 uncommitted_diff.clone(),
2955 buffer.clone(),
2956 head,
2957 head_changed,
2958 language_changed,
2959 language.clone(),
2960 language_registry.clone(),
2961 cx,
2962 )
2963 .await?,
2964 )
2965 }
2966 }
2967
2968 let cancel = this.update(cx, |this, _| {
2969 // This checks whether all pending stage/unstage operations
2970 // have quiesced (i.e. both the corresponding write and the
2971 // read of that write have completed). If not, then we cancel
2972 // this recalculation attempt to avoid invalidating pending
2973 // state too quickly; another recalculation will come along
2974 // later and clear the pending state once the state of the index has settled.
2975 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2976 *this.recalculating_tx.borrow_mut() = false;
2977 true
2978 } else {
2979 false
2980 }
2981 })?;
2982 if cancel {
2983 log::debug!(
2984 concat!(
2985 "aborting recalculating diffs for buffer {}",
2986 "due to subsequent hunk operations",
2987 ),
2988 buffer.remote_id()
2989 );
2990 return Ok(());
2991 }
2992
2993 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2994 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2995 {
2996 unstaged_diff.update(cx, |diff, cx| {
2997 if language_changed {
2998 diff.language_changed(cx);
2999 }
3000 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3001 })?
3002 } else {
3003 None
3004 };
3005
3006 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3007 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3008 {
3009 uncommitted_diff.update(cx, |diff, cx| {
3010 if language_changed {
3011 diff.language_changed(cx);
3012 }
3013 diff.set_snapshot_with_secondary(
3014 new_uncommitted_diff,
3015 &buffer,
3016 unstaged_changed_range,
3017 true,
3018 cx,
3019 );
3020 })?;
3021 }
3022
3023 log::debug!(
3024 "finished recalculating diffs for buffer {}",
3025 buffer.remote_id()
3026 );
3027
3028 if let Some(this) = this.upgrade() {
3029 this.update(cx, |this, _| {
3030 this.index_changed = false;
3031 this.head_changed = false;
3032 this.language_changed = false;
3033 *this.recalculating_tx.borrow_mut() = false;
3034 })?;
3035 }
3036
3037 Ok(())
3038 }));
3039 }
3040}
3041
3042fn make_remote_delegate(
3043 this: Entity<GitStore>,
3044 project_id: u64,
3045 repository_id: RepositoryId,
3046 askpass_id: u64,
3047 cx: &mut AsyncApp,
3048) -> AskPassDelegate {
3049 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3050 this.update(cx, |this, cx| {
3051 let Some((client, _)) = this.downstream_client() else {
3052 return;
3053 };
3054 let response = client.request(proto::AskPassRequest {
3055 project_id,
3056 repository_id: repository_id.to_proto(),
3057 askpass_id,
3058 prompt,
3059 });
3060 cx.spawn(async move |_, _| {
3061 let mut response = response.await?.response;
3062 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3063 .ok();
3064 response.zeroize();
3065 anyhow::Ok(())
3066 })
3067 .detach_and_log_err(cx);
3068 })
3069 .log_err();
3070 })
3071}
3072
3073impl RepositoryId {
3074 pub fn to_proto(self) -> u64 {
3075 self.0
3076 }
3077
3078 pub fn from_proto(id: u64) -> Self {
3079 RepositoryId(id)
3080 }
3081}
3082
3083impl RepositorySnapshot {
3084 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3085 Self {
3086 id,
3087 statuses_by_path: Default::default(),
3088 pending_ops_by_path: Default::default(),
3089 work_directory_abs_path,
3090 branch: None,
3091 head_commit: None,
3092 scan_id: 0,
3093 merge: Default::default(),
3094 remote_origin_url: None,
3095 remote_upstream_url: None,
3096 stash_entries: Default::default(),
3097 path_style,
3098 }
3099 }
3100
3101 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3102 proto::UpdateRepository {
3103 branch_summary: self.branch.as_ref().map(branch_to_proto),
3104 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3105 updated_statuses: self
3106 .statuses_by_path
3107 .iter()
3108 .map(|entry| entry.to_proto())
3109 .collect(),
3110 removed_statuses: Default::default(),
3111 current_merge_conflicts: self
3112 .merge
3113 .conflicted_paths
3114 .iter()
3115 .map(|repo_path| repo_path.to_proto())
3116 .collect(),
3117 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3118 project_id,
3119 id: self.id.to_proto(),
3120 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3121 entry_ids: vec![self.id.to_proto()],
3122 scan_id: self.scan_id,
3123 is_last_update: true,
3124 stash_entries: self
3125 .stash_entries
3126 .entries
3127 .iter()
3128 .map(stash_to_proto)
3129 .collect(),
3130 }
3131 }
3132
3133 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3134 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3135 let mut removed_statuses: Vec<String> = Vec::new();
3136
3137 let mut new_statuses = self.statuses_by_path.iter().peekable();
3138 let mut old_statuses = old.statuses_by_path.iter().peekable();
3139
3140 let mut current_new_entry = new_statuses.next();
3141 let mut current_old_entry = old_statuses.next();
3142 loop {
3143 match (current_new_entry, current_old_entry) {
3144 (Some(new_entry), Some(old_entry)) => {
3145 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3146 Ordering::Less => {
3147 updated_statuses.push(new_entry.to_proto());
3148 current_new_entry = new_statuses.next();
3149 }
3150 Ordering::Equal => {
3151 if new_entry.status != old_entry.status {
3152 updated_statuses.push(new_entry.to_proto());
3153 }
3154 current_old_entry = old_statuses.next();
3155 current_new_entry = new_statuses.next();
3156 }
3157 Ordering::Greater => {
3158 removed_statuses.push(old_entry.repo_path.to_proto());
3159 current_old_entry = old_statuses.next();
3160 }
3161 }
3162 }
3163 (None, Some(old_entry)) => {
3164 removed_statuses.push(old_entry.repo_path.to_proto());
3165 current_old_entry = old_statuses.next();
3166 }
3167 (Some(new_entry), None) => {
3168 updated_statuses.push(new_entry.to_proto());
3169 current_new_entry = new_statuses.next();
3170 }
3171 (None, None) => break,
3172 }
3173 }
3174
3175 proto::UpdateRepository {
3176 branch_summary: self.branch.as_ref().map(branch_to_proto),
3177 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3178 updated_statuses,
3179 removed_statuses,
3180 current_merge_conflicts: self
3181 .merge
3182 .conflicted_paths
3183 .iter()
3184 .map(|path| path.to_proto())
3185 .collect(),
3186 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3187 project_id,
3188 id: self.id.to_proto(),
3189 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3190 entry_ids: vec![],
3191 scan_id: self.scan_id,
3192 is_last_update: true,
3193 stash_entries: self
3194 .stash_entries
3195 .entries
3196 .iter()
3197 .map(stash_to_proto)
3198 .collect(),
3199 }
3200 }
3201
3202 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3203 self.statuses_by_path.iter().cloned()
3204 }
3205
3206 pub fn status_summary(&self) -> GitSummary {
3207 self.statuses_by_path.summary().item_summary
3208 }
3209
3210 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3211 self.statuses_by_path
3212 .get(&PathKey(path.as_ref().clone()), ())
3213 .cloned()
3214 }
3215
3216 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3217 self.pending_ops_by_path
3218 .get(&PathKey(path.as_ref().clone()), ())
3219 .cloned()
3220 }
3221
3222 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3223 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3224 }
3225
3226 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3227 self.path_style
3228 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3229 .unwrap()
3230 .into()
3231 }
3232
3233 #[inline]
3234 fn abs_path_to_repo_path_inner(
3235 work_directory_abs_path: &Path,
3236 abs_path: &Path,
3237 path_style: PathStyle,
3238 ) -> Option<RepoPath> {
3239 abs_path
3240 .strip_prefix(&work_directory_abs_path)
3241 .ok()
3242 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3243 }
3244
3245 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3246 self.merge.conflicted_paths.contains(repo_path)
3247 }
3248
3249 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3250 let had_conflict_on_last_merge_head_change =
3251 self.merge.conflicted_paths.contains(repo_path);
3252 let has_conflict_currently = self
3253 .status_for_path(repo_path)
3254 .is_some_and(|entry| entry.status.is_conflicted());
3255 had_conflict_on_last_merge_head_change || has_conflict_currently
3256 }
3257
3258 /// This is the name that will be displayed in the repository selector for this repository.
3259 pub fn display_name(&self) -> SharedString {
3260 self.work_directory_abs_path
3261 .file_name()
3262 .unwrap_or_default()
3263 .to_string_lossy()
3264 .to_string()
3265 .into()
3266 }
3267}
3268
3269pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3270 proto::StashEntry {
3271 oid: entry.oid.as_bytes().to_vec(),
3272 message: entry.message.clone(),
3273 branch: entry.branch.clone(),
3274 index: entry.index as u64,
3275 timestamp: entry.timestamp,
3276 }
3277}
3278
3279pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3280 Ok(StashEntry {
3281 oid: Oid::from_bytes(&entry.oid)?,
3282 message: entry.message.clone(),
3283 index: entry.index as usize,
3284 branch: entry.branch.clone(),
3285 timestamp: entry.timestamp,
3286 })
3287}
3288
3289impl MergeDetails {
3290 async fn load(
3291 backend: &Arc<dyn GitRepository>,
3292 status: &SumTree<StatusEntry>,
3293 prev_snapshot: &RepositorySnapshot,
3294 ) -> Result<(MergeDetails, bool)> {
3295 log::debug!("load merge details");
3296 let message = backend.merge_message().await;
3297 let heads = backend
3298 .revparse_batch(vec![
3299 "MERGE_HEAD".into(),
3300 "CHERRY_PICK_HEAD".into(),
3301 "REBASE_HEAD".into(),
3302 "REVERT_HEAD".into(),
3303 "APPLY_HEAD".into(),
3304 ])
3305 .await
3306 .log_err()
3307 .unwrap_or_default()
3308 .into_iter()
3309 .map(|opt| opt.map(SharedString::from))
3310 .collect::<Vec<_>>();
3311 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3312 let conflicted_paths = if merge_heads_changed {
3313 let current_conflicted_paths = TreeSet::from_ordered_entries(
3314 status
3315 .iter()
3316 .filter(|entry| entry.status.is_conflicted())
3317 .map(|entry| entry.repo_path.clone()),
3318 );
3319
3320 // It can happen that we run a scan while a lengthy merge is in progress
3321 // that will eventually result in conflicts, but before those conflicts
3322 // are reported by `git status`. Since for the moment we only care about
3323 // the merge heads state for the purposes of tracking conflicts, don't update
3324 // this state until we see some conflicts.
3325 if heads.iter().any(Option::is_some)
3326 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3327 && current_conflicted_paths.is_empty()
3328 {
3329 log::debug!("not updating merge heads because no conflicts found");
3330 return Ok((
3331 MergeDetails {
3332 message: message.map(SharedString::from),
3333 ..prev_snapshot.merge.clone()
3334 },
3335 false,
3336 ));
3337 }
3338
3339 current_conflicted_paths
3340 } else {
3341 prev_snapshot.merge.conflicted_paths.clone()
3342 };
3343 let details = MergeDetails {
3344 conflicted_paths,
3345 message: message.map(SharedString::from),
3346 heads,
3347 };
3348 Ok((details, merge_heads_changed))
3349 }
3350}
3351
3352impl Repository {
3353 pub fn snapshot(&self) -> RepositorySnapshot {
3354 self.snapshot.clone()
3355 }
3356
3357 fn local(
3358 id: RepositoryId,
3359 work_directory_abs_path: Arc<Path>,
3360 dot_git_abs_path: Arc<Path>,
3361 repository_dir_abs_path: Arc<Path>,
3362 common_dir_abs_path: Arc<Path>,
3363 project_environment: WeakEntity<ProjectEnvironment>,
3364 fs: Arc<dyn Fs>,
3365 git_store: WeakEntity<GitStore>,
3366 cx: &mut Context<Self>,
3367 ) -> Self {
3368 let snapshot =
3369 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3370 Repository {
3371 this: cx.weak_entity(),
3372 git_store,
3373 snapshot,
3374 commit_message_buffer: None,
3375 askpass_delegates: Default::default(),
3376 paths_needing_status_update: Default::default(),
3377 latest_askpass_id: 0,
3378 job_sender: Repository::spawn_local_git_worker(
3379 work_directory_abs_path,
3380 dot_git_abs_path,
3381 repository_dir_abs_path,
3382 common_dir_abs_path,
3383 project_environment,
3384 fs,
3385 cx,
3386 ),
3387 job_id: 0,
3388 active_jobs: Default::default(),
3389 }
3390 }
3391
3392 fn remote(
3393 id: RepositoryId,
3394 work_directory_abs_path: Arc<Path>,
3395 path_style: PathStyle,
3396 project_id: ProjectId,
3397 client: AnyProtoClient,
3398 git_store: WeakEntity<GitStore>,
3399 cx: &mut Context<Self>,
3400 ) -> Self {
3401 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3402 Self {
3403 this: cx.weak_entity(),
3404 snapshot,
3405 commit_message_buffer: None,
3406 git_store,
3407 paths_needing_status_update: Default::default(),
3408 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3409 askpass_delegates: Default::default(),
3410 latest_askpass_id: 0,
3411 active_jobs: Default::default(),
3412 job_id: 0,
3413 }
3414 }
3415
3416 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3417 self.git_store.upgrade()
3418 }
3419
3420 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3421 let this = cx.weak_entity();
3422 let git_store = self.git_store.clone();
3423 let _ = self.send_keyed_job(
3424 Some(GitJobKey::ReloadBufferDiffBases),
3425 None,
3426 |state, mut cx| async move {
3427 let RepositoryState::Local { backend, .. } = state else {
3428 log::error!("tried to recompute diffs for a non-local repository");
3429 return Ok(());
3430 };
3431
3432 let Some(this) = this.upgrade() else {
3433 return Ok(());
3434 };
3435
3436 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3437 git_store.update(cx, |git_store, cx| {
3438 git_store
3439 .diffs
3440 .iter()
3441 .filter_map(|(buffer_id, diff_state)| {
3442 let buffer_store = git_store.buffer_store.read(cx);
3443 let buffer = buffer_store.get(*buffer_id)?;
3444 let file = File::from_dyn(buffer.read(cx).file())?;
3445 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3446 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3447 log::debug!(
3448 "start reload diff bases for repo path {}",
3449 repo_path.as_unix_str()
3450 );
3451 diff_state.update(cx, |diff_state, _| {
3452 let has_unstaged_diff = diff_state
3453 .unstaged_diff
3454 .as_ref()
3455 .is_some_and(|diff| diff.is_upgradable());
3456 let has_uncommitted_diff = diff_state
3457 .uncommitted_diff
3458 .as_ref()
3459 .is_some_and(|set| set.is_upgradable());
3460
3461 Some((
3462 buffer,
3463 repo_path,
3464 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3465 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3466 ))
3467 })
3468 })
3469 .collect::<Vec<_>>()
3470 })
3471 })??;
3472
3473 let buffer_diff_base_changes = cx
3474 .background_spawn(async move {
3475 let mut changes = Vec::new();
3476 for (buffer, repo_path, current_index_text, current_head_text) in
3477 &repo_diff_state_updates
3478 {
3479 let index_text = if current_index_text.is_some() {
3480 backend.load_index_text(repo_path.clone()).await
3481 } else {
3482 None
3483 };
3484 let head_text = if current_head_text.is_some() {
3485 backend.load_committed_text(repo_path.clone()).await
3486 } else {
3487 None
3488 };
3489
3490 let change =
3491 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3492 (Some(current_index), Some(current_head)) => {
3493 let index_changed =
3494 index_text.as_ref() != current_index.as_deref();
3495 let head_changed =
3496 head_text.as_ref() != current_head.as_deref();
3497 if index_changed && head_changed {
3498 if index_text == head_text {
3499 Some(DiffBasesChange::SetBoth(head_text))
3500 } else {
3501 Some(DiffBasesChange::SetEach {
3502 index: index_text,
3503 head: head_text,
3504 })
3505 }
3506 } else if index_changed {
3507 Some(DiffBasesChange::SetIndex(index_text))
3508 } else if head_changed {
3509 Some(DiffBasesChange::SetHead(head_text))
3510 } else {
3511 None
3512 }
3513 }
3514 (Some(current_index), None) => {
3515 let index_changed =
3516 index_text.as_ref() != current_index.as_deref();
3517 index_changed
3518 .then_some(DiffBasesChange::SetIndex(index_text))
3519 }
3520 (None, Some(current_head)) => {
3521 let head_changed =
3522 head_text.as_ref() != current_head.as_deref();
3523 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3524 }
3525 (None, None) => None,
3526 };
3527
3528 changes.push((buffer.clone(), change))
3529 }
3530 changes
3531 })
3532 .await;
3533
3534 git_store.update(&mut cx, |git_store, cx| {
3535 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3536 let buffer_snapshot = buffer.read(cx).text_snapshot();
3537 let buffer_id = buffer_snapshot.remote_id();
3538 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3539 continue;
3540 };
3541
3542 let downstream_client = git_store.downstream_client();
3543 diff_state.update(cx, |diff_state, cx| {
3544 use proto::update_diff_bases::Mode;
3545
3546 if let Some((diff_bases_change, (client, project_id))) =
3547 diff_bases_change.clone().zip(downstream_client)
3548 {
3549 let (staged_text, committed_text, mode) = match diff_bases_change {
3550 DiffBasesChange::SetIndex(index) => {
3551 (index, None, Mode::IndexOnly)
3552 }
3553 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3554 DiffBasesChange::SetEach { index, head } => {
3555 (index, head, Mode::IndexAndHead)
3556 }
3557 DiffBasesChange::SetBoth(text) => {
3558 (None, text, Mode::IndexMatchesHead)
3559 }
3560 };
3561 client
3562 .send(proto::UpdateDiffBases {
3563 project_id: project_id.to_proto(),
3564 buffer_id: buffer_id.to_proto(),
3565 staged_text,
3566 committed_text,
3567 mode: mode as i32,
3568 })
3569 .log_err();
3570 }
3571
3572 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3573 });
3574 }
3575 })
3576 },
3577 );
3578 }
3579
3580 pub fn send_job<F, Fut, R>(
3581 &mut self,
3582 status: Option<SharedString>,
3583 job: F,
3584 ) -> oneshot::Receiver<R>
3585 where
3586 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3587 Fut: Future<Output = R> + 'static,
3588 R: Send + 'static,
3589 {
3590 self.send_keyed_job(None, status, job)
3591 }
3592
3593 fn send_keyed_job<F, Fut, R>(
3594 &mut self,
3595 key: Option<GitJobKey>,
3596 status: Option<SharedString>,
3597 job: F,
3598 ) -> oneshot::Receiver<R>
3599 where
3600 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3601 Fut: Future<Output = R> + 'static,
3602 R: Send + 'static,
3603 {
3604 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3605 let job_id = post_inc(&mut self.job_id);
3606 let this = self.this.clone();
3607 self.job_sender
3608 .unbounded_send(GitJob {
3609 key,
3610 job: Box::new(move |state, cx: &mut AsyncApp| {
3611 let job = job(state, cx.clone());
3612 cx.spawn(async move |cx| {
3613 if let Some(s) = status.clone() {
3614 this.update(cx, |this, cx| {
3615 this.active_jobs.insert(
3616 job_id,
3617 JobInfo {
3618 start: Instant::now(),
3619 message: s.clone(),
3620 },
3621 );
3622
3623 cx.notify();
3624 })
3625 .ok();
3626 }
3627 let result = job.await;
3628
3629 this.update(cx, |this, cx| {
3630 this.active_jobs.remove(&job_id);
3631 cx.notify();
3632 })
3633 .ok();
3634
3635 result_tx.send(result).ok();
3636 })
3637 }),
3638 })
3639 .ok();
3640 result_rx
3641 }
3642
3643 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3644 let Some(git_store) = self.git_store.upgrade() else {
3645 return;
3646 };
3647 let entity = cx.entity();
3648 git_store.update(cx, |git_store, cx| {
3649 let Some((&id, _)) = git_store
3650 .repositories
3651 .iter()
3652 .find(|(_, handle)| *handle == &entity)
3653 else {
3654 return;
3655 };
3656 git_store.active_repo_id = Some(id);
3657 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3658 });
3659 }
3660
3661 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3662 self.snapshot.status()
3663 }
3664
3665 pub fn cached_stash(&self) -> GitStash {
3666 self.snapshot.stash_entries.clone()
3667 }
3668
3669 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3670 let git_store = self.git_store.upgrade()?;
3671 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3672 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3673 let abs_path = SanitizedPath::new(&abs_path);
3674 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3675 Some(ProjectPath {
3676 worktree_id: worktree.read(cx).id(),
3677 path: relative_path,
3678 })
3679 }
3680
3681 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3682 let git_store = self.git_store.upgrade()?;
3683 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3684 let abs_path = worktree_store.absolutize(path, cx)?;
3685 self.snapshot.abs_path_to_repo_path(&abs_path)
3686 }
3687
3688 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3689 other
3690 .read(cx)
3691 .snapshot
3692 .work_directory_abs_path
3693 .starts_with(&self.snapshot.work_directory_abs_path)
3694 }
3695
3696 pub fn open_commit_buffer(
3697 &mut self,
3698 languages: Option<Arc<LanguageRegistry>>,
3699 buffer_store: Entity<BufferStore>,
3700 cx: &mut Context<Self>,
3701 ) -> Task<Result<Entity<Buffer>>> {
3702 let id = self.id;
3703 if let Some(buffer) = self.commit_message_buffer.clone() {
3704 return Task::ready(Ok(buffer));
3705 }
3706 let this = cx.weak_entity();
3707
3708 let rx = self.send_job(None, move |state, mut cx| async move {
3709 let Some(this) = this.upgrade() else {
3710 bail!("git store was dropped");
3711 };
3712 match state {
3713 RepositoryState::Local { .. } => {
3714 this.update(&mut cx, |_, cx| {
3715 Self::open_local_commit_buffer(languages, buffer_store, cx)
3716 })?
3717 .await
3718 }
3719 RepositoryState::Remote { project_id, client } => {
3720 let request = client.request(proto::OpenCommitMessageBuffer {
3721 project_id: project_id.0,
3722 repository_id: id.to_proto(),
3723 });
3724 let response = request.await.context("requesting to open commit buffer")?;
3725 let buffer_id = BufferId::new(response.buffer_id)?;
3726 let buffer = buffer_store
3727 .update(&mut cx, |buffer_store, cx| {
3728 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3729 })?
3730 .await?;
3731 if let Some(language_registry) = languages {
3732 let git_commit_language =
3733 language_registry.language_for_name("Git Commit").await?;
3734 buffer.update(&mut cx, |buffer, cx| {
3735 buffer.set_language(Some(git_commit_language), cx);
3736 })?;
3737 }
3738 this.update(&mut cx, |this, _| {
3739 this.commit_message_buffer = Some(buffer.clone());
3740 })?;
3741 Ok(buffer)
3742 }
3743 }
3744 });
3745
3746 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3747 }
3748
3749 fn open_local_commit_buffer(
3750 language_registry: Option<Arc<LanguageRegistry>>,
3751 buffer_store: Entity<BufferStore>,
3752 cx: &mut Context<Self>,
3753 ) -> Task<Result<Entity<Buffer>>> {
3754 cx.spawn(async move |repository, cx| {
3755 let buffer = buffer_store
3756 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3757 .await?;
3758
3759 if let Some(language_registry) = language_registry {
3760 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3761 buffer.update(cx, |buffer, cx| {
3762 buffer.set_language(Some(git_commit_language), cx);
3763 })?;
3764 }
3765
3766 repository.update(cx, |repository, _| {
3767 repository.commit_message_buffer = Some(buffer.clone());
3768 })?;
3769 Ok(buffer)
3770 })
3771 }
3772
3773 pub fn checkout_files(
3774 &mut self,
3775 commit: &str,
3776 paths: Vec<RepoPath>,
3777 cx: &mut Context<Self>,
3778 ) -> Task<Result<()>> {
3779 let commit = commit.to_string();
3780 let id = self.id;
3781
3782 self.spawn_job_with_tracking(
3783 paths.clone(),
3784 pending_op::GitStatus::Reverted,
3785 cx,
3786 async move |this, cx| {
3787 this.update(cx, |this, _cx| {
3788 this.send_job(
3789 Some(format!("git checkout {}", commit).into()),
3790 move |git_repo, _| async move {
3791 match git_repo {
3792 RepositoryState::Local {
3793 backend,
3794 environment,
3795 ..
3796 } => {
3797 backend
3798 .checkout_files(commit, paths, environment.clone())
3799 .await
3800 }
3801 RepositoryState::Remote { project_id, client } => {
3802 client
3803 .request(proto::GitCheckoutFiles {
3804 project_id: project_id.0,
3805 repository_id: id.to_proto(),
3806 commit,
3807 paths: paths
3808 .into_iter()
3809 .map(|p| p.to_proto())
3810 .collect(),
3811 })
3812 .await?;
3813
3814 Ok(())
3815 }
3816 }
3817 },
3818 )
3819 })?
3820 .await?
3821 },
3822 )
3823 }
3824
3825 pub fn reset(
3826 &mut self,
3827 commit: String,
3828 reset_mode: ResetMode,
3829 _cx: &mut App,
3830 ) -> oneshot::Receiver<Result<()>> {
3831 let id = self.id;
3832
3833 self.send_job(None, move |git_repo, _| async move {
3834 match git_repo {
3835 RepositoryState::Local {
3836 backend,
3837 environment,
3838 ..
3839 } => backend.reset(commit, reset_mode, environment).await,
3840 RepositoryState::Remote { project_id, client } => {
3841 client
3842 .request(proto::GitReset {
3843 project_id: project_id.0,
3844 repository_id: id.to_proto(),
3845 commit,
3846 mode: match reset_mode {
3847 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3848 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3849 },
3850 })
3851 .await?;
3852
3853 Ok(())
3854 }
3855 }
3856 })
3857 }
3858
3859 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3860 let id = self.id;
3861 self.send_job(None, move |git_repo, _cx| async move {
3862 match git_repo {
3863 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3864 RepositoryState::Remote { project_id, client } => {
3865 let resp = client
3866 .request(proto::GitShow {
3867 project_id: project_id.0,
3868 repository_id: id.to_proto(),
3869 commit,
3870 })
3871 .await?;
3872
3873 Ok(CommitDetails {
3874 sha: resp.sha.into(),
3875 message: resp.message.into(),
3876 commit_timestamp: resp.commit_timestamp,
3877 author_email: resp.author_email.into(),
3878 author_name: resp.author_name.into(),
3879 })
3880 }
3881 }
3882 })
3883 }
3884
3885 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3886 let id = self.id;
3887 self.send_job(None, move |git_repo, cx| async move {
3888 match git_repo {
3889 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3890 RepositoryState::Remote {
3891 client, project_id, ..
3892 } => {
3893 let response = client
3894 .request(proto::LoadCommitDiff {
3895 project_id: project_id.0,
3896 repository_id: id.to_proto(),
3897 commit,
3898 })
3899 .await?;
3900 Ok(CommitDiff {
3901 files: response
3902 .files
3903 .into_iter()
3904 .map(|file| {
3905 Ok(CommitFile {
3906 path: RepoPath::from_proto(&file.path)?,
3907 old_text: file.old_text,
3908 new_text: file.new_text,
3909 })
3910 })
3911 .collect::<Result<Vec<_>>>()?,
3912 })
3913 }
3914 }
3915 })
3916 }
3917
3918 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3919 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3920 }
3921
3922 fn save_buffers<'a>(
3923 &self,
3924 entries: impl IntoIterator<Item = &'a RepoPath>,
3925 cx: &mut Context<Self>,
3926 ) -> Vec<Task<anyhow::Result<()>>> {
3927 let mut save_futures = Vec::new();
3928 if let Some(buffer_store) = self.buffer_store(cx) {
3929 buffer_store.update(cx, |buffer_store, cx| {
3930 for path in entries {
3931 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3932 continue;
3933 };
3934 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3935 && buffer
3936 .read(cx)
3937 .file()
3938 .is_some_and(|file| file.disk_state().exists())
3939 && buffer.read(cx).has_unsaved_edits()
3940 {
3941 save_futures.push(buffer_store.save_buffer(buffer, cx));
3942 }
3943 }
3944 })
3945 }
3946 save_futures
3947 }
3948
3949 pub fn stage_entries(
3950 &mut self,
3951 entries: Vec<RepoPath>,
3952 cx: &mut Context<Self>,
3953 ) -> Task<anyhow::Result<()>> {
3954 if entries.is_empty() {
3955 return Task::ready(Ok(()));
3956 }
3957 let id = self.id;
3958 let save_tasks = self.save_buffers(&entries, cx);
3959 let paths = entries
3960 .iter()
3961 .map(|p| p.as_unix_str())
3962 .collect::<Vec<_>>()
3963 .join(" ");
3964 let status = format!("git add {paths}");
3965 let job_key = GitJobKey::WriteIndex(entries.clone());
3966
3967 self.spawn_job_with_tracking(
3968 entries.clone(),
3969 pending_op::GitStatus::Staged,
3970 cx,
3971 async move |this, cx| {
3972 for save_task in save_tasks {
3973 save_task.await?;
3974 }
3975
3976 this.update(cx, |this, _| {
3977 this.send_keyed_job(
3978 Some(job_key),
3979 Some(status.into()),
3980 move |git_repo, _cx| async move {
3981 match git_repo {
3982 RepositoryState::Local {
3983 backend,
3984 environment,
3985 ..
3986 } => backend.stage_paths(entries, environment.clone()).await,
3987 RepositoryState::Remote { project_id, client } => {
3988 client
3989 .request(proto::Stage {
3990 project_id: project_id.0,
3991 repository_id: id.to_proto(),
3992 paths: entries
3993 .into_iter()
3994 .map(|repo_path| repo_path.to_proto())
3995 .collect(),
3996 })
3997 .await
3998 .context("sending stage request")?;
3999
4000 Ok(())
4001 }
4002 }
4003 },
4004 )
4005 })?
4006 .await?
4007 },
4008 )
4009 }
4010
4011 pub fn unstage_entries(
4012 &mut self,
4013 entries: Vec<RepoPath>,
4014 cx: &mut Context<Self>,
4015 ) -> Task<anyhow::Result<()>> {
4016 if entries.is_empty() {
4017 return Task::ready(Ok(()));
4018 }
4019 let id = self.id;
4020 let save_tasks = self.save_buffers(&entries, cx);
4021 let paths = entries
4022 .iter()
4023 .map(|p| p.as_unix_str())
4024 .collect::<Vec<_>>()
4025 .join(" ");
4026 let status = format!("git reset {paths}");
4027 let job_key = GitJobKey::WriteIndex(entries.clone());
4028
4029 self.spawn_job_with_tracking(
4030 entries.clone(),
4031 pending_op::GitStatus::Unstaged,
4032 cx,
4033 async move |this, cx| {
4034 for save_task in save_tasks {
4035 save_task.await?;
4036 }
4037
4038 this.update(cx, |this, _| {
4039 this.send_keyed_job(
4040 Some(job_key),
4041 Some(status.into()),
4042 move |git_repo, _cx| async move {
4043 match git_repo {
4044 RepositoryState::Local {
4045 backend,
4046 environment,
4047 ..
4048 } => backend.unstage_paths(entries, environment).await,
4049 RepositoryState::Remote { project_id, client } => {
4050 client
4051 .request(proto::Unstage {
4052 project_id: project_id.0,
4053 repository_id: id.to_proto(),
4054 paths: entries
4055 .into_iter()
4056 .map(|repo_path| repo_path.to_proto())
4057 .collect(),
4058 })
4059 .await
4060 .context("sending unstage request")?;
4061
4062 Ok(())
4063 }
4064 }
4065 },
4066 )
4067 })?
4068 .await?
4069 },
4070 )
4071 }
4072
4073 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4074 let to_stage = self
4075 .cached_status()
4076 .filter_map(|entry| {
4077 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4078 if ops.staging() || ops.staged() {
4079 None
4080 } else {
4081 Some(entry.repo_path)
4082 }
4083 } else if entry.status.staging().has_staged() {
4084 None
4085 } else {
4086 Some(entry.repo_path)
4087 }
4088 })
4089 .collect();
4090 self.stage_entries(to_stage, cx)
4091 }
4092
4093 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4094 let to_unstage = self
4095 .cached_status()
4096 .filter_map(|entry| {
4097 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4098 if !ops.staging() && !ops.staged() {
4099 None
4100 } else {
4101 Some(entry.repo_path)
4102 }
4103 } else if entry.status.staging().has_unstaged() {
4104 None
4105 } else {
4106 Some(entry.repo_path)
4107 }
4108 })
4109 .collect();
4110 self.unstage_entries(to_unstage, cx)
4111 }
4112
4113 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4114 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4115
4116 self.stash_entries(to_stash, cx)
4117 }
4118
4119 pub fn stash_entries(
4120 &mut self,
4121 entries: Vec<RepoPath>,
4122 cx: &mut Context<Self>,
4123 ) -> Task<anyhow::Result<()>> {
4124 let id = self.id;
4125
4126 cx.spawn(async move |this, cx| {
4127 this.update(cx, |this, _| {
4128 this.send_job(None, move |git_repo, _cx| async move {
4129 match git_repo {
4130 RepositoryState::Local {
4131 backend,
4132 environment,
4133 ..
4134 } => backend.stash_paths(entries, environment).await,
4135 RepositoryState::Remote { project_id, client } => {
4136 client
4137 .request(proto::Stash {
4138 project_id: project_id.0,
4139 repository_id: id.to_proto(),
4140 paths: entries
4141 .into_iter()
4142 .map(|repo_path| repo_path.to_proto())
4143 .collect(),
4144 })
4145 .await
4146 .context("sending stash request")?;
4147 Ok(())
4148 }
4149 }
4150 })
4151 })?
4152 .await??;
4153 Ok(())
4154 })
4155 }
4156
4157 pub fn stash_pop(
4158 &mut self,
4159 index: Option<usize>,
4160 cx: &mut Context<Self>,
4161 ) -> Task<anyhow::Result<()>> {
4162 let id = self.id;
4163 cx.spawn(async move |this, cx| {
4164 this.update(cx, |this, _| {
4165 this.send_job(None, move |git_repo, _cx| async move {
4166 match git_repo {
4167 RepositoryState::Local {
4168 backend,
4169 environment,
4170 ..
4171 } => backend.stash_pop(index, environment).await,
4172 RepositoryState::Remote { project_id, client } => {
4173 client
4174 .request(proto::StashPop {
4175 project_id: project_id.0,
4176 repository_id: id.to_proto(),
4177 stash_index: index.map(|i| i as u64),
4178 })
4179 .await
4180 .context("sending stash pop request")?;
4181 Ok(())
4182 }
4183 }
4184 })
4185 })?
4186 .await??;
4187 Ok(())
4188 })
4189 }
4190
4191 pub fn stash_apply(
4192 &mut self,
4193 index: Option<usize>,
4194 cx: &mut Context<Self>,
4195 ) -> Task<anyhow::Result<()>> {
4196 let id = self.id;
4197 cx.spawn(async move |this, cx| {
4198 this.update(cx, |this, _| {
4199 this.send_job(None, move |git_repo, _cx| async move {
4200 match git_repo {
4201 RepositoryState::Local {
4202 backend,
4203 environment,
4204 ..
4205 } => backend.stash_apply(index, environment).await,
4206 RepositoryState::Remote { project_id, client } => {
4207 client
4208 .request(proto::StashApply {
4209 project_id: project_id.0,
4210 repository_id: id.to_proto(),
4211 stash_index: index.map(|i| i as u64),
4212 })
4213 .await
4214 .context("sending stash apply request")?;
4215 Ok(())
4216 }
4217 }
4218 })
4219 })?
4220 .await??;
4221 Ok(())
4222 })
4223 }
4224
4225 pub fn stash_drop(
4226 &mut self,
4227 index: Option<usize>,
4228 cx: &mut Context<Self>,
4229 ) -> oneshot::Receiver<anyhow::Result<()>> {
4230 let id = self.id;
4231 let updates_tx = self
4232 .git_store()
4233 .and_then(|git_store| match &git_store.read(cx).state {
4234 GitStoreState::Local { downstream, .. } => downstream
4235 .as_ref()
4236 .map(|downstream| downstream.updates_tx.clone()),
4237 _ => None,
4238 });
4239 let this = cx.weak_entity();
4240 self.send_job(None, move |git_repo, mut cx| async move {
4241 match git_repo {
4242 RepositoryState::Local {
4243 backend,
4244 environment,
4245 ..
4246 } => {
4247 // TODO would be nice to not have to do this manually
4248 let result = backend.stash_drop(index, environment).await;
4249 if result.is_ok()
4250 && let Ok(stash_entries) = backend.stash_entries().await
4251 {
4252 let snapshot = this.update(&mut cx, |this, cx| {
4253 this.snapshot.stash_entries = stash_entries;
4254 cx.emit(RepositoryEvent::StashEntriesChanged);
4255 this.snapshot.clone()
4256 })?;
4257 if let Some(updates_tx) = updates_tx {
4258 updates_tx
4259 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4260 .ok();
4261 }
4262 }
4263
4264 result
4265 }
4266 RepositoryState::Remote { project_id, client } => {
4267 client
4268 .request(proto::StashDrop {
4269 project_id: project_id.0,
4270 repository_id: id.to_proto(),
4271 stash_index: index.map(|i| i as u64),
4272 })
4273 .await
4274 .context("sending stash pop request")?;
4275 Ok(())
4276 }
4277 }
4278 })
4279 }
4280
4281 pub fn commit(
4282 &mut self,
4283 message: SharedString,
4284 name_and_email: Option<(SharedString, SharedString)>,
4285 options: CommitOptions,
4286 askpass: AskPassDelegate,
4287 _cx: &mut App,
4288 ) -> oneshot::Receiver<Result<()>> {
4289 let id = self.id;
4290 let askpass_delegates = self.askpass_delegates.clone();
4291 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4292
4293 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4294 match git_repo {
4295 RepositoryState::Local {
4296 backend,
4297 environment,
4298 ..
4299 } => {
4300 backend
4301 .commit(message, name_and_email, options, askpass, environment)
4302 .await
4303 }
4304 RepositoryState::Remote { project_id, client } => {
4305 askpass_delegates.lock().insert(askpass_id, askpass);
4306 let _defer = util::defer(|| {
4307 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4308 debug_assert!(askpass_delegate.is_some());
4309 });
4310 let (name, email) = name_and_email.unzip();
4311 client
4312 .request(proto::Commit {
4313 project_id: project_id.0,
4314 repository_id: id.to_proto(),
4315 message: String::from(message),
4316 name: name.map(String::from),
4317 email: email.map(String::from),
4318 options: Some(proto::commit::CommitOptions {
4319 amend: options.amend,
4320 signoff: options.signoff,
4321 }),
4322 askpass_id,
4323 })
4324 .await
4325 .context("sending commit request")?;
4326
4327 Ok(())
4328 }
4329 }
4330 })
4331 }
4332
4333 pub fn fetch(
4334 &mut self,
4335 fetch_options: FetchOptions,
4336 askpass: AskPassDelegate,
4337 _cx: &mut App,
4338 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4339 let askpass_delegates = self.askpass_delegates.clone();
4340 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4341 let id = self.id;
4342
4343 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4344 match git_repo {
4345 RepositoryState::Local {
4346 backend,
4347 environment,
4348 ..
4349 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4350 RepositoryState::Remote { project_id, client } => {
4351 askpass_delegates.lock().insert(askpass_id, askpass);
4352 let _defer = util::defer(|| {
4353 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4354 debug_assert!(askpass_delegate.is_some());
4355 });
4356
4357 let response = client
4358 .request(proto::Fetch {
4359 project_id: project_id.0,
4360 repository_id: id.to_proto(),
4361 askpass_id,
4362 remote: fetch_options.to_proto(),
4363 })
4364 .await
4365 .context("sending fetch request")?;
4366
4367 Ok(RemoteCommandOutput {
4368 stdout: response.stdout,
4369 stderr: response.stderr,
4370 })
4371 }
4372 }
4373 })
4374 }
4375
4376 pub fn push(
4377 &mut self,
4378 branch: SharedString,
4379 remote: SharedString,
4380 options: Option<PushOptions>,
4381 askpass: AskPassDelegate,
4382 cx: &mut Context<Self>,
4383 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4384 let askpass_delegates = self.askpass_delegates.clone();
4385 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4386 let id = self.id;
4387
4388 let args = options
4389 .map(|option| match option {
4390 PushOptions::SetUpstream => " --set-upstream",
4391 PushOptions::Force => " --force-with-lease",
4392 })
4393 .unwrap_or("");
4394
4395 let updates_tx = self
4396 .git_store()
4397 .and_then(|git_store| match &git_store.read(cx).state {
4398 GitStoreState::Local { downstream, .. } => downstream
4399 .as_ref()
4400 .map(|downstream| downstream.updates_tx.clone()),
4401 _ => None,
4402 });
4403
4404 let this = cx.weak_entity();
4405 self.send_job(
4406 Some(format!("git push {} {} {}", args, remote, branch).into()),
4407 move |git_repo, mut cx| async move {
4408 match git_repo {
4409 RepositoryState::Local {
4410 backend,
4411 environment,
4412 ..
4413 } => {
4414 let result = backend
4415 .push(
4416 branch.to_string(),
4417 remote.to_string(),
4418 options,
4419 askpass,
4420 environment.clone(),
4421 cx.clone(),
4422 )
4423 .await;
4424 // TODO would be nice to not have to do this manually
4425 if result.is_ok() {
4426 let branches = backend.branches().await?;
4427 let branch = branches.into_iter().find(|branch| branch.is_head);
4428 log::info!("head branch after scan is {branch:?}");
4429 let snapshot = this.update(&mut cx, |this, cx| {
4430 this.snapshot.branch = branch;
4431 cx.emit(RepositoryEvent::BranchChanged);
4432 this.snapshot.clone()
4433 })?;
4434 if let Some(updates_tx) = updates_tx {
4435 updates_tx
4436 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4437 .ok();
4438 }
4439 }
4440 result
4441 }
4442 RepositoryState::Remote { project_id, client } => {
4443 askpass_delegates.lock().insert(askpass_id, askpass);
4444 let _defer = util::defer(|| {
4445 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4446 debug_assert!(askpass_delegate.is_some());
4447 });
4448 let response = client
4449 .request(proto::Push {
4450 project_id: project_id.0,
4451 repository_id: id.to_proto(),
4452 askpass_id,
4453 branch_name: branch.to_string(),
4454 remote_name: remote.to_string(),
4455 options: options.map(|options| match options {
4456 PushOptions::Force => proto::push::PushOptions::Force,
4457 PushOptions::SetUpstream => {
4458 proto::push::PushOptions::SetUpstream
4459 }
4460 }
4461 as i32),
4462 })
4463 .await
4464 .context("sending push request")?;
4465
4466 Ok(RemoteCommandOutput {
4467 stdout: response.stdout,
4468 stderr: response.stderr,
4469 })
4470 }
4471 }
4472 },
4473 )
4474 }
4475
4476 pub fn pull(
4477 &mut self,
4478 branch: Option<SharedString>,
4479 remote: SharedString,
4480 rebase: bool,
4481 askpass: AskPassDelegate,
4482 _cx: &mut App,
4483 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4484 let askpass_delegates = self.askpass_delegates.clone();
4485 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4486 let id = self.id;
4487
4488 let mut status = "git pull".to_string();
4489 if rebase {
4490 status.push_str(" --rebase");
4491 }
4492 status.push_str(&format!(" {}", remote));
4493 if let Some(b) = &branch {
4494 status.push_str(&format!(" {}", b));
4495 }
4496
4497 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4498 match git_repo {
4499 RepositoryState::Local {
4500 backend,
4501 environment,
4502 ..
4503 } => {
4504 backend
4505 .pull(
4506 branch.as_ref().map(|b| b.to_string()),
4507 remote.to_string(),
4508 rebase,
4509 askpass,
4510 environment.clone(),
4511 cx,
4512 )
4513 .await
4514 }
4515 RepositoryState::Remote { project_id, client } => {
4516 askpass_delegates.lock().insert(askpass_id, askpass);
4517 let _defer = util::defer(|| {
4518 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4519 debug_assert!(askpass_delegate.is_some());
4520 });
4521 let response = client
4522 .request(proto::Pull {
4523 project_id: project_id.0,
4524 repository_id: id.to_proto(),
4525 askpass_id,
4526 rebase,
4527 branch_name: branch.as_ref().map(|b| b.to_string()),
4528 remote_name: remote.to_string(),
4529 })
4530 .await
4531 .context("sending pull request")?;
4532
4533 Ok(RemoteCommandOutput {
4534 stdout: response.stdout,
4535 stderr: response.stderr,
4536 })
4537 }
4538 }
4539 })
4540 }
4541
4542 fn spawn_set_index_text_job(
4543 &mut self,
4544 path: RepoPath,
4545 content: Option<String>,
4546 hunk_staging_operation_count: Option<usize>,
4547 cx: &mut Context<Self>,
4548 ) -> oneshot::Receiver<anyhow::Result<()>> {
4549 let id = self.id;
4550 let this = cx.weak_entity();
4551 let git_store = self.git_store.clone();
4552 self.send_keyed_job(
4553 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4554 None,
4555 move |git_repo, mut cx| async move {
4556 log::debug!(
4557 "start updating index text for buffer {}",
4558 path.as_unix_str()
4559 );
4560 match git_repo {
4561 RepositoryState::Local {
4562 backend,
4563 environment,
4564 ..
4565 } => {
4566 backend
4567 .set_index_text(path.clone(), content, environment.clone())
4568 .await?;
4569 }
4570 RepositoryState::Remote { project_id, client } => {
4571 client
4572 .request(proto::SetIndexText {
4573 project_id: project_id.0,
4574 repository_id: id.to_proto(),
4575 path: path.to_proto(),
4576 text: content,
4577 })
4578 .await?;
4579 }
4580 }
4581 log::debug!(
4582 "finish updating index text for buffer {}",
4583 path.as_unix_str()
4584 );
4585
4586 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4587 let project_path = this
4588 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4589 .ok()
4590 .flatten();
4591 git_store.update(&mut cx, |git_store, cx| {
4592 let buffer_id = git_store
4593 .buffer_store
4594 .read(cx)
4595 .get_by_path(&project_path?)?
4596 .read(cx)
4597 .remote_id();
4598 let diff_state = git_store.diffs.get(&buffer_id)?;
4599 diff_state.update(cx, |diff_state, _| {
4600 diff_state.hunk_staging_operation_count_as_of_write =
4601 hunk_staging_operation_count;
4602 });
4603 Some(())
4604 })?;
4605 }
4606 Ok(())
4607 },
4608 )
4609 }
4610
4611 pub fn get_remotes(
4612 &mut self,
4613 branch_name: Option<String>,
4614 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4615 let id = self.id;
4616 self.send_job(None, move |repo, _cx| async move {
4617 match repo {
4618 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4619 RepositoryState::Remote { project_id, client } => {
4620 let response = client
4621 .request(proto::GetRemotes {
4622 project_id: project_id.0,
4623 repository_id: id.to_proto(),
4624 branch_name,
4625 })
4626 .await?;
4627
4628 let remotes = response
4629 .remotes
4630 .into_iter()
4631 .map(|remotes| git::repository::Remote {
4632 name: remotes.name.into(),
4633 })
4634 .collect();
4635
4636 Ok(remotes)
4637 }
4638 }
4639 })
4640 }
4641
4642 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4643 let id = self.id;
4644 self.send_job(None, move |repo, _| async move {
4645 match repo {
4646 RepositoryState::Local { backend, .. } => backend.branches().await,
4647 RepositoryState::Remote { project_id, client } => {
4648 let response = client
4649 .request(proto::GitGetBranches {
4650 project_id: project_id.0,
4651 repository_id: id.to_proto(),
4652 })
4653 .await?;
4654
4655 let branches = response
4656 .branches
4657 .into_iter()
4658 .map(|branch| proto_to_branch(&branch))
4659 .collect();
4660
4661 Ok(branches)
4662 }
4663 }
4664 })
4665 }
4666
4667 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4668 let id = self.id;
4669 self.send_job(None, move |repo, _| async move {
4670 match repo {
4671 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4672 RepositoryState::Remote { project_id, client } => {
4673 let response = client
4674 .request(proto::GitGetWorktrees {
4675 project_id: project_id.0,
4676 repository_id: id.to_proto(),
4677 })
4678 .await?;
4679
4680 let worktrees = response
4681 .worktrees
4682 .into_iter()
4683 .map(|worktree| proto_to_worktree(&worktree))
4684 .collect();
4685
4686 Ok(worktrees)
4687 }
4688 }
4689 })
4690 }
4691
4692 pub fn create_worktree(
4693 &mut self,
4694 name: String,
4695 path: PathBuf,
4696 commit: Option<String>,
4697 ) -> oneshot::Receiver<Result<()>> {
4698 let id = self.id;
4699 self.send_job(
4700 Some("git worktree add".into()),
4701 move |repo, _cx| async move {
4702 match repo {
4703 RepositoryState::Local { backend, .. } => {
4704 backend.create_worktree(name, path, commit).await
4705 }
4706 RepositoryState::Remote { project_id, client } => {
4707 client
4708 .request(proto::GitCreateWorktree {
4709 project_id: project_id.0,
4710 repository_id: id.to_proto(),
4711 name,
4712 directory: path.to_string_lossy().to_string(),
4713 commit,
4714 })
4715 .await?;
4716
4717 Ok(())
4718 }
4719 }
4720 },
4721 )
4722 }
4723
4724 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4725 let id = self.id;
4726 self.send_job(None, move |repo, _| async move {
4727 match repo {
4728 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4729 RepositoryState::Remote { project_id, client } => {
4730 let response = client
4731 .request(proto::GetDefaultBranch {
4732 project_id: project_id.0,
4733 repository_id: id.to_proto(),
4734 })
4735 .await?;
4736
4737 anyhow::Ok(response.branch.map(SharedString::from))
4738 }
4739 }
4740 })
4741 }
4742
4743 pub fn diff_tree(
4744 &mut self,
4745 diff_type: DiffTreeType,
4746 _cx: &App,
4747 ) -> oneshot::Receiver<Result<TreeDiff>> {
4748 let repository_id = self.snapshot.id;
4749 self.send_job(None, move |repo, _cx| async move {
4750 match repo {
4751 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4752 RepositoryState::Remote { client, project_id } => {
4753 let response = client
4754 .request(proto::GetTreeDiff {
4755 project_id: project_id.0,
4756 repository_id: repository_id.0,
4757 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4758 base: diff_type.base().to_string(),
4759 head: diff_type.head().to_string(),
4760 })
4761 .await?;
4762
4763 let entries = response
4764 .entries
4765 .into_iter()
4766 .filter_map(|entry| {
4767 let status = match entry.status() {
4768 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4769 proto::tree_diff_status::Status::Modified => {
4770 TreeDiffStatus::Modified {
4771 old: git::Oid::from_str(
4772 &entry.oid.context("missing oid").log_err()?,
4773 )
4774 .log_err()?,
4775 }
4776 }
4777 proto::tree_diff_status::Status::Deleted => {
4778 TreeDiffStatus::Deleted {
4779 old: git::Oid::from_str(
4780 &entry.oid.context("missing oid").log_err()?,
4781 )
4782 .log_err()?,
4783 }
4784 }
4785 };
4786 Some((
4787 RepoPath::from_rel_path(
4788 &RelPath::from_proto(&entry.path).log_err()?,
4789 ),
4790 status,
4791 ))
4792 })
4793 .collect();
4794
4795 Ok(TreeDiff { entries })
4796 }
4797 }
4798 })
4799 }
4800
4801 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4802 let id = self.id;
4803 self.send_job(None, move |repo, _cx| async move {
4804 match repo {
4805 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4806 RepositoryState::Remote { project_id, client } => {
4807 let response = client
4808 .request(proto::GitDiff {
4809 project_id: project_id.0,
4810 repository_id: id.to_proto(),
4811 diff_type: match diff_type {
4812 DiffType::HeadToIndex => {
4813 proto::git_diff::DiffType::HeadToIndex.into()
4814 }
4815 DiffType::HeadToWorktree => {
4816 proto::git_diff::DiffType::HeadToWorktree.into()
4817 }
4818 },
4819 })
4820 .await?;
4821
4822 Ok(response.diff)
4823 }
4824 }
4825 })
4826 }
4827
4828 pub fn create_branch(
4829 &mut self,
4830 branch_name: String,
4831 base_branch: Option<String>,
4832 ) -> oneshot::Receiver<Result<()>> {
4833 let id = self.id;
4834 let status_msg = if let Some(ref base) = base_branch {
4835 format!("git switch -c {branch_name} {base}").into()
4836 } else {
4837 format!("git switch -c {branch_name}").into()
4838 };
4839 self.send_job(Some(status_msg), move |repo, _cx| async move {
4840 match repo {
4841 RepositoryState::Local { backend, .. } => {
4842 backend.create_branch(branch_name, base_branch).await
4843 }
4844 RepositoryState::Remote { project_id, client } => {
4845 client
4846 .request(proto::GitCreateBranch {
4847 project_id: project_id.0,
4848 repository_id: id.to_proto(),
4849 branch_name,
4850 })
4851 .await?;
4852
4853 Ok(())
4854 }
4855 }
4856 })
4857 }
4858
4859 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4860 let id = self.id;
4861 self.send_job(
4862 Some(format!("git switch {branch_name}").into()),
4863 move |repo, _cx| async move {
4864 match repo {
4865 RepositoryState::Local { backend, .. } => {
4866 backend.change_branch(branch_name).await
4867 }
4868 RepositoryState::Remote { project_id, client } => {
4869 client
4870 .request(proto::GitChangeBranch {
4871 project_id: project_id.0,
4872 repository_id: id.to_proto(),
4873 branch_name,
4874 })
4875 .await?;
4876
4877 Ok(())
4878 }
4879 }
4880 },
4881 )
4882 }
4883
4884 pub fn rename_branch(
4885 &mut self,
4886 branch: String,
4887 new_name: String,
4888 ) -> oneshot::Receiver<Result<()>> {
4889 let id = self.id;
4890 self.send_job(
4891 Some(format!("git branch -m {branch} {new_name}").into()),
4892 move |repo, _cx| async move {
4893 match repo {
4894 RepositoryState::Local { backend, .. } => {
4895 backend.rename_branch(branch, new_name).await
4896 }
4897 RepositoryState::Remote { project_id, client } => {
4898 client
4899 .request(proto::GitRenameBranch {
4900 project_id: project_id.0,
4901 repository_id: id.to_proto(),
4902 branch,
4903 new_name,
4904 })
4905 .await?;
4906
4907 Ok(())
4908 }
4909 }
4910 },
4911 )
4912 }
4913
4914 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4915 let id = self.id;
4916 self.send_job(None, move |repo, _cx| async move {
4917 match repo {
4918 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4919 RepositoryState::Remote { project_id, client } => {
4920 let response = client
4921 .request(proto::CheckForPushedCommits {
4922 project_id: project_id.0,
4923 repository_id: id.to_proto(),
4924 })
4925 .await?;
4926
4927 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4928
4929 Ok(branches)
4930 }
4931 }
4932 })
4933 }
4934
4935 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4936 self.send_job(None, |repo, _cx| async move {
4937 match repo {
4938 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4939 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4940 }
4941 })
4942 }
4943
4944 pub fn restore_checkpoint(
4945 &mut self,
4946 checkpoint: GitRepositoryCheckpoint,
4947 ) -> oneshot::Receiver<Result<()>> {
4948 self.send_job(None, move |repo, _cx| async move {
4949 match repo {
4950 RepositoryState::Local { backend, .. } => {
4951 backend.restore_checkpoint(checkpoint).await
4952 }
4953 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4954 }
4955 })
4956 }
4957
4958 pub(crate) fn apply_remote_update(
4959 &mut self,
4960 update: proto::UpdateRepository,
4961 cx: &mut Context<Self>,
4962 ) -> Result<()> {
4963 let conflicted_paths = TreeSet::from_ordered_entries(
4964 update
4965 .current_merge_conflicts
4966 .into_iter()
4967 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4968 );
4969 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4970 let new_head_commit = update
4971 .head_commit_details
4972 .as_ref()
4973 .map(proto_to_commit_details);
4974 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4975 cx.emit(RepositoryEvent::BranchChanged)
4976 }
4977 self.snapshot.branch = new_branch;
4978 self.snapshot.head_commit = new_head_commit;
4979
4980 self.snapshot.merge.conflicted_paths = conflicted_paths;
4981 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4982 let new_stash_entries = GitStash {
4983 entries: update
4984 .stash_entries
4985 .iter()
4986 .filter_map(|entry| proto_to_stash(entry).ok())
4987 .collect(),
4988 };
4989 if self.snapshot.stash_entries != new_stash_entries {
4990 cx.emit(RepositoryEvent::StashEntriesChanged)
4991 }
4992 self.snapshot.stash_entries = new_stash_entries;
4993
4994 let edits = update
4995 .removed_statuses
4996 .into_iter()
4997 .filter_map(|path| {
4998 Some(sum_tree::Edit::Remove(PathKey(
4999 RelPath::from_proto(&path).log_err()?,
5000 )))
5001 })
5002 .chain(
5003 update
5004 .updated_statuses
5005 .into_iter()
5006 .filter_map(|updated_status| {
5007 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5008 }),
5009 )
5010 .collect::<Vec<_>>();
5011 if !edits.is_empty() {
5012 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
5013 }
5014 self.snapshot.statuses_by_path.edit(edits, ());
5015 if update.is_last_update {
5016 self.snapshot.scan_id = update.scan_id;
5017 }
5018 Ok(())
5019 }
5020
5021 pub fn compare_checkpoints(
5022 &mut self,
5023 left: GitRepositoryCheckpoint,
5024 right: GitRepositoryCheckpoint,
5025 ) -> oneshot::Receiver<Result<bool>> {
5026 self.send_job(None, move |repo, _cx| async move {
5027 match repo {
5028 RepositoryState::Local { backend, .. } => {
5029 backend.compare_checkpoints(left, right).await
5030 }
5031 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5032 }
5033 })
5034 }
5035
5036 pub fn diff_checkpoints(
5037 &mut self,
5038 base_checkpoint: GitRepositoryCheckpoint,
5039 target_checkpoint: GitRepositoryCheckpoint,
5040 ) -> oneshot::Receiver<Result<String>> {
5041 self.send_job(None, move |repo, _cx| async move {
5042 match repo {
5043 RepositoryState::Local { backend, .. } => {
5044 backend
5045 .diff_checkpoints(base_checkpoint, target_checkpoint)
5046 .await
5047 }
5048 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5049 }
5050 })
5051 }
5052
5053 fn schedule_scan(
5054 &mut self,
5055 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5056 cx: &mut Context<Self>,
5057 ) {
5058 let this = cx.weak_entity();
5059 let _ = self.send_keyed_job(
5060 Some(GitJobKey::ReloadGitState),
5061 None,
5062 |state, mut cx| async move {
5063 log::debug!("run scheduled git status scan");
5064
5065 let Some(this) = this.upgrade() else {
5066 return Ok(());
5067 };
5068 let RepositoryState::Local { backend, .. } = state else {
5069 bail!("not a local repository")
5070 };
5071 let (snapshot, events) = this
5072 .update(&mut cx, |this, _| {
5073 this.paths_needing_status_update.clear();
5074 compute_snapshot(
5075 this.id,
5076 this.work_directory_abs_path.clone(),
5077 this.snapshot.clone(),
5078 backend.clone(),
5079 )
5080 })?
5081 .await?;
5082 this.update(&mut cx, |this, cx| {
5083 this.snapshot = snapshot.clone();
5084 for event in events {
5085 cx.emit(event);
5086 }
5087 })?;
5088 if let Some(updates_tx) = updates_tx {
5089 updates_tx
5090 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5091 .ok();
5092 }
5093 Ok(())
5094 },
5095 );
5096 }
5097
5098 fn spawn_local_git_worker(
5099 work_directory_abs_path: Arc<Path>,
5100 dot_git_abs_path: Arc<Path>,
5101 _repository_dir_abs_path: Arc<Path>,
5102 _common_dir_abs_path: Arc<Path>,
5103 project_environment: WeakEntity<ProjectEnvironment>,
5104 fs: Arc<dyn Fs>,
5105 cx: &mut Context<Self>,
5106 ) -> mpsc::UnboundedSender<GitJob> {
5107 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5108
5109 cx.spawn(async move |_, cx| {
5110 let environment = project_environment
5111 .upgrade()
5112 .context("missing project environment")?
5113 .update(cx, |project_environment, cx| {
5114 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
5115 })?
5116 .await
5117 .unwrap_or_else(|| {
5118 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
5119 HashMap::default()
5120 });
5121 let search_paths = environment.get("PATH").map(|val| val.to_owned());
5122 let backend = cx
5123 .background_spawn(async move {
5124 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
5125 .or_else(|| which::which("git").ok());
5126 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
5127 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
5128 })
5129 .await?;
5130
5131 if let Some(git_hosting_provider_registry) =
5132 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5133 {
5134 git_hosting_providers::register_additional_providers(
5135 git_hosting_provider_registry,
5136 backend.clone(),
5137 );
5138 }
5139
5140 let state = RepositoryState::Local {
5141 backend,
5142 environment: Arc::new(environment),
5143 };
5144 let mut jobs = VecDeque::new();
5145 loop {
5146 while let Ok(Some(next_job)) = job_rx.try_next() {
5147 jobs.push_back(next_job);
5148 }
5149
5150 if let Some(job) = jobs.pop_front() {
5151 if let Some(current_key) = &job.key
5152 && jobs
5153 .iter()
5154 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5155 {
5156 continue;
5157 }
5158 (job.job)(state.clone(), cx).await;
5159 } else if let Some(job) = job_rx.next().await {
5160 jobs.push_back(job);
5161 } else {
5162 break;
5163 }
5164 }
5165 anyhow::Ok(())
5166 })
5167 .detach_and_log_err(cx);
5168
5169 job_tx
5170 }
5171
5172 fn spawn_remote_git_worker(
5173 project_id: ProjectId,
5174 client: AnyProtoClient,
5175 cx: &mut Context<Self>,
5176 ) -> mpsc::UnboundedSender<GitJob> {
5177 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5178
5179 cx.spawn(async move |_, cx| {
5180 let state = RepositoryState::Remote { project_id, client };
5181 let mut jobs = VecDeque::new();
5182 loop {
5183 while let Ok(Some(next_job)) = job_rx.try_next() {
5184 jobs.push_back(next_job);
5185 }
5186
5187 if let Some(job) = jobs.pop_front() {
5188 if let Some(current_key) = &job.key
5189 && jobs
5190 .iter()
5191 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5192 {
5193 continue;
5194 }
5195 (job.job)(state.clone(), cx).await;
5196 } else if let Some(job) = job_rx.next().await {
5197 jobs.push_back(job);
5198 } else {
5199 break;
5200 }
5201 }
5202 anyhow::Ok(())
5203 })
5204 .detach_and_log_err(cx);
5205
5206 job_tx
5207 }
5208
5209 fn load_staged_text(
5210 &mut self,
5211 buffer_id: BufferId,
5212 repo_path: RepoPath,
5213 cx: &App,
5214 ) -> Task<Result<Option<String>>> {
5215 let rx = self.send_job(None, move |state, _| async move {
5216 match state {
5217 RepositoryState::Local { backend, .. } => {
5218 anyhow::Ok(backend.load_index_text(repo_path).await)
5219 }
5220 RepositoryState::Remote { project_id, client } => {
5221 let response = client
5222 .request(proto::OpenUnstagedDiff {
5223 project_id: project_id.to_proto(),
5224 buffer_id: buffer_id.to_proto(),
5225 })
5226 .await?;
5227 Ok(response.staged_text)
5228 }
5229 }
5230 });
5231 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5232 }
5233
5234 fn load_committed_text(
5235 &mut self,
5236 buffer_id: BufferId,
5237 repo_path: RepoPath,
5238 cx: &App,
5239 ) -> Task<Result<DiffBasesChange>> {
5240 let rx = self.send_job(None, move |state, _| async move {
5241 match state {
5242 RepositoryState::Local { backend, .. } => {
5243 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5244 let staged_text = backend.load_index_text(repo_path).await;
5245 let diff_bases_change = if committed_text == staged_text {
5246 DiffBasesChange::SetBoth(committed_text)
5247 } else {
5248 DiffBasesChange::SetEach {
5249 index: staged_text,
5250 head: committed_text,
5251 }
5252 };
5253 anyhow::Ok(diff_bases_change)
5254 }
5255 RepositoryState::Remote { project_id, client } => {
5256 use proto::open_uncommitted_diff_response::Mode;
5257
5258 let response = client
5259 .request(proto::OpenUncommittedDiff {
5260 project_id: project_id.to_proto(),
5261 buffer_id: buffer_id.to_proto(),
5262 })
5263 .await?;
5264 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5265 let bases = match mode {
5266 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5267 Mode::IndexAndHead => DiffBasesChange::SetEach {
5268 head: response.committed_text,
5269 index: response.staged_text,
5270 },
5271 };
5272 Ok(bases)
5273 }
5274 }
5275 });
5276
5277 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5278 }
5279 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5280 let repository_id = self.snapshot.id;
5281 let rx = self.send_job(None, move |state, _| async move {
5282 match state {
5283 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5284 RepositoryState::Remote { client, project_id } => {
5285 let response = client
5286 .request(proto::GetBlobContent {
5287 project_id: project_id.to_proto(),
5288 repository_id: repository_id.0,
5289 oid: oid.to_string(),
5290 })
5291 .await?;
5292 Ok(response.content)
5293 }
5294 }
5295 });
5296 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5297 }
5298
5299 fn paths_changed(
5300 &mut self,
5301 paths: Vec<RepoPath>,
5302 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5303 cx: &mut Context<Self>,
5304 ) {
5305 self.paths_needing_status_update.extend(paths);
5306
5307 let this = cx.weak_entity();
5308 let _ = self.send_keyed_job(
5309 Some(GitJobKey::RefreshStatuses),
5310 None,
5311 |state, mut cx| async move {
5312 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5313 (
5314 this.snapshot.clone(),
5315 mem::take(&mut this.paths_needing_status_update),
5316 )
5317 })?;
5318 let RepositoryState::Local { backend, .. } = state else {
5319 bail!("not a local repository")
5320 };
5321
5322 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5323 if paths.is_empty() {
5324 return Ok(());
5325 }
5326 let statuses = backend.status(&paths).await?;
5327 let stash_entries = backend.stash_entries().await?;
5328
5329 let changed_path_statuses = cx
5330 .background_spawn(async move {
5331 let mut changed_path_statuses = Vec::new();
5332 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5333 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5334
5335 for (repo_path, status) in &*statuses.entries {
5336 changed_paths.remove(repo_path);
5337 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5338 && cursor.item().is_some_and(|entry| entry.status == *status)
5339 {
5340 continue;
5341 }
5342
5343 changed_path_statuses.push(Edit::Insert(StatusEntry {
5344 repo_path: repo_path.clone(),
5345 status: *status,
5346 }));
5347 }
5348 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5349 for path in changed_paths.into_iter() {
5350 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5351 changed_path_statuses
5352 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5353 }
5354 }
5355 changed_path_statuses
5356 })
5357 .await;
5358
5359 this.update(&mut cx, |this, cx| {
5360 if this.snapshot.stash_entries != stash_entries {
5361 cx.emit(RepositoryEvent::StashEntriesChanged);
5362 this.snapshot.stash_entries = stash_entries;
5363 }
5364
5365 if !changed_path_statuses.is_empty() {
5366 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5367 this.snapshot
5368 .statuses_by_path
5369 .edit(changed_path_statuses, ());
5370 this.snapshot.scan_id += 1;
5371 }
5372
5373 if let Some(updates_tx) = updates_tx {
5374 updates_tx
5375 .unbounded_send(DownstreamUpdate::UpdateRepository(
5376 this.snapshot.clone(),
5377 ))
5378 .ok();
5379 }
5380 })
5381 },
5382 );
5383 }
5384
5385 /// currently running git command and when it started
5386 pub fn current_job(&self) -> Option<JobInfo> {
5387 self.active_jobs.values().next().cloned()
5388 }
5389
5390 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5391 self.send_job(None, |_, _| async {})
5392 }
5393
5394 fn spawn_job_with_tracking<AsyncFn>(
5395 &mut self,
5396 paths: Vec<RepoPath>,
5397 git_status: pending_op::GitStatus,
5398 cx: &mut Context<Self>,
5399 f: AsyncFn,
5400 ) -> Task<Result<()>>
5401 where
5402 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5403 {
5404 let ids = self.new_pending_ops_for_paths(paths, git_status);
5405
5406 cx.spawn(async move |this, cx| {
5407 let (job_status, result) = match f(this.clone(), cx).await {
5408 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5409 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5410 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5411 };
5412
5413 this.update(cx, |this, _| {
5414 let mut edits = Vec::with_capacity(ids.len());
5415 for (id, entry) in ids {
5416 if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
5417 if let Some(op) = ops.op_by_id_mut(id) {
5418 op.job_status = job_status;
5419 }
5420 edits.push(sum_tree::Edit::Insert(ops));
5421 }
5422 }
5423 this.snapshot.pending_ops_by_path.edit(edits, ());
5424 })?;
5425
5426 result
5427 })
5428 }
5429
5430 fn new_pending_ops_for_paths(
5431 &mut self,
5432 paths: Vec<RepoPath>,
5433 git_status: pending_op::GitStatus,
5434 ) -> Vec<(PendingOpId, RepoPath)> {
5435 let mut edits = Vec::with_capacity(paths.len());
5436 let mut ids = Vec::with_capacity(paths.len());
5437 for path in paths {
5438 let mut ops = self
5439 .snapshot
5440 .pending_ops_for_path(&path)
5441 .unwrap_or_else(|| PendingOps::new(&path));
5442 let id = ops.max_id() + 1;
5443 ops.ops.push(PendingOp {
5444 id,
5445 git_status,
5446 job_status: pending_op::JobStatus::Running,
5447 });
5448 edits.push(sum_tree::Edit::Insert(ops));
5449 ids.push((id, path));
5450 }
5451 self.snapshot.pending_ops_by_path.edit(edits, ());
5452 ids
5453 }
5454}
5455
5456fn get_permalink_in_rust_registry_src(
5457 provider_registry: Arc<GitHostingProviderRegistry>,
5458 path: PathBuf,
5459 selection: Range<u32>,
5460) -> Result<url::Url> {
5461 #[derive(Deserialize)]
5462 struct CargoVcsGit {
5463 sha1: String,
5464 }
5465
5466 #[derive(Deserialize)]
5467 struct CargoVcsInfo {
5468 git: CargoVcsGit,
5469 path_in_vcs: String,
5470 }
5471
5472 #[derive(Deserialize)]
5473 struct CargoPackage {
5474 repository: String,
5475 }
5476
5477 #[derive(Deserialize)]
5478 struct CargoToml {
5479 package: CargoPackage,
5480 }
5481
5482 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5483 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5484 Some((dir, json))
5485 }) else {
5486 bail!("No .cargo_vcs_info.json found in parent directories")
5487 };
5488 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5489 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5490 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5491 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5492 .context("parsing package.repository field of manifest")?;
5493 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5494 let permalink = provider.build_permalink(
5495 remote,
5496 BuildPermalinkParams::new(
5497 &cargo_vcs_info.git.sha1,
5498 &RepoPath::from_rel_path(
5499 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5500 ),
5501 Some(selection),
5502 ),
5503 );
5504 Ok(permalink)
5505}
5506
5507fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5508 let Some(blame) = blame else {
5509 return proto::BlameBufferResponse {
5510 blame_response: None,
5511 };
5512 };
5513
5514 let entries = blame
5515 .entries
5516 .into_iter()
5517 .map(|entry| proto::BlameEntry {
5518 sha: entry.sha.as_bytes().into(),
5519 start_line: entry.range.start,
5520 end_line: entry.range.end,
5521 original_line_number: entry.original_line_number,
5522 author: entry.author,
5523 author_mail: entry.author_mail,
5524 author_time: entry.author_time,
5525 author_tz: entry.author_tz,
5526 committer: entry.committer_name,
5527 committer_mail: entry.committer_email,
5528 committer_time: entry.committer_time,
5529 committer_tz: entry.committer_tz,
5530 summary: entry.summary,
5531 previous: entry.previous,
5532 filename: entry.filename,
5533 })
5534 .collect::<Vec<_>>();
5535
5536 let messages = blame
5537 .messages
5538 .into_iter()
5539 .map(|(oid, message)| proto::CommitMessage {
5540 oid: oid.as_bytes().into(),
5541 message,
5542 })
5543 .collect::<Vec<_>>();
5544
5545 proto::BlameBufferResponse {
5546 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5547 entries,
5548 messages,
5549 remote_url: blame.remote_url,
5550 }),
5551 }
5552}
5553
5554fn deserialize_blame_buffer_response(
5555 response: proto::BlameBufferResponse,
5556) -> Option<git::blame::Blame> {
5557 let response = response.blame_response?;
5558 let entries = response
5559 .entries
5560 .into_iter()
5561 .filter_map(|entry| {
5562 Some(git::blame::BlameEntry {
5563 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5564 range: entry.start_line..entry.end_line,
5565 original_line_number: entry.original_line_number,
5566 committer_name: entry.committer,
5567 committer_time: entry.committer_time,
5568 committer_tz: entry.committer_tz,
5569 committer_email: entry.committer_mail,
5570 author: entry.author,
5571 author_mail: entry.author_mail,
5572 author_time: entry.author_time,
5573 author_tz: entry.author_tz,
5574 summary: entry.summary,
5575 previous: entry.previous,
5576 filename: entry.filename,
5577 })
5578 })
5579 .collect::<Vec<_>>();
5580
5581 let messages = response
5582 .messages
5583 .into_iter()
5584 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5585 .collect::<HashMap<_, _>>();
5586
5587 Some(Blame {
5588 entries,
5589 messages,
5590 remote_url: response.remote_url,
5591 })
5592}
5593
5594fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5595 proto::Branch {
5596 is_head: branch.is_head,
5597 ref_name: branch.ref_name.to_string(),
5598 unix_timestamp: branch
5599 .most_recent_commit
5600 .as_ref()
5601 .map(|commit| commit.commit_timestamp as u64),
5602 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5603 ref_name: upstream.ref_name.to_string(),
5604 tracking: upstream
5605 .tracking
5606 .status()
5607 .map(|upstream| proto::UpstreamTracking {
5608 ahead: upstream.ahead as u64,
5609 behind: upstream.behind as u64,
5610 }),
5611 }),
5612 most_recent_commit: branch
5613 .most_recent_commit
5614 .as_ref()
5615 .map(|commit| proto::CommitSummary {
5616 sha: commit.sha.to_string(),
5617 subject: commit.subject.to_string(),
5618 commit_timestamp: commit.commit_timestamp,
5619 author_name: commit.author_name.to_string(),
5620 }),
5621 }
5622}
5623
5624fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5625 proto::Worktree {
5626 path: worktree.path.to_string_lossy().to_string(),
5627 ref_name: worktree.ref_name.to_string(),
5628 sha: worktree.sha.to_string(),
5629 }
5630}
5631
5632fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5633 git::repository::Worktree {
5634 path: PathBuf::from(proto.path.clone()),
5635 ref_name: proto.ref_name.clone().into(),
5636 sha: proto.sha.clone().into(),
5637 }
5638}
5639
5640fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5641 git::repository::Branch {
5642 is_head: proto.is_head,
5643 ref_name: proto.ref_name.clone().into(),
5644 upstream: proto
5645 .upstream
5646 .as_ref()
5647 .map(|upstream| git::repository::Upstream {
5648 ref_name: upstream.ref_name.to_string().into(),
5649 tracking: upstream
5650 .tracking
5651 .as_ref()
5652 .map(|tracking| {
5653 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5654 ahead: tracking.ahead as u32,
5655 behind: tracking.behind as u32,
5656 })
5657 })
5658 .unwrap_or(git::repository::UpstreamTracking::Gone),
5659 }),
5660 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5661 git::repository::CommitSummary {
5662 sha: commit.sha.to_string().into(),
5663 subject: commit.subject.to_string().into(),
5664 commit_timestamp: commit.commit_timestamp,
5665 author_name: commit.author_name.to_string().into(),
5666 has_parent: true,
5667 }
5668 }),
5669 }
5670}
5671
5672fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5673 proto::GitCommitDetails {
5674 sha: commit.sha.to_string(),
5675 message: commit.message.to_string(),
5676 commit_timestamp: commit.commit_timestamp,
5677 author_email: commit.author_email.to_string(),
5678 author_name: commit.author_name.to_string(),
5679 }
5680}
5681
5682fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5683 CommitDetails {
5684 sha: proto.sha.clone().into(),
5685 message: proto.message.clone().into(),
5686 commit_timestamp: proto.commit_timestamp,
5687 author_email: proto.author_email.clone().into(),
5688 author_name: proto.author_name.clone().into(),
5689 }
5690}
5691
5692async fn compute_snapshot(
5693 id: RepositoryId,
5694 work_directory_abs_path: Arc<Path>,
5695 prev_snapshot: RepositorySnapshot,
5696 backend: Arc<dyn GitRepository>,
5697) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5698 let mut events = Vec::new();
5699 let branches = backend.branches().await?;
5700 let branch = branches.into_iter().find(|branch| branch.is_head);
5701 let statuses = backend
5702 .status(&[RepoPath::from_rel_path(
5703 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
5704 )])
5705 .await?;
5706 let stash_entries = backend.stash_entries().await?;
5707 let statuses_by_path = SumTree::from_iter(
5708 statuses
5709 .entries
5710 .iter()
5711 .map(|(repo_path, status)| StatusEntry {
5712 repo_path: repo_path.clone(),
5713 status: *status,
5714 }),
5715 (),
5716 );
5717 let (merge_details, merge_heads_changed) =
5718 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5719 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5720
5721 let pending_ops_by_path = SumTree::from_iter(
5722 prev_snapshot.pending_ops_by_path.iter().filter_map(|ops| {
5723 let inner_ops: Vec<PendingOp> =
5724 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5725 if inner_ops.is_empty() {
5726 None
5727 } else {
5728 Some(PendingOps {
5729 repo_path: ops.repo_path.clone(),
5730 ops: inner_ops,
5731 })
5732 }
5733 }),
5734 (),
5735 );
5736
5737 if pending_ops_by_path != prev_snapshot.pending_ops_by_path {
5738 events.push(RepositoryEvent::PendingOpsChanged {
5739 pending_ops: prev_snapshot.pending_ops_by_path.clone(),
5740 })
5741 }
5742
5743 if merge_heads_changed {
5744 events.push(RepositoryEvent::MergeHeadsChanged);
5745 }
5746
5747 if statuses_by_path != prev_snapshot.statuses_by_path {
5748 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5749 }
5750
5751 // Useful when branch is None in detached head state
5752 let head_commit = match backend.head_sha().await {
5753 Some(head_sha) => backend.show(head_sha).await.log_err(),
5754 None => None,
5755 };
5756
5757 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5758 events.push(RepositoryEvent::BranchChanged);
5759 }
5760
5761 // Used by edit prediction data collection
5762 let remote_origin_url = backend.remote_url("origin");
5763 let remote_upstream_url = backend.remote_url("upstream");
5764
5765 let snapshot = RepositorySnapshot {
5766 id,
5767 statuses_by_path,
5768 pending_ops_by_path,
5769 work_directory_abs_path,
5770 path_style: prev_snapshot.path_style,
5771 scan_id: prev_snapshot.scan_id + 1,
5772 branch,
5773 head_commit,
5774 merge: merge_details,
5775 remote_origin_url,
5776 remote_upstream_url,
5777 stash_entries,
5778 };
5779
5780 Ok((snapshot, events))
5781}
5782
5783fn status_from_proto(
5784 simple_status: i32,
5785 status: Option<proto::GitFileStatus>,
5786) -> anyhow::Result<FileStatus> {
5787 use proto::git_file_status::Variant;
5788
5789 let Some(variant) = status.and_then(|status| status.variant) else {
5790 let code = proto::GitStatus::from_i32(simple_status)
5791 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5792 let result = match code {
5793 proto::GitStatus::Added => TrackedStatus {
5794 worktree_status: StatusCode::Added,
5795 index_status: StatusCode::Unmodified,
5796 }
5797 .into(),
5798 proto::GitStatus::Modified => TrackedStatus {
5799 worktree_status: StatusCode::Modified,
5800 index_status: StatusCode::Unmodified,
5801 }
5802 .into(),
5803 proto::GitStatus::Conflict => UnmergedStatus {
5804 first_head: UnmergedStatusCode::Updated,
5805 second_head: UnmergedStatusCode::Updated,
5806 }
5807 .into(),
5808 proto::GitStatus::Deleted => TrackedStatus {
5809 worktree_status: StatusCode::Deleted,
5810 index_status: StatusCode::Unmodified,
5811 }
5812 .into(),
5813 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5814 };
5815 return Ok(result);
5816 };
5817
5818 let result = match variant {
5819 Variant::Untracked(_) => FileStatus::Untracked,
5820 Variant::Ignored(_) => FileStatus::Ignored,
5821 Variant::Unmerged(unmerged) => {
5822 let [first_head, second_head] =
5823 [unmerged.first_head, unmerged.second_head].map(|head| {
5824 let code = proto::GitStatus::from_i32(head)
5825 .with_context(|| format!("Invalid git status code: {head}"))?;
5826 let result = match code {
5827 proto::GitStatus::Added => UnmergedStatusCode::Added,
5828 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5829 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5830 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5831 };
5832 Ok(result)
5833 });
5834 let [first_head, second_head] = [first_head?, second_head?];
5835 UnmergedStatus {
5836 first_head,
5837 second_head,
5838 }
5839 .into()
5840 }
5841 Variant::Tracked(tracked) => {
5842 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5843 .map(|status| {
5844 let code = proto::GitStatus::from_i32(status)
5845 .with_context(|| format!("Invalid git status code: {status}"))?;
5846 let result = match code {
5847 proto::GitStatus::Modified => StatusCode::Modified,
5848 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5849 proto::GitStatus::Added => StatusCode::Added,
5850 proto::GitStatus::Deleted => StatusCode::Deleted,
5851 proto::GitStatus::Renamed => StatusCode::Renamed,
5852 proto::GitStatus::Copied => StatusCode::Copied,
5853 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5854 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5855 };
5856 Ok(result)
5857 });
5858 let [index_status, worktree_status] = [index_status?, worktree_status?];
5859 TrackedStatus {
5860 index_status,
5861 worktree_status,
5862 }
5863 .into()
5864 }
5865 };
5866 Ok(result)
5867}
5868
5869fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5870 use proto::git_file_status::{Tracked, Unmerged, Variant};
5871
5872 let variant = match status {
5873 FileStatus::Untracked => Variant::Untracked(Default::default()),
5874 FileStatus::Ignored => Variant::Ignored(Default::default()),
5875 FileStatus::Unmerged(UnmergedStatus {
5876 first_head,
5877 second_head,
5878 }) => Variant::Unmerged(Unmerged {
5879 first_head: unmerged_status_to_proto(first_head),
5880 second_head: unmerged_status_to_proto(second_head),
5881 }),
5882 FileStatus::Tracked(TrackedStatus {
5883 index_status,
5884 worktree_status,
5885 }) => Variant::Tracked(Tracked {
5886 index_status: tracked_status_to_proto(index_status),
5887 worktree_status: tracked_status_to_proto(worktree_status),
5888 }),
5889 };
5890 proto::GitFileStatus {
5891 variant: Some(variant),
5892 }
5893}
5894
5895fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5896 match code {
5897 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5898 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5899 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5900 }
5901}
5902
5903fn tracked_status_to_proto(code: StatusCode) -> i32 {
5904 match code {
5905 StatusCode::Added => proto::GitStatus::Added as _,
5906 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5907 StatusCode::Modified => proto::GitStatus::Modified as _,
5908 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5909 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5910 StatusCode::Copied => proto::GitStatus::Copied as _,
5911 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5912 }
5913}