1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<str>>,
133 index_text: Option<Arc<str>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_create_remote);
476 client.add_entity_request_handler(Self::handle_remove_remote);
477 client.add_entity_request_handler(Self::handle_delete_branch);
478 client.add_entity_request_handler(Self::handle_git_init);
479 client.add_entity_request_handler(Self::handle_push);
480 client.add_entity_request_handler(Self::handle_pull);
481 client.add_entity_request_handler(Self::handle_fetch);
482 client.add_entity_request_handler(Self::handle_stage);
483 client.add_entity_request_handler(Self::handle_unstage);
484 client.add_entity_request_handler(Self::handle_stash);
485 client.add_entity_request_handler(Self::handle_stash_pop);
486 client.add_entity_request_handler(Self::handle_stash_apply);
487 client.add_entity_request_handler(Self::handle_stash_drop);
488 client.add_entity_request_handler(Self::handle_commit);
489 client.add_entity_request_handler(Self::handle_run_hook);
490 client.add_entity_request_handler(Self::handle_reset);
491 client.add_entity_request_handler(Self::handle_show);
492 client.add_entity_request_handler(Self::handle_load_commit_diff);
493 client.add_entity_request_handler(Self::handle_file_history);
494 client.add_entity_request_handler(Self::handle_checkout_files);
495 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
496 client.add_entity_request_handler(Self::handle_set_index_text);
497 client.add_entity_request_handler(Self::handle_askpass);
498 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
499 client.add_entity_request_handler(Self::handle_git_diff);
500 client.add_entity_request_handler(Self::handle_tree_diff);
501 client.add_entity_request_handler(Self::handle_get_blob_content);
502 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
503 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
504 client.add_entity_message_handler(Self::handle_update_diff_bases);
505 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
506 client.add_entity_request_handler(Self::handle_blame_buffer);
507 client.add_entity_message_handler(Self::handle_update_repository);
508 client.add_entity_message_handler(Self::handle_remove_repository);
509 client.add_entity_request_handler(Self::handle_git_clone);
510 client.add_entity_request_handler(Self::handle_get_worktrees);
511 client.add_entity_request_handler(Self::handle_create_worktree);
512 }
513
514 pub fn is_local(&self) -> bool {
515 matches!(self.state, GitStoreState::Local { .. })
516 }
517 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
518 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
519 let id = repo.read(cx).id;
520 if self.active_repo_id != Some(id) {
521 self.active_repo_id = Some(id);
522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
523 }
524 }
525 }
526
527 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
528 match &mut self.state {
529 GitStoreState::Remote {
530 downstream: downstream_client,
531 ..
532 } => {
533 for repo in self.repositories.values() {
534 let update = repo.read(cx).snapshot.initial_update(project_id);
535 for update in split_repository_update(update) {
536 client.send(update).log_err();
537 }
538 }
539 *downstream_client = Some((client, ProjectId(project_id)));
540 }
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 let mut snapshots = HashMap::default();
546 let (updates_tx, mut updates_rx) = mpsc::unbounded();
547 for repo in self.repositories.values() {
548 updates_tx
549 .unbounded_send(DownstreamUpdate::UpdateRepository(
550 repo.read(cx).snapshot.clone(),
551 ))
552 .ok();
553 }
554 *downstream_client = Some(LocalDownstreamState {
555 client: client.clone(),
556 project_id: ProjectId(project_id),
557 updates_tx,
558 _task: cx.spawn(async move |this, cx| {
559 cx.background_spawn(async move {
560 while let Some(update) = updates_rx.next().await {
561 match update {
562 DownstreamUpdate::UpdateRepository(snapshot) => {
563 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
564 {
565 let update =
566 snapshot.build_update(old_snapshot, project_id);
567 *old_snapshot = snapshot;
568 for update in split_repository_update(update) {
569 client.send(update)?;
570 }
571 } else {
572 let update = snapshot.initial_update(project_id);
573 for update in split_repository_update(update) {
574 client.send(update)?;
575 }
576 snapshots.insert(snapshot.id, snapshot);
577 }
578 }
579 DownstreamUpdate::RemoveRepository(id) => {
580 client.send(proto::RemoveRepository {
581 project_id,
582 id: id.to_proto(),
583 })?;
584 }
585 }
586 }
587 anyhow::Ok(())
588 })
589 .await
590 .ok();
591 this.update(cx, |this, _| {
592 if let GitStoreState::Local {
593 downstream: downstream_client,
594 ..
595 } = &mut this.state
596 {
597 downstream_client.take();
598 } else {
599 unreachable!("unshared called on remote store");
600 }
601 })
602 }),
603 });
604 }
605 }
606 }
607
608 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
609 match &mut self.state {
610 GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } => {
614 downstream_client.take();
615 }
616 GitStoreState::Remote {
617 downstream: downstream_client,
618 ..
619 } => {
620 downstream_client.take();
621 }
622 }
623 self.shared_diffs.clear();
624 }
625
626 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
627 self.shared_diffs.remove(peer_id);
628 }
629
630 pub fn active_repository(&self) -> Option<Entity<Repository>> {
631 self.active_repo_id
632 .as_ref()
633 .map(|id| self.repositories[id].clone())
634 }
635
636 pub fn open_unstaged_diff(
637 &mut self,
638 buffer: Entity<Buffer>,
639 cx: &mut Context<Self>,
640 ) -> Task<Result<Entity<BufferDiff>>> {
641 let buffer_id = buffer.read(cx).remote_id();
642 if let Some(diff_state) = self.diffs.get(&buffer_id)
643 && let Some(unstaged_diff) = diff_state
644 .read(cx)
645 .unstaged_diff
646 .as_ref()
647 .and_then(|weak| weak.upgrade())
648 {
649 if let Some(task) =
650 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
651 {
652 return cx.background_executor().spawn(async move {
653 task.await;
654 Ok(unstaged_diff)
655 });
656 }
657 return Task::ready(Ok(unstaged_diff));
658 }
659
660 let Some((repo, repo_path)) =
661 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
662 else {
663 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
664 };
665
666 let task = self
667 .loading_diffs
668 .entry((buffer_id, DiffKind::Unstaged))
669 .or_insert_with(|| {
670 let staged_text = repo.update(cx, |repo, cx| {
671 repo.load_staged_text(buffer_id, repo_path, cx)
672 });
673 cx.spawn(async move |this, cx| {
674 Self::open_diff_internal(
675 this,
676 DiffKind::Unstaged,
677 staged_text.await.map(DiffBasesChange::SetIndex),
678 buffer,
679 cx,
680 )
681 .await
682 .map_err(Arc::new)
683 })
684 .shared()
685 })
686 .clone();
687
688 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
689 }
690
691 pub fn open_diff_since(
692 &mut self,
693 oid: Option<git::Oid>,
694 buffer: Entity<Buffer>,
695 repo: Entity<Repository>,
696 cx: &mut Context<Self>,
697 ) -> Task<Result<Entity<BufferDiff>>> {
698 cx.spawn(async move |this, cx| {
699 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
700 let content = match oid {
701 None => None,
702 Some(oid) => Some(
703 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
704 .await?,
705 ),
706 };
707 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
708
709 buffer_diff
710 .update(cx, |buffer_diff, cx| {
711 buffer_diff.set_base_text(
712 content.map(|s| s.as_str().into()),
713 buffer_snapshot.language().cloned(),
714 buffer_snapshot.text,
715 cx,
716 )
717 })?
718 .await?;
719 let unstaged_diff = this
720 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
721 .await?;
722 buffer_diff.update(cx, |buffer_diff, _| {
723 buffer_diff.set_secondary_diff(unstaged_diff);
724 })?;
725
726 this.update(cx, |_, cx| {
727 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
728 .detach();
729 })?;
730
731 Ok(buffer_diff)
732 })
733 }
734
735 pub fn open_uncommitted_diff(
736 &mut self,
737 buffer: Entity<Buffer>,
738 cx: &mut Context<Self>,
739 ) -> Task<Result<Entity<BufferDiff>>> {
740 let buffer_id = buffer.read(cx).remote_id();
741
742 if let Some(diff_state) = self.diffs.get(&buffer_id)
743 && let Some(uncommitted_diff) = diff_state
744 .read(cx)
745 .uncommitted_diff
746 .as_ref()
747 .and_then(|weak| weak.upgrade())
748 {
749 if let Some(task) =
750 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
751 {
752 return cx.background_executor().spawn(async move {
753 task.await;
754 Ok(uncommitted_diff)
755 });
756 }
757 return Task::ready(Ok(uncommitted_diff));
758 }
759
760 let Some((repo, repo_path)) =
761 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
762 else {
763 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
764 };
765
766 let task = self
767 .loading_diffs
768 .entry((buffer_id, DiffKind::Uncommitted))
769 .or_insert_with(|| {
770 let changes = repo.update(cx, |repo, cx| {
771 repo.load_committed_text(buffer_id, repo_path, cx)
772 });
773
774 // todo(lw): hot foreground spawn
775 cx.spawn(async move |this, cx| {
776 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
777 .await
778 .map_err(Arc::new)
779 })
780 .shared()
781 })
782 .clone();
783
784 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
785 }
786
787 async fn open_diff_internal(
788 this: WeakEntity<Self>,
789 kind: DiffKind,
790 texts: Result<DiffBasesChange>,
791 buffer_entity: Entity<Buffer>,
792 cx: &mut AsyncApp,
793 ) -> Result<Entity<BufferDiff>> {
794 let diff_bases_change = match texts {
795 Err(e) => {
796 this.update(cx, |this, cx| {
797 let buffer = buffer_entity.read(cx);
798 let buffer_id = buffer.remote_id();
799 this.loading_diffs.remove(&(buffer_id, kind));
800 })?;
801 return Err(e);
802 }
803 Ok(change) => change,
804 };
805
806 this.update(cx, |this, cx| {
807 let buffer = buffer_entity.read(cx);
808 let buffer_id = buffer.remote_id();
809 let language = buffer.language().cloned();
810 let language_registry = buffer.language_registry();
811 let text_snapshot = buffer.text_snapshot();
812 this.loading_diffs.remove(&(buffer_id, kind));
813
814 let git_store = cx.weak_entity();
815 let diff_state = this
816 .diffs
817 .entry(buffer_id)
818 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
819
820 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
821
822 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
823 diff_state.update(cx, |diff_state, cx| {
824 diff_state.language_changed = true;
825 diff_state.language = language;
826 diff_state.language_registry = language_registry;
827
828 match kind {
829 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
830 DiffKind::Uncommitted => {
831 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
832 diff
833 } else {
834 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
835 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
836 unstaged_diff
837 };
838
839 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
840 diff_state.uncommitted_diff = Some(diff.downgrade())
841 }
842 }
843
844 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
845 let rx = diff_state.wait_for_recalculation();
846
847 anyhow::Ok(async move {
848 if let Some(rx) = rx {
849 rx.await;
850 }
851 Ok(diff)
852 })
853 })
854 })??
855 .await
856 }
857
858 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
859 let diff_state = self.diffs.get(&buffer_id)?;
860 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
861 }
862
863 pub fn get_uncommitted_diff(
864 &self,
865 buffer_id: BufferId,
866 cx: &App,
867 ) -> Option<Entity<BufferDiff>> {
868 let diff_state = self.diffs.get(&buffer_id)?;
869 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
870 }
871
872 pub fn open_conflict_set(
873 &mut self,
874 buffer: Entity<Buffer>,
875 cx: &mut Context<Self>,
876 ) -> Entity<ConflictSet> {
877 log::debug!("open conflict set");
878 let buffer_id = buffer.read(cx).remote_id();
879
880 if let Some(git_state) = self.diffs.get(&buffer_id)
881 && let Some(conflict_set) = git_state
882 .read(cx)
883 .conflict_set
884 .as_ref()
885 .and_then(|weak| weak.upgrade())
886 {
887 let conflict_set = conflict_set;
888 let buffer_snapshot = buffer.read(cx).text_snapshot();
889
890 git_state.update(cx, |state, cx| {
891 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
892 });
893
894 return conflict_set;
895 }
896
897 let is_unmerged = self
898 .repository_and_path_for_buffer_id(buffer_id, cx)
899 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
900 let git_store = cx.weak_entity();
901 let buffer_git_state = self
902 .diffs
903 .entry(buffer_id)
904 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
905 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
906
907 self._subscriptions
908 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
909 cx.emit(GitStoreEvent::ConflictsUpdated);
910 }));
911
912 buffer_git_state.update(cx, |state, cx| {
913 state.conflict_set = Some(conflict_set.downgrade());
914 let buffer_snapshot = buffer.read(cx).text_snapshot();
915 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
916 });
917
918 conflict_set
919 }
920
921 pub fn project_path_git_status(
922 &self,
923 project_path: &ProjectPath,
924 cx: &App,
925 ) -> Option<FileStatus> {
926 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
927 Some(repo.read(cx).status_for_path(&repo_path)?.status)
928 }
929
930 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
931 let mut work_directory_abs_paths = Vec::new();
932 let mut checkpoints = Vec::new();
933 for repository in self.repositories.values() {
934 repository.update(cx, |repository, _| {
935 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
936 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
937 });
938 }
939
940 cx.background_executor().spawn(async move {
941 let checkpoints = future::try_join_all(checkpoints).await?;
942 Ok(GitStoreCheckpoint {
943 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
944 .into_iter()
945 .zip(checkpoints)
946 .collect(),
947 })
948 })
949 }
950
951 pub fn restore_checkpoint(
952 &self,
953 checkpoint: GitStoreCheckpoint,
954 cx: &mut App,
955 ) -> Task<Result<()>> {
956 let repositories_by_work_dir_abs_path = self
957 .repositories
958 .values()
959 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
960 .collect::<HashMap<_, _>>();
961
962 let mut tasks = Vec::new();
963 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
964 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
965 let restore = repository.update(cx, |repository, _| {
966 repository.restore_checkpoint(checkpoint)
967 });
968 tasks.push(async move { restore.await? });
969 }
970 }
971 cx.background_spawn(async move {
972 future::try_join_all(tasks).await?;
973 Ok(())
974 })
975 }
976
977 /// Compares two checkpoints, returning true if they are equal.
978 pub fn compare_checkpoints(
979 &self,
980 left: GitStoreCheckpoint,
981 mut right: GitStoreCheckpoint,
982 cx: &mut App,
983 ) -> Task<Result<bool>> {
984 let repositories_by_work_dir_abs_path = self
985 .repositories
986 .values()
987 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
988 .collect::<HashMap<_, _>>();
989
990 let mut tasks = Vec::new();
991 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
992 if let Some(right_checkpoint) = right
993 .checkpoints_by_work_dir_abs_path
994 .remove(&work_dir_abs_path)
995 {
996 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
997 {
998 let compare = repository.update(cx, |repository, _| {
999 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1000 });
1001
1002 tasks.push(async move { compare.await? });
1003 }
1004 } else {
1005 return Task::ready(Ok(false));
1006 }
1007 }
1008 cx.background_spawn(async move {
1009 Ok(future::try_join_all(tasks)
1010 .await?
1011 .into_iter()
1012 .all(|result| result))
1013 })
1014 }
1015
1016 /// Blames a buffer.
1017 pub fn blame_buffer(
1018 &self,
1019 buffer: &Entity<Buffer>,
1020 version: Option<clock::Global>,
1021 cx: &mut Context<Self>,
1022 ) -> Task<Result<Option<Blame>>> {
1023 let buffer = buffer.read(cx);
1024 let Some((repo, repo_path)) =
1025 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1026 else {
1027 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1028 };
1029 let content = match &version {
1030 Some(version) => buffer.rope_for_version(version),
1031 None => buffer.as_rope().clone(),
1032 };
1033 let line_ending = buffer.line_ending();
1034 let version = version.unwrap_or(buffer.version());
1035 let buffer_id = buffer.remote_id();
1036
1037 let repo = repo.downgrade();
1038 cx.spawn(async move |_, cx| {
1039 let repository_state = repo
1040 .update(cx, |repo, _| repo.repository_state.clone())?
1041 .await
1042 .map_err(|err| anyhow::anyhow!(err))?;
1043 match repository_state {
1044 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1045 .blame(repo_path.clone(), content, line_ending)
1046 .await
1047 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1048 .map(Some),
1049 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1050 let response = client
1051 .request(proto::BlameBuffer {
1052 project_id: project_id.to_proto(),
1053 buffer_id: buffer_id.into(),
1054 version: serialize_version(&version),
1055 })
1056 .await?;
1057 Ok(deserialize_blame_buffer_response(response))
1058 }
1059 }
1060 })
1061 }
1062
1063 pub fn file_history(
1064 &self,
1065 repo: &Entity<Repository>,
1066 path: RepoPath,
1067 cx: &mut App,
1068 ) -> Task<Result<git::repository::FileHistory>> {
1069 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1070
1071 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1072 }
1073
1074 pub fn file_history_paginated(
1075 &self,
1076 repo: &Entity<Repository>,
1077 path: RepoPath,
1078 skip: usize,
1079 limit: Option<usize>,
1080 cx: &mut App,
1081 ) -> Task<Result<git::repository::FileHistory>> {
1082 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1083
1084 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1085 }
1086
1087 pub fn get_permalink_to_line(
1088 &self,
1089 buffer: &Entity<Buffer>,
1090 selection: Range<u32>,
1091 cx: &mut App,
1092 ) -> Task<Result<url::Url>> {
1093 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1094 return Task::ready(Err(anyhow!("buffer has no file")));
1095 };
1096
1097 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1098 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1099 cx,
1100 ) else {
1101 // If we're not in a Git repo, check whether this is a Rust source
1102 // file in the Cargo registry (presumably opened with go-to-definition
1103 // from a normal Rust file). If so, we can put together a permalink
1104 // using crate metadata.
1105 if buffer
1106 .read(cx)
1107 .language()
1108 .is_none_or(|lang| lang.name() != "Rust".into())
1109 {
1110 return Task::ready(Err(anyhow!("no permalink available")));
1111 }
1112 let file_path = file.worktree.read(cx).absolutize(&file.path);
1113 return cx.spawn(async move |cx| {
1114 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1115 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1116 .context("no permalink available")
1117 });
1118 };
1119
1120 let buffer_id = buffer.read(cx).remote_id();
1121 let branch = repo.read(cx).branch.clone();
1122 let remote = branch
1123 .as_ref()
1124 .and_then(|b| b.upstream.as_ref())
1125 .and_then(|b| b.remote_name())
1126 .unwrap_or("origin")
1127 .to_string();
1128
1129 let rx = repo.update(cx, |repo, _| {
1130 repo.send_job(None, move |state, cx| async move {
1131 match state {
1132 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1133 let origin_url = backend
1134 .remote_url(&remote)
1135 .await
1136 .with_context(|| format!("remote \"{remote}\" not found"))?;
1137
1138 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1139
1140 let provider_registry =
1141 cx.update(GitHostingProviderRegistry::default_global)?;
1142
1143 let (provider, remote) =
1144 parse_git_remote_url(provider_registry, &origin_url)
1145 .context("parsing Git remote URL")?;
1146
1147 Ok(provider.build_permalink(
1148 remote,
1149 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1150 ))
1151 }
1152 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1153 let response = client
1154 .request(proto::GetPermalinkToLine {
1155 project_id: project_id.to_proto(),
1156 buffer_id: buffer_id.into(),
1157 selection: Some(proto::Range {
1158 start: selection.start as u64,
1159 end: selection.end as u64,
1160 }),
1161 })
1162 .await?;
1163
1164 url::Url::parse(&response.permalink).context("failed to parse permalink")
1165 }
1166 }
1167 })
1168 });
1169 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1170 }
1171
1172 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1173 match &self.state {
1174 GitStoreState::Local {
1175 downstream: downstream_client,
1176 ..
1177 } => downstream_client
1178 .as_ref()
1179 .map(|state| (state.client.clone(), state.project_id)),
1180 GitStoreState::Remote {
1181 downstream: downstream_client,
1182 ..
1183 } => downstream_client.clone(),
1184 }
1185 }
1186
1187 fn upstream_client(&self) -> Option<AnyProtoClient> {
1188 match &self.state {
1189 GitStoreState::Local { .. } => None,
1190 GitStoreState::Remote {
1191 upstream_client, ..
1192 } => Some(upstream_client.clone()),
1193 }
1194 }
1195
1196 fn on_worktree_store_event(
1197 &mut self,
1198 worktree_store: Entity<WorktreeStore>,
1199 event: &WorktreeStoreEvent,
1200 cx: &mut Context<Self>,
1201 ) {
1202 let GitStoreState::Local {
1203 project_environment,
1204 downstream,
1205 next_repository_id,
1206 fs,
1207 } = &self.state
1208 else {
1209 return;
1210 };
1211
1212 match event {
1213 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1214 if let Some(worktree) = self
1215 .worktree_store
1216 .read(cx)
1217 .worktree_for_id(*worktree_id, cx)
1218 {
1219 let paths_by_git_repo =
1220 self.process_updated_entries(&worktree, updated_entries, cx);
1221 let downstream = downstream
1222 .as_ref()
1223 .map(|downstream| downstream.updates_tx.clone());
1224 cx.spawn(async move |_, cx| {
1225 let paths_by_git_repo = paths_by_git_repo.await;
1226 for (repo, paths) in paths_by_git_repo {
1227 repo.update(cx, |repo, cx| {
1228 repo.paths_changed(paths, downstream.clone(), cx);
1229 })
1230 .ok();
1231 }
1232 })
1233 .detach();
1234 }
1235 }
1236 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1237 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1238 else {
1239 return;
1240 };
1241 if !worktree.read(cx).is_visible() {
1242 log::debug!(
1243 "not adding repositories for local worktree {:?} because it's not visible",
1244 worktree.read(cx).abs_path()
1245 );
1246 return;
1247 }
1248 self.update_repositories_from_worktree(
1249 *worktree_id,
1250 project_environment.clone(),
1251 next_repository_id.clone(),
1252 downstream
1253 .as_ref()
1254 .map(|downstream| downstream.updates_tx.clone()),
1255 changed_repos.clone(),
1256 fs.clone(),
1257 cx,
1258 );
1259 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1260 }
1261 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1262 let repos_without_worktree: Vec<RepositoryId> = self
1263 .worktree_ids
1264 .iter_mut()
1265 .filter_map(|(repo_id, worktree_ids)| {
1266 worktree_ids.remove(worktree_id);
1267 if worktree_ids.is_empty() {
1268 Some(*repo_id)
1269 } else {
1270 None
1271 }
1272 })
1273 .collect();
1274 let is_active_repo_removed = repos_without_worktree
1275 .iter()
1276 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1277
1278 for repo_id in repos_without_worktree {
1279 self.repositories.remove(&repo_id);
1280 self.worktree_ids.remove(&repo_id);
1281 if let Some(updates_tx) =
1282 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1283 {
1284 updates_tx
1285 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1286 .ok();
1287 }
1288 }
1289
1290 if is_active_repo_removed {
1291 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1292 self.active_repo_id = Some(repo_id);
1293 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1294 } else {
1295 self.active_repo_id = None;
1296 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1297 }
1298 }
1299 }
1300 _ => {}
1301 }
1302 }
1303 fn on_repository_event(
1304 &mut self,
1305 repo: Entity<Repository>,
1306 event: &RepositoryEvent,
1307 cx: &mut Context<Self>,
1308 ) {
1309 let id = repo.read(cx).id;
1310 let repo_snapshot = repo.read(cx).snapshot.clone();
1311 for (buffer_id, diff) in self.diffs.iter() {
1312 if let Some((buffer_repo, repo_path)) =
1313 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1314 && buffer_repo == repo
1315 {
1316 diff.update(cx, |diff, cx| {
1317 if let Some(conflict_set) = &diff.conflict_set {
1318 let conflict_status_changed =
1319 conflict_set.update(cx, |conflict_set, cx| {
1320 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1321 conflict_set.set_has_conflict(has_conflict, cx)
1322 })?;
1323 if conflict_status_changed {
1324 let buffer_store = self.buffer_store.read(cx);
1325 if let Some(buffer) = buffer_store.get(*buffer_id) {
1326 let _ = diff
1327 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1328 }
1329 }
1330 }
1331 anyhow::Ok(())
1332 })
1333 .ok();
1334 }
1335 }
1336 cx.emit(GitStoreEvent::RepositoryUpdated(
1337 id,
1338 event.clone(),
1339 self.active_repo_id == Some(id),
1340 ))
1341 }
1342
1343 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1344 cx.emit(GitStoreEvent::JobsUpdated)
1345 }
1346
1347 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1348 fn update_repositories_from_worktree(
1349 &mut self,
1350 worktree_id: WorktreeId,
1351 project_environment: Entity<ProjectEnvironment>,
1352 next_repository_id: Arc<AtomicU64>,
1353 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1354 updated_git_repositories: UpdatedGitRepositoriesSet,
1355 fs: Arc<dyn Fs>,
1356 cx: &mut Context<Self>,
1357 ) {
1358 let mut removed_ids = Vec::new();
1359 for update in updated_git_repositories.iter() {
1360 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1361 let existing_work_directory_abs_path =
1362 repo.read(cx).work_directory_abs_path.clone();
1363 Some(&existing_work_directory_abs_path)
1364 == update.old_work_directory_abs_path.as_ref()
1365 || Some(&existing_work_directory_abs_path)
1366 == update.new_work_directory_abs_path.as_ref()
1367 }) {
1368 let repo_id = *id;
1369 if let Some(new_work_directory_abs_path) =
1370 update.new_work_directory_abs_path.clone()
1371 {
1372 self.worktree_ids
1373 .entry(repo_id)
1374 .or_insert_with(HashSet::new)
1375 .insert(worktree_id);
1376 existing.update(cx, |existing, cx| {
1377 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1378 existing.schedule_scan(updates_tx.clone(), cx);
1379 });
1380 } else {
1381 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1382 worktree_ids.remove(&worktree_id);
1383 if worktree_ids.is_empty() {
1384 removed_ids.push(repo_id);
1385 }
1386 }
1387 }
1388 } else if let UpdatedGitRepository {
1389 new_work_directory_abs_path: Some(work_directory_abs_path),
1390 dot_git_abs_path: Some(dot_git_abs_path),
1391 repository_dir_abs_path: Some(_repository_dir_abs_path),
1392 common_dir_abs_path: Some(_common_dir_abs_path),
1393 ..
1394 } = update
1395 {
1396 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1397 let git_store = cx.weak_entity();
1398 let repo = cx.new(|cx| {
1399 let mut repo = Repository::local(
1400 id,
1401 work_directory_abs_path.clone(),
1402 dot_git_abs_path.clone(),
1403 project_environment.downgrade(),
1404 fs.clone(),
1405 git_store,
1406 cx,
1407 );
1408 if let Some(updates_tx) = updates_tx.as_ref() {
1409 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1410 updates_tx
1411 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1412 .ok();
1413 }
1414 repo.schedule_scan(updates_tx.clone(), cx);
1415 repo
1416 });
1417 self._subscriptions
1418 .push(cx.subscribe(&repo, Self::on_repository_event));
1419 self._subscriptions
1420 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1421 self.repositories.insert(id, repo);
1422 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1423 cx.emit(GitStoreEvent::RepositoryAdded);
1424 self.active_repo_id.get_or_insert_with(|| {
1425 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1426 id
1427 });
1428 }
1429 }
1430
1431 for id in removed_ids {
1432 if self.active_repo_id == Some(id) {
1433 self.active_repo_id = None;
1434 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1435 }
1436 self.repositories.remove(&id);
1437 if let Some(updates_tx) = updates_tx.as_ref() {
1438 updates_tx
1439 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1440 .ok();
1441 }
1442 }
1443 }
1444
1445 fn on_buffer_store_event(
1446 &mut self,
1447 _: Entity<BufferStore>,
1448 event: &BufferStoreEvent,
1449 cx: &mut Context<Self>,
1450 ) {
1451 match event {
1452 BufferStoreEvent::BufferAdded(buffer) => {
1453 cx.subscribe(buffer, |this, buffer, event, cx| {
1454 if let BufferEvent::LanguageChanged(_) = event {
1455 let buffer_id = buffer.read(cx).remote_id();
1456 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1457 diff_state.update(cx, |diff_state, cx| {
1458 diff_state.buffer_language_changed(buffer, cx);
1459 });
1460 }
1461 }
1462 })
1463 .detach();
1464 }
1465 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1466 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1467 diffs.remove(buffer_id);
1468 }
1469 }
1470 BufferStoreEvent::BufferDropped(buffer_id) => {
1471 self.diffs.remove(buffer_id);
1472 for diffs in self.shared_diffs.values_mut() {
1473 diffs.remove(buffer_id);
1474 }
1475 }
1476 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1477 // Whenever a buffer's file path changes, it's possible that the
1478 // new path is actually a path that is being tracked by a git
1479 // repository. In that case, we'll want to update the buffer's
1480 // `BufferDiffState`, in case it already has one.
1481 let buffer_id = buffer.read(cx).remote_id();
1482 let diff_state = self.diffs.get(&buffer_id);
1483 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1484
1485 if let Some(diff_state) = diff_state
1486 && let Some((repo, repo_path)) = repo
1487 {
1488 let buffer = buffer.clone();
1489 let diff_state = diff_state.clone();
1490
1491 cx.spawn(async move |_git_store, cx| {
1492 async {
1493 let diff_bases_change = repo
1494 .update(cx, |repo, cx| {
1495 repo.load_committed_text(buffer_id, repo_path, cx)
1496 })?
1497 .await?;
1498
1499 diff_state.update(cx, |diff_state, cx| {
1500 let buffer_snapshot = buffer.read(cx).text_snapshot();
1501 diff_state.diff_bases_changed(
1502 buffer_snapshot,
1503 Some(diff_bases_change),
1504 cx,
1505 );
1506 })
1507 }
1508 .await
1509 .log_err();
1510 })
1511 .detach();
1512 }
1513 }
1514 _ => {}
1515 }
1516 }
1517
1518 pub fn recalculate_buffer_diffs(
1519 &mut self,
1520 buffers: Vec<Entity<Buffer>>,
1521 cx: &mut Context<Self>,
1522 ) -> impl Future<Output = ()> + use<> {
1523 let mut futures = Vec::new();
1524 for buffer in buffers {
1525 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1526 let buffer = buffer.read(cx).text_snapshot();
1527 diff_state.update(cx, |diff_state, cx| {
1528 diff_state.recalculate_diffs(buffer.clone(), cx);
1529 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1530 });
1531 futures.push(diff_state.update(cx, |diff_state, cx| {
1532 diff_state
1533 .reparse_conflict_markers(buffer, cx)
1534 .map(|_| {})
1535 .boxed()
1536 }));
1537 }
1538 }
1539 async move {
1540 futures::future::join_all(futures).await;
1541 }
1542 }
1543
1544 fn on_buffer_diff_event(
1545 &mut self,
1546 diff: Entity<buffer_diff::BufferDiff>,
1547 event: &BufferDiffEvent,
1548 cx: &mut Context<Self>,
1549 ) {
1550 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1551 let buffer_id = diff.read(cx).buffer_id;
1552 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1553 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1554 diff_state.hunk_staging_operation_count += 1;
1555 diff_state.hunk_staging_operation_count
1556 });
1557 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1558 let recv = repo.update(cx, |repo, cx| {
1559 log::debug!("hunks changed for {}", path.as_unix_str());
1560 repo.spawn_set_index_text_job(
1561 path,
1562 new_index_text.as_ref().map(|rope| rope.to_string()),
1563 Some(hunk_staging_operation_count),
1564 cx,
1565 )
1566 });
1567 let diff = diff.downgrade();
1568 cx.spawn(async move |this, cx| {
1569 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1570 diff.update(cx, |diff, cx| {
1571 diff.clear_pending_hunks(cx);
1572 })
1573 .ok();
1574 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1575 .ok();
1576 }
1577 })
1578 .detach();
1579 }
1580 }
1581 }
1582 }
1583
1584 fn local_worktree_git_repos_changed(
1585 &mut self,
1586 worktree: Entity<Worktree>,
1587 changed_repos: &UpdatedGitRepositoriesSet,
1588 cx: &mut Context<Self>,
1589 ) {
1590 log::debug!("local worktree repos changed");
1591 debug_assert!(worktree.read(cx).is_local());
1592
1593 for repository in self.repositories.values() {
1594 repository.update(cx, |repository, cx| {
1595 let repo_abs_path = &repository.work_directory_abs_path;
1596 if changed_repos.iter().any(|update| {
1597 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1598 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1599 }) {
1600 repository.reload_buffer_diff_bases(cx);
1601 }
1602 });
1603 }
1604 }
1605
1606 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1607 &self.repositories
1608 }
1609
1610 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1611 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1612 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1613 Some(status.status)
1614 }
1615
1616 pub fn repository_and_path_for_buffer_id(
1617 &self,
1618 buffer_id: BufferId,
1619 cx: &App,
1620 ) -> Option<(Entity<Repository>, RepoPath)> {
1621 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1622 let project_path = buffer.read(cx).project_path(cx)?;
1623 self.repository_and_path_for_project_path(&project_path, cx)
1624 }
1625
1626 pub fn repository_and_path_for_project_path(
1627 &self,
1628 path: &ProjectPath,
1629 cx: &App,
1630 ) -> Option<(Entity<Repository>, RepoPath)> {
1631 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1632 self.repositories
1633 .values()
1634 .filter_map(|repo| {
1635 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1636 Some((repo.clone(), repo_path))
1637 })
1638 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1639 }
1640
1641 pub fn git_init(
1642 &self,
1643 path: Arc<Path>,
1644 fallback_branch_name: String,
1645 cx: &App,
1646 ) -> Task<Result<()>> {
1647 match &self.state {
1648 GitStoreState::Local { fs, .. } => {
1649 let fs = fs.clone();
1650 cx.background_executor()
1651 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1652 }
1653 GitStoreState::Remote {
1654 upstream_client,
1655 upstream_project_id: project_id,
1656 ..
1657 } => {
1658 let client = upstream_client.clone();
1659 let project_id = *project_id;
1660 cx.background_executor().spawn(async move {
1661 client
1662 .request(proto::GitInit {
1663 project_id: project_id,
1664 abs_path: path.to_string_lossy().into_owned(),
1665 fallback_branch_name,
1666 })
1667 .await?;
1668 Ok(())
1669 })
1670 }
1671 }
1672 }
1673
1674 pub fn git_clone(
1675 &self,
1676 repo: String,
1677 path: impl Into<Arc<std::path::Path>>,
1678 cx: &App,
1679 ) -> Task<Result<()>> {
1680 let path = path.into();
1681 match &self.state {
1682 GitStoreState::Local { fs, .. } => {
1683 let fs = fs.clone();
1684 cx.background_executor()
1685 .spawn(async move { fs.git_clone(&repo, &path).await })
1686 }
1687 GitStoreState::Remote {
1688 upstream_client,
1689 upstream_project_id,
1690 ..
1691 } => {
1692 if upstream_client.is_via_collab() {
1693 return Task::ready(Err(anyhow!(
1694 "Git Clone isn't supported for project guests"
1695 )));
1696 }
1697 let request = upstream_client.request(proto::GitClone {
1698 project_id: *upstream_project_id,
1699 abs_path: path.to_string_lossy().into_owned(),
1700 remote_repo: repo,
1701 });
1702
1703 cx.background_spawn(async move {
1704 let result = request.await?;
1705
1706 match result.success {
1707 true => Ok(()),
1708 false => Err(anyhow!("Git Clone failed")),
1709 }
1710 })
1711 }
1712 }
1713 }
1714
1715 async fn handle_update_repository(
1716 this: Entity<Self>,
1717 envelope: TypedEnvelope<proto::UpdateRepository>,
1718 mut cx: AsyncApp,
1719 ) -> Result<()> {
1720 this.update(&mut cx, |this, cx| {
1721 let path_style = this.worktree_store.read(cx).path_style();
1722 let mut update = envelope.payload;
1723
1724 let id = RepositoryId::from_proto(update.id);
1725 let client = this.upstream_client().context("no upstream client")?;
1726
1727 let mut repo_subscription = None;
1728 let repo = this.repositories.entry(id).or_insert_with(|| {
1729 let git_store = cx.weak_entity();
1730 let repo = cx.new(|cx| {
1731 Repository::remote(
1732 id,
1733 Path::new(&update.abs_path).into(),
1734 path_style,
1735 ProjectId(update.project_id),
1736 client,
1737 git_store,
1738 cx,
1739 )
1740 });
1741 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1742 cx.emit(GitStoreEvent::RepositoryAdded);
1743 repo
1744 });
1745 this._subscriptions.extend(repo_subscription);
1746
1747 repo.update(cx, {
1748 let update = update.clone();
1749 |repo, cx| repo.apply_remote_update(update, cx)
1750 })?;
1751
1752 this.active_repo_id.get_or_insert_with(|| {
1753 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1754 id
1755 });
1756
1757 if let Some((client, project_id)) = this.downstream_client() {
1758 update.project_id = project_id.to_proto();
1759 client.send(update).log_err();
1760 }
1761 Ok(())
1762 })?
1763 }
1764
1765 async fn handle_remove_repository(
1766 this: Entity<Self>,
1767 envelope: TypedEnvelope<proto::RemoveRepository>,
1768 mut cx: AsyncApp,
1769 ) -> Result<()> {
1770 this.update(&mut cx, |this, cx| {
1771 let mut update = envelope.payload;
1772 let id = RepositoryId::from_proto(update.id);
1773 this.repositories.remove(&id);
1774 if let Some((client, project_id)) = this.downstream_client() {
1775 update.project_id = project_id.to_proto();
1776 client.send(update).log_err();
1777 }
1778 if this.active_repo_id == Some(id) {
1779 this.active_repo_id = None;
1780 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1781 }
1782 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1783 })
1784 }
1785
1786 async fn handle_git_init(
1787 this: Entity<Self>,
1788 envelope: TypedEnvelope<proto::GitInit>,
1789 cx: AsyncApp,
1790 ) -> Result<proto::Ack> {
1791 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1792 let name = envelope.payload.fallback_branch_name;
1793 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1794 .await?;
1795
1796 Ok(proto::Ack {})
1797 }
1798
1799 async fn handle_git_clone(
1800 this: Entity<Self>,
1801 envelope: TypedEnvelope<proto::GitClone>,
1802 cx: AsyncApp,
1803 ) -> Result<proto::GitCloneResponse> {
1804 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1805 let repo_name = envelope.payload.remote_repo;
1806 let result = cx
1807 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1808 .await;
1809
1810 Ok(proto::GitCloneResponse {
1811 success: result.is_ok(),
1812 })
1813 }
1814
1815 async fn handle_fetch(
1816 this: Entity<Self>,
1817 envelope: TypedEnvelope<proto::Fetch>,
1818 mut cx: AsyncApp,
1819 ) -> Result<proto::RemoteMessageResponse> {
1820 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1821 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1822 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1823 let askpass_id = envelope.payload.askpass_id;
1824
1825 let askpass = make_remote_delegate(
1826 this,
1827 envelope.payload.project_id,
1828 repository_id,
1829 askpass_id,
1830 &mut cx,
1831 );
1832
1833 let remote_output = repository_handle
1834 .update(&mut cx, |repository_handle, cx| {
1835 repository_handle.fetch(fetch_options, askpass, cx)
1836 })?
1837 .await??;
1838
1839 Ok(proto::RemoteMessageResponse {
1840 stdout: remote_output.stdout,
1841 stderr: remote_output.stderr,
1842 })
1843 }
1844
1845 async fn handle_push(
1846 this: Entity<Self>,
1847 envelope: TypedEnvelope<proto::Push>,
1848 mut cx: AsyncApp,
1849 ) -> Result<proto::RemoteMessageResponse> {
1850 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1851 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1852
1853 let askpass_id = envelope.payload.askpass_id;
1854 let askpass = make_remote_delegate(
1855 this,
1856 envelope.payload.project_id,
1857 repository_id,
1858 askpass_id,
1859 &mut cx,
1860 );
1861
1862 let options = envelope
1863 .payload
1864 .options
1865 .as_ref()
1866 .map(|_| match envelope.payload.options() {
1867 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1868 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1869 });
1870
1871 let branch_name = envelope.payload.branch_name.into();
1872 let remote_name = envelope.payload.remote_name.into();
1873
1874 let remote_output = repository_handle
1875 .update(&mut cx, |repository_handle, cx| {
1876 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1877 })?
1878 .await??;
1879 Ok(proto::RemoteMessageResponse {
1880 stdout: remote_output.stdout,
1881 stderr: remote_output.stderr,
1882 })
1883 }
1884
1885 async fn handle_pull(
1886 this: Entity<Self>,
1887 envelope: TypedEnvelope<proto::Pull>,
1888 mut cx: AsyncApp,
1889 ) -> Result<proto::RemoteMessageResponse> {
1890 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1891 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1892 let askpass_id = envelope.payload.askpass_id;
1893 let askpass = make_remote_delegate(
1894 this,
1895 envelope.payload.project_id,
1896 repository_id,
1897 askpass_id,
1898 &mut cx,
1899 );
1900
1901 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1902 let remote_name = envelope.payload.remote_name.into();
1903 let rebase = envelope.payload.rebase;
1904
1905 let remote_message = repository_handle
1906 .update(&mut cx, |repository_handle, cx| {
1907 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1908 })?
1909 .await??;
1910
1911 Ok(proto::RemoteMessageResponse {
1912 stdout: remote_message.stdout,
1913 stderr: remote_message.stderr,
1914 })
1915 }
1916
1917 async fn handle_stage(
1918 this: Entity<Self>,
1919 envelope: TypedEnvelope<proto::Stage>,
1920 mut cx: AsyncApp,
1921 ) -> Result<proto::Ack> {
1922 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1923 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1924
1925 let entries = envelope
1926 .payload
1927 .paths
1928 .into_iter()
1929 .map(|path| RepoPath::new(&path))
1930 .collect::<Result<Vec<_>>>()?;
1931
1932 repository_handle
1933 .update(&mut cx, |repository_handle, cx| {
1934 repository_handle.stage_entries(entries, cx)
1935 })?
1936 .await?;
1937 Ok(proto::Ack {})
1938 }
1939
1940 async fn handle_unstage(
1941 this: Entity<Self>,
1942 envelope: TypedEnvelope<proto::Unstage>,
1943 mut cx: AsyncApp,
1944 ) -> Result<proto::Ack> {
1945 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1946 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1947
1948 let entries = envelope
1949 .payload
1950 .paths
1951 .into_iter()
1952 .map(|path| RepoPath::new(&path))
1953 .collect::<Result<Vec<_>>>()?;
1954
1955 repository_handle
1956 .update(&mut cx, |repository_handle, cx| {
1957 repository_handle.unstage_entries(entries, cx)
1958 })?
1959 .await?;
1960
1961 Ok(proto::Ack {})
1962 }
1963
1964 async fn handle_stash(
1965 this: Entity<Self>,
1966 envelope: TypedEnvelope<proto::Stash>,
1967 mut cx: AsyncApp,
1968 ) -> Result<proto::Ack> {
1969 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1970 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1971
1972 let entries = envelope
1973 .payload
1974 .paths
1975 .into_iter()
1976 .map(|path| RepoPath::new(&path))
1977 .collect::<Result<Vec<_>>>()?;
1978
1979 repository_handle
1980 .update(&mut cx, |repository_handle, cx| {
1981 repository_handle.stash_entries(entries, cx)
1982 })?
1983 .await?;
1984
1985 Ok(proto::Ack {})
1986 }
1987
1988 async fn handle_stash_pop(
1989 this: Entity<Self>,
1990 envelope: TypedEnvelope<proto::StashPop>,
1991 mut cx: AsyncApp,
1992 ) -> Result<proto::Ack> {
1993 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1994 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1995 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1996
1997 repository_handle
1998 .update(&mut cx, |repository_handle, cx| {
1999 repository_handle.stash_pop(stash_index, cx)
2000 })?
2001 .await?;
2002
2003 Ok(proto::Ack {})
2004 }
2005
2006 async fn handle_stash_apply(
2007 this: Entity<Self>,
2008 envelope: TypedEnvelope<proto::StashApply>,
2009 mut cx: AsyncApp,
2010 ) -> Result<proto::Ack> {
2011 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2012 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2013 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2014
2015 repository_handle
2016 .update(&mut cx, |repository_handle, cx| {
2017 repository_handle.stash_apply(stash_index, cx)
2018 })?
2019 .await?;
2020
2021 Ok(proto::Ack {})
2022 }
2023
2024 async fn handle_stash_drop(
2025 this: Entity<Self>,
2026 envelope: TypedEnvelope<proto::StashDrop>,
2027 mut cx: AsyncApp,
2028 ) -> Result<proto::Ack> {
2029 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2030 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2031 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2032
2033 repository_handle
2034 .update(&mut cx, |repository_handle, cx| {
2035 repository_handle.stash_drop(stash_index, cx)
2036 })?
2037 .await??;
2038
2039 Ok(proto::Ack {})
2040 }
2041
2042 async fn handle_set_index_text(
2043 this: Entity<Self>,
2044 envelope: TypedEnvelope<proto::SetIndexText>,
2045 mut cx: AsyncApp,
2046 ) -> Result<proto::Ack> {
2047 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2048 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2049 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2050
2051 repository_handle
2052 .update(&mut cx, |repository_handle, cx| {
2053 repository_handle.spawn_set_index_text_job(
2054 repo_path,
2055 envelope.payload.text,
2056 None,
2057 cx,
2058 )
2059 })?
2060 .await??;
2061 Ok(proto::Ack {})
2062 }
2063
2064 async fn handle_run_hook(
2065 this: Entity<Self>,
2066 envelope: TypedEnvelope<proto::RunGitHook>,
2067 mut cx: AsyncApp,
2068 ) -> Result<proto::Ack> {
2069 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2070 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2071 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2072 repository_handle
2073 .update(&mut cx, |repository_handle, cx| {
2074 repository_handle.run_hook(hook, cx)
2075 })?
2076 .await??;
2077 Ok(proto::Ack {})
2078 }
2079
2080 async fn handle_commit(
2081 this: Entity<Self>,
2082 envelope: TypedEnvelope<proto::Commit>,
2083 mut cx: AsyncApp,
2084 ) -> Result<proto::Ack> {
2085 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2086 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2087 let askpass_id = envelope.payload.askpass_id;
2088
2089 let askpass = make_remote_delegate(
2090 this,
2091 envelope.payload.project_id,
2092 repository_id,
2093 askpass_id,
2094 &mut cx,
2095 );
2096
2097 let message = SharedString::from(envelope.payload.message);
2098 let name = envelope.payload.name.map(SharedString::from);
2099 let email = envelope.payload.email.map(SharedString::from);
2100 let options = envelope.payload.options.unwrap_or_default();
2101
2102 repository_handle
2103 .update(&mut cx, |repository_handle, cx| {
2104 repository_handle.commit(
2105 message,
2106 name.zip(email),
2107 CommitOptions {
2108 amend: options.amend,
2109 signoff: options.signoff,
2110 },
2111 askpass,
2112 cx,
2113 )
2114 })?
2115 .await??;
2116 Ok(proto::Ack {})
2117 }
2118
2119 async fn handle_get_remotes(
2120 this: Entity<Self>,
2121 envelope: TypedEnvelope<proto::GetRemotes>,
2122 mut cx: AsyncApp,
2123 ) -> Result<proto::GetRemotesResponse> {
2124 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2125 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2126
2127 let branch_name = envelope.payload.branch_name;
2128 let is_push = envelope.payload.is_push;
2129
2130 let remotes = repository_handle
2131 .update(&mut cx, |repository_handle, _| {
2132 repository_handle.get_remotes(branch_name, is_push)
2133 })?
2134 .await??;
2135
2136 Ok(proto::GetRemotesResponse {
2137 remotes: remotes
2138 .into_iter()
2139 .map(|remotes| proto::get_remotes_response::Remote {
2140 name: remotes.name.to_string(),
2141 })
2142 .collect::<Vec<_>>(),
2143 })
2144 }
2145
2146 async fn handle_get_worktrees(
2147 this: Entity<Self>,
2148 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2149 mut cx: AsyncApp,
2150 ) -> Result<proto::GitWorktreesResponse> {
2151 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2152 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2153
2154 let worktrees = repository_handle
2155 .update(&mut cx, |repository_handle, _| {
2156 repository_handle.worktrees()
2157 })?
2158 .await??;
2159
2160 Ok(proto::GitWorktreesResponse {
2161 worktrees: worktrees
2162 .into_iter()
2163 .map(|worktree| worktree_to_proto(&worktree))
2164 .collect::<Vec<_>>(),
2165 })
2166 }
2167
2168 async fn handle_create_worktree(
2169 this: Entity<Self>,
2170 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2171 mut cx: AsyncApp,
2172 ) -> Result<proto::Ack> {
2173 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2174 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2175 let directory = PathBuf::from(envelope.payload.directory);
2176 let name = envelope.payload.name;
2177 let commit = envelope.payload.commit;
2178
2179 repository_handle
2180 .update(&mut cx, |repository_handle, _| {
2181 repository_handle.create_worktree(name, directory, commit)
2182 })?
2183 .await??;
2184
2185 Ok(proto::Ack {})
2186 }
2187
2188 async fn handle_get_branches(
2189 this: Entity<Self>,
2190 envelope: TypedEnvelope<proto::GitGetBranches>,
2191 mut cx: AsyncApp,
2192 ) -> Result<proto::GitBranchesResponse> {
2193 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2194 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2195
2196 let branches = repository_handle
2197 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2198 .await??;
2199
2200 Ok(proto::GitBranchesResponse {
2201 branches: branches
2202 .into_iter()
2203 .map(|branch| branch_to_proto(&branch))
2204 .collect::<Vec<_>>(),
2205 })
2206 }
2207 async fn handle_get_default_branch(
2208 this: Entity<Self>,
2209 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2210 mut cx: AsyncApp,
2211 ) -> Result<proto::GetDefaultBranchResponse> {
2212 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2213 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2214
2215 let branch = repository_handle
2216 .update(&mut cx, |repository_handle, _| {
2217 repository_handle.default_branch()
2218 })?
2219 .await??
2220 .map(Into::into);
2221
2222 Ok(proto::GetDefaultBranchResponse { branch })
2223 }
2224 async fn handle_create_branch(
2225 this: Entity<Self>,
2226 envelope: TypedEnvelope<proto::GitCreateBranch>,
2227 mut cx: AsyncApp,
2228 ) -> Result<proto::Ack> {
2229 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2230 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2231 let branch_name = envelope.payload.branch_name;
2232
2233 repository_handle
2234 .update(&mut cx, |repository_handle, _| {
2235 repository_handle.create_branch(branch_name, None)
2236 })?
2237 .await??;
2238
2239 Ok(proto::Ack {})
2240 }
2241
2242 async fn handle_change_branch(
2243 this: Entity<Self>,
2244 envelope: TypedEnvelope<proto::GitChangeBranch>,
2245 mut cx: AsyncApp,
2246 ) -> Result<proto::Ack> {
2247 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2248 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2249 let branch_name = envelope.payload.branch_name;
2250
2251 repository_handle
2252 .update(&mut cx, |repository_handle, _| {
2253 repository_handle.change_branch(branch_name)
2254 })?
2255 .await??;
2256
2257 Ok(proto::Ack {})
2258 }
2259
2260 async fn handle_rename_branch(
2261 this: Entity<Self>,
2262 envelope: TypedEnvelope<proto::GitRenameBranch>,
2263 mut cx: AsyncApp,
2264 ) -> Result<proto::Ack> {
2265 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2266 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2267 let branch = envelope.payload.branch;
2268 let new_name = envelope.payload.new_name;
2269
2270 repository_handle
2271 .update(&mut cx, |repository_handle, _| {
2272 repository_handle.rename_branch(branch, new_name)
2273 })?
2274 .await??;
2275
2276 Ok(proto::Ack {})
2277 }
2278
2279 async fn handle_create_remote(
2280 this: Entity<Self>,
2281 envelope: TypedEnvelope<proto::GitCreateRemote>,
2282 mut cx: AsyncApp,
2283 ) -> Result<proto::Ack> {
2284 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2285 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2286 let remote_name = envelope.payload.remote_name;
2287 let remote_url = envelope.payload.remote_url;
2288
2289 repository_handle
2290 .update(&mut cx, |repository_handle, _| {
2291 repository_handle.create_remote(remote_name, remote_url)
2292 })?
2293 .await??;
2294
2295 Ok(proto::Ack {})
2296 }
2297
2298 async fn handle_delete_branch(
2299 this: Entity<Self>,
2300 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2301 mut cx: AsyncApp,
2302 ) -> Result<proto::Ack> {
2303 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2304 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2305 let branch_name = envelope.payload.branch_name;
2306
2307 repository_handle
2308 .update(&mut cx, |repository_handle, _| {
2309 repository_handle.delete_branch(branch_name)
2310 })?
2311 .await??;
2312
2313 Ok(proto::Ack {})
2314 }
2315
2316 async fn handle_remove_remote(
2317 this: Entity<Self>,
2318 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2319 mut cx: AsyncApp,
2320 ) -> Result<proto::Ack> {
2321 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2322 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2323 let remote_name = envelope.payload.remote_name;
2324
2325 repository_handle
2326 .update(&mut cx, |repository_handle, _| {
2327 repository_handle.remove_remote(remote_name)
2328 })?
2329 .await??;
2330
2331 Ok(proto::Ack {})
2332 }
2333
2334 async fn handle_show(
2335 this: Entity<Self>,
2336 envelope: TypedEnvelope<proto::GitShow>,
2337 mut cx: AsyncApp,
2338 ) -> Result<proto::GitCommitDetails> {
2339 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2340 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2341
2342 let commit = repository_handle
2343 .update(&mut cx, |repository_handle, _| {
2344 repository_handle.show(envelope.payload.commit)
2345 })?
2346 .await??;
2347 Ok(proto::GitCommitDetails {
2348 sha: commit.sha.into(),
2349 message: commit.message.into(),
2350 commit_timestamp: commit.commit_timestamp,
2351 author_email: commit.author_email.into(),
2352 author_name: commit.author_name.into(),
2353 })
2354 }
2355
2356 async fn handle_load_commit_diff(
2357 this: Entity<Self>,
2358 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2359 mut cx: AsyncApp,
2360 ) -> Result<proto::LoadCommitDiffResponse> {
2361 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2362 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2363
2364 let commit_diff = repository_handle
2365 .update(&mut cx, |repository_handle, _| {
2366 repository_handle.load_commit_diff(envelope.payload.commit)
2367 })?
2368 .await??;
2369 Ok(proto::LoadCommitDiffResponse {
2370 files: commit_diff
2371 .files
2372 .into_iter()
2373 .map(|file| proto::CommitFile {
2374 path: file.path.to_proto(),
2375 old_text: file.old_text,
2376 new_text: file.new_text,
2377 })
2378 .collect(),
2379 })
2380 }
2381
2382 async fn handle_file_history(
2383 this: Entity<Self>,
2384 envelope: TypedEnvelope<proto::GitFileHistory>,
2385 mut cx: AsyncApp,
2386 ) -> Result<proto::GitFileHistoryResponse> {
2387 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2388 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2389 let path = RepoPath::from_proto(&envelope.payload.path)?;
2390 let skip = envelope.payload.skip as usize;
2391 let limit = envelope.payload.limit.map(|l| l as usize);
2392
2393 let file_history = repository_handle
2394 .update(&mut cx, |repository_handle, _| {
2395 repository_handle.file_history_paginated(path, skip, limit)
2396 })?
2397 .await??;
2398
2399 Ok(proto::GitFileHistoryResponse {
2400 entries: file_history
2401 .entries
2402 .into_iter()
2403 .map(|entry| proto::FileHistoryEntry {
2404 sha: entry.sha.to_string(),
2405 subject: entry.subject.to_string(),
2406 message: entry.message.to_string(),
2407 commit_timestamp: entry.commit_timestamp,
2408 author_name: entry.author_name.to_string(),
2409 author_email: entry.author_email.to_string(),
2410 })
2411 .collect(),
2412 path: file_history.path.to_proto(),
2413 })
2414 }
2415
2416 async fn handle_reset(
2417 this: Entity<Self>,
2418 envelope: TypedEnvelope<proto::GitReset>,
2419 mut cx: AsyncApp,
2420 ) -> Result<proto::Ack> {
2421 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2422 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2423
2424 let mode = match envelope.payload.mode() {
2425 git_reset::ResetMode::Soft => ResetMode::Soft,
2426 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2427 };
2428
2429 repository_handle
2430 .update(&mut cx, |repository_handle, cx| {
2431 repository_handle.reset(envelope.payload.commit, mode, cx)
2432 })?
2433 .await??;
2434 Ok(proto::Ack {})
2435 }
2436
2437 async fn handle_checkout_files(
2438 this: Entity<Self>,
2439 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2440 mut cx: AsyncApp,
2441 ) -> Result<proto::Ack> {
2442 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2443 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2444 let paths = envelope
2445 .payload
2446 .paths
2447 .iter()
2448 .map(|s| RepoPath::from_proto(s))
2449 .collect::<Result<Vec<_>>>()?;
2450
2451 repository_handle
2452 .update(&mut cx, |repository_handle, cx| {
2453 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2454 })?
2455 .await?;
2456 Ok(proto::Ack {})
2457 }
2458
2459 async fn handle_open_commit_message_buffer(
2460 this: Entity<Self>,
2461 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2462 mut cx: AsyncApp,
2463 ) -> Result<proto::OpenBufferResponse> {
2464 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2465 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2466 let buffer = repository
2467 .update(&mut cx, |repository, cx| {
2468 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2469 })?
2470 .await?;
2471
2472 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2473 this.update(&mut cx, |this, cx| {
2474 this.buffer_store.update(cx, |buffer_store, cx| {
2475 buffer_store
2476 .create_buffer_for_peer(
2477 &buffer,
2478 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2479 cx,
2480 )
2481 .detach_and_log_err(cx);
2482 })
2483 })?;
2484
2485 Ok(proto::OpenBufferResponse {
2486 buffer_id: buffer_id.to_proto(),
2487 })
2488 }
2489
2490 async fn handle_askpass(
2491 this: Entity<Self>,
2492 envelope: TypedEnvelope<proto::AskPassRequest>,
2493 mut cx: AsyncApp,
2494 ) -> Result<proto::AskPassResponse> {
2495 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2496 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2497
2498 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2499 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2500 debug_panic!("no askpass found");
2501 anyhow::bail!("no askpass found");
2502 };
2503
2504 let response = askpass
2505 .ask_password(envelope.payload.prompt)
2506 .await
2507 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2508
2509 delegates
2510 .lock()
2511 .insert(envelope.payload.askpass_id, askpass);
2512
2513 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2514 Ok(proto::AskPassResponse {
2515 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2516 })
2517 }
2518
2519 async fn handle_check_for_pushed_commits(
2520 this: Entity<Self>,
2521 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2522 mut cx: AsyncApp,
2523 ) -> Result<proto::CheckForPushedCommitsResponse> {
2524 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2525 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2526
2527 let branches = repository_handle
2528 .update(&mut cx, |repository_handle, _| {
2529 repository_handle.check_for_pushed_commits()
2530 })?
2531 .await??;
2532 Ok(proto::CheckForPushedCommitsResponse {
2533 pushed_to: branches
2534 .into_iter()
2535 .map(|commit| commit.to_string())
2536 .collect(),
2537 })
2538 }
2539
2540 async fn handle_git_diff(
2541 this: Entity<Self>,
2542 envelope: TypedEnvelope<proto::GitDiff>,
2543 mut cx: AsyncApp,
2544 ) -> Result<proto::GitDiffResponse> {
2545 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2546 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2547 let diff_type = match envelope.payload.diff_type() {
2548 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2549 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2550 };
2551
2552 let mut diff = repository_handle
2553 .update(&mut cx, |repository_handle, cx| {
2554 repository_handle.diff(diff_type, cx)
2555 })?
2556 .await??;
2557 const ONE_MB: usize = 1_000_000;
2558 if diff.len() > ONE_MB {
2559 diff = diff.chars().take(ONE_MB).collect()
2560 }
2561
2562 Ok(proto::GitDiffResponse { diff })
2563 }
2564
2565 async fn handle_tree_diff(
2566 this: Entity<Self>,
2567 request: TypedEnvelope<proto::GetTreeDiff>,
2568 mut cx: AsyncApp,
2569 ) -> Result<proto::GetTreeDiffResponse> {
2570 let repository_id = RepositoryId(request.payload.repository_id);
2571 let diff_type = if request.payload.is_merge {
2572 DiffTreeType::MergeBase {
2573 base: request.payload.base.into(),
2574 head: request.payload.head.into(),
2575 }
2576 } else {
2577 DiffTreeType::Since {
2578 base: request.payload.base.into(),
2579 head: request.payload.head.into(),
2580 }
2581 };
2582
2583 let diff = this
2584 .update(&mut cx, |this, cx| {
2585 let repository = this.repositories().get(&repository_id)?;
2586 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2587 })?
2588 .context("missing repository")?
2589 .await??;
2590
2591 Ok(proto::GetTreeDiffResponse {
2592 entries: diff
2593 .entries
2594 .into_iter()
2595 .map(|(path, status)| proto::TreeDiffStatus {
2596 path: path.as_ref().to_proto(),
2597 status: match status {
2598 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2599 TreeDiffStatus::Modified { .. } => {
2600 proto::tree_diff_status::Status::Modified.into()
2601 }
2602 TreeDiffStatus::Deleted { .. } => {
2603 proto::tree_diff_status::Status::Deleted.into()
2604 }
2605 },
2606 oid: match status {
2607 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2608 Some(old.to_string())
2609 }
2610 TreeDiffStatus::Added => None,
2611 },
2612 })
2613 .collect(),
2614 })
2615 }
2616
2617 async fn handle_get_blob_content(
2618 this: Entity<Self>,
2619 request: TypedEnvelope<proto::GetBlobContent>,
2620 mut cx: AsyncApp,
2621 ) -> Result<proto::GetBlobContentResponse> {
2622 let oid = git::Oid::from_str(&request.payload.oid)?;
2623 let repository_id = RepositoryId(request.payload.repository_id);
2624 let content = this
2625 .update(&mut cx, |this, cx| {
2626 let repository = this.repositories().get(&repository_id)?;
2627 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2628 })?
2629 .context("missing repository")?
2630 .await?;
2631 Ok(proto::GetBlobContentResponse { content })
2632 }
2633
2634 async fn handle_open_unstaged_diff(
2635 this: Entity<Self>,
2636 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2637 mut cx: AsyncApp,
2638 ) -> Result<proto::OpenUnstagedDiffResponse> {
2639 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2640 let diff = this
2641 .update(&mut cx, |this, cx| {
2642 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2643 Some(this.open_unstaged_diff(buffer, cx))
2644 })?
2645 .context("missing buffer")?
2646 .await?;
2647 this.update(&mut cx, |this, _| {
2648 let shared_diffs = this
2649 .shared_diffs
2650 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2651 .or_default();
2652 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2653 })?;
2654 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx))?;
2655 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2656 }
2657
2658 async fn handle_open_uncommitted_diff(
2659 this: Entity<Self>,
2660 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2661 mut cx: AsyncApp,
2662 ) -> Result<proto::OpenUncommittedDiffResponse> {
2663 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2664 let diff = this
2665 .update(&mut cx, |this, cx| {
2666 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2667 Some(this.open_uncommitted_diff(buffer, cx))
2668 })?
2669 .context("missing buffer")?
2670 .await?;
2671 this.update(&mut cx, |this, _| {
2672 let shared_diffs = this
2673 .shared_diffs
2674 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2675 .or_default();
2676 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2677 })?;
2678 diff.read_with(&cx, |diff, cx| {
2679 use proto::open_uncommitted_diff_response::Mode;
2680
2681 let unstaged_diff = diff.secondary_diff();
2682 let index_snapshot = unstaged_diff.and_then(|diff| {
2683 let diff = diff.read(cx);
2684 diff.base_text_exists().then(|| diff.base_text(cx))
2685 });
2686
2687 let mode;
2688 let staged_text;
2689 let committed_text;
2690 if diff.base_text_exists() {
2691 let committed_snapshot = diff.base_text(cx);
2692 committed_text = Some(committed_snapshot.text());
2693 if let Some(index_text) = index_snapshot {
2694 if index_text.remote_id() == committed_snapshot.remote_id() {
2695 mode = Mode::IndexMatchesHead;
2696 staged_text = None;
2697 } else {
2698 mode = Mode::IndexAndHead;
2699 staged_text = Some(index_text.text());
2700 }
2701 } else {
2702 mode = Mode::IndexAndHead;
2703 staged_text = None;
2704 }
2705 } else {
2706 mode = Mode::IndexAndHead;
2707 committed_text = None;
2708 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2709 }
2710
2711 proto::OpenUncommittedDiffResponse {
2712 committed_text,
2713 staged_text,
2714 mode: mode.into(),
2715 }
2716 })
2717 }
2718
2719 async fn handle_update_diff_bases(
2720 this: Entity<Self>,
2721 request: TypedEnvelope<proto::UpdateDiffBases>,
2722 mut cx: AsyncApp,
2723 ) -> Result<()> {
2724 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2725 this.update(&mut cx, |this, cx| {
2726 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2727 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2728 {
2729 let buffer = buffer.read(cx).text_snapshot();
2730 diff_state.update(cx, |diff_state, cx| {
2731 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2732 })
2733 }
2734 })
2735 }
2736
2737 async fn handle_blame_buffer(
2738 this: Entity<Self>,
2739 envelope: TypedEnvelope<proto::BlameBuffer>,
2740 mut cx: AsyncApp,
2741 ) -> Result<proto::BlameBufferResponse> {
2742 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2743 let version = deserialize_version(&envelope.payload.version);
2744 let buffer = this.read_with(&cx, |this, cx| {
2745 this.buffer_store.read(cx).get_existing(buffer_id)
2746 })??;
2747 buffer
2748 .update(&mut cx, |buffer, _| {
2749 buffer.wait_for_version(version.clone())
2750 })?
2751 .await?;
2752 let blame = this
2753 .update(&mut cx, |this, cx| {
2754 this.blame_buffer(&buffer, Some(version), cx)
2755 })?
2756 .await?;
2757 Ok(serialize_blame_buffer_response(blame))
2758 }
2759
2760 async fn handle_get_permalink_to_line(
2761 this: Entity<Self>,
2762 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2763 mut cx: AsyncApp,
2764 ) -> Result<proto::GetPermalinkToLineResponse> {
2765 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2766 // let version = deserialize_version(&envelope.payload.version);
2767 let selection = {
2768 let proto_selection = envelope
2769 .payload
2770 .selection
2771 .context("no selection to get permalink for defined")?;
2772 proto_selection.start as u32..proto_selection.end as u32
2773 };
2774 let buffer = this.read_with(&cx, |this, cx| {
2775 this.buffer_store.read(cx).get_existing(buffer_id)
2776 })??;
2777 let permalink = this
2778 .update(&mut cx, |this, cx| {
2779 this.get_permalink_to_line(&buffer, selection, cx)
2780 })?
2781 .await?;
2782 Ok(proto::GetPermalinkToLineResponse {
2783 permalink: permalink.to_string(),
2784 })
2785 }
2786
2787 fn repository_for_request(
2788 this: &Entity<Self>,
2789 id: RepositoryId,
2790 cx: &mut AsyncApp,
2791 ) -> Result<Entity<Repository>> {
2792 this.read_with(cx, |this, _| {
2793 this.repositories
2794 .get(&id)
2795 .context("missing repository handle")
2796 .cloned()
2797 })?
2798 }
2799
2800 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2801 self.repositories
2802 .iter()
2803 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2804 .collect()
2805 }
2806
2807 fn process_updated_entries(
2808 &self,
2809 worktree: &Entity<Worktree>,
2810 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2811 cx: &mut App,
2812 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2813 let path_style = worktree.read(cx).path_style();
2814 let mut repo_paths = self
2815 .repositories
2816 .values()
2817 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2818 .collect::<Vec<_>>();
2819 let mut entries: Vec<_> = updated_entries
2820 .iter()
2821 .map(|(path, _, _)| path.clone())
2822 .collect();
2823 entries.sort();
2824 let worktree = worktree.read(cx);
2825
2826 let entries = entries
2827 .into_iter()
2828 .map(|path| worktree.absolutize(&path))
2829 .collect::<Arc<[_]>>();
2830
2831 let executor = cx.background_executor().clone();
2832 cx.background_executor().spawn(async move {
2833 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2834 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2835 let mut tasks = FuturesOrdered::new();
2836 for (repo_path, repo) in repo_paths.into_iter().rev() {
2837 let entries = entries.clone();
2838 let task = executor.spawn(async move {
2839 // Find all repository paths that belong to this repo
2840 let mut ix = entries.partition_point(|path| path < &*repo_path);
2841 if ix == entries.len() {
2842 return None;
2843 };
2844
2845 let mut paths = Vec::new();
2846 // All paths prefixed by a given repo will constitute a continuous range.
2847 while let Some(path) = entries.get(ix)
2848 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2849 &repo_path, path, path_style,
2850 )
2851 {
2852 paths.push((repo_path, ix));
2853 ix += 1;
2854 }
2855 if paths.is_empty() {
2856 None
2857 } else {
2858 Some((repo, paths))
2859 }
2860 });
2861 tasks.push_back(task);
2862 }
2863
2864 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2865 let mut path_was_used = vec![false; entries.len()];
2866 let tasks = tasks.collect::<Vec<_>>().await;
2867 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2868 // We always want to assign a path to it's innermost repository.
2869 for t in tasks {
2870 let Some((repo, paths)) = t else {
2871 continue;
2872 };
2873 let entry = paths_by_git_repo.entry(repo).or_default();
2874 for (repo_path, ix) in paths {
2875 if path_was_used[ix] {
2876 continue;
2877 }
2878 path_was_used[ix] = true;
2879 entry.push(repo_path);
2880 }
2881 }
2882
2883 paths_by_git_repo
2884 })
2885 }
2886}
2887
2888impl BufferGitState {
2889 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2890 Self {
2891 unstaged_diff: Default::default(),
2892 uncommitted_diff: Default::default(),
2893 recalculate_diff_task: Default::default(),
2894 language: Default::default(),
2895 language_registry: Default::default(),
2896 recalculating_tx: postage::watch::channel_with(false).0,
2897 hunk_staging_operation_count: 0,
2898 hunk_staging_operation_count_as_of_write: 0,
2899 head_text: Default::default(),
2900 index_text: Default::default(),
2901 head_changed: Default::default(),
2902 index_changed: Default::default(),
2903 language_changed: Default::default(),
2904 conflict_updated_futures: Default::default(),
2905 conflict_set: Default::default(),
2906 reparse_conflict_markers_task: Default::default(),
2907 }
2908 }
2909
2910 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2911 self.language = buffer.read(cx).language().cloned();
2912 self.language_changed = true;
2913 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2914 }
2915
2916 fn reparse_conflict_markers(
2917 &mut self,
2918 buffer: text::BufferSnapshot,
2919 cx: &mut Context<Self>,
2920 ) -> oneshot::Receiver<()> {
2921 let (tx, rx) = oneshot::channel();
2922
2923 let Some(conflict_set) = self
2924 .conflict_set
2925 .as_ref()
2926 .and_then(|conflict_set| conflict_set.upgrade())
2927 else {
2928 return rx;
2929 };
2930
2931 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2932 if conflict_set.has_conflict {
2933 Some(conflict_set.snapshot())
2934 } else {
2935 None
2936 }
2937 });
2938
2939 if let Some(old_snapshot) = old_snapshot {
2940 self.conflict_updated_futures.push(tx);
2941 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2942 let (snapshot, changed_range) = cx
2943 .background_spawn(async move {
2944 let new_snapshot = ConflictSet::parse(&buffer);
2945 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2946 (new_snapshot, changed_range)
2947 })
2948 .await;
2949 this.update(cx, |this, cx| {
2950 if let Some(conflict_set) = &this.conflict_set {
2951 conflict_set
2952 .update(cx, |conflict_set, cx| {
2953 conflict_set.set_snapshot(snapshot, changed_range, cx);
2954 })
2955 .ok();
2956 }
2957 let futures = std::mem::take(&mut this.conflict_updated_futures);
2958 for tx in futures {
2959 tx.send(()).ok();
2960 }
2961 })
2962 }))
2963 }
2964
2965 rx
2966 }
2967
2968 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2969 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2970 }
2971
2972 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2973 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2974 }
2975
2976 fn handle_base_texts_updated(
2977 &mut self,
2978 buffer: text::BufferSnapshot,
2979 message: proto::UpdateDiffBases,
2980 cx: &mut Context<Self>,
2981 ) {
2982 use proto::update_diff_bases::Mode;
2983
2984 let Some(mode) = Mode::from_i32(message.mode) else {
2985 return;
2986 };
2987
2988 let diff_bases_change = match mode {
2989 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2990 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2991 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2992 Mode::IndexAndHead => DiffBasesChange::SetEach {
2993 index: message.staged_text,
2994 head: message.committed_text,
2995 },
2996 };
2997
2998 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2999 }
3000
3001 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3002 if *self.recalculating_tx.borrow() {
3003 let mut rx = self.recalculating_tx.subscribe();
3004 Some(async move {
3005 loop {
3006 let is_recalculating = rx.recv().await;
3007 if is_recalculating != Some(true) {
3008 break;
3009 }
3010 }
3011 })
3012 } else {
3013 None
3014 }
3015 }
3016
3017 fn diff_bases_changed(
3018 &mut self,
3019 buffer: text::BufferSnapshot,
3020 diff_bases_change: Option<DiffBasesChange>,
3021 cx: &mut Context<Self>,
3022 ) {
3023 match diff_bases_change {
3024 Some(DiffBasesChange::SetIndex(index)) => {
3025 self.index_text = index.map(|mut index| {
3026 text::LineEnding::normalize(&mut index);
3027 Arc::from(index.as_str())
3028 });
3029 self.index_changed = true;
3030 }
3031 Some(DiffBasesChange::SetHead(head)) => {
3032 self.head_text = head.map(|mut head| {
3033 text::LineEnding::normalize(&mut head);
3034 Arc::from(head.as_str())
3035 });
3036 self.head_changed = true;
3037 }
3038 Some(DiffBasesChange::SetBoth(text)) => {
3039 let text = text.map(|mut text| {
3040 text::LineEnding::normalize(&mut text);
3041 Arc::from(text.as_str())
3042 });
3043 self.head_text = text.clone();
3044 self.index_text = text;
3045 self.head_changed = true;
3046 self.index_changed = true;
3047 }
3048 Some(DiffBasesChange::SetEach { index, head }) => {
3049 self.index_text = index.map(|mut index| {
3050 text::LineEnding::normalize(&mut index);
3051 Arc::from(index.as_str())
3052 });
3053 self.index_changed = true;
3054 self.head_text = head.map(|mut head| {
3055 text::LineEnding::normalize(&mut head);
3056 Arc::from(head.as_str())
3057 });
3058 self.head_changed = true;
3059 }
3060 None => {}
3061 }
3062
3063 self.recalculate_diffs(buffer, cx)
3064 }
3065
3066 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3067 *self.recalculating_tx.borrow_mut() = true;
3068
3069 let language = self.language.clone();
3070 let language_registry = self.language_registry.clone();
3071 let unstaged_diff = self.unstaged_diff();
3072 let uncommitted_diff = self.uncommitted_diff();
3073 let head = self.head_text.clone();
3074 let index = self.index_text.clone();
3075 let index_changed = self.index_changed;
3076 let head_changed = self.head_changed;
3077 let language_changed = self.language_changed;
3078 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3079 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3080 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3081 (None, None) => true,
3082 _ => false,
3083 };
3084 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3085 log::debug!(
3086 "start recalculating diffs for buffer {}",
3087 buffer.remote_id()
3088 );
3089
3090 let mut new_unstaged_diff = None;
3091 if let Some(unstaged_diff) = &unstaged_diff {
3092 new_unstaged_diff = Some(
3093 cx.update(|cx| {
3094 unstaged_diff.read(cx).update_diff(
3095 buffer.clone(),
3096 index,
3097 index_changed,
3098 language.clone(),
3099 cx,
3100 )
3101 })?
3102 .await,
3103 );
3104 }
3105
3106 // Dropping BufferDiff can be expensive, so yield back to the event loop
3107 // for a bit
3108 yield_now().await;
3109
3110 let mut new_uncommitted_diff = None;
3111 if let Some(uncommitted_diff) = &uncommitted_diff {
3112 new_uncommitted_diff = if index_matches_head {
3113 new_unstaged_diff.clone()
3114 } else {
3115 Some(
3116 cx.update(|cx| {
3117 uncommitted_diff.read(cx).update_diff(
3118 buffer.clone(),
3119 head,
3120 head_changed,
3121 language.clone(),
3122 cx,
3123 )
3124 })?
3125 .await,
3126 )
3127 }
3128 }
3129
3130 // Dropping BufferDiff can be expensive, so yield back to the event loop
3131 // for a bit
3132 yield_now().await;
3133
3134 let cancel = this.update(cx, |this, _| {
3135 // This checks whether all pending stage/unstage operations
3136 // have quiesced (i.e. both the corresponding write and the
3137 // read of that write have completed). If not, then we cancel
3138 // this recalculation attempt to avoid invalidating pending
3139 // state too quickly; another recalculation will come along
3140 // later and clear the pending state once the state of the index has settled.
3141 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3142 *this.recalculating_tx.borrow_mut() = false;
3143 true
3144 } else {
3145 false
3146 }
3147 })?;
3148 if cancel {
3149 log::debug!(
3150 concat!(
3151 "aborting recalculating diffs for buffer {}",
3152 "due to subsequent hunk operations",
3153 ),
3154 buffer.remote_id()
3155 );
3156 return Ok(());
3157 }
3158
3159 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3160 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3161 {
3162 unstaged_diff.update(cx, |diff, cx| {
3163 if language_changed {
3164 diff.language_changed(language.clone(), language_registry.clone(), cx);
3165 }
3166 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3167 })?
3168 } else {
3169 None
3170 };
3171
3172 yield_now().await;
3173
3174 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3175 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3176 {
3177 uncommitted_diff.update(cx, |diff, cx| {
3178 if language_changed {
3179 diff.language_changed(language, language_registry, cx);
3180 }
3181 diff.set_snapshot_with_secondary(
3182 new_uncommitted_diff,
3183 &buffer,
3184 unstaged_changed_range,
3185 true,
3186 cx,
3187 );
3188 })?;
3189 }
3190
3191 log::debug!(
3192 "finished recalculating diffs for buffer {}",
3193 buffer.remote_id()
3194 );
3195
3196 if let Some(this) = this.upgrade() {
3197 this.update(cx, |this, _| {
3198 this.index_changed = false;
3199 this.head_changed = false;
3200 this.language_changed = false;
3201 *this.recalculating_tx.borrow_mut() = false;
3202 })?;
3203 }
3204
3205 Ok(())
3206 }));
3207 }
3208}
3209
3210fn make_remote_delegate(
3211 this: Entity<GitStore>,
3212 project_id: u64,
3213 repository_id: RepositoryId,
3214 askpass_id: u64,
3215 cx: &mut AsyncApp,
3216) -> AskPassDelegate {
3217 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3218 this.update(cx, |this, cx| {
3219 let Some((client, _)) = this.downstream_client() else {
3220 return;
3221 };
3222 let response = client.request(proto::AskPassRequest {
3223 project_id,
3224 repository_id: repository_id.to_proto(),
3225 askpass_id,
3226 prompt,
3227 });
3228 cx.spawn(async move |_, _| {
3229 let mut response = response.await?.response;
3230 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3231 .ok();
3232 response.zeroize();
3233 anyhow::Ok(())
3234 })
3235 .detach_and_log_err(cx);
3236 })
3237 .log_err();
3238 })
3239}
3240
3241impl RepositoryId {
3242 pub fn to_proto(self) -> u64 {
3243 self.0
3244 }
3245
3246 pub fn from_proto(id: u64) -> Self {
3247 RepositoryId(id)
3248 }
3249}
3250
3251impl RepositorySnapshot {
3252 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3253 Self {
3254 id,
3255 statuses_by_path: Default::default(),
3256 work_directory_abs_path,
3257 branch: None,
3258 head_commit: None,
3259 scan_id: 0,
3260 merge: Default::default(),
3261 remote_origin_url: None,
3262 remote_upstream_url: None,
3263 stash_entries: Default::default(),
3264 path_style,
3265 }
3266 }
3267
3268 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3269 proto::UpdateRepository {
3270 branch_summary: self.branch.as_ref().map(branch_to_proto),
3271 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3272 updated_statuses: self
3273 .statuses_by_path
3274 .iter()
3275 .map(|entry| entry.to_proto())
3276 .collect(),
3277 removed_statuses: Default::default(),
3278 current_merge_conflicts: self
3279 .merge
3280 .conflicted_paths
3281 .iter()
3282 .map(|repo_path| repo_path.to_proto())
3283 .collect(),
3284 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3285 project_id,
3286 id: self.id.to_proto(),
3287 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3288 entry_ids: vec![self.id.to_proto()],
3289 scan_id: self.scan_id,
3290 is_last_update: true,
3291 stash_entries: self
3292 .stash_entries
3293 .entries
3294 .iter()
3295 .map(stash_to_proto)
3296 .collect(),
3297 remote_upstream_url: self.remote_upstream_url.clone(),
3298 remote_origin_url: self.remote_origin_url.clone(),
3299 }
3300 }
3301
3302 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3303 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3304 let mut removed_statuses: Vec<String> = Vec::new();
3305
3306 let mut new_statuses = self.statuses_by_path.iter().peekable();
3307 let mut old_statuses = old.statuses_by_path.iter().peekable();
3308
3309 let mut current_new_entry = new_statuses.next();
3310 let mut current_old_entry = old_statuses.next();
3311 loop {
3312 match (current_new_entry, current_old_entry) {
3313 (Some(new_entry), Some(old_entry)) => {
3314 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3315 Ordering::Less => {
3316 updated_statuses.push(new_entry.to_proto());
3317 current_new_entry = new_statuses.next();
3318 }
3319 Ordering::Equal => {
3320 if new_entry.status != old_entry.status {
3321 updated_statuses.push(new_entry.to_proto());
3322 }
3323 current_old_entry = old_statuses.next();
3324 current_new_entry = new_statuses.next();
3325 }
3326 Ordering::Greater => {
3327 removed_statuses.push(old_entry.repo_path.to_proto());
3328 current_old_entry = old_statuses.next();
3329 }
3330 }
3331 }
3332 (None, Some(old_entry)) => {
3333 removed_statuses.push(old_entry.repo_path.to_proto());
3334 current_old_entry = old_statuses.next();
3335 }
3336 (Some(new_entry), None) => {
3337 updated_statuses.push(new_entry.to_proto());
3338 current_new_entry = new_statuses.next();
3339 }
3340 (None, None) => break,
3341 }
3342 }
3343
3344 proto::UpdateRepository {
3345 branch_summary: self.branch.as_ref().map(branch_to_proto),
3346 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3347 updated_statuses,
3348 removed_statuses,
3349 current_merge_conflicts: self
3350 .merge
3351 .conflicted_paths
3352 .iter()
3353 .map(|path| path.to_proto())
3354 .collect(),
3355 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3356 project_id,
3357 id: self.id.to_proto(),
3358 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3359 entry_ids: vec![],
3360 scan_id: self.scan_id,
3361 is_last_update: true,
3362 stash_entries: self
3363 .stash_entries
3364 .entries
3365 .iter()
3366 .map(stash_to_proto)
3367 .collect(),
3368 remote_upstream_url: self.remote_upstream_url.clone(),
3369 remote_origin_url: self.remote_origin_url.clone(),
3370 }
3371 }
3372
3373 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3374 self.statuses_by_path.iter().cloned()
3375 }
3376
3377 pub fn status_summary(&self) -> GitSummary {
3378 self.statuses_by_path.summary().item_summary
3379 }
3380
3381 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3382 self.statuses_by_path
3383 .get(&PathKey(path.as_ref().clone()), ())
3384 .cloned()
3385 }
3386
3387 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3388 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3389 }
3390
3391 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3392 self.path_style
3393 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3394 .unwrap()
3395 .into()
3396 }
3397
3398 #[inline]
3399 fn abs_path_to_repo_path_inner(
3400 work_directory_abs_path: &Path,
3401 abs_path: &Path,
3402 path_style: PathStyle,
3403 ) -> Option<RepoPath> {
3404 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3405 Some(RepoPath::from_rel_path(&rel_path))
3406 }
3407
3408 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3409 self.merge.conflicted_paths.contains(repo_path)
3410 }
3411
3412 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3413 let had_conflict_on_last_merge_head_change =
3414 self.merge.conflicted_paths.contains(repo_path);
3415 let has_conflict_currently = self
3416 .status_for_path(repo_path)
3417 .is_some_and(|entry| entry.status.is_conflicted());
3418 had_conflict_on_last_merge_head_change || has_conflict_currently
3419 }
3420
3421 /// This is the name that will be displayed in the repository selector for this repository.
3422 pub fn display_name(&self) -> SharedString {
3423 self.work_directory_abs_path
3424 .file_name()
3425 .unwrap_or_default()
3426 .to_string_lossy()
3427 .to_string()
3428 .into()
3429 }
3430}
3431
3432pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3433 proto::StashEntry {
3434 oid: entry.oid.as_bytes().to_vec(),
3435 message: entry.message.clone(),
3436 branch: entry.branch.clone(),
3437 index: entry.index as u64,
3438 timestamp: entry.timestamp,
3439 }
3440}
3441
3442pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3443 Ok(StashEntry {
3444 oid: Oid::from_bytes(&entry.oid)?,
3445 message: entry.message.clone(),
3446 index: entry.index as usize,
3447 branch: entry.branch.clone(),
3448 timestamp: entry.timestamp,
3449 })
3450}
3451
3452impl MergeDetails {
3453 async fn load(
3454 backend: &Arc<dyn GitRepository>,
3455 status: &SumTree<StatusEntry>,
3456 prev_snapshot: &RepositorySnapshot,
3457 ) -> Result<(MergeDetails, bool)> {
3458 log::debug!("load merge details");
3459 let message = backend.merge_message().await;
3460 let heads = backend
3461 .revparse_batch(vec![
3462 "MERGE_HEAD".into(),
3463 "CHERRY_PICK_HEAD".into(),
3464 "REBASE_HEAD".into(),
3465 "REVERT_HEAD".into(),
3466 "APPLY_HEAD".into(),
3467 ])
3468 .await
3469 .log_err()
3470 .unwrap_or_default()
3471 .into_iter()
3472 .map(|opt| opt.map(SharedString::from))
3473 .collect::<Vec<_>>();
3474 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3475 let conflicted_paths = if merge_heads_changed {
3476 let current_conflicted_paths = TreeSet::from_ordered_entries(
3477 status
3478 .iter()
3479 .filter(|entry| entry.status.is_conflicted())
3480 .map(|entry| entry.repo_path.clone()),
3481 );
3482
3483 // It can happen that we run a scan while a lengthy merge is in progress
3484 // that will eventually result in conflicts, but before those conflicts
3485 // are reported by `git status`. Since for the moment we only care about
3486 // the merge heads state for the purposes of tracking conflicts, don't update
3487 // this state until we see some conflicts.
3488 if heads.iter().any(Option::is_some)
3489 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3490 && current_conflicted_paths.is_empty()
3491 {
3492 log::debug!("not updating merge heads because no conflicts found");
3493 return Ok((
3494 MergeDetails {
3495 message: message.map(SharedString::from),
3496 ..prev_snapshot.merge.clone()
3497 },
3498 false,
3499 ));
3500 }
3501
3502 current_conflicted_paths
3503 } else {
3504 prev_snapshot.merge.conflicted_paths.clone()
3505 };
3506 let details = MergeDetails {
3507 conflicted_paths,
3508 message: message.map(SharedString::from),
3509 heads,
3510 };
3511 Ok((details, merge_heads_changed))
3512 }
3513}
3514
3515impl Repository {
3516 pub fn snapshot(&self) -> RepositorySnapshot {
3517 self.snapshot.clone()
3518 }
3519
3520 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3521 self.pending_ops.iter().cloned()
3522 }
3523
3524 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3525 self.pending_ops.summary().clone()
3526 }
3527
3528 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3529 self.pending_ops
3530 .get(&PathKey(path.as_ref().clone()), ())
3531 .cloned()
3532 }
3533
3534 fn local(
3535 id: RepositoryId,
3536 work_directory_abs_path: Arc<Path>,
3537 dot_git_abs_path: Arc<Path>,
3538 project_environment: WeakEntity<ProjectEnvironment>,
3539 fs: Arc<dyn Fs>,
3540 git_store: WeakEntity<GitStore>,
3541 cx: &mut Context<Self>,
3542 ) -> Self {
3543 let snapshot =
3544 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3545 let state = cx
3546 .spawn(async move |_, cx| {
3547 LocalRepositoryState::new(
3548 work_directory_abs_path,
3549 dot_git_abs_path,
3550 project_environment,
3551 fs,
3552 cx,
3553 )
3554 .await
3555 .map_err(|err| err.to_string())
3556 })
3557 .shared();
3558 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3559 let state = cx
3560 .spawn(async move |_, _| {
3561 let state = state.await?;
3562 Ok(RepositoryState::Local(state))
3563 })
3564 .shared();
3565
3566 Repository {
3567 this: cx.weak_entity(),
3568 git_store,
3569 snapshot,
3570 pending_ops: Default::default(),
3571 repository_state: state,
3572 commit_message_buffer: None,
3573 askpass_delegates: Default::default(),
3574 paths_needing_status_update: Default::default(),
3575 latest_askpass_id: 0,
3576 job_sender,
3577 job_id: 0,
3578 active_jobs: Default::default(),
3579 }
3580 }
3581
3582 fn remote(
3583 id: RepositoryId,
3584 work_directory_abs_path: Arc<Path>,
3585 path_style: PathStyle,
3586 project_id: ProjectId,
3587 client: AnyProtoClient,
3588 git_store: WeakEntity<GitStore>,
3589 cx: &mut Context<Self>,
3590 ) -> Self {
3591 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3592 let repository_state = RemoteRepositoryState { project_id, client };
3593 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3594 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3595 Self {
3596 this: cx.weak_entity(),
3597 snapshot,
3598 commit_message_buffer: None,
3599 git_store,
3600 pending_ops: Default::default(),
3601 paths_needing_status_update: Default::default(),
3602 job_sender,
3603 repository_state,
3604 askpass_delegates: Default::default(),
3605 latest_askpass_id: 0,
3606 active_jobs: Default::default(),
3607 job_id: 0,
3608 }
3609 }
3610
3611 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3612 self.git_store.upgrade()
3613 }
3614
3615 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3616 let this = cx.weak_entity();
3617 let git_store = self.git_store.clone();
3618 let _ = self.send_keyed_job(
3619 Some(GitJobKey::ReloadBufferDiffBases),
3620 None,
3621 |state, mut cx| async move {
3622 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3623 log::error!("tried to recompute diffs for a non-local repository");
3624 return Ok(());
3625 };
3626
3627 let Some(this) = this.upgrade() else {
3628 return Ok(());
3629 };
3630
3631 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3632 git_store.update(cx, |git_store, cx| {
3633 git_store
3634 .diffs
3635 .iter()
3636 .filter_map(|(buffer_id, diff_state)| {
3637 let buffer_store = git_store.buffer_store.read(cx);
3638 let buffer = buffer_store.get(*buffer_id)?;
3639 let file = File::from_dyn(buffer.read(cx).file())?;
3640 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3641 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3642 log::debug!(
3643 "start reload diff bases for repo path {}",
3644 repo_path.as_unix_str()
3645 );
3646 diff_state.update(cx, |diff_state, _| {
3647 let has_unstaged_diff = diff_state
3648 .unstaged_diff
3649 .as_ref()
3650 .is_some_and(|diff| diff.is_upgradable());
3651 let has_uncommitted_diff = diff_state
3652 .uncommitted_diff
3653 .as_ref()
3654 .is_some_and(|set| set.is_upgradable());
3655
3656 Some((
3657 buffer,
3658 repo_path,
3659 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3660 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3661 ))
3662 })
3663 })
3664 .collect::<Vec<_>>()
3665 })
3666 })??;
3667
3668 let buffer_diff_base_changes = cx
3669 .background_spawn(async move {
3670 let mut changes = Vec::new();
3671 for (buffer, repo_path, current_index_text, current_head_text) in
3672 &repo_diff_state_updates
3673 {
3674 let index_text = if current_index_text.is_some() {
3675 backend.load_index_text(repo_path.clone()).await
3676 } else {
3677 None
3678 };
3679 let head_text = if current_head_text.is_some() {
3680 backend.load_committed_text(repo_path.clone()).await
3681 } else {
3682 None
3683 };
3684
3685 let change =
3686 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3687 (Some(current_index), Some(current_head)) => {
3688 let index_changed =
3689 index_text.as_deref() != current_index.as_deref();
3690 let head_changed =
3691 head_text.as_deref() != current_head.as_deref();
3692 if index_changed && head_changed {
3693 if index_text == head_text {
3694 Some(DiffBasesChange::SetBoth(head_text))
3695 } else {
3696 Some(DiffBasesChange::SetEach {
3697 index: index_text,
3698 head: head_text,
3699 })
3700 }
3701 } else if index_changed {
3702 Some(DiffBasesChange::SetIndex(index_text))
3703 } else if head_changed {
3704 Some(DiffBasesChange::SetHead(head_text))
3705 } else {
3706 None
3707 }
3708 }
3709 (Some(current_index), None) => {
3710 let index_changed =
3711 index_text.as_deref() != current_index.as_deref();
3712 index_changed
3713 .then_some(DiffBasesChange::SetIndex(index_text))
3714 }
3715 (None, Some(current_head)) => {
3716 let head_changed =
3717 head_text.as_deref() != current_head.as_deref();
3718 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3719 }
3720 (None, None) => None,
3721 };
3722
3723 changes.push((buffer.clone(), change))
3724 }
3725 changes
3726 })
3727 .await;
3728
3729 git_store.update(&mut cx, |git_store, cx| {
3730 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3731 let buffer_snapshot = buffer.read(cx).text_snapshot();
3732 let buffer_id = buffer_snapshot.remote_id();
3733 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3734 continue;
3735 };
3736
3737 let downstream_client = git_store.downstream_client();
3738 diff_state.update(cx, |diff_state, cx| {
3739 use proto::update_diff_bases::Mode;
3740
3741 if let Some((diff_bases_change, (client, project_id))) =
3742 diff_bases_change.clone().zip(downstream_client)
3743 {
3744 let (staged_text, committed_text, mode) = match diff_bases_change {
3745 DiffBasesChange::SetIndex(index) => {
3746 (index, None, Mode::IndexOnly)
3747 }
3748 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3749 DiffBasesChange::SetEach { index, head } => {
3750 (index, head, Mode::IndexAndHead)
3751 }
3752 DiffBasesChange::SetBoth(text) => {
3753 (None, text, Mode::IndexMatchesHead)
3754 }
3755 };
3756 client
3757 .send(proto::UpdateDiffBases {
3758 project_id: project_id.to_proto(),
3759 buffer_id: buffer_id.to_proto(),
3760 staged_text,
3761 committed_text,
3762 mode: mode as i32,
3763 })
3764 .log_err();
3765 }
3766
3767 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3768 });
3769 }
3770 })
3771 },
3772 );
3773 }
3774
3775 pub fn send_job<F, Fut, R>(
3776 &mut self,
3777 status: Option<SharedString>,
3778 job: F,
3779 ) -> oneshot::Receiver<R>
3780 where
3781 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3782 Fut: Future<Output = R> + 'static,
3783 R: Send + 'static,
3784 {
3785 self.send_keyed_job(None, status, job)
3786 }
3787
3788 fn send_keyed_job<F, Fut, R>(
3789 &mut self,
3790 key: Option<GitJobKey>,
3791 status: Option<SharedString>,
3792 job: F,
3793 ) -> oneshot::Receiver<R>
3794 where
3795 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3796 Fut: Future<Output = R> + 'static,
3797 R: Send + 'static,
3798 {
3799 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3800 let job_id = post_inc(&mut self.job_id);
3801 let this = self.this.clone();
3802 self.job_sender
3803 .unbounded_send(GitJob {
3804 key,
3805 job: Box::new(move |state, cx: &mut AsyncApp| {
3806 let job = job(state, cx.clone());
3807 cx.spawn(async move |cx| {
3808 if let Some(s) = status.clone() {
3809 this.update(cx, |this, cx| {
3810 this.active_jobs.insert(
3811 job_id,
3812 JobInfo {
3813 start: Instant::now(),
3814 message: s.clone(),
3815 },
3816 );
3817
3818 cx.notify();
3819 })
3820 .ok();
3821 }
3822 let result = job.await;
3823
3824 this.update(cx, |this, cx| {
3825 this.active_jobs.remove(&job_id);
3826 cx.notify();
3827 })
3828 .ok();
3829
3830 result_tx.send(result).ok();
3831 })
3832 }),
3833 })
3834 .ok();
3835 result_rx
3836 }
3837
3838 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3839 let Some(git_store) = self.git_store.upgrade() else {
3840 return;
3841 };
3842 let entity = cx.entity();
3843 git_store.update(cx, |git_store, cx| {
3844 let Some((&id, _)) = git_store
3845 .repositories
3846 .iter()
3847 .find(|(_, handle)| *handle == &entity)
3848 else {
3849 return;
3850 };
3851 git_store.active_repo_id = Some(id);
3852 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3853 });
3854 }
3855
3856 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3857 self.snapshot.status()
3858 }
3859
3860 pub fn cached_stash(&self) -> GitStash {
3861 self.snapshot.stash_entries.clone()
3862 }
3863
3864 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3865 let git_store = self.git_store.upgrade()?;
3866 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3867 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3868 let abs_path = SanitizedPath::new(&abs_path);
3869 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3870 Some(ProjectPath {
3871 worktree_id: worktree.read(cx).id(),
3872 path: relative_path,
3873 })
3874 }
3875
3876 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3877 let git_store = self.git_store.upgrade()?;
3878 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3879 let abs_path = worktree_store.absolutize(path, cx)?;
3880 self.snapshot.abs_path_to_repo_path(&abs_path)
3881 }
3882
3883 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3884 other
3885 .read(cx)
3886 .snapshot
3887 .work_directory_abs_path
3888 .starts_with(&self.snapshot.work_directory_abs_path)
3889 }
3890
3891 pub fn open_commit_buffer(
3892 &mut self,
3893 languages: Option<Arc<LanguageRegistry>>,
3894 buffer_store: Entity<BufferStore>,
3895 cx: &mut Context<Self>,
3896 ) -> Task<Result<Entity<Buffer>>> {
3897 let id = self.id;
3898 if let Some(buffer) = self.commit_message_buffer.clone() {
3899 return Task::ready(Ok(buffer));
3900 }
3901 let this = cx.weak_entity();
3902
3903 let rx = self.send_job(None, move |state, mut cx| async move {
3904 let Some(this) = this.upgrade() else {
3905 bail!("git store was dropped");
3906 };
3907 match state {
3908 RepositoryState::Local(..) => {
3909 this.update(&mut cx, |_, cx| {
3910 Self::open_local_commit_buffer(languages, buffer_store, cx)
3911 })?
3912 .await
3913 }
3914 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3915 let request = client.request(proto::OpenCommitMessageBuffer {
3916 project_id: project_id.0,
3917 repository_id: id.to_proto(),
3918 });
3919 let response = request.await.context("requesting to open commit buffer")?;
3920 let buffer_id = BufferId::new(response.buffer_id)?;
3921 let buffer = buffer_store
3922 .update(&mut cx, |buffer_store, cx| {
3923 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3924 })?
3925 .await?;
3926 if let Some(language_registry) = languages {
3927 let git_commit_language =
3928 language_registry.language_for_name("Git Commit").await?;
3929 buffer.update(&mut cx, |buffer, cx| {
3930 buffer.set_language(Some(git_commit_language), cx);
3931 })?;
3932 }
3933 this.update(&mut cx, |this, _| {
3934 this.commit_message_buffer = Some(buffer.clone());
3935 })?;
3936 Ok(buffer)
3937 }
3938 }
3939 });
3940
3941 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3942 }
3943
3944 fn open_local_commit_buffer(
3945 language_registry: Option<Arc<LanguageRegistry>>,
3946 buffer_store: Entity<BufferStore>,
3947 cx: &mut Context<Self>,
3948 ) -> Task<Result<Entity<Buffer>>> {
3949 cx.spawn(async move |repository, cx| {
3950 let buffer = buffer_store
3951 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3952 .await?;
3953
3954 if let Some(language_registry) = language_registry {
3955 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3956 buffer.update(cx, |buffer, cx| {
3957 buffer.set_language(Some(git_commit_language), cx);
3958 })?;
3959 }
3960
3961 repository.update(cx, |repository, _| {
3962 repository.commit_message_buffer = Some(buffer.clone());
3963 })?;
3964 Ok(buffer)
3965 })
3966 }
3967
3968 pub fn checkout_files(
3969 &mut self,
3970 commit: &str,
3971 paths: Vec<RepoPath>,
3972 cx: &mut Context<Self>,
3973 ) -> Task<Result<()>> {
3974 let commit = commit.to_string();
3975 let id = self.id;
3976
3977 self.spawn_job_with_tracking(
3978 paths.clone(),
3979 pending_op::GitStatus::Reverted,
3980 cx,
3981 async move |this, cx| {
3982 this.update(cx, |this, _cx| {
3983 this.send_job(
3984 Some(format!("git checkout {}", commit).into()),
3985 move |git_repo, _| async move {
3986 match git_repo {
3987 RepositoryState::Local(LocalRepositoryState {
3988 backend,
3989 environment,
3990 ..
3991 }) => {
3992 backend
3993 .checkout_files(commit, paths, environment.clone())
3994 .await
3995 }
3996 RepositoryState::Remote(RemoteRepositoryState {
3997 project_id,
3998 client,
3999 }) => {
4000 client
4001 .request(proto::GitCheckoutFiles {
4002 project_id: project_id.0,
4003 repository_id: id.to_proto(),
4004 commit,
4005 paths: paths
4006 .into_iter()
4007 .map(|p| p.to_proto())
4008 .collect(),
4009 })
4010 .await?;
4011
4012 Ok(())
4013 }
4014 }
4015 },
4016 )
4017 })?
4018 .await?
4019 },
4020 )
4021 }
4022
4023 pub fn reset(
4024 &mut self,
4025 commit: String,
4026 reset_mode: ResetMode,
4027 _cx: &mut App,
4028 ) -> oneshot::Receiver<Result<()>> {
4029 let id = self.id;
4030
4031 self.send_job(None, move |git_repo, _| async move {
4032 match git_repo {
4033 RepositoryState::Local(LocalRepositoryState {
4034 backend,
4035 environment,
4036 ..
4037 }) => backend.reset(commit, reset_mode, environment).await,
4038 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4039 client
4040 .request(proto::GitReset {
4041 project_id: project_id.0,
4042 repository_id: id.to_proto(),
4043 commit,
4044 mode: match reset_mode {
4045 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4046 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4047 },
4048 })
4049 .await?;
4050
4051 Ok(())
4052 }
4053 }
4054 })
4055 }
4056
4057 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4058 let id = self.id;
4059 self.send_job(None, move |git_repo, _cx| async move {
4060 match git_repo {
4061 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4062 backend.show(commit).await
4063 }
4064 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4065 let resp = client
4066 .request(proto::GitShow {
4067 project_id: project_id.0,
4068 repository_id: id.to_proto(),
4069 commit,
4070 })
4071 .await?;
4072
4073 Ok(CommitDetails {
4074 sha: resp.sha.into(),
4075 message: resp.message.into(),
4076 commit_timestamp: resp.commit_timestamp,
4077 author_email: resp.author_email.into(),
4078 author_name: resp.author_name.into(),
4079 })
4080 }
4081 }
4082 })
4083 }
4084
4085 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4086 let id = self.id;
4087 self.send_job(None, move |git_repo, cx| async move {
4088 match git_repo {
4089 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4090 backend.load_commit(commit, cx).await
4091 }
4092 RepositoryState::Remote(RemoteRepositoryState {
4093 client, project_id, ..
4094 }) => {
4095 let response = client
4096 .request(proto::LoadCommitDiff {
4097 project_id: project_id.0,
4098 repository_id: id.to_proto(),
4099 commit,
4100 })
4101 .await?;
4102 Ok(CommitDiff {
4103 files: response
4104 .files
4105 .into_iter()
4106 .map(|file| {
4107 Ok(CommitFile {
4108 path: RepoPath::from_proto(&file.path)?,
4109 old_text: file.old_text,
4110 new_text: file.new_text,
4111 })
4112 })
4113 .collect::<Result<Vec<_>>>()?,
4114 })
4115 }
4116 }
4117 })
4118 }
4119
4120 pub fn file_history(
4121 &mut self,
4122 path: RepoPath,
4123 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4124 self.file_history_paginated(path, 0, None)
4125 }
4126
4127 pub fn file_history_paginated(
4128 &mut self,
4129 path: RepoPath,
4130 skip: usize,
4131 limit: Option<usize>,
4132 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4133 let id = self.id;
4134 self.send_job(None, move |git_repo, _cx| async move {
4135 match git_repo {
4136 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4137 backend.file_history_paginated(path, skip, limit).await
4138 }
4139 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4140 let response = client
4141 .request(proto::GitFileHistory {
4142 project_id: project_id.0,
4143 repository_id: id.to_proto(),
4144 path: path.to_proto(),
4145 skip: skip as u64,
4146 limit: limit.map(|l| l as u64),
4147 })
4148 .await?;
4149 Ok(git::repository::FileHistory {
4150 entries: response
4151 .entries
4152 .into_iter()
4153 .map(|entry| git::repository::FileHistoryEntry {
4154 sha: entry.sha.into(),
4155 subject: entry.subject.into(),
4156 message: entry.message.into(),
4157 commit_timestamp: entry.commit_timestamp,
4158 author_name: entry.author_name.into(),
4159 author_email: entry.author_email.into(),
4160 })
4161 .collect(),
4162 path: RepoPath::from_proto(&response.path)?,
4163 })
4164 }
4165 }
4166 })
4167 }
4168
4169 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4170 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4171 }
4172
4173 fn save_buffers<'a>(
4174 &self,
4175 entries: impl IntoIterator<Item = &'a RepoPath>,
4176 cx: &mut Context<Self>,
4177 ) -> Vec<Task<anyhow::Result<()>>> {
4178 let mut save_futures = Vec::new();
4179 if let Some(buffer_store) = self.buffer_store(cx) {
4180 buffer_store.update(cx, |buffer_store, cx| {
4181 for path in entries {
4182 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4183 continue;
4184 };
4185 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4186 && buffer
4187 .read(cx)
4188 .file()
4189 .is_some_and(|file| file.disk_state().exists())
4190 && buffer.read(cx).has_unsaved_edits()
4191 {
4192 save_futures.push(buffer_store.save_buffer(buffer, cx));
4193 }
4194 }
4195 })
4196 }
4197 save_futures
4198 }
4199
4200 pub fn stage_entries(
4201 &mut self,
4202 entries: Vec<RepoPath>,
4203 cx: &mut Context<Self>,
4204 ) -> Task<anyhow::Result<()>> {
4205 self.stage_or_unstage_entries(true, entries, cx)
4206 }
4207
4208 pub fn unstage_entries(
4209 &mut self,
4210 entries: Vec<RepoPath>,
4211 cx: &mut Context<Self>,
4212 ) -> Task<anyhow::Result<()>> {
4213 self.stage_or_unstage_entries(false, entries, cx)
4214 }
4215
4216 fn stage_or_unstage_entries(
4217 &mut self,
4218 stage: bool,
4219 entries: Vec<RepoPath>,
4220 cx: &mut Context<Self>,
4221 ) -> Task<anyhow::Result<()>> {
4222 if entries.is_empty() {
4223 return Task::ready(Ok(()));
4224 }
4225 let Some(git_store) = self.git_store.upgrade() else {
4226 return Task::ready(Ok(()));
4227 };
4228 let id = self.id;
4229 let save_tasks = self.save_buffers(&entries, cx);
4230 let paths = entries
4231 .iter()
4232 .map(|p| p.as_unix_str())
4233 .collect::<Vec<_>>()
4234 .join(" ");
4235 let status = if stage {
4236 format!("git add {paths}")
4237 } else {
4238 format!("git reset {paths}")
4239 };
4240 let job_key = GitJobKey::WriteIndex(entries.clone());
4241
4242 self.spawn_job_with_tracking(
4243 entries.clone(),
4244 if stage {
4245 pending_op::GitStatus::Staged
4246 } else {
4247 pending_op::GitStatus::Unstaged
4248 },
4249 cx,
4250 async move |this, cx| {
4251 for save_task in save_tasks {
4252 save_task.await?;
4253 }
4254
4255 this.update(cx, |this, cx| {
4256 let weak_this = cx.weak_entity();
4257 this.send_keyed_job(
4258 Some(job_key),
4259 Some(status.into()),
4260 move |git_repo, mut cx| async move {
4261 let hunk_staging_operation_counts = weak_this
4262 .update(&mut cx, |this, cx| {
4263 let mut hunk_staging_operation_counts = HashMap::default();
4264 for path in &entries {
4265 let Some(project_path) =
4266 this.repo_path_to_project_path(path, cx)
4267 else {
4268 continue;
4269 };
4270 let Some(buffer) = git_store
4271 .read(cx)
4272 .buffer_store
4273 .read(cx)
4274 .get_by_path(&project_path)
4275 else {
4276 continue;
4277 };
4278 let Some(diff_state) = git_store
4279 .read(cx)
4280 .diffs
4281 .get(&buffer.read(cx).remote_id())
4282 .cloned()
4283 else {
4284 continue;
4285 };
4286 let Some(uncommitted_diff) =
4287 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4288 |uncommitted_diff| uncommitted_diff.upgrade(),
4289 )
4290 else {
4291 continue;
4292 };
4293 let buffer_snapshot = buffer.read(cx).text_snapshot();
4294 let file_exists = buffer
4295 .read(cx)
4296 .file()
4297 .is_some_and(|file| file.disk_state().exists());
4298 let hunk_staging_operation_count =
4299 diff_state.update(cx, |diff_state, cx| {
4300 uncommitted_diff.update(
4301 cx,
4302 |uncommitted_diff, cx| {
4303 uncommitted_diff
4304 .stage_or_unstage_all_hunks(
4305 stage,
4306 &buffer_snapshot,
4307 file_exists,
4308 cx,
4309 );
4310 },
4311 );
4312
4313 diff_state.hunk_staging_operation_count += 1;
4314 diff_state.hunk_staging_operation_count
4315 });
4316 hunk_staging_operation_counts.insert(
4317 diff_state.downgrade(),
4318 hunk_staging_operation_count,
4319 );
4320 }
4321 hunk_staging_operation_counts
4322 })
4323 .unwrap_or_default();
4324
4325 let result = match git_repo {
4326 RepositoryState::Local(LocalRepositoryState {
4327 backend,
4328 environment,
4329 ..
4330 }) => {
4331 if stage {
4332 backend.stage_paths(entries, environment.clone()).await
4333 } else {
4334 backend.unstage_paths(entries, environment.clone()).await
4335 }
4336 }
4337 RepositoryState::Remote(RemoteRepositoryState {
4338 project_id,
4339 client,
4340 }) => {
4341 if stage {
4342 client
4343 .request(proto::Stage {
4344 project_id: project_id.0,
4345 repository_id: id.to_proto(),
4346 paths: entries
4347 .into_iter()
4348 .map(|repo_path| repo_path.to_proto())
4349 .collect(),
4350 })
4351 .await
4352 .context("sending stage request")
4353 .map(|_| ())
4354 } else {
4355 client
4356 .request(proto::Unstage {
4357 project_id: project_id.0,
4358 repository_id: id.to_proto(),
4359 paths: entries
4360 .into_iter()
4361 .map(|repo_path| repo_path.to_proto())
4362 .collect(),
4363 })
4364 .await
4365 .context("sending unstage request")
4366 .map(|_| ())
4367 }
4368 }
4369 };
4370
4371 for (diff_state, hunk_staging_operation_count) in
4372 hunk_staging_operation_counts
4373 {
4374 diff_state
4375 .update(&mut cx, |diff_state, cx| {
4376 if result.is_ok() {
4377 diff_state.hunk_staging_operation_count_as_of_write =
4378 hunk_staging_operation_count;
4379 } else if let Some(uncommitted_diff) =
4380 &diff_state.uncommitted_diff
4381 {
4382 uncommitted_diff
4383 .update(cx, |uncommitted_diff, cx| {
4384 uncommitted_diff.clear_pending_hunks(cx);
4385 })
4386 .ok();
4387 }
4388 })
4389 .ok();
4390 }
4391
4392 result
4393 },
4394 )
4395 })?
4396 .await?
4397 },
4398 )
4399 }
4400
4401 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4402 let to_stage = self
4403 .cached_status()
4404 .filter_map(|entry| {
4405 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4406 if ops.staging() || ops.staged() {
4407 None
4408 } else {
4409 Some(entry.repo_path)
4410 }
4411 } else if entry.status.staging().is_fully_staged() {
4412 None
4413 } else {
4414 Some(entry.repo_path)
4415 }
4416 })
4417 .collect();
4418 self.stage_or_unstage_entries(true, to_stage, cx)
4419 }
4420
4421 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4422 let to_unstage = self
4423 .cached_status()
4424 .filter_map(|entry| {
4425 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4426 if !ops.staging() && !ops.staged() {
4427 None
4428 } else {
4429 Some(entry.repo_path)
4430 }
4431 } else if entry.status.staging().is_fully_unstaged() {
4432 None
4433 } else {
4434 Some(entry.repo_path)
4435 }
4436 })
4437 .collect();
4438 self.stage_or_unstage_entries(false, to_unstage, cx)
4439 }
4440
4441 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4442 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4443
4444 self.stash_entries(to_stash, cx)
4445 }
4446
4447 pub fn stash_entries(
4448 &mut self,
4449 entries: Vec<RepoPath>,
4450 cx: &mut Context<Self>,
4451 ) -> Task<anyhow::Result<()>> {
4452 let id = self.id;
4453
4454 cx.spawn(async move |this, cx| {
4455 this.update(cx, |this, _| {
4456 this.send_job(None, move |git_repo, _cx| async move {
4457 match git_repo {
4458 RepositoryState::Local(LocalRepositoryState {
4459 backend,
4460 environment,
4461 ..
4462 }) => backend.stash_paths(entries, environment).await,
4463 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4464 client
4465 .request(proto::Stash {
4466 project_id: project_id.0,
4467 repository_id: id.to_proto(),
4468 paths: entries
4469 .into_iter()
4470 .map(|repo_path| repo_path.to_proto())
4471 .collect(),
4472 })
4473 .await
4474 .context("sending stash request")?;
4475 Ok(())
4476 }
4477 }
4478 })
4479 })?
4480 .await??;
4481 Ok(())
4482 })
4483 }
4484
4485 pub fn stash_pop(
4486 &mut self,
4487 index: Option<usize>,
4488 cx: &mut Context<Self>,
4489 ) -> Task<anyhow::Result<()>> {
4490 let id = self.id;
4491 cx.spawn(async move |this, cx| {
4492 this.update(cx, |this, _| {
4493 this.send_job(None, move |git_repo, _cx| async move {
4494 match git_repo {
4495 RepositoryState::Local(LocalRepositoryState {
4496 backend,
4497 environment,
4498 ..
4499 }) => backend.stash_pop(index, environment).await,
4500 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4501 client
4502 .request(proto::StashPop {
4503 project_id: project_id.0,
4504 repository_id: id.to_proto(),
4505 stash_index: index.map(|i| i as u64),
4506 })
4507 .await
4508 .context("sending stash pop request")?;
4509 Ok(())
4510 }
4511 }
4512 })
4513 })?
4514 .await??;
4515 Ok(())
4516 })
4517 }
4518
4519 pub fn stash_apply(
4520 &mut self,
4521 index: Option<usize>,
4522 cx: &mut Context<Self>,
4523 ) -> Task<anyhow::Result<()>> {
4524 let id = self.id;
4525 cx.spawn(async move |this, cx| {
4526 this.update(cx, |this, _| {
4527 this.send_job(None, move |git_repo, _cx| async move {
4528 match git_repo {
4529 RepositoryState::Local(LocalRepositoryState {
4530 backend,
4531 environment,
4532 ..
4533 }) => backend.stash_apply(index, environment).await,
4534 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4535 client
4536 .request(proto::StashApply {
4537 project_id: project_id.0,
4538 repository_id: id.to_proto(),
4539 stash_index: index.map(|i| i as u64),
4540 })
4541 .await
4542 .context("sending stash apply request")?;
4543 Ok(())
4544 }
4545 }
4546 })
4547 })?
4548 .await??;
4549 Ok(())
4550 })
4551 }
4552
4553 pub fn stash_drop(
4554 &mut self,
4555 index: Option<usize>,
4556 cx: &mut Context<Self>,
4557 ) -> oneshot::Receiver<anyhow::Result<()>> {
4558 let id = self.id;
4559 let updates_tx = self
4560 .git_store()
4561 .and_then(|git_store| match &git_store.read(cx).state {
4562 GitStoreState::Local { downstream, .. } => downstream
4563 .as_ref()
4564 .map(|downstream| downstream.updates_tx.clone()),
4565 _ => None,
4566 });
4567 let this = cx.weak_entity();
4568 self.send_job(None, move |git_repo, mut cx| async move {
4569 match git_repo {
4570 RepositoryState::Local(LocalRepositoryState {
4571 backend,
4572 environment,
4573 ..
4574 }) => {
4575 // TODO would be nice to not have to do this manually
4576 let result = backend.stash_drop(index, environment).await;
4577 if result.is_ok()
4578 && let Ok(stash_entries) = backend.stash_entries().await
4579 {
4580 let snapshot = this.update(&mut cx, |this, cx| {
4581 this.snapshot.stash_entries = stash_entries;
4582 cx.emit(RepositoryEvent::StashEntriesChanged);
4583 this.snapshot.clone()
4584 })?;
4585 if let Some(updates_tx) = updates_tx {
4586 updates_tx
4587 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4588 .ok();
4589 }
4590 }
4591
4592 result
4593 }
4594 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4595 client
4596 .request(proto::StashDrop {
4597 project_id: project_id.0,
4598 repository_id: id.to_proto(),
4599 stash_index: index.map(|i| i as u64),
4600 })
4601 .await
4602 .context("sending stash pop request")?;
4603 Ok(())
4604 }
4605 }
4606 })
4607 }
4608
4609 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4610 let id = self.id;
4611 self.send_job(
4612 Some(format!("git hook {}", hook.as_str()).into()),
4613 move |git_repo, _cx| async move {
4614 match git_repo {
4615 RepositoryState::Local(LocalRepositoryState {
4616 backend,
4617 environment,
4618 ..
4619 }) => backend.run_hook(hook, environment.clone()).await,
4620 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4621 client
4622 .request(proto::RunGitHook {
4623 project_id: project_id.0,
4624 repository_id: id.to_proto(),
4625 hook: hook.to_proto(),
4626 })
4627 .await?;
4628
4629 Ok(())
4630 }
4631 }
4632 },
4633 )
4634 }
4635
4636 pub fn commit(
4637 &mut self,
4638 message: SharedString,
4639 name_and_email: Option<(SharedString, SharedString)>,
4640 options: CommitOptions,
4641 askpass: AskPassDelegate,
4642 cx: &mut App,
4643 ) -> oneshot::Receiver<Result<()>> {
4644 let id = self.id;
4645 let askpass_delegates = self.askpass_delegates.clone();
4646 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4647
4648 let rx = self.run_hook(RunHook::PreCommit, cx);
4649
4650 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4651 rx.await??;
4652
4653 match git_repo {
4654 RepositoryState::Local(LocalRepositoryState {
4655 backend,
4656 environment,
4657 ..
4658 }) => {
4659 backend
4660 .commit(message, name_and_email, options, askpass, environment)
4661 .await
4662 }
4663 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4664 askpass_delegates.lock().insert(askpass_id, askpass);
4665 let _defer = util::defer(|| {
4666 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4667 debug_assert!(askpass_delegate.is_some());
4668 });
4669 let (name, email) = name_and_email.unzip();
4670 client
4671 .request(proto::Commit {
4672 project_id: project_id.0,
4673 repository_id: id.to_proto(),
4674 message: String::from(message),
4675 name: name.map(String::from),
4676 email: email.map(String::from),
4677 options: Some(proto::commit::CommitOptions {
4678 amend: options.amend,
4679 signoff: options.signoff,
4680 }),
4681 askpass_id,
4682 })
4683 .await
4684 .context("sending commit request")?;
4685
4686 Ok(())
4687 }
4688 }
4689 })
4690 }
4691
4692 pub fn fetch(
4693 &mut self,
4694 fetch_options: FetchOptions,
4695 askpass: AskPassDelegate,
4696 _cx: &mut App,
4697 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4698 let askpass_delegates = self.askpass_delegates.clone();
4699 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4700 let id = self.id;
4701
4702 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4703 match git_repo {
4704 RepositoryState::Local(LocalRepositoryState {
4705 backend,
4706 environment,
4707 ..
4708 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4709 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4710 askpass_delegates.lock().insert(askpass_id, askpass);
4711 let _defer = util::defer(|| {
4712 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4713 debug_assert!(askpass_delegate.is_some());
4714 });
4715
4716 let response = client
4717 .request(proto::Fetch {
4718 project_id: project_id.0,
4719 repository_id: id.to_proto(),
4720 askpass_id,
4721 remote: fetch_options.to_proto(),
4722 })
4723 .await
4724 .context("sending fetch request")?;
4725
4726 Ok(RemoteCommandOutput {
4727 stdout: response.stdout,
4728 stderr: response.stderr,
4729 })
4730 }
4731 }
4732 })
4733 }
4734
4735 pub fn push(
4736 &mut self,
4737 branch: SharedString,
4738 remote: SharedString,
4739 options: Option<PushOptions>,
4740 askpass: AskPassDelegate,
4741 cx: &mut Context<Self>,
4742 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4743 let askpass_delegates = self.askpass_delegates.clone();
4744 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4745 let id = self.id;
4746
4747 let args = options
4748 .map(|option| match option {
4749 PushOptions::SetUpstream => " --set-upstream",
4750 PushOptions::Force => " --force-with-lease",
4751 })
4752 .unwrap_or("");
4753
4754 let updates_tx = self
4755 .git_store()
4756 .and_then(|git_store| match &git_store.read(cx).state {
4757 GitStoreState::Local { downstream, .. } => downstream
4758 .as_ref()
4759 .map(|downstream| downstream.updates_tx.clone()),
4760 _ => None,
4761 });
4762
4763 let this = cx.weak_entity();
4764 self.send_job(
4765 Some(format!("git push {} {} {}", args, remote, branch).into()),
4766 move |git_repo, mut cx| async move {
4767 match git_repo {
4768 RepositoryState::Local(LocalRepositoryState {
4769 backend,
4770 environment,
4771 ..
4772 }) => {
4773 let result = backend
4774 .push(
4775 branch.to_string(),
4776 remote.to_string(),
4777 options,
4778 askpass,
4779 environment.clone(),
4780 cx.clone(),
4781 )
4782 .await;
4783 // TODO would be nice to not have to do this manually
4784 if result.is_ok() {
4785 let branches = backend.branches().await?;
4786 let branch = branches.into_iter().find(|branch| branch.is_head);
4787 log::info!("head branch after scan is {branch:?}");
4788 let snapshot = this.update(&mut cx, |this, cx| {
4789 this.snapshot.branch = branch;
4790 cx.emit(RepositoryEvent::BranchChanged);
4791 this.snapshot.clone()
4792 })?;
4793 if let Some(updates_tx) = updates_tx {
4794 updates_tx
4795 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4796 .ok();
4797 }
4798 }
4799 result
4800 }
4801 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4802 askpass_delegates.lock().insert(askpass_id, askpass);
4803 let _defer = util::defer(|| {
4804 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4805 debug_assert!(askpass_delegate.is_some());
4806 });
4807 let response = client
4808 .request(proto::Push {
4809 project_id: project_id.0,
4810 repository_id: id.to_proto(),
4811 askpass_id,
4812 branch_name: branch.to_string(),
4813 remote_name: remote.to_string(),
4814 options: options.map(|options| match options {
4815 PushOptions::Force => proto::push::PushOptions::Force,
4816 PushOptions::SetUpstream => {
4817 proto::push::PushOptions::SetUpstream
4818 }
4819 }
4820 as i32),
4821 })
4822 .await
4823 .context("sending push request")?;
4824
4825 Ok(RemoteCommandOutput {
4826 stdout: response.stdout,
4827 stderr: response.stderr,
4828 })
4829 }
4830 }
4831 },
4832 )
4833 }
4834
4835 pub fn pull(
4836 &mut self,
4837 branch: Option<SharedString>,
4838 remote: SharedString,
4839 rebase: bool,
4840 askpass: AskPassDelegate,
4841 _cx: &mut App,
4842 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4843 let askpass_delegates = self.askpass_delegates.clone();
4844 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4845 let id = self.id;
4846
4847 let mut status = "git pull".to_string();
4848 if rebase {
4849 status.push_str(" --rebase");
4850 }
4851 status.push_str(&format!(" {}", remote));
4852 if let Some(b) = &branch {
4853 status.push_str(&format!(" {}", b));
4854 }
4855
4856 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4857 match git_repo {
4858 RepositoryState::Local(LocalRepositoryState {
4859 backend,
4860 environment,
4861 ..
4862 }) => {
4863 backend
4864 .pull(
4865 branch.as_ref().map(|b| b.to_string()),
4866 remote.to_string(),
4867 rebase,
4868 askpass,
4869 environment.clone(),
4870 cx,
4871 )
4872 .await
4873 }
4874 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4875 askpass_delegates.lock().insert(askpass_id, askpass);
4876 let _defer = util::defer(|| {
4877 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4878 debug_assert!(askpass_delegate.is_some());
4879 });
4880 let response = client
4881 .request(proto::Pull {
4882 project_id: project_id.0,
4883 repository_id: id.to_proto(),
4884 askpass_id,
4885 rebase,
4886 branch_name: branch.as_ref().map(|b| b.to_string()),
4887 remote_name: remote.to_string(),
4888 })
4889 .await
4890 .context("sending pull request")?;
4891
4892 Ok(RemoteCommandOutput {
4893 stdout: response.stdout,
4894 stderr: response.stderr,
4895 })
4896 }
4897 }
4898 })
4899 }
4900
4901 fn spawn_set_index_text_job(
4902 &mut self,
4903 path: RepoPath,
4904 content: Option<String>,
4905 hunk_staging_operation_count: Option<usize>,
4906 cx: &mut Context<Self>,
4907 ) -> oneshot::Receiver<anyhow::Result<()>> {
4908 let id = self.id;
4909 let this = cx.weak_entity();
4910 let git_store = self.git_store.clone();
4911 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4912 self.send_keyed_job(
4913 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4914 None,
4915 move |git_repo, mut cx| async move {
4916 log::debug!(
4917 "start updating index text for buffer {}",
4918 path.as_unix_str()
4919 );
4920
4921 match git_repo {
4922 RepositoryState::Local(LocalRepositoryState {
4923 fs,
4924 backend,
4925 environment,
4926 ..
4927 }) => {
4928 let executable = match fs.metadata(&abs_path).await {
4929 Ok(Some(meta)) => meta.is_executable,
4930 Ok(None) => false,
4931 Err(_err) => false,
4932 };
4933 backend
4934 .set_index_text(path.clone(), content, environment.clone(), executable)
4935 .await?;
4936 }
4937 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4938 client
4939 .request(proto::SetIndexText {
4940 project_id: project_id.0,
4941 repository_id: id.to_proto(),
4942 path: path.to_proto(),
4943 text: content,
4944 })
4945 .await?;
4946 }
4947 }
4948 log::debug!(
4949 "finish updating index text for buffer {}",
4950 path.as_unix_str()
4951 );
4952
4953 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4954 let project_path = this
4955 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4956 .ok()
4957 .flatten();
4958 git_store.update(&mut cx, |git_store, cx| {
4959 let buffer_id = git_store
4960 .buffer_store
4961 .read(cx)
4962 .get_by_path(&project_path?)?
4963 .read(cx)
4964 .remote_id();
4965 let diff_state = git_store.diffs.get(&buffer_id)?;
4966 diff_state.update(cx, |diff_state, _| {
4967 diff_state.hunk_staging_operation_count_as_of_write =
4968 hunk_staging_operation_count;
4969 });
4970 Some(())
4971 })?;
4972 }
4973 Ok(())
4974 },
4975 )
4976 }
4977
4978 pub fn create_remote(
4979 &mut self,
4980 remote_name: String,
4981 remote_url: String,
4982 ) -> oneshot::Receiver<Result<()>> {
4983 let id = self.id;
4984 self.send_job(
4985 Some(format!("git remote add {remote_name} {remote_url}").into()),
4986 move |repo, _cx| async move {
4987 match repo {
4988 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4989 backend.create_remote(remote_name, remote_url).await
4990 }
4991 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4992 client
4993 .request(proto::GitCreateRemote {
4994 project_id: project_id.0,
4995 repository_id: id.to_proto(),
4996 remote_name,
4997 remote_url,
4998 })
4999 .await?;
5000
5001 Ok(())
5002 }
5003 }
5004 },
5005 )
5006 }
5007
5008 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5009 let id = self.id;
5010 self.send_job(
5011 Some(format!("git remove remote {remote_name}").into()),
5012 move |repo, _cx| async move {
5013 match repo {
5014 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5015 backend.remove_remote(remote_name).await
5016 }
5017 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5018 client
5019 .request(proto::GitRemoveRemote {
5020 project_id: project_id.0,
5021 repository_id: id.to_proto(),
5022 remote_name,
5023 })
5024 .await?;
5025
5026 Ok(())
5027 }
5028 }
5029 },
5030 )
5031 }
5032
5033 pub fn get_remotes(
5034 &mut self,
5035 branch_name: Option<String>,
5036 is_push: bool,
5037 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5038 let id = self.id;
5039 self.send_job(None, move |repo, _cx| async move {
5040 match repo {
5041 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5042 let remote = if let Some(branch_name) = branch_name {
5043 if is_push {
5044 backend.get_push_remote(branch_name).await?
5045 } else {
5046 backend.get_branch_remote(branch_name).await?
5047 }
5048 } else {
5049 None
5050 };
5051
5052 match remote {
5053 Some(remote) => Ok(vec![remote]),
5054 None => backend.get_all_remotes().await,
5055 }
5056 }
5057 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5058 let response = client
5059 .request(proto::GetRemotes {
5060 project_id: project_id.0,
5061 repository_id: id.to_proto(),
5062 branch_name,
5063 is_push,
5064 })
5065 .await?;
5066
5067 let remotes = response
5068 .remotes
5069 .into_iter()
5070 .map(|remotes| Remote {
5071 name: remotes.name.into(),
5072 })
5073 .collect();
5074
5075 Ok(remotes)
5076 }
5077 }
5078 })
5079 }
5080
5081 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5082 let id = self.id;
5083 self.send_job(None, move |repo, _| async move {
5084 match repo {
5085 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5086 backend.branches().await
5087 }
5088 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5089 let response = client
5090 .request(proto::GitGetBranches {
5091 project_id: project_id.0,
5092 repository_id: id.to_proto(),
5093 })
5094 .await?;
5095
5096 let branches = response
5097 .branches
5098 .into_iter()
5099 .map(|branch| proto_to_branch(&branch))
5100 .collect();
5101
5102 Ok(branches)
5103 }
5104 }
5105 })
5106 }
5107
5108 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5109 let id = self.id;
5110 self.send_job(None, move |repo, _| async move {
5111 match repo {
5112 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5113 backend.worktrees().await
5114 }
5115 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5116 let response = client
5117 .request(proto::GitGetWorktrees {
5118 project_id: project_id.0,
5119 repository_id: id.to_proto(),
5120 })
5121 .await?;
5122
5123 let worktrees = response
5124 .worktrees
5125 .into_iter()
5126 .map(|worktree| proto_to_worktree(&worktree))
5127 .collect();
5128
5129 Ok(worktrees)
5130 }
5131 }
5132 })
5133 }
5134
5135 pub fn create_worktree(
5136 &mut self,
5137 name: String,
5138 path: PathBuf,
5139 commit: Option<String>,
5140 ) -> oneshot::Receiver<Result<()>> {
5141 let id = self.id;
5142 self.send_job(
5143 Some("git worktree add".into()),
5144 move |repo, _cx| async move {
5145 match repo {
5146 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5147 backend.create_worktree(name, path, commit).await
5148 }
5149 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5150 client
5151 .request(proto::GitCreateWorktree {
5152 project_id: project_id.0,
5153 repository_id: id.to_proto(),
5154 name,
5155 directory: path.to_string_lossy().to_string(),
5156 commit,
5157 })
5158 .await?;
5159
5160 Ok(())
5161 }
5162 }
5163 },
5164 )
5165 }
5166
5167 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
5168 let id = self.id;
5169 self.send_job(None, move |repo, _| async move {
5170 match repo {
5171 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5172 backend.default_branch().await
5173 }
5174 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5175 let response = client
5176 .request(proto::GetDefaultBranch {
5177 project_id: project_id.0,
5178 repository_id: id.to_proto(),
5179 })
5180 .await?;
5181
5182 anyhow::Ok(response.branch.map(SharedString::from))
5183 }
5184 }
5185 })
5186 }
5187
5188 pub fn diff_tree(
5189 &mut self,
5190 diff_type: DiffTreeType,
5191 _cx: &App,
5192 ) -> oneshot::Receiver<Result<TreeDiff>> {
5193 let repository_id = self.snapshot.id;
5194 self.send_job(None, move |repo, _cx| async move {
5195 match repo {
5196 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5197 backend.diff_tree(diff_type).await
5198 }
5199 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5200 let response = client
5201 .request(proto::GetTreeDiff {
5202 project_id: project_id.0,
5203 repository_id: repository_id.0,
5204 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5205 base: diff_type.base().to_string(),
5206 head: diff_type.head().to_string(),
5207 })
5208 .await?;
5209
5210 let entries = response
5211 .entries
5212 .into_iter()
5213 .filter_map(|entry| {
5214 let status = match entry.status() {
5215 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5216 proto::tree_diff_status::Status::Modified => {
5217 TreeDiffStatus::Modified {
5218 old: git::Oid::from_str(
5219 &entry.oid.context("missing oid").log_err()?,
5220 )
5221 .log_err()?,
5222 }
5223 }
5224 proto::tree_diff_status::Status::Deleted => {
5225 TreeDiffStatus::Deleted {
5226 old: git::Oid::from_str(
5227 &entry.oid.context("missing oid").log_err()?,
5228 )
5229 .log_err()?,
5230 }
5231 }
5232 };
5233 Some((
5234 RepoPath::from_rel_path(
5235 &RelPath::from_proto(&entry.path).log_err()?,
5236 ),
5237 status,
5238 ))
5239 })
5240 .collect();
5241
5242 Ok(TreeDiff { entries })
5243 }
5244 }
5245 })
5246 }
5247
5248 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5249 let id = self.id;
5250 self.send_job(None, move |repo, _cx| async move {
5251 match repo {
5252 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5253 backend.diff(diff_type).await
5254 }
5255 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5256 let response = client
5257 .request(proto::GitDiff {
5258 project_id: project_id.0,
5259 repository_id: id.to_proto(),
5260 diff_type: match diff_type {
5261 DiffType::HeadToIndex => {
5262 proto::git_diff::DiffType::HeadToIndex.into()
5263 }
5264 DiffType::HeadToWorktree => {
5265 proto::git_diff::DiffType::HeadToWorktree.into()
5266 }
5267 },
5268 })
5269 .await?;
5270
5271 Ok(response.diff)
5272 }
5273 }
5274 })
5275 }
5276
5277 pub fn create_branch(
5278 &mut self,
5279 branch_name: String,
5280 base_branch: Option<String>,
5281 ) -> oneshot::Receiver<Result<()>> {
5282 let id = self.id;
5283 let status_msg = if let Some(ref base) = base_branch {
5284 format!("git switch -c {branch_name} {base}").into()
5285 } else {
5286 format!("git switch -c {branch_name}").into()
5287 };
5288 self.send_job(Some(status_msg), move |repo, _cx| async move {
5289 match repo {
5290 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5291 backend.create_branch(branch_name, base_branch).await
5292 }
5293 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5294 client
5295 .request(proto::GitCreateBranch {
5296 project_id: project_id.0,
5297 repository_id: id.to_proto(),
5298 branch_name,
5299 })
5300 .await?;
5301
5302 Ok(())
5303 }
5304 }
5305 })
5306 }
5307
5308 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5309 let id = self.id;
5310 self.send_job(
5311 Some(format!("git switch {branch_name}").into()),
5312 move |repo, _cx| async move {
5313 match repo {
5314 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5315 backend.change_branch(branch_name).await
5316 }
5317 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5318 client
5319 .request(proto::GitChangeBranch {
5320 project_id: project_id.0,
5321 repository_id: id.to_proto(),
5322 branch_name,
5323 })
5324 .await?;
5325
5326 Ok(())
5327 }
5328 }
5329 },
5330 )
5331 }
5332
5333 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5334 let id = self.id;
5335 self.send_job(
5336 Some(format!("git branch -d {branch_name}").into()),
5337 move |repo, _cx| async move {
5338 match repo {
5339 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5340 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5341 client
5342 .request(proto::GitDeleteBranch {
5343 project_id: project_id.0,
5344 repository_id: id.to_proto(),
5345 branch_name,
5346 })
5347 .await?;
5348
5349 Ok(())
5350 }
5351 }
5352 },
5353 )
5354 }
5355
5356 pub fn rename_branch(
5357 &mut self,
5358 branch: String,
5359 new_name: String,
5360 ) -> oneshot::Receiver<Result<()>> {
5361 let id = self.id;
5362 self.send_job(
5363 Some(format!("git branch -m {branch} {new_name}").into()),
5364 move |repo, _cx| async move {
5365 match repo {
5366 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5367 backend.rename_branch(branch, new_name).await
5368 }
5369 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5370 client
5371 .request(proto::GitRenameBranch {
5372 project_id: project_id.0,
5373 repository_id: id.to_proto(),
5374 branch,
5375 new_name,
5376 })
5377 .await?;
5378
5379 Ok(())
5380 }
5381 }
5382 },
5383 )
5384 }
5385
5386 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5387 let id = self.id;
5388 self.send_job(None, move |repo, _cx| async move {
5389 match repo {
5390 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5391 backend.check_for_pushed_commit().await
5392 }
5393 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5394 let response = client
5395 .request(proto::CheckForPushedCommits {
5396 project_id: project_id.0,
5397 repository_id: id.to_proto(),
5398 })
5399 .await?;
5400
5401 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5402
5403 Ok(branches)
5404 }
5405 }
5406 })
5407 }
5408
5409 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5410 self.send_job(None, |repo, _cx| async move {
5411 match repo {
5412 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5413 backend.checkpoint().await
5414 }
5415 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5416 }
5417 })
5418 }
5419
5420 pub fn restore_checkpoint(
5421 &mut self,
5422 checkpoint: GitRepositoryCheckpoint,
5423 ) -> oneshot::Receiver<Result<()>> {
5424 self.send_job(None, move |repo, _cx| async move {
5425 match repo {
5426 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5427 backend.restore_checkpoint(checkpoint).await
5428 }
5429 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5430 }
5431 })
5432 }
5433
5434 pub(crate) fn apply_remote_update(
5435 &mut self,
5436 update: proto::UpdateRepository,
5437 cx: &mut Context<Self>,
5438 ) -> Result<()> {
5439 let conflicted_paths = TreeSet::from_ordered_entries(
5440 update
5441 .current_merge_conflicts
5442 .into_iter()
5443 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5444 );
5445 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5446 let new_head_commit = update
5447 .head_commit_details
5448 .as_ref()
5449 .map(proto_to_commit_details);
5450 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5451 cx.emit(RepositoryEvent::BranchChanged)
5452 }
5453 self.snapshot.branch = new_branch;
5454 self.snapshot.head_commit = new_head_commit;
5455
5456 self.snapshot.merge.conflicted_paths = conflicted_paths;
5457 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5458 let new_stash_entries = GitStash {
5459 entries: update
5460 .stash_entries
5461 .iter()
5462 .filter_map(|entry| proto_to_stash(entry).ok())
5463 .collect(),
5464 };
5465 if self.snapshot.stash_entries != new_stash_entries {
5466 cx.emit(RepositoryEvent::StashEntriesChanged)
5467 }
5468 self.snapshot.stash_entries = new_stash_entries;
5469 self.snapshot.remote_upstream_url = update.remote_upstream_url;
5470 self.snapshot.remote_origin_url = update.remote_origin_url;
5471
5472 let edits = update
5473 .removed_statuses
5474 .into_iter()
5475 .filter_map(|path| {
5476 Some(sum_tree::Edit::Remove(PathKey(
5477 RelPath::from_proto(&path).log_err()?,
5478 )))
5479 })
5480 .chain(
5481 update
5482 .updated_statuses
5483 .into_iter()
5484 .filter_map(|updated_status| {
5485 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5486 }),
5487 )
5488 .collect::<Vec<_>>();
5489 if !edits.is_empty() {
5490 cx.emit(RepositoryEvent::StatusesChanged);
5491 }
5492 self.snapshot.statuses_by_path.edit(edits, ());
5493 if update.is_last_update {
5494 self.snapshot.scan_id = update.scan_id;
5495 }
5496 self.clear_pending_ops(cx);
5497 Ok(())
5498 }
5499
5500 pub fn compare_checkpoints(
5501 &mut self,
5502 left: GitRepositoryCheckpoint,
5503 right: GitRepositoryCheckpoint,
5504 ) -> oneshot::Receiver<Result<bool>> {
5505 self.send_job(None, move |repo, _cx| async move {
5506 match repo {
5507 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5508 backend.compare_checkpoints(left, right).await
5509 }
5510 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5511 }
5512 })
5513 }
5514
5515 pub fn diff_checkpoints(
5516 &mut self,
5517 base_checkpoint: GitRepositoryCheckpoint,
5518 target_checkpoint: GitRepositoryCheckpoint,
5519 ) -> oneshot::Receiver<Result<String>> {
5520 self.send_job(None, move |repo, _cx| async move {
5521 match repo {
5522 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5523 backend
5524 .diff_checkpoints(base_checkpoint, target_checkpoint)
5525 .await
5526 }
5527 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5528 }
5529 })
5530 }
5531
5532 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5533 let updated = SumTree::from_iter(
5534 self.pending_ops.iter().filter_map(|ops| {
5535 let inner_ops: Vec<PendingOp> =
5536 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5537 if inner_ops.is_empty() {
5538 None
5539 } else {
5540 Some(PendingOps {
5541 repo_path: ops.repo_path.clone(),
5542 ops: inner_ops,
5543 })
5544 }
5545 }),
5546 (),
5547 );
5548
5549 if updated != self.pending_ops {
5550 cx.emit(RepositoryEvent::PendingOpsChanged {
5551 pending_ops: self.pending_ops.clone(),
5552 })
5553 }
5554
5555 self.pending_ops = updated;
5556 }
5557
5558 fn schedule_scan(
5559 &mut self,
5560 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5561 cx: &mut Context<Self>,
5562 ) {
5563 let this = cx.weak_entity();
5564 let _ = self.send_keyed_job(
5565 Some(GitJobKey::ReloadGitState),
5566 None,
5567 |state, mut cx| async move {
5568 log::debug!("run scheduled git status scan");
5569
5570 let Some(this) = this.upgrade() else {
5571 return Ok(());
5572 };
5573 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5574 bail!("not a local repository")
5575 };
5576 let (snapshot, events) = this
5577 .update(&mut cx, |this, _| {
5578 this.paths_needing_status_update.clear();
5579 compute_snapshot(
5580 this.id,
5581 this.work_directory_abs_path.clone(),
5582 this.snapshot.clone(),
5583 backend.clone(),
5584 )
5585 })?
5586 .await?;
5587 this.update(&mut cx, |this, cx| {
5588 this.snapshot = snapshot.clone();
5589 this.clear_pending_ops(cx);
5590 for event in events {
5591 cx.emit(event);
5592 }
5593 })?;
5594 if let Some(updates_tx) = updates_tx {
5595 updates_tx
5596 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5597 .ok();
5598 }
5599 Ok(())
5600 },
5601 );
5602 }
5603
5604 fn spawn_local_git_worker(
5605 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5606 cx: &mut Context<Self>,
5607 ) -> mpsc::UnboundedSender<GitJob> {
5608 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5609
5610 cx.spawn(async move |_, cx| {
5611 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5612 if let Some(git_hosting_provider_registry) =
5613 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5614 {
5615 git_hosting_providers::register_additional_providers(
5616 git_hosting_provider_registry,
5617 state.backend.clone(),
5618 )
5619 .await;
5620 }
5621 let state = RepositoryState::Local(state);
5622 let mut jobs = VecDeque::new();
5623 loop {
5624 while let Ok(Some(next_job)) = job_rx.try_next() {
5625 jobs.push_back(next_job);
5626 }
5627
5628 if let Some(job) = jobs.pop_front() {
5629 if let Some(current_key) = &job.key
5630 && jobs
5631 .iter()
5632 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5633 {
5634 continue;
5635 }
5636 (job.job)(state.clone(), cx).await;
5637 } else if let Some(job) = job_rx.next().await {
5638 jobs.push_back(job);
5639 } else {
5640 break;
5641 }
5642 }
5643 anyhow::Ok(())
5644 })
5645 .detach_and_log_err(cx);
5646
5647 job_tx
5648 }
5649
5650 fn spawn_remote_git_worker(
5651 state: RemoteRepositoryState,
5652 cx: &mut Context<Self>,
5653 ) -> mpsc::UnboundedSender<GitJob> {
5654 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5655
5656 cx.spawn(async move |_, cx| {
5657 let state = RepositoryState::Remote(state);
5658 let mut jobs = VecDeque::new();
5659 loop {
5660 while let Ok(Some(next_job)) = job_rx.try_next() {
5661 jobs.push_back(next_job);
5662 }
5663
5664 if let Some(job) = jobs.pop_front() {
5665 if let Some(current_key) = &job.key
5666 && jobs
5667 .iter()
5668 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5669 {
5670 continue;
5671 }
5672 (job.job)(state.clone(), cx).await;
5673 } else if let Some(job) = job_rx.next().await {
5674 jobs.push_back(job);
5675 } else {
5676 break;
5677 }
5678 }
5679 anyhow::Ok(())
5680 })
5681 .detach_and_log_err(cx);
5682
5683 job_tx
5684 }
5685
5686 fn load_staged_text(
5687 &mut self,
5688 buffer_id: BufferId,
5689 repo_path: RepoPath,
5690 cx: &App,
5691 ) -> Task<Result<Option<String>>> {
5692 let rx = self.send_job(None, move |state, _| async move {
5693 match state {
5694 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5695 anyhow::Ok(backend.load_index_text(repo_path).await)
5696 }
5697 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5698 let response = client
5699 .request(proto::OpenUnstagedDiff {
5700 project_id: project_id.to_proto(),
5701 buffer_id: buffer_id.to_proto(),
5702 })
5703 .await?;
5704 Ok(response.staged_text)
5705 }
5706 }
5707 });
5708 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5709 }
5710
5711 fn load_committed_text(
5712 &mut self,
5713 buffer_id: BufferId,
5714 repo_path: RepoPath,
5715 cx: &App,
5716 ) -> Task<Result<DiffBasesChange>> {
5717 let rx = self.send_job(None, move |state, _| async move {
5718 match state {
5719 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5720 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5721 let staged_text = backend.load_index_text(repo_path).await;
5722 let diff_bases_change = if committed_text == staged_text {
5723 DiffBasesChange::SetBoth(committed_text)
5724 } else {
5725 DiffBasesChange::SetEach {
5726 index: staged_text,
5727 head: committed_text,
5728 }
5729 };
5730 anyhow::Ok(diff_bases_change)
5731 }
5732 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5733 use proto::open_uncommitted_diff_response::Mode;
5734
5735 let response = client
5736 .request(proto::OpenUncommittedDiff {
5737 project_id: project_id.to_proto(),
5738 buffer_id: buffer_id.to_proto(),
5739 })
5740 .await?;
5741 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5742 let bases = match mode {
5743 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5744 Mode::IndexAndHead => DiffBasesChange::SetEach {
5745 head: response.committed_text,
5746 index: response.staged_text,
5747 },
5748 };
5749 Ok(bases)
5750 }
5751 }
5752 });
5753
5754 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5755 }
5756
5757 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5758 let repository_id = self.snapshot.id;
5759 let rx = self.send_job(None, move |state, _| async move {
5760 match state {
5761 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5762 backend.load_blob_content(oid).await
5763 }
5764 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5765 let response = client
5766 .request(proto::GetBlobContent {
5767 project_id: project_id.to_proto(),
5768 repository_id: repository_id.0,
5769 oid: oid.to_string(),
5770 })
5771 .await?;
5772 Ok(response.content)
5773 }
5774 }
5775 });
5776 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5777 }
5778
5779 fn paths_changed(
5780 &mut self,
5781 paths: Vec<RepoPath>,
5782 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5783 cx: &mut Context<Self>,
5784 ) {
5785 self.paths_needing_status_update.extend(paths);
5786
5787 let this = cx.weak_entity();
5788 let _ = self.send_keyed_job(
5789 Some(GitJobKey::RefreshStatuses),
5790 None,
5791 |state, mut cx| async move {
5792 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5793 (
5794 this.snapshot.clone(),
5795 mem::take(&mut this.paths_needing_status_update),
5796 )
5797 })?;
5798 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5799 bail!("not a local repository")
5800 };
5801
5802 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5803 if paths.is_empty() {
5804 return Ok(());
5805 }
5806 let statuses = backend.status(&paths).await?;
5807 let stash_entries = backend.stash_entries().await?;
5808
5809 let changed_path_statuses = cx
5810 .background_spawn(async move {
5811 let mut changed_path_statuses = Vec::new();
5812 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5813 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5814
5815 for (repo_path, status) in &*statuses.entries {
5816 changed_paths.remove(repo_path);
5817 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5818 && cursor.item().is_some_and(|entry| entry.status == *status)
5819 {
5820 continue;
5821 }
5822
5823 changed_path_statuses.push(Edit::Insert(StatusEntry {
5824 repo_path: repo_path.clone(),
5825 status: *status,
5826 }));
5827 }
5828 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5829 for path in changed_paths.into_iter() {
5830 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5831 changed_path_statuses
5832 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5833 }
5834 }
5835 changed_path_statuses
5836 })
5837 .await;
5838
5839 this.update(&mut cx, |this, cx| {
5840 if this.snapshot.stash_entries != stash_entries {
5841 cx.emit(RepositoryEvent::StashEntriesChanged);
5842 this.snapshot.stash_entries = stash_entries;
5843 }
5844
5845 if !changed_path_statuses.is_empty() {
5846 cx.emit(RepositoryEvent::StatusesChanged);
5847 this.snapshot
5848 .statuses_by_path
5849 .edit(changed_path_statuses, ());
5850 this.snapshot.scan_id += 1;
5851 }
5852
5853 if let Some(updates_tx) = updates_tx {
5854 updates_tx
5855 .unbounded_send(DownstreamUpdate::UpdateRepository(
5856 this.snapshot.clone(),
5857 ))
5858 .ok();
5859 }
5860 })
5861 },
5862 );
5863 }
5864
5865 /// currently running git command and when it started
5866 pub fn current_job(&self) -> Option<JobInfo> {
5867 self.active_jobs.values().next().cloned()
5868 }
5869
5870 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5871 self.send_job(None, |_, _| async {})
5872 }
5873
5874 fn spawn_job_with_tracking<AsyncFn>(
5875 &mut self,
5876 paths: Vec<RepoPath>,
5877 git_status: pending_op::GitStatus,
5878 cx: &mut Context<Self>,
5879 f: AsyncFn,
5880 ) -> Task<Result<()>>
5881 where
5882 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5883 {
5884 let ids = self.new_pending_ops_for_paths(paths, git_status);
5885
5886 cx.spawn(async move |this, cx| {
5887 let (job_status, result) = match f(this.clone(), cx).await {
5888 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5889 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5890 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5891 };
5892
5893 this.update(cx, |this, _| {
5894 let mut edits = Vec::with_capacity(ids.len());
5895 for (id, entry) in ids {
5896 if let Some(mut ops) = this
5897 .pending_ops
5898 .get(&PathKey(entry.as_ref().clone()), ())
5899 .cloned()
5900 {
5901 if let Some(op) = ops.op_by_id_mut(id) {
5902 op.job_status = job_status;
5903 }
5904 edits.push(sum_tree::Edit::Insert(ops));
5905 }
5906 }
5907 this.pending_ops.edit(edits, ());
5908 })?;
5909
5910 result
5911 })
5912 }
5913
5914 fn new_pending_ops_for_paths(
5915 &mut self,
5916 paths: Vec<RepoPath>,
5917 git_status: pending_op::GitStatus,
5918 ) -> Vec<(PendingOpId, RepoPath)> {
5919 let mut edits = Vec::with_capacity(paths.len());
5920 let mut ids = Vec::with_capacity(paths.len());
5921 for path in paths {
5922 let mut ops = self
5923 .pending_ops
5924 .get(&PathKey(path.as_ref().clone()), ())
5925 .cloned()
5926 .unwrap_or_else(|| PendingOps::new(&path));
5927 let id = ops.max_id() + 1;
5928 ops.ops.push(PendingOp {
5929 id,
5930 git_status,
5931 job_status: pending_op::JobStatus::Running,
5932 });
5933 edits.push(sum_tree::Edit::Insert(ops));
5934 ids.push((id, path));
5935 }
5936 self.pending_ops.edit(edits, ());
5937 ids
5938 }
5939 pub fn default_remote_url(&self) -> Option<String> {
5940 self.remote_upstream_url
5941 .clone()
5942 .or(self.remote_origin_url.clone())
5943 }
5944}
5945
5946fn get_permalink_in_rust_registry_src(
5947 provider_registry: Arc<GitHostingProviderRegistry>,
5948 path: PathBuf,
5949 selection: Range<u32>,
5950) -> Result<url::Url> {
5951 #[derive(Deserialize)]
5952 struct CargoVcsGit {
5953 sha1: String,
5954 }
5955
5956 #[derive(Deserialize)]
5957 struct CargoVcsInfo {
5958 git: CargoVcsGit,
5959 path_in_vcs: String,
5960 }
5961
5962 #[derive(Deserialize)]
5963 struct CargoPackage {
5964 repository: String,
5965 }
5966
5967 #[derive(Deserialize)]
5968 struct CargoToml {
5969 package: CargoPackage,
5970 }
5971
5972 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5973 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5974 Some((dir, json))
5975 }) else {
5976 bail!("No .cargo_vcs_info.json found in parent directories")
5977 };
5978 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5979 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5980 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5981 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5982 .context("parsing package.repository field of manifest")?;
5983 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5984 let permalink = provider.build_permalink(
5985 remote,
5986 BuildPermalinkParams::new(
5987 &cargo_vcs_info.git.sha1,
5988 &RepoPath::from_rel_path(
5989 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5990 ),
5991 Some(selection),
5992 ),
5993 );
5994 Ok(permalink)
5995}
5996
5997fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5998 let Some(blame) = blame else {
5999 return proto::BlameBufferResponse {
6000 blame_response: None,
6001 };
6002 };
6003
6004 let entries = blame
6005 .entries
6006 .into_iter()
6007 .map(|entry| proto::BlameEntry {
6008 sha: entry.sha.as_bytes().into(),
6009 start_line: entry.range.start,
6010 end_line: entry.range.end,
6011 original_line_number: entry.original_line_number,
6012 author: entry.author,
6013 author_mail: entry.author_mail,
6014 author_time: entry.author_time,
6015 author_tz: entry.author_tz,
6016 committer: entry.committer_name,
6017 committer_mail: entry.committer_email,
6018 committer_time: entry.committer_time,
6019 committer_tz: entry.committer_tz,
6020 summary: entry.summary,
6021 previous: entry.previous,
6022 filename: entry.filename,
6023 })
6024 .collect::<Vec<_>>();
6025
6026 let messages = blame
6027 .messages
6028 .into_iter()
6029 .map(|(oid, message)| proto::CommitMessage {
6030 oid: oid.as_bytes().into(),
6031 message,
6032 })
6033 .collect::<Vec<_>>();
6034
6035 proto::BlameBufferResponse {
6036 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6037 }
6038}
6039
6040fn deserialize_blame_buffer_response(
6041 response: proto::BlameBufferResponse,
6042) -> Option<git::blame::Blame> {
6043 let response = response.blame_response?;
6044 let entries = response
6045 .entries
6046 .into_iter()
6047 .filter_map(|entry| {
6048 Some(git::blame::BlameEntry {
6049 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6050 range: entry.start_line..entry.end_line,
6051 original_line_number: entry.original_line_number,
6052 committer_name: entry.committer,
6053 committer_time: entry.committer_time,
6054 committer_tz: entry.committer_tz,
6055 committer_email: entry.committer_mail,
6056 author: entry.author,
6057 author_mail: entry.author_mail,
6058 author_time: entry.author_time,
6059 author_tz: entry.author_tz,
6060 summary: entry.summary,
6061 previous: entry.previous,
6062 filename: entry.filename,
6063 })
6064 })
6065 .collect::<Vec<_>>();
6066
6067 let messages = response
6068 .messages
6069 .into_iter()
6070 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6071 .collect::<HashMap<_, _>>();
6072
6073 Some(Blame { entries, messages })
6074}
6075
6076fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6077 proto::Branch {
6078 is_head: branch.is_head,
6079 ref_name: branch.ref_name.to_string(),
6080 unix_timestamp: branch
6081 .most_recent_commit
6082 .as_ref()
6083 .map(|commit| commit.commit_timestamp as u64),
6084 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6085 ref_name: upstream.ref_name.to_string(),
6086 tracking: upstream
6087 .tracking
6088 .status()
6089 .map(|upstream| proto::UpstreamTracking {
6090 ahead: upstream.ahead as u64,
6091 behind: upstream.behind as u64,
6092 }),
6093 }),
6094 most_recent_commit: branch
6095 .most_recent_commit
6096 .as_ref()
6097 .map(|commit| proto::CommitSummary {
6098 sha: commit.sha.to_string(),
6099 subject: commit.subject.to_string(),
6100 commit_timestamp: commit.commit_timestamp,
6101 author_name: commit.author_name.to_string(),
6102 }),
6103 }
6104}
6105
6106fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6107 proto::Worktree {
6108 path: worktree.path.to_string_lossy().to_string(),
6109 ref_name: worktree.ref_name.to_string(),
6110 sha: worktree.sha.to_string(),
6111 }
6112}
6113
6114fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6115 git::repository::Worktree {
6116 path: PathBuf::from(proto.path.clone()),
6117 ref_name: proto.ref_name.clone().into(),
6118 sha: proto.sha.clone().into(),
6119 }
6120}
6121
6122fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6123 git::repository::Branch {
6124 is_head: proto.is_head,
6125 ref_name: proto.ref_name.clone().into(),
6126 upstream: proto
6127 .upstream
6128 .as_ref()
6129 .map(|upstream| git::repository::Upstream {
6130 ref_name: upstream.ref_name.to_string().into(),
6131 tracking: upstream
6132 .tracking
6133 .as_ref()
6134 .map(|tracking| {
6135 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6136 ahead: tracking.ahead as u32,
6137 behind: tracking.behind as u32,
6138 })
6139 })
6140 .unwrap_or(git::repository::UpstreamTracking::Gone),
6141 }),
6142 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6143 git::repository::CommitSummary {
6144 sha: commit.sha.to_string().into(),
6145 subject: commit.subject.to_string().into(),
6146 commit_timestamp: commit.commit_timestamp,
6147 author_name: commit.author_name.to_string().into(),
6148 has_parent: true,
6149 }
6150 }),
6151 }
6152}
6153
6154fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6155 proto::GitCommitDetails {
6156 sha: commit.sha.to_string(),
6157 message: commit.message.to_string(),
6158 commit_timestamp: commit.commit_timestamp,
6159 author_email: commit.author_email.to_string(),
6160 author_name: commit.author_name.to_string(),
6161 }
6162}
6163
6164fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6165 CommitDetails {
6166 sha: proto.sha.clone().into(),
6167 message: proto.message.clone().into(),
6168 commit_timestamp: proto.commit_timestamp,
6169 author_email: proto.author_email.clone().into(),
6170 author_name: proto.author_name.clone().into(),
6171 }
6172}
6173
6174async fn compute_snapshot(
6175 id: RepositoryId,
6176 work_directory_abs_path: Arc<Path>,
6177 prev_snapshot: RepositorySnapshot,
6178 backend: Arc<dyn GitRepository>,
6179) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6180 let mut events = Vec::new();
6181 let branches = backend.branches().await?;
6182 let branch = branches.into_iter().find(|branch| branch.is_head);
6183 let statuses = backend
6184 .status(&[RepoPath::from_rel_path(
6185 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6186 )])
6187 .await?;
6188 let stash_entries = backend.stash_entries().await?;
6189 let statuses_by_path = SumTree::from_iter(
6190 statuses
6191 .entries
6192 .iter()
6193 .map(|(repo_path, status)| StatusEntry {
6194 repo_path: repo_path.clone(),
6195 status: *status,
6196 }),
6197 (),
6198 );
6199 let (merge_details, merge_heads_changed) =
6200 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6201 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6202
6203 if merge_heads_changed {
6204 events.push(RepositoryEvent::MergeHeadsChanged);
6205 }
6206
6207 if statuses_by_path != prev_snapshot.statuses_by_path {
6208 events.push(RepositoryEvent::StatusesChanged)
6209 }
6210
6211 // Useful when branch is None in detached head state
6212 let head_commit = match backend.head_sha().await {
6213 Some(head_sha) => backend.show(head_sha).await.log_err(),
6214 None => None,
6215 };
6216
6217 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6218 events.push(RepositoryEvent::BranchChanged);
6219 }
6220
6221 let remote_origin_url = backend.remote_url("origin").await;
6222 let remote_upstream_url = backend.remote_url("upstream").await;
6223
6224 let snapshot = RepositorySnapshot {
6225 id,
6226 statuses_by_path,
6227 work_directory_abs_path,
6228 path_style: prev_snapshot.path_style,
6229 scan_id: prev_snapshot.scan_id + 1,
6230 branch,
6231 head_commit,
6232 merge: merge_details,
6233 remote_origin_url,
6234 remote_upstream_url,
6235 stash_entries,
6236 };
6237
6238 Ok((snapshot, events))
6239}
6240
6241fn status_from_proto(
6242 simple_status: i32,
6243 status: Option<proto::GitFileStatus>,
6244) -> anyhow::Result<FileStatus> {
6245 use proto::git_file_status::Variant;
6246
6247 let Some(variant) = status.and_then(|status| status.variant) else {
6248 let code = proto::GitStatus::from_i32(simple_status)
6249 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6250 let result = match code {
6251 proto::GitStatus::Added => TrackedStatus {
6252 worktree_status: StatusCode::Added,
6253 index_status: StatusCode::Unmodified,
6254 }
6255 .into(),
6256 proto::GitStatus::Modified => TrackedStatus {
6257 worktree_status: StatusCode::Modified,
6258 index_status: StatusCode::Unmodified,
6259 }
6260 .into(),
6261 proto::GitStatus::Conflict => UnmergedStatus {
6262 first_head: UnmergedStatusCode::Updated,
6263 second_head: UnmergedStatusCode::Updated,
6264 }
6265 .into(),
6266 proto::GitStatus::Deleted => TrackedStatus {
6267 worktree_status: StatusCode::Deleted,
6268 index_status: StatusCode::Unmodified,
6269 }
6270 .into(),
6271 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6272 };
6273 return Ok(result);
6274 };
6275
6276 let result = match variant {
6277 Variant::Untracked(_) => FileStatus::Untracked,
6278 Variant::Ignored(_) => FileStatus::Ignored,
6279 Variant::Unmerged(unmerged) => {
6280 let [first_head, second_head] =
6281 [unmerged.first_head, unmerged.second_head].map(|head| {
6282 let code = proto::GitStatus::from_i32(head)
6283 .with_context(|| format!("Invalid git status code: {head}"))?;
6284 let result = match code {
6285 proto::GitStatus::Added => UnmergedStatusCode::Added,
6286 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6287 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6288 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6289 };
6290 Ok(result)
6291 });
6292 let [first_head, second_head] = [first_head?, second_head?];
6293 UnmergedStatus {
6294 first_head,
6295 second_head,
6296 }
6297 .into()
6298 }
6299 Variant::Tracked(tracked) => {
6300 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6301 .map(|status| {
6302 let code = proto::GitStatus::from_i32(status)
6303 .with_context(|| format!("Invalid git status code: {status}"))?;
6304 let result = match code {
6305 proto::GitStatus::Modified => StatusCode::Modified,
6306 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6307 proto::GitStatus::Added => StatusCode::Added,
6308 proto::GitStatus::Deleted => StatusCode::Deleted,
6309 proto::GitStatus::Renamed => StatusCode::Renamed,
6310 proto::GitStatus::Copied => StatusCode::Copied,
6311 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6312 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6313 };
6314 Ok(result)
6315 });
6316 let [index_status, worktree_status] = [index_status?, worktree_status?];
6317 TrackedStatus {
6318 index_status,
6319 worktree_status,
6320 }
6321 .into()
6322 }
6323 };
6324 Ok(result)
6325}
6326
6327fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6328 use proto::git_file_status::{Tracked, Unmerged, Variant};
6329
6330 let variant = match status {
6331 FileStatus::Untracked => Variant::Untracked(Default::default()),
6332 FileStatus::Ignored => Variant::Ignored(Default::default()),
6333 FileStatus::Unmerged(UnmergedStatus {
6334 first_head,
6335 second_head,
6336 }) => Variant::Unmerged(Unmerged {
6337 first_head: unmerged_status_to_proto(first_head),
6338 second_head: unmerged_status_to_proto(second_head),
6339 }),
6340 FileStatus::Tracked(TrackedStatus {
6341 index_status,
6342 worktree_status,
6343 }) => Variant::Tracked(Tracked {
6344 index_status: tracked_status_to_proto(index_status),
6345 worktree_status: tracked_status_to_proto(worktree_status),
6346 }),
6347 };
6348 proto::GitFileStatus {
6349 variant: Some(variant),
6350 }
6351}
6352
6353fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6354 match code {
6355 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6356 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6357 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6358 }
6359}
6360
6361fn tracked_status_to_proto(code: StatusCode) -> i32 {
6362 match code {
6363 StatusCode::Added => proto::GitStatus::Added as _,
6364 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6365 StatusCode::Modified => proto::GitStatus::Modified as _,
6366 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6367 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6368 StatusCode::Copied => proto::GitStatus::Copied as _,
6369 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6370 }
6371}