1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<String>>,
133 index_text: Option<Arc<String>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_create_remote);
476 client.add_entity_request_handler(Self::handle_remove_remote);
477 client.add_entity_request_handler(Self::handle_delete_branch);
478 client.add_entity_request_handler(Self::handle_git_init);
479 client.add_entity_request_handler(Self::handle_push);
480 client.add_entity_request_handler(Self::handle_pull);
481 client.add_entity_request_handler(Self::handle_fetch);
482 client.add_entity_request_handler(Self::handle_stage);
483 client.add_entity_request_handler(Self::handle_unstage);
484 client.add_entity_request_handler(Self::handle_stash);
485 client.add_entity_request_handler(Self::handle_stash_pop);
486 client.add_entity_request_handler(Self::handle_stash_apply);
487 client.add_entity_request_handler(Self::handle_stash_drop);
488 client.add_entity_request_handler(Self::handle_commit);
489 client.add_entity_request_handler(Self::handle_run_hook);
490 client.add_entity_request_handler(Self::handle_reset);
491 client.add_entity_request_handler(Self::handle_show);
492 client.add_entity_request_handler(Self::handle_load_commit_diff);
493 client.add_entity_request_handler(Self::handle_file_history);
494 client.add_entity_request_handler(Self::handle_checkout_files);
495 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
496 client.add_entity_request_handler(Self::handle_set_index_text);
497 client.add_entity_request_handler(Self::handle_askpass);
498 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
499 client.add_entity_request_handler(Self::handle_git_diff);
500 client.add_entity_request_handler(Self::handle_tree_diff);
501 client.add_entity_request_handler(Self::handle_get_blob_content);
502 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
503 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
504 client.add_entity_message_handler(Self::handle_update_diff_bases);
505 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
506 client.add_entity_request_handler(Self::handle_blame_buffer);
507 client.add_entity_message_handler(Self::handle_update_repository);
508 client.add_entity_message_handler(Self::handle_remove_repository);
509 client.add_entity_request_handler(Self::handle_git_clone);
510 client.add_entity_request_handler(Self::handle_get_worktrees);
511 client.add_entity_request_handler(Self::handle_create_worktree);
512 }
513
514 pub fn is_local(&self) -> bool {
515 matches!(self.state, GitStoreState::Local { .. })
516 }
517 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
518 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
519 let id = repo.read(cx).id;
520 if self.active_repo_id != Some(id) {
521 self.active_repo_id = Some(id);
522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
523 }
524 }
525 }
526
527 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
528 match &mut self.state {
529 GitStoreState::Remote {
530 downstream: downstream_client,
531 ..
532 } => {
533 for repo in self.repositories.values() {
534 let update = repo.read(cx).snapshot.initial_update(project_id);
535 for update in split_repository_update(update) {
536 client.send(update).log_err();
537 }
538 }
539 *downstream_client = Some((client, ProjectId(project_id)));
540 }
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 let mut snapshots = HashMap::default();
546 let (updates_tx, mut updates_rx) = mpsc::unbounded();
547 for repo in self.repositories.values() {
548 updates_tx
549 .unbounded_send(DownstreamUpdate::UpdateRepository(
550 repo.read(cx).snapshot.clone(),
551 ))
552 .ok();
553 }
554 *downstream_client = Some(LocalDownstreamState {
555 client: client.clone(),
556 project_id: ProjectId(project_id),
557 updates_tx,
558 _task: cx.spawn(async move |this, cx| {
559 cx.background_spawn(async move {
560 while let Some(update) = updates_rx.next().await {
561 match update {
562 DownstreamUpdate::UpdateRepository(snapshot) => {
563 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
564 {
565 let update =
566 snapshot.build_update(old_snapshot, project_id);
567 *old_snapshot = snapshot;
568 for update in split_repository_update(update) {
569 client.send(update)?;
570 }
571 } else {
572 let update = snapshot.initial_update(project_id);
573 for update in split_repository_update(update) {
574 client.send(update)?;
575 }
576 snapshots.insert(snapshot.id, snapshot);
577 }
578 }
579 DownstreamUpdate::RemoveRepository(id) => {
580 client.send(proto::RemoveRepository {
581 project_id,
582 id: id.to_proto(),
583 })?;
584 }
585 }
586 }
587 anyhow::Ok(())
588 })
589 .await
590 .ok();
591 this.update(cx, |this, _| {
592 if let GitStoreState::Local {
593 downstream: downstream_client,
594 ..
595 } = &mut this.state
596 {
597 downstream_client.take();
598 } else {
599 unreachable!("unshared called on remote store");
600 }
601 })
602 }),
603 });
604 }
605 }
606 }
607
608 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
609 match &mut self.state {
610 GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } => {
614 downstream_client.take();
615 }
616 GitStoreState::Remote {
617 downstream: downstream_client,
618 ..
619 } => {
620 downstream_client.take();
621 }
622 }
623 self.shared_diffs.clear();
624 }
625
626 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
627 self.shared_diffs.remove(peer_id);
628 }
629
630 pub fn active_repository(&self) -> Option<Entity<Repository>> {
631 self.active_repo_id
632 .as_ref()
633 .map(|id| self.repositories[id].clone())
634 }
635
636 pub fn open_unstaged_diff(
637 &mut self,
638 buffer: Entity<Buffer>,
639 cx: &mut Context<Self>,
640 ) -> Task<Result<Entity<BufferDiff>>> {
641 let buffer_id = buffer.read(cx).remote_id();
642 if let Some(diff_state) = self.diffs.get(&buffer_id)
643 && let Some(unstaged_diff) = diff_state
644 .read(cx)
645 .unstaged_diff
646 .as_ref()
647 .and_then(|weak| weak.upgrade())
648 {
649 if let Some(task) =
650 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
651 {
652 return cx.background_executor().spawn(async move {
653 task.await;
654 Ok(unstaged_diff)
655 });
656 }
657 return Task::ready(Ok(unstaged_diff));
658 }
659
660 let Some((repo, repo_path)) =
661 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
662 else {
663 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
664 };
665
666 let task = self
667 .loading_diffs
668 .entry((buffer_id, DiffKind::Unstaged))
669 .or_insert_with(|| {
670 let staged_text = repo.update(cx, |repo, cx| {
671 repo.load_staged_text(buffer_id, repo_path, cx)
672 });
673 cx.spawn(async move |this, cx| {
674 Self::open_diff_internal(
675 this,
676 DiffKind::Unstaged,
677 staged_text.await.map(DiffBasesChange::SetIndex),
678 buffer,
679 cx,
680 )
681 .await
682 .map_err(Arc::new)
683 })
684 .shared()
685 })
686 .clone();
687
688 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
689 }
690
691 pub fn open_diff_since(
692 &mut self,
693 oid: Option<git::Oid>,
694 buffer: Entity<Buffer>,
695 repo: Entity<Repository>,
696 languages: Arc<LanguageRegistry>,
697 cx: &mut Context<Self>,
698 ) -> Task<Result<Entity<BufferDiff>>> {
699 cx.spawn(async move |this, cx| {
700 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
701 let content = match oid {
702 None => None,
703 Some(oid) => Some(
704 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
705 .await?,
706 ),
707 };
708 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
709
710 buffer_diff
711 .update(cx, |buffer_diff, cx| {
712 buffer_diff.set_base_text(
713 content.map(Arc::new),
714 buffer_snapshot.language().cloned(),
715 Some(languages.clone()),
716 buffer_snapshot.text,
717 cx,
718 )
719 })?
720 .await?;
721 let unstaged_diff = this
722 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
723 .await?;
724 buffer_diff.update(cx, |buffer_diff, _| {
725 buffer_diff.set_secondary_diff(unstaged_diff);
726 })?;
727
728 this.update(cx, |_, cx| {
729 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
730 .detach();
731 })?;
732
733 Ok(buffer_diff)
734 })
735 }
736
737 pub fn open_uncommitted_diff(
738 &mut self,
739 buffer: Entity<Buffer>,
740 cx: &mut Context<Self>,
741 ) -> Task<Result<Entity<BufferDiff>>> {
742 let buffer_id = buffer.read(cx).remote_id();
743
744 if let Some(diff_state) = self.diffs.get(&buffer_id)
745 && let Some(uncommitted_diff) = diff_state
746 .read(cx)
747 .uncommitted_diff
748 .as_ref()
749 .and_then(|weak| weak.upgrade())
750 {
751 if let Some(task) =
752 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
753 {
754 return cx.background_executor().spawn(async move {
755 task.await;
756 Ok(uncommitted_diff)
757 });
758 }
759 return Task::ready(Ok(uncommitted_diff));
760 }
761
762 let Some((repo, repo_path)) =
763 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
764 else {
765 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
766 };
767
768 let task = self
769 .loading_diffs
770 .entry((buffer_id, DiffKind::Uncommitted))
771 .or_insert_with(|| {
772 let changes = repo.update(cx, |repo, cx| {
773 repo.load_committed_text(buffer_id, repo_path, cx)
774 });
775
776 // todo(lw): hot foreground spawn
777 cx.spawn(async move |this, cx| {
778 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
779 .await
780 .map_err(Arc::new)
781 })
782 .shared()
783 })
784 .clone();
785
786 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
787 }
788
789 async fn open_diff_internal(
790 this: WeakEntity<Self>,
791 kind: DiffKind,
792 texts: Result<DiffBasesChange>,
793 buffer_entity: Entity<Buffer>,
794 cx: &mut AsyncApp,
795 ) -> Result<Entity<BufferDiff>> {
796 let diff_bases_change = match texts {
797 Err(e) => {
798 this.update(cx, |this, cx| {
799 let buffer = buffer_entity.read(cx);
800 let buffer_id = buffer.remote_id();
801 this.loading_diffs.remove(&(buffer_id, kind));
802 })?;
803 return Err(e);
804 }
805 Ok(change) => change,
806 };
807
808 this.update(cx, |this, cx| {
809 let buffer = buffer_entity.read(cx);
810 let buffer_id = buffer.remote_id();
811 let language = buffer.language().cloned();
812 let language_registry = buffer.language_registry();
813 let text_snapshot = buffer.text_snapshot();
814 this.loading_diffs.remove(&(buffer_id, kind));
815
816 let git_store = cx.weak_entity();
817 let diff_state = this
818 .diffs
819 .entry(buffer_id)
820 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
821
822 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
823
824 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
825 diff_state.update(cx, |diff_state, cx| {
826 diff_state.language = language;
827 diff_state.language_registry = language_registry;
828
829 match kind {
830 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
831 DiffKind::Uncommitted => {
832 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
833 diff
834 } else {
835 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
836 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
837 unstaged_diff
838 };
839
840 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
841 diff_state.uncommitted_diff = Some(diff.downgrade())
842 }
843 }
844
845 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
846 let rx = diff_state.wait_for_recalculation();
847
848 anyhow::Ok(async move {
849 if let Some(rx) = rx {
850 rx.await;
851 }
852 Ok(diff)
853 })
854 })
855 })??
856 .await
857 }
858
859 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
860 let diff_state = self.diffs.get(&buffer_id)?;
861 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
862 }
863
864 pub fn get_uncommitted_diff(
865 &self,
866 buffer_id: BufferId,
867 cx: &App,
868 ) -> Option<Entity<BufferDiff>> {
869 let diff_state = self.diffs.get(&buffer_id)?;
870 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
871 }
872
873 pub fn open_conflict_set(
874 &mut self,
875 buffer: Entity<Buffer>,
876 cx: &mut Context<Self>,
877 ) -> Entity<ConflictSet> {
878 log::debug!("open conflict set");
879 let buffer_id = buffer.read(cx).remote_id();
880
881 if let Some(git_state) = self.diffs.get(&buffer_id)
882 && let Some(conflict_set) = git_state
883 .read(cx)
884 .conflict_set
885 .as_ref()
886 .and_then(|weak| weak.upgrade())
887 {
888 let conflict_set = conflict_set;
889 let buffer_snapshot = buffer.read(cx).text_snapshot();
890
891 git_state.update(cx, |state, cx| {
892 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
893 });
894
895 return conflict_set;
896 }
897
898 let is_unmerged = self
899 .repository_and_path_for_buffer_id(buffer_id, cx)
900 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
901 let git_store = cx.weak_entity();
902 let buffer_git_state = self
903 .diffs
904 .entry(buffer_id)
905 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
906 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
907
908 self._subscriptions
909 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
910 cx.emit(GitStoreEvent::ConflictsUpdated);
911 }));
912
913 buffer_git_state.update(cx, |state, cx| {
914 state.conflict_set = Some(conflict_set.downgrade());
915 let buffer_snapshot = buffer.read(cx).text_snapshot();
916 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
917 });
918
919 conflict_set
920 }
921
922 pub fn project_path_git_status(
923 &self,
924 project_path: &ProjectPath,
925 cx: &App,
926 ) -> Option<FileStatus> {
927 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
928 Some(repo.read(cx).status_for_path(&repo_path)?.status)
929 }
930
931 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
932 let mut work_directory_abs_paths = Vec::new();
933 let mut checkpoints = Vec::new();
934 for repository in self.repositories.values() {
935 repository.update(cx, |repository, _| {
936 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
937 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
938 });
939 }
940
941 cx.background_executor().spawn(async move {
942 let checkpoints = future::try_join_all(checkpoints).await?;
943 Ok(GitStoreCheckpoint {
944 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
945 .into_iter()
946 .zip(checkpoints)
947 .collect(),
948 })
949 })
950 }
951
952 pub fn restore_checkpoint(
953 &self,
954 checkpoint: GitStoreCheckpoint,
955 cx: &mut App,
956 ) -> Task<Result<()>> {
957 let repositories_by_work_dir_abs_path = self
958 .repositories
959 .values()
960 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
961 .collect::<HashMap<_, _>>();
962
963 let mut tasks = Vec::new();
964 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
965 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
966 let restore = repository.update(cx, |repository, _| {
967 repository.restore_checkpoint(checkpoint)
968 });
969 tasks.push(async move { restore.await? });
970 }
971 }
972 cx.background_spawn(async move {
973 future::try_join_all(tasks).await?;
974 Ok(())
975 })
976 }
977
978 /// Compares two checkpoints, returning true if they are equal.
979 pub fn compare_checkpoints(
980 &self,
981 left: GitStoreCheckpoint,
982 mut right: GitStoreCheckpoint,
983 cx: &mut App,
984 ) -> Task<Result<bool>> {
985 let repositories_by_work_dir_abs_path = self
986 .repositories
987 .values()
988 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
989 .collect::<HashMap<_, _>>();
990
991 let mut tasks = Vec::new();
992 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
993 if let Some(right_checkpoint) = right
994 .checkpoints_by_work_dir_abs_path
995 .remove(&work_dir_abs_path)
996 {
997 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
998 {
999 let compare = repository.update(cx, |repository, _| {
1000 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1001 });
1002
1003 tasks.push(async move { compare.await? });
1004 }
1005 } else {
1006 return Task::ready(Ok(false));
1007 }
1008 }
1009 cx.background_spawn(async move {
1010 Ok(future::try_join_all(tasks)
1011 .await?
1012 .into_iter()
1013 .all(|result| result))
1014 })
1015 }
1016
1017 /// Blames a buffer.
1018 pub fn blame_buffer(
1019 &self,
1020 buffer: &Entity<Buffer>,
1021 version: Option<clock::Global>,
1022 cx: &mut Context<Self>,
1023 ) -> Task<Result<Option<Blame>>> {
1024 let buffer = buffer.read(cx);
1025 let Some((repo, repo_path)) =
1026 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1027 else {
1028 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1029 };
1030 let content = match &version {
1031 Some(version) => buffer.rope_for_version(version),
1032 None => buffer.as_rope().clone(),
1033 };
1034 let line_ending = buffer.line_ending();
1035 let version = version.unwrap_or(buffer.version());
1036 let buffer_id = buffer.remote_id();
1037
1038 let repo = repo.downgrade();
1039 cx.spawn(async move |_, cx| {
1040 let repository_state = repo
1041 .update(cx, |repo, _| repo.repository_state.clone())?
1042 .await
1043 .map_err(|err| anyhow::anyhow!(err))?;
1044 match repository_state {
1045 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1046 .blame(repo_path.clone(), content, line_ending)
1047 .await
1048 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1049 .map(Some),
1050 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1051 let response = client
1052 .request(proto::BlameBuffer {
1053 project_id: project_id.to_proto(),
1054 buffer_id: buffer_id.into(),
1055 version: serialize_version(&version),
1056 })
1057 .await?;
1058 Ok(deserialize_blame_buffer_response(response))
1059 }
1060 }
1061 })
1062 }
1063
1064 pub fn file_history(
1065 &self,
1066 repo: &Entity<Repository>,
1067 path: RepoPath,
1068 cx: &mut App,
1069 ) -> Task<Result<git::repository::FileHistory>> {
1070 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1071
1072 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1073 }
1074
1075 pub fn file_history_paginated(
1076 &self,
1077 repo: &Entity<Repository>,
1078 path: RepoPath,
1079 skip: usize,
1080 limit: Option<usize>,
1081 cx: &mut App,
1082 ) -> Task<Result<git::repository::FileHistory>> {
1083 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1084
1085 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1086 }
1087
1088 pub fn get_permalink_to_line(
1089 &self,
1090 buffer: &Entity<Buffer>,
1091 selection: Range<u32>,
1092 cx: &mut App,
1093 ) -> Task<Result<url::Url>> {
1094 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1095 return Task::ready(Err(anyhow!("buffer has no file")));
1096 };
1097
1098 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1099 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1100 cx,
1101 ) else {
1102 // If we're not in a Git repo, check whether this is a Rust source
1103 // file in the Cargo registry (presumably opened with go-to-definition
1104 // from a normal Rust file). If so, we can put together a permalink
1105 // using crate metadata.
1106 if buffer
1107 .read(cx)
1108 .language()
1109 .is_none_or(|lang| lang.name() != "Rust".into())
1110 {
1111 return Task::ready(Err(anyhow!("no permalink available")));
1112 }
1113 let file_path = file.worktree.read(cx).absolutize(&file.path);
1114 return cx.spawn(async move |cx| {
1115 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1116 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1117 .context("no permalink available")
1118 });
1119 };
1120
1121 let buffer_id = buffer.read(cx).remote_id();
1122 let branch = repo.read(cx).branch.clone();
1123 let remote = branch
1124 .as_ref()
1125 .and_then(|b| b.upstream.as_ref())
1126 .and_then(|b| b.remote_name())
1127 .unwrap_or("origin")
1128 .to_string();
1129
1130 let rx = repo.update(cx, |repo, _| {
1131 repo.send_job(None, move |state, cx| async move {
1132 match state {
1133 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1134 let origin_url = backend
1135 .remote_url(&remote)
1136 .await
1137 .with_context(|| format!("remote \"{remote}\" not found"))?;
1138
1139 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1140
1141 let provider_registry =
1142 cx.update(GitHostingProviderRegistry::default_global)?;
1143
1144 let (provider, remote) =
1145 parse_git_remote_url(provider_registry, &origin_url)
1146 .context("parsing Git remote URL")?;
1147
1148 Ok(provider.build_permalink(
1149 remote,
1150 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1151 ))
1152 }
1153 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1154 let response = client
1155 .request(proto::GetPermalinkToLine {
1156 project_id: project_id.to_proto(),
1157 buffer_id: buffer_id.into(),
1158 selection: Some(proto::Range {
1159 start: selection.start as u64,
1160 end: selection.end as u64,
1161 }),
1162 })
1163 .await?;
1164
1165 url::Url::parse(&response.permalink).context("failed to parse permalink")
1166 }
1167 }
1168 })
1169 });
1170 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1171 }
1172
1173 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1174 match &self.state {
1175 GitStoreState::Local {
1176 downstream: downstream_client,
1177 ..
1178 } => downstream_client
1179 .as_ref()
1180 .map(|state| (state.client.clone(), state.project_id)),
1181 GitStoreState::Remote {
1182 downstream: downstream_client,
1183 ..
1184 } => downstream_client.clone(),
1185 }
1186 }
1187
1188 fn upstream_client(&self) -> Option<AnyProtoClient> {
1189 match &self.state {
1190 GitStoreState::Local { .. } => None,
1191 GitStoreState::Remote {
1192 upstream_client, ..
1193 } => Some(upstream_client.clone()),
1194 }
1195 }
1196
1197 fn on_worktree_store_event(
1198 &mut self,
1199 worktree_store: Entity<WorktreeStore>,
1200 event: &WorktreeStoreEvent,
1201 cx: &mut Context<Self>,
1202 ) {
1203 let GitStoreState::Local {
1204 project_environment,
1205 downstream,
1206 next_repository_id,
1207 fs,
1208 } = &self.state
1209 else {
1210 return;
1211 };
1212
1213 match event {
1214 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1215 if let Some(worktree) = self
1216 .worktree_store
1217 .read(cx)
1218 .worktree_for_id(*worktree_id, cx)
1219 {
1220 let paths_by_git_repo =
1221 self.process_updated_entries(&worktree, updated_entries, cx);
1222 let downstream = downstream
1223 .as_ref()
1224 .map(|downstream| downstream.updates_tx.clone());
1225 cx.spawn(async move |_, cx| {
1226 let paths_by_git_repo = paths_by_git_repo.await;
1227 for (repo, paths) in paths_by_git_repo {
1228 repo.update(cx, |repo, cx| {
1229 repo.paths_changed(paths, downstream.clone(), cx);
1230 })
1231 .ok();
1232 }
1233 })
1234 .detach();
1235 }
1236 }
1237 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1238 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1239 else {
1240 return;
1241 };
1242 if !worktree.read(cx).is_visible() {
1243 log::debug!(
1244 "not adding repositories for local worktree {:?} because it's not visible",
1245 worktree.read(cx).abs_path()
1246 );
1247 return;
1248 }
1249 self.update_repositories_from_worktree(
1250 *worktree_id,
1251 project_environment.clone(),
1252 next_repository_id.clone(),
1253 downstream
1254 .as_ref()
1255 .map(|downstream| downstream.updates_tx.clone()),
1256 changed_repos.clone(),
1257 fs.clone(),
1258 cx,
1259 );
1260 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1261 }
1262 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1263 let repos_without_worktree: Vec<RepositoryId> = self
1264 .worktree_ids
1265 .iter_mut()
1266 .filter_map(|(repo_id, worktree_ids)| {
1267 worktree_ids.remove(worktree_id);
1268 if worktree_ids.is_empty() {
1269 Some(*repo_id)
1270 } else {
1271 None
1272 }
1273 })
1274 .collect();
1275 let is_active_repo_removed = repos_without_worktree
1276 .iter()
1277 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1278
1279 for repo_id in repos_without_worktree {
1280 self.repositories.remove(&repo_id);
1281 self.worktree_ids.remove(&repo_id);
1282 if let Some(updates_tx) =
1283 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1284 {
1285 updates_tx
1286 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1287 .ok();
1288 }
1289 }
1290
1291 if is_active_repo_removed {
1292 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1293 self.active_repo_id = Some(repo_id);
1294 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1295 } else {
1296 self.active_repo_id = None;
1297 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1298 }
1299 }
1300 }
1301 _ => {}
1302 }
1303 }
1304 fn on_repository_event(
1305 &mut self,
1306 repo: Entity<Repository>,
1307 event: &RepositoryEvent,
1308 cx: &mut Context<Self>,
1309 ) {
1310 let id = repo.read(cx).id;
1311 let repo_snapshot = repo.read(cx).snapshot.clone();
1312 for (buffer_id, diff) in self.diffs.iter() {
1313 if let Some((buffer_repo, repo_path)) =
1314 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1315 && buffer_repo == repo
1316 {
1317 diff.update(cx, |diff, cx| {
1318 if let Some(conflict_set) = &diff.conflict_set {
1319 let conflict_status_changed =
1320 conflict_set.update(cx, |conflict_set, cx| {
1321 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1322 conflict_set.set_has_conflict(has_conflict, cx)
1323 })?;
1324 if conflict_status_changed {
1325 let buffer_store = self.buffer_store.read(cx);
1326 if let Some(buffer) = buffer_store.get(*buffer_id) {
1327 let _ = diff
1328 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1329 }
1330 }
1331 }
1332 anyhow::Ok(())
1333 })
1334 .ok();
1335 }
1336 }
1337 cx.emit(GitStoreEvent::RepositoryUpdated(
1338 id,
1339 event.clone(),
1340 self.active_repo_id == Some(id),
1341 ))
1342 }
1343
1344 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1345 cx.emit(GitStoreEvent::JobsUpdated)
1346 }
1347
1348 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1349 fn update_repositories_from_worktree(
1350 &mut self,
1351 worktree_id: WorktreeId,
1352 project_environment: Entity<ProjectEnvironment>,
1353 next_repository_id: Arc<AtomicU64>,
1354 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1355 updated_git_repositories: UpdatedGitRepositoriesSet,
1356 fs: Arc<dyn Fs>,
1357 cx: &mut Context<Self>,
1358 ) {
1359 let mut removed_ids = Vec::new();
1360 for update in updated_git_repositories.iter() {
1361 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1362 let existing_work_directory_abs_path =
1363 repo.read(cx).work_directory_abs_path.clone();
1364 Some(&existing_work_directory_abs_path)
1365 == update.old_work_directory_abs_path.as_ref()
1366 || Some(&existing_work_directory_abs_path)
1367 == update.new_work_directory_abs_path.as_ref()
1368 }) {
1369 let repo_id = *id;
1370 if let Some(new_work_directory_abs_path) =
1371 update.new_work_directory_abs_path.clone()
1372 {
1373 self.worktree_ids
1374 .entry(repo_id)
1375 .or_insert_with(HashSet::new)
1376 .insert(worktree_id);
1377 existing.update(cx, |existing, cx| {
1378 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1379 existing.schedule_scan(updates_tx.clone(), cx);
1380 });
1381 } else {
1382 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1383 worktree_ids.remove(&worktree_id);
1384 if worktree_ids.is_empty() {
1385 removed_ids.push(repo_id);
1386 }
1387 }
1388 }
1389 } else if let UpdatedGitRepository {
1390 new_work_directory_abs_path: Some(work_directory_abs_path),
1391 dot_git_abs_path: Some(dot_git_abs_path),
1392 repository_dir_abs_path: Some(_repository_dir_abs_path),
1393 common_dir_abs_path: Some(_common_dir_abs_path),
1394 ..
1395 } = update
1396 {
1397 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1398 let git_store = cx.weak_entity();
1399 let repo = cx.new(|cx| {
1400 let mut repo = Repository::local(
1401 id,
1402 work_directory_abs_path.clone(),
1403 dot_git_abs_path.clone(),
1404 project_environment.downgrade(),
1405 fs.clone(),
1406 git_store,
1407 cx,
1408 );
1409 if let Some(updates_tx) = updates_tx.as_ref() {
1410 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1411 updates_tx
1412 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1413 .ok();
1414 }
1415 repo.schedule_scan(updates_tx.clone(), cx);
1416 repo
1417 });
1418 self._subscriptions
1419 .push(cx.subscribe(&repo, Self::on_repository_event));
1420 self._subscriptions
1421 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1422 self.repositories.insert(id, repo);
1423 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1424 cx.emit(GitStoreEvent::RepositoryAdded);
1425 self.active_repo_id.get_or_insert_with(|| {
1426 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1427 id
1428 });
1429 }
1430 }
1431
1432 for id in removed_ids {
1433 if self.active_repo_id == Some(id) {
1434 self.active_repo_id = None;
1435 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1436 }
1437 self.repositories.remove(&id);
1438 if let Some(updates_tx) = updates_tx.as_ref() {
1439 updates_tx
1440 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1441 .ok();
1442 }
1443 }
1444 }
1445
1446 fn on_buffer_store_event(
1447 &mut self,
1448 _: Entity<BufferStore>,
1449 event: &BufferStoreEvent,
1450 cx: &mut Context<Self>,
1451 ) {
1452 match event {
1453 BufferStoreEvent::BufferAdded(buffer) => {
1454 cx.subscribe(buffer, |this, buffer, event, cx| {
1455 if let BufferEvent::LanguageChanged(_) = event {
1456 let buffer_id = buffer.read(cx).remote_id();
1457 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1458 diff_state.update(cx, |diff_state, cx| {
1459 diff_state.buffer_language_changed(buffer, cx);
1460 });
1461 }
1462 }
1463 })
1464 .detach();
1465 }
1466 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1467 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1468 diffs.remove(buffer_id);
1469 }
1470 }
1471 BufferStoreEvent::BufferDropped(buffer_id) => {
1472 self.diffs.remove(buffer_id);
1473 for diffs in self.shared_diffs.values_mut() {
1474 diffs.remove(buffer_id);
1475 }
1476 }
1477 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1478 // Whenever a buffer's file path changes, it's possible that the
1479 // new path is actually a path that is being tracked by a git
1480 // repository. In that case, we'll want to update the buffer's
1481 // `BufferDiffState`, in case it already has one.
1482 let buffer_id = buffer.read(cx).remote_id();
1483 let diff_state = self.diffs.get(&buffer_id);
1484 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1485
1486 if let Some(diff_state) = diff_state
1487 && let Some((repo, repo_path)) = repo
1488 {
1489 let buffer = buffer.clone();
1490 let diff_state = diff_state.clone();
1491
1492 cx.spawn(async move |_git_store, cx| {
1493 async {
1494 let diff_bases_change = repo
1495 .update(cx, |repo, cx| {
1496 repo.load_committed_text(buffer_id, repo_path, cx)
1497 })?
1498 .await?;
1499
1500 diff_state.update(cx, |diff_state, cx| {
1501 let buffer_snapshot = buffer.read(cx).text_snapshot();
1502 diff_state.diff_bases_changed(
1503 buffer_snapshot,
1504 Some(diff_bases_change),
1505 cx,
1506 );
1507 })
1508 }
1509 .await
1510 .log_err();
1511 })
1512 .detach();
1513 }
1514 }
1515 _ => {}
1516 }
1517 }
1518
1519 pub fn recalculate_buffer_diffs(
1520 &mut self,
1521 buffers: Vec<Entity<Buffer>>,
1522 cx: &mut Context<Self>,
1523 ) -> impl Future<Output = ()> + use<> {
1524 let mut futures = Vec::new();
1525 for buffer in buffers {
1526 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1527 let buffer = buffer.read(cx).text_snapshot();
1528 diff_state.update(cx, |diff_state, cx| {
1529 diff_state.recalculate_diffs(buffer.clone(), cx);
1530 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1531 });
1532 futures.push(diff_state.update(cx, |diff_state, cx| {
1533 diff_state
1534 .reparse_conflict_markers(buffer, cx)
1535 .map(|_| {})
1536 .boxed()
1537 }));
1538 }
1539 }
1540 async move {
1541 futures::future::join_all(futures).await;
1542 }
1543 }
1544
1545 fn on_buffer_diff_event(
1546 &mut self,
1547 diff: Entity<buffer_diff::BufferDiff>,
1548 event: &BufferDiffEvent,
1549 cx: &mut Context<Self>,
1550 ) {
1551 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1552 let buffer_id = diff.read(cx).buffer_id;
1553 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1554 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1555 diff_state.hunk_staging_operation_count += 1;
1556 diff_state.hunk_staging_operation_count
1557 });
1558 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1559 let recv = repo.update(cx, |repo, cx| {
1560 log::debug!("hunks changed for {}", path.as_unix_str());
1561 repo.spawn_set_index_text_job(
1562 path,
1563 new_index_text.as_ref().map(|rope| rope.to_string()),
1564 Some(hunk_staging_operation_count),
1565 cx,
1566 )
1567 });
1568 let diff = diff.downgrade();
1569 cx.spawn(async move |this, cx| {
1570 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1571 diff.update(cx, |diff, cx| {
1572 diff.clear_pending_hunks(cx);
1573 })
1574 .ok();
1575 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1576 .ok();
1577 }
1578 })
1579 .detach();
1580 }
1581 }
1582 }
1583 }
1584
1585 fn local_worktree_git_repos_changed(
1586 &mut self,
1587 worktree: Entity<Worktree>,
1588 changed_repos: &UpdatedGitRepositoriesSet,
1589 cx: &mut Context<Self>,
1590 ) {
1591 log::debug!("local worktree repos changed");
1592 debug_assert!(worktree.read(cx).is_local());
1593
1594 for repository in self.repositories.values() {
1595 repository.update(cx, |repository, cx| {
1596 let repo_abs_path = &repository.work_directory_abs_path;
1597 if changed_repos.iter().any(|update| {
1598 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1599 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1600 }) {
1601 repository.reload_buffer_diff_bases(cx);
1602 }
1603 });
1604 }
1605 }
1606
1607 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1608 &self.repositories
1609 }
1610
1611 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1612 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1613 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1614 Some(status.status)
1615 }
1616
1617 pub fn repository_and_path_for_buffer_id(
1618 &self,
1619 buffer_id: BufferId,
1620 cx: &App,
1621 ) -> Option<(Entity<Repository>, RepoPath)> {
1622 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1623 let project_path = buffer.read(cx).project_path(cx)?;
1624 self.repository_and_path_for_project_path(&project_path, cx)
1625 }
1626
1627 pub fn repository_and_path_for_project_path(
1628 &self,
1629 path: &ProjectPath,
1630 cx: &App,
1631 ) -> Option<(Entity<Repository>, RepoPath)> {
1632 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1633 self.repositories
1634 .values()
1635 .filter_map(|repo| {
1636 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1637 Some((repo.clone(), repo_path))
1638 })
1639 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1640 }
1641
1642 pub fn git_init(
1643 &self,
1644 path: Arc<Path>,
1645 fallback_branch_name: String,
1646 cx: &App,
1647 ) -> Task<Result<()>> {
1648 match &self.state {
1649 GitStoreState::Local { fs, .. } => {
1650 let fs = fs.clone();
1651 cx.background_executor()
1652 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1653 }
1654 GitStoreState::Remote {
1655 upstream_client,
1656 upstream_project_id: project_id,
1657 ..
1658 } => {
1659 let client = upstream_client.clone();
1660 let project_id = *project_id;
1661 cx.background_executor().spawn(async move {
1662 client
1663 .request(proto::GitInit {
1664 project_id: project_id,
1665 abs_path: path.to_string_lossy().into_owned(),
1666 fallback_branch_name,
1667 })
1668 .await?;
1669 Ok(())
1670 })
1671 }
1672 }
1673 }
1674
1675 fn mark_entries_pending_by_project_paths(
1676 &mut self,
1677 project_paths: &[ProjectPath],
1678 stage: bool,
1679 cx: &mut Context<Self>,
1680 ) {
1681 let buffer_store = &self.buffer_store;
1682
1683 for project_path in project_paths {
1684 let Some(buffer) = buffer_store.read(cx).get_by_path(project_path) else {
1685 continue;
1686 };
1687
1688 let buffer_id = buffer.read(cx).remote_id();
1689 let Some(diff_state) = self.diffs.get(&buffer_id) else {
1690 continue;
1691 };
1692
1693 diff_state.update(cx, |diff_state, cx| {
1694 let Some(uncommitted_diff) = diff_state.uncommitted_diff() else {
1695 return;
1696 };
1697
1698 let buffer_snapshot = buffer.read(cx).text_snapshot();
1699 let file_exists = buffer
1700 .read(cx)
1701 .file()
1702 .is_some_and(|file| file.disk_state().exists());
1703
1704 let all_hunks: Vec<_> = uncommitted_diff
1705 .read(cx)
1706 .hunks_intersecting_range(
1707 text::Anchor::MIN..text::Anchor::MAX,
1708 &buffer_snapshot,
1709 cx,
1710 )
1711 .collect();
1712
1713 if !all_hunks.is_empty() {
1714 uncommitted_diff.update(cx, |diff, cx| {
1715 diff.stage_or_unstage_hunks(
1716 stage,
1717 &all_hunks,
1718 &buffer_snapshot,
1719 file_exists,
1720 cx,
1721 );
1722 });
1723 }
1724 });
1725 }
1726 }
1727
1728 pub fn git_clone(
1729 &self,
1730 repo: String,
1731 path: impl Into<Arc<std::path::Path>>,
1732 cx: &App,
1733 ) -> Task<Result<()>> {
1734 let path = path.into();
1735 match &self.state {
1736 GitStoreState::Local { fs, .. } => {
1737 let fs = fs.clone();
1738 cx.background_executor()
1739 .spawn(async move { fs.git_clone(&repo, &path).await })
1740 }
1741 GitStoreState::Remote {
1742 upstream_client,
1743 upstream_project_id,
1744 ..
1745 } => {
1746 if upstream_client.is_via_collab() {
1747 return Task::ready(Err(anyhow!(
1748 "Git Clone isn't supported for project guests"
1749 )));
1750 }
1751 let request = upstream_client.request(proto::GitClone {
1752 project_id: *upstream_project_id,
1753 abs_path: path.to_string_lossy().into_owned(),
1754 remote_repo: repo,
1755 });
1756
1757 cx.background_spawn(async move {
1758 let result = request.await?;
1759
1760 match result.success {
1761 true => Ok(()),
1762 false => Err(anyhow!("Git Clone failed")),
1763 }
1764 })
1765 }
1766 }
1767 }
1768
1769 async fn handle_update_repository(
1770 this: Entity<Self>,
1771 envelope: TypedEnvelope<proto::UpdateRepository>,
1772 mut cx: AsyncApp,
1773 ) -> Result<()> {
1774 this.update(&mut cx, |this, cx| {
1775 let path_style = this.worktree_store.read(cx).path_style();
1776 let mut update = envelope.payload;
1777
1778 let id = RepositoryId::from_proto(update.id);
1779 let client = this.upstream_client().context("no upstream client")?;
1780
1781 let mut repo_subscription = None;
1782 let repo = this.repositories.entry(id).or_insert_with(|| {
1783 let git_store = cx.weak_entity();
1784 let repo = cx.new(|cx| {
1785 Repository::remote(
1786 id,
1787 Path::new(&update.abs_path).into(),
1788 path_style,
1789 ProjectId(update.project_id),
1790 client,
1791 git_store,
1792 cx,
1793 )
1794 });
1795 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1796 cx.emit(GitStoreEvent::RepositoryAdded);
1797 repo
1798 });
1799 this._subscriptions.extend(repo_subscription);
1800
1801 repo.update(cx, {
1802 let update = update.clone();
1803 |repo, cx| repo.apply_remote_update(update, cx)
1804 })?;
1805
1806 this.active_repo_id.get_or_insert_with(|| {
1807 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1808 id
1809 });
1810
1811 if let Some((client, project_id)) = this.downstream_client() {
1812 update.project_id = project_id.to_proto();
1813 client.send(update).log_err();
1814 }
1815 Ok(())
1816 })?
1817 }
1818
1819 async fn handle_remove_repository(
1820 this: Entity<Self>,
1821 envelope: TypedEnvelope<proto::RemoveRepository>,
1822 mut cx: AsyncApp,
1823 ) -> Result<()> {
1824 this.update(&mut cx, |this, cx| {
1825 let mut update = envelope.payload;
1826 let id = RepositoryId::from_proto(update.id);
1827 this.repositories.remove(&id);
1828 if let Some((client, project_id)) = this.downstream_client() {
1829 update.project_id = project_id.to_proto();
1830 client.send(update).log_err();
1831 }
1832 if this.active_repo_id == Some(id) {
1833 this.active_repo_id = None;
1834 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1835 }
1836 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1837 })
1838 }
1839
1840 async fn handle_git_init(
1841 this: Entity<Self>,
1842 envelope: TypedEnvelope<proto::GitInit>,
1843 cx: AsyncApp,
1844 ) -> Result<proto::Ack> {
1845 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1846 let name = envelope.payload.fallback_branch_name;
1847 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1848 .await?;
1849
1850 Ok(proto::Ack {})
1851 }
1852
1853 async fn handle_git_clone(
1854 this: Entity<Self>,
1855 envelope: TypedEnvelope<proto::GitClone>,
1856 cx: AsyncApp,
1857 ) -> Result<proto::GitCloneResponse> {
1858 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1859 let repo_name = envelope.payload.remote_repo;
1860 let result = cx
1861 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1862 .await;
1863
1864 Ok(proto::GitCloneResponse {
1865 success: result.is_ok(),
1866 })
1867 }
1868
1869 async fn handle_fetch(
1870 this: Entity<Self>,
1871 envelope: TypedEnvelope<proto::Fetch>,
1872 mut cx: AsyncApp,
1873 ) -> Result<proto::RemoteMessageResponse> {
1874 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1875 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1876 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1877 let askpass_id = envelope.payload.askpass_id;
1878
1879 let askpass = make_remote_delegate(
1880 this,
1881 envelope.payload.project_id,
1882 repository_id,
1883 askpass_id,
1884 &mut cx,
1885 );
1886
1887 let remote_output = repository_handle
1888 .update(&mut cx, |repository_handle, cx| {
1889 repository_handle.fetch(fetch_options, askpass, cx)
1890 })?
1891 .await??;
1892
1893 Ok(proto::RemoteMessageResponse {
1894 stdout: remote_output.stdout,
1895 stderr: remote_output.stderr,
1896 })
1897 }
1898
1899 async fn handle_push(
1900 this: Entity<Self>,
1901 envelope: TypedEnvelope<proto::Push>,
1902 mut cx: AsyncApp,
1903 ) -> Result<proto::RemoteMessageResponse> {
1904 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1905 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1906
1907 let askpass_id = envelope.payload.askpass_id;
1908 let askpass = make_remote_delegate(
1909 this,
1910 envelope.payload.project_id,
1911 repository_id,
1912 askpass_id,
1913 &mut cx,
1914 );
1915
1916 let options = envelope
1917 .payload
1918 .options
1919 .as_ref()
1920 .map(|_| match envelope.payload.options() {
1921 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1922 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1923 });
1924
1925 let branch_name = envelope.payload.branch_name.into();
1926 let remote_name = envelope.payload.remote_name.into();
1927
1928 let remote_output = repository_handle
1929 .update(&mut cx, |repository_handle, cx| {
1930 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1931 })?
1932 .await??;
1933 Ok(proto::RemoteMessageResponse {
1934 stdout: remote_output.stdout,
1935 stderr: remote_output.stderr,
1936 })
1937 }
1938
1939 async fn handle_pull(
1940 this: Entity<Self>,
1941 envelope: TypedEnvelope<proto::Pull>,
1942 mut cx: AsyncApp,
1943 ) -> Result<proto::RemoteMessageResponse> {
1944 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1945 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1946 let askpass_id = envelope.payload.askpass_id;
1947 let askpass = make_remote_delegate(
1948 this,
1949 envelope.payload.project_id,
1950 repository_id,
1951 askpass_id,
1952 &mut cx,
1953 );
1954
1955 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1956 let remote_name = envelope.payload.remote_name.into();
1957 let rebase = envelope.payload.rebase;
1958
1959 let remote_message = repository_handle
1960 .update(&mut cx, |repository_handle, cx| {
1961 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1962 })?
1963 .await??;
1964
1965 Ok(proto::RemoteMessageResponse {
1966 stdout: remote_message.stdout,
1967 stderr: remote_message.stderr,
1968 })
1969 }
1970
1971 async fn handle_stage(
1972 this: Entity<Self>,
1973 envelope: TypedEnvelope<proto::Stage>,
1974 mut cx: AsyncApp,
1975 ) -> Result<proto::Ack> {
1976 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1977 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1978
1979 let entries = envelope
1980 .payload
1981 .paths
1982 .into_iter()
1983 .map(|path| RepoPath::new(&path))
1984 .collect::<Result<Vec<_>>>()?;
1985
1986 repository_handle
1987 .update(&mut cx, |repository_handle, cx| {
1988 repository_handle.stage_entries(entries, cx)
1989 })?
1990 .await?;
1991 Ok(proto::Ack {})
1992 }
1993
1994 async fn handle_unstage(
1995 this: Entity<Self>,
1996 envelope: TypedEnvelope<proto::Unstage>,
1997 mut cx: AsyncApp,
1998 ) -> Result<proto::Ack> {
1999 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2000 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2001
2002 let entries = envelope
2003 .payload
2004 .paths
2005 .into_iter()
2006 .map(|path| RepoPath::new(&path))
2007 .collect::<Result<Vec<_>>>()?;
2008
2009 repository_handle
2010 .update(&mut cx, |repository_handle, cx| {
2011 repository_handle.unstage_entries(entries, cx)
2012 })?
2013 .await?;
2014
2015 Ok(proto::Ack {})
2016 }
2017
2018 async fn handle_stash(
2019 this: Entity<Self>,
2020 envelope: TypedEnvelope<proto::Stash>,
2021 mut cx: AsyncApp,
2022 ) -> Result<proto::Ack> {
2023 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2024 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2025
2026 let entries = envelope
2027 .payload
2028 .paths
2029 .into_iter()
2030 .map(|path| RepoPath::new(&path))
2031 .collect::<Result<Vec<_>>>()?;
2032
2033 repository_handle
2034 .update(&mut cx, |repository_handle, cx| {
2035 repository_handle.stash_entries(entries, cx)
2036 })?
2037 .await?;
2038
2039 Ok(proto::Ack {})
2040 }
2041
2042 async fn handle_stash_pop(
2043 this: Entity<Self>,
2044 envelope: TypedEnvelope<proto::StashPop>,
2045 mut cx: AsyncApp,
2046 ) -> Result<proto::Ack> {
2047 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2048 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2049 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2050
2051 repository_handle
2052 .update(&mut cx, |repository_handle, cx| {
2053 repository_handle.stash_pop(stash_index, cx)
2054 })?
2055 .await?;
2056
2057 Ok(proto::Ack {})
2058 }
2059
2060 async fn handle_stash_apply(
2061 this: Entity<Self>,
2062 envelope: TypedEnvelope<proto::StashApply>,
2063 mut cx: AsyncApp,
2064 ) -> Result<proto::Ack> {
2065 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2066 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2067 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2068
2069 repository_handle
2070 .update(&mut cx, |repository_handle, cx| {
2071 repository_handle.stash_apply(stash_index, cx)
2072 })?
2073 .await?;
2074
2075 Ok(proto::Ack {})
2076 }
2077
2078 async fn handle_stash_drop(
2079 this: Entity<Self>,
2080 envelope: TypedEnvelope<proto::StashDrop>,
2081 mut cx: AsyncApp,
2082 ) -> Result<proto::Ack> {
2083 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2084 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2085 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2086
2087 repository_handle
2088 .update(&mut cx, |repository_handle, cx| {
2089 repository_handle.stash_drop(stash_index, cx)
2090 })?
2091 .await??;
2092
2093 Ok(proto::Ack {})
2094 }
2095
2096 async fn handle_set_index_text(
2097 this: Entity<Self>,
2098 envelope: TypedEnvelope<proto::SetIndexText>,
2099 mut cx: AsyncApp,
2100 ) -> Result<proto::Ack> {
2101 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2102 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2103 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2104
2105 repository_handle
2106 .update(&mut cx, |repository_handle, cx| {
2107 repository_handle.spawn_set_index_text_job(
2108 repo_path,
2109 envelope.payload.text,
2110 None,
2111 cx,
2112 )
2113 })?
2114 .await??;
2115 Ok(proto::Ack {})
2116 }
2117
2118 async fn handle_run_hook(
2119 this: Entity<Self>,
2120 envelope: TypedEnvelope<proto::RunGitHook>,
2121 mut cx: AsyncApp,
2122 ) -> Result<proto::Ack> {
2123 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2124 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2125 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2126 repository_handle
2127 .update(&mut cx, |repository_handle, cx| {
2128 repository_handle.run_hook(hook, cx)
2129 })?
2130 .await??;
2131 Ok(proto::Ack {})
2132 }
2133
2134 async fn handle_commit(
2135 this: Entity<Self>,
2136 envelope: TypedEnvelope<proto::Commit>,
2137 mut cx: AsyncApp,
2138 ) -> Result<proto::Ack> {
2139 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2140 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2141 let askpass_id = envelope.payload.askpass_id;
2142
2143 let askpass = make_remote_delegate(
2144 this,
2145 envelope.payload.project_id,
2146 repository_id,
2147 askpass_id,
2148 &mut cx,
2149 );
2150
2151 let message = SharedString::from(envelope.payload.message);
2152 let name = envelope.payload.name.map(SharedString::from);
2153 let email = envelope.payload.email.map(SharedString::from);
2154 let options = envelope.payload.options.unwrap_or_default();
2155
2156 repository_handle
2157 .update(&mut cx, |repository_handle, cx| {
2158 repository_handle.commit(
2159 message,
2160 name.zip(email),
2161 CommitOptions {
2162 amend: options.amend,
2163 signoff: options.signoff,
2164 },
2165 askpass,
2166 cx,
2167 )
2168 })?
2169 .await??;
2170 Ok(proto::Ack {})
2171 }
2172
2173 async fn handle_get_remotes(
2174 this: Entity<Self>,
2175 envelope: TypedEnvelope<proto::GetRemotes>,
2176 mut cx: AsyncApp,
2177 ) -> Result<proto::GetRemotesResponse> {
2178 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2179 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2180
2181 let branch_name = envelope.payload.branch_name;
2182 let is_push = envelope.payload.is_push;
2183
2184 let remotes = repository_handle
2185 .update(&mut cx, |repository_handle, _| {
2186 repository_handle.get_remotes(branch_name, is_push)
2187 })?
2188 .await??;
2189
2190 Ok(proto::GetRemotesResponse {
2191 remotes: remotes
2192 .into_iter()
2193 .map(|remotes| proto::get_remotes_response::Remote {
2194 name: remotes.name.to_string(),
2195 })
2196 .collect::<Vec<_>>(),
2197 })
2198 }
2199
2200 async fn handle_get_worktrees(
2201 this: Entity<Self>,
2202 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2203 mut cx: AsyncApp,
2204 ) -> Result<proto::GitWorktreesResponse> {
2205 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2206 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2207
2208 let worktrees = repository_handle
2209 .update(&mut cx, |repository_handle, _| {
2210 repository_handle.worktrees()
2211 })?
2212 .await??;
2213
2214 Ok(proto::GitWorktreesResponse {
2215 worktrees: worktrees
2216 .into_iter()
2217 .map(|worktree| worktree_to_proto(&worktree))
2218 .collect::<Vec<_>>(),
2219 })
2220 }
2221
2222 async fn handle_create_worktree(
2223 this: Entity<Self>,
2224 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2225 mut cx: AsyncApp,
2226 ) -> Result<proto::Ack> {
2227 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2228 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2229 let directory = PathBuf::from(envelope.payload.directory);
2230 let name = envelope.payload.name;
2231 let commit = envelope.payload.commit;
2232
2233 repository_handle
2234 .update(&mut cx, |repository_handle, _| {
2235 repository_handle.create_worktree(name, directory, commit)
2236 })?
2237 .await??;
2238
2239 Ok(proto::Ack {})
2240 }
2241
2242 async fn handle_get_branches(
2243 this: Entity<Self>,
2244 envelope: TypedEnvelope<proto::GitGetBranches>,
2245 mut cx: AsyncApp,
2246 ) -> Result<proto::GitBranchesResponse> {
2247 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2248 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2249
2250 let branches = repository_handle
2251 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2252 .await??;
2253
2254 Ok(proto::GitBranchesResponse {
2255 branches: branches
2256 .into_iter()
2257 .map(|branch| branch_to_proto(&branch))
2258 .collect::<Vec<_>>(),
2259 })
2260 }
2261 async fn handle_get_default_branch(
2262 this: Entity<Self>,
2263 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2264 mut cx: AsyncApp,
2265 ) -> Result<proto::GetDefaultBranchResponse> {
2266 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2267 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2268
2269 let branch = repository_handle
2270 .update(&mut cx, |repository_handle, _| {
2271 repository_handle.default_branch()
2272 })?
2273 .await??
2274 .map(Into::into);
2275
2276 Ok(proto::GetDefaultBranchResponse { branch })
2277 }
2278 async fn handle_create_branch(
2279 this: Entity<Self>,
2280 envelope: TypedEnvelope<proto::GitCreateBranch>,
2281 mut cx: AsyncApp,
2282 ) -> Result<proto::Ack> {
2283 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2284 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2285 let branch_name = envelope.payload.branch_name;
2286
2287 repository_handle
2288 .update(&mut cx, |repository_handle, _| {
2289 repository_handle.create_branch(branch_name, None)
2290 })?
2291 .await??;
2292
2293 Ok(proto::Ack {})
2294 }
2295
2296 async fn handle_change_branch(
2297 this: Entity<Self>,
2298 envelope: TypedEnvelope<proto::GitChangeBranch>,
2299 mut cx: AsyncApp,
2300 ) -> Result<proto::Ack> {
2301 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2302 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2303 let branch_name = envelope.payload.branch_name;
2304
2305 repository_handle
2306 .update(&mut cx, |repository_handle, _| {
2307 repository_handle.change_branch(branch_name)
2308 })?
2309 .await??;
2310
2311 Ok(proto::Ack {})
2312 }
2313
2314 async fn handle_rename_branch(
2315 this: Entity<Self>,
2316 envelope: TypedEnvelope<proto::GitRenameBranch>,
2317 mut cx: AsyncApp,
2318 ) -> Result<proto::Ack> {
2319 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2320 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2321 let branch = envelope.payload.branch;
2322 let new_name = envelope.payload.new_name;
2323
2324 repository_handle
2325 .update(&mut cx, |repository_handle, _| {
2326 repository_handle.rename_branch(branch, new_name)
2327 })?
2328 .await??;
2329
2330 Ok(proto::Ack {})
2331 }
2332
2333 async fn handle_create_remote(
2334 this: Entity<Self>,
2335 envelope: TypedEnvelope<proto::GitCreateRemote>,
2336 mut cx: AsyncApp,
2337 ) -> Result<proto::Ack> {
2338 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2339 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2340 let remote_name = envelope.payload.remote_name;
2341 let remote_url = envelope.payload.remote_url;
2342
2343 repository_handle
2344 .update(&mut cx, |repository_handle, _| {
2345 repository_handle.create_remote(remote_name, remote_url)
2346 })?
2347 .await??;
2348
2349 Ok(proto::Ack {})
2350 }
2351
2352 async fn handle_delete_branch(
2353 this: Entity<Self>,
2354 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2355 mut cx: AsyncApp,
2356 ) -> Result<proto::Ack> {
2357 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2358 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2359 let branch_name = envelope.payload.branch_name;
2360
2361 repository_handle
2362 .update(&mut cx, |repository_handle, _| {
2363 repository_handle.delete_branch(branch_name)
2364 })?
2365 .await??;
2366
2367 Ok(proto::Ack {})
2368 }
2369
2370 async fn handle_remove_remote(
2371 this: Entity<Self>,
2372 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2373 mut cx: AsyncApp,
2374 ) -> Result<proto::Ack> {
2375 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2376 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2377 let remote_name = envelope.payload.remote_name;
2378
2379 repository_handle
2380 .update(&mut cx, |repository_handle, _| {
2381 repository_handle.remove_remote(remote_name)
2382 })?
2383 .await??;
2384
2385 Ok(proto::Ack {})
2386 }
2387
2388 async fn handle_show(
2389 this: Entity<Self>,
2390 envelope: TypedEnvelope<proto::GitShow>,
2391 mut cx: AsyncApp,
2392 ) -> Result<proto::GitCommitDetails> {
2393 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2394 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2395
2396 let commit = repository_handle
2397 .update(&mut cx, |repository_handle, _| {
2398 repository_handle.show(envelope.payload.commit)
2399 })?
2400 .await??;
2401 Ok(proto::GitCommitDetails {
2402 sha: commit.sha.into(),
2403 message: commit.message.into(),
2404 commit_timestamp: commit.commit_timestamp,
2405 author_email: commit.author_email.into(),
2406 author_name: commit.author_name.into(),
2407 })
2408 }
2409
2410 async fn handle_load_commit_diff(
2411 this: Entity<Self>,
2412 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2413 mut cx: AsyncApp,
2414 ) -> Result<proto::LoadCommitDiffResponse> {
2415 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2416 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2417
2418 let commit_diff = repository_handle
2419 .update(&mut cx, |repository_handle, _| {
2420 repository_handle.load_commit_diff(envelope.payload.commit)
2421 })?
2422 .await??;
2423 Ok(proto::LoadCommitDiffResponse {
2424 files: commit_diff
2425 .files
2426 .into_iter()
2427 .map(|file| proto::CommitFile {
2428 path: file.path.to_proto(),
2429 old_text: file.old_text,
2430 new_text: file.new_text,
2431 })
2432 .collect(),
2433 })
2434 }
2435
2436 async fn handle_file_history(
2437 this: Entity<Self>,
2438 envelope: TypedEnvelope<proto::GitFileHistory>,
2439 mut cx: AsyncApp,
2440 ) -> Result<proto::GitFileHistoryResponse> {
2441 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2442 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2443 let path = RepoPath::from_proto(&envelope.payload.path)?;
2444 let skip = envelope.payload.skip as usize;
2445 let limit = envelope.payload.limit.map(|l| l as usize);
2446
2447 let file_history = repository_handle
2448 .update(&mut cx, |repository_handle, _| {
2449 repository_handle.file_history_paginated(path, skip, limit)
2450 })?
2451 .await??;
2452
2453 Ok(proto::GitFileHistoryResponse {
2454 entries: file_history
2455 .entries
2456 .into_iter()
2457 .map(|entry| proto::FileHistoryEntry {
2458 sha: entry.sha.to_string(),
2459 subject: entry.subject.to_string(),
2460 message: entry.message.to_string(),
2461 commit_timestamp: entry.commit_timestamp,
2462 author_name: entry.author_name.to_string(),
2463 author_email: entry.author_email.to_string(),
2464 })
2465 .collect(),
2466 path: file_history.path.to_proto(),
2467 })
2468 }
2469
2470 async fn handle_reset(
2471 this: Entity<Self>,
2472 envelope: TypedEnvelope<proto::GitReset>,
2473 mut cx: AsyncApp,
2474 ) -> Result<proto::Ack> {
2475 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2476 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2477
2478 let mode = match envelope.payload.mode() {
2479 git_reset::ResetMode::Soft => ResetMode::Soft,
2480 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2481 };
2482
2483 repository_handle
2484 .update(&mut cx, |repository_handle, cx| {
2485 repository_handle.reset(envelope.payload.commit, mode, cx)
2486 })?
2487 .await??;
2488 Ok(proto::Ack {})
2489 }
2490
2491 async fn handle_checkout_files(
2492 this: Entity<Self>,
2493 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2494 mut cx: AsyncApp,
2495 ) -> Result<proto::Ack> {
2496 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2497 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2498 let paths = envelope
2499 .payload
2500 .paths
2501 .iter()
2502 .map(|s| RepoPath::from_proto(s))
2503 .collect::<Result<Vec<_>>>()?;
2504
2505 repository_handle
2506 .update(&mut cx, |repository_handle, cx| {
2507 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2508 })?
2509 .await?;
2510 Ok(proto::Ack {})
2511 }
2512
2513 async fn handle_open_commit_message_buffer(
2514 this: Entity<Self>,
2515 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2516 mut cx: AsyncApp,
2517 ) -> Result<proto::OpenBufferResponse> {
2518 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2519 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2520 let buffer = repository
2521 .update(&mut cx, |repository, cx| {
2522 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2523 })?
2524 .await?;
2525
2526 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2527 this.update(&mut cx, |this, cx| {
2528 this.buffer_store.update(cx, |buffer_store, cx| {
2529 buffer_store
2530 .create_buffer_for_peer(
2531 &buffer,
2532 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2533 cx,
2534 )
2535 .detach_and_log_err(cx);
2536 })
2537 })?;
2538
2539 Ok(proto::OpenBufferResponse {
2540 buffer_id: buffer_id.to_proto(),
2541 })
2542 }
2543
2544 async fn handle_askpass(
2545 this: Entity<Self>,
2546 envelope: TypedEnvelope<proto::AskPassRequest>,
2547 mut cx: AsyncApp,
2548 ) -> Result<proto::AskPassResponse> {
2549 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2550 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2551
2552 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2553 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2554 debug_panic!("no askpass found");
2555 anyhow::bail!("no askpass found");
2556 };
2557
2558 let response = askpass
2559 .ask_password(envelope.payload.prompt)
2560 .await
2561 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2562
2563 delegates
2564 .lock()
2565 .insert(envelope.payload.askpass_id, askpass);
2566
2567 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2568 Ok(proto::AskPassResponse {
2569 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2570 })
2571 }
2572
2573 async fn handle_check_for_pushed_commits(
2574 this: Entity<Self>,
2575 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2576 mut cx: AsyncApp,
2577 ) -> Result<proto::CheckForPushedCommitsResponse> {
2578 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2579 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2580
2581 let branches = repository_handle
2582 .update(&mut cx, |repository_handle, _| {
2583 repository_handle.check_for_pushed_commits()
2584 })?
2585 .await??;
2586 Ok(proto::CheckForPushedCommitsResponse {
2587 pushed_to: branches
2588 .into_iter()
2589 .map(|commit| commit.to_string())
2590 .collect(),
2591 })
2592 }
2593
2594 async fn handle_git_diff(
2595 this: Entity<Self>,
2596 envelope: TypedEnvelope<proto::GitDiff>,
2597 mut cx: AsyncApp,
2598 ) -> Result<proto::GitDiffResponse> {
2599 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2600 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2601 let diff_type = match envelope.payload.diff_type() {
2602 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2603 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2604 };
2605
2606 let mut diff = repository_handle
2607 .update(&mut cx, |repository_handle, cx| {
2608 repository_handle.diff(diff_type, cx)
2609 })?
2610 .await??;
2611 const ONE_MB: usize = 1_000_000;
2612 if diff.len() > ONE_MB {
2613 diff = diff.chars().take(ONE_MB).collect()
2614 }
2615
2616 Ok(proto::GitDiffResponse { diff })
2617 }
2618
2619 async fn handle_tree_diff(
2620 this: Entity<Self>,
2621 request: TypedEnvelope<proto::GetTreeDiff>,
2622 mut cx: AsyncApp,
2623 ) -> Result<proto::GetTreeDiffResponse> {
2624 let repository_id = RepositoryId(request.payload.repository_id);
2625 let diff_type = if request.payload.is_merge {
2626 DiffTreeType::MergeBase {
2627 base: request.payload.base.into(),
2628 head: request.payload.head.into(),
2629 }
2630 } else {
2631 DiffTreeType::Since {
2632 base: request.payload.base.into(),
2633 head: request.payload.head.into(),
2634 }
2635 };
2636
2637 let diff = this
2638 .update(&mut cx, |this, cx| {
2639 let repository = this.repositories().get(&repository_id)?;
2640 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2641 })?
2642 .context("missing repository")?
2643 .await??;
2644
2645 Ok(proto::GetTreeDiffResponse {
2646 entries: diff
2647 .entries
2648 .into_iter()
2649 .map(|(path, status)| proto::TreeDiffStatus {
2650 path: path.as_ref().to_proto(),
2651 status: match status {
2652 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2653 TreeDiffStatus::Modified { .. } => {
2654 proto::tree_diff_status::Status::Modified.into()
2655 }
2656 TreeDiffStatus::Deleted { .. } => {
2657 proto::tree_diff_status::Status::Deleted.into()
2658 }
2659 },
2660 oid: match status {
2661 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2662 Some(old.to_string())
2663 }
2664 TreeDiffStatus::Added => None,
2665 },
2666 })
2667 .collect(),
2668 })
2669 }
2670
2671 async fn handle_get_blob_content(
2672 this: Entity<Self>,
2673 request: TypedEnvelope<proto::GetBlobContent>,
2674 mut cx: AsyncApp,
2675 ) -> Result<proto::GetBlobContentResponse> {
2676 let oid = git::Oid::from_str(&request.payload.oid)?;
2677 let repository_id = RepositoryId(request.payload.repository_id);
2678 let content = this
2679 .update(&mut cx, |this, cx| {
2680 let repository = this.repositories().get(&repository_id)?;
2681 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2682 })?
2683 .context("missing repository")?
2684 .await?;
2685 Ok(proto::GetBlobContentResponse { content })
2686 }
2687
2688 async fn handle_open_unstaged_diff(
2689 this: Entity<Self>,
2690 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2691 mut cx: AsyncApp,
2692 ) -> Result<proto::OpenUnstagedDiffResponse> {
2693 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2694 let diff = this
2695 .update(&mut cx, |this, cx| {
2696 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2697 Some(this.open_unstaged_diff(buffer, cx))
2698 })?
2699 .context("missing buffer")?
2700 .await?;
2701 this.update(&mut cx, |this, _| {
2702 let shared_diffs = this
2703 .shared_diffs
2704 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2705 .or_default();
2706 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2707 })?;
2708 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2709 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2710 }
2711
2712 async fn handle_open_uncommitted_diff(
2713 this: Entity<Self>,
2714 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2715 mut cx: AsyncApp,
2716 ) -> Result<proto::OpenUncommittedDiffResponse> {
2717 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2718 let diff = this
2719 .update(&mut cx, |this, cx| {
2720 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2721 Some(this.open_uncommitted_diff(buffer, cx))
2722 })?
2723 .context("missing buffer")?
2724 .await?;
2725 this.update(&mut cx, |this, _| {
2726 let shared_diffs = this
2727 .shared_diffs
2728 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2729 .or_default();
2730 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2731 })?;
2732 diff.read_with(&cx, |diff, cx| {
2733 use proto::open_uncommitted_diff_response::Mode;
2734
2735 let unstaged_diff = diff.secondary_diff();
2736 let index_snapshot = unstaged_diff.and_then(|diff| {
2737 let diff = diff.read(cx);
2738 diff.base_text_exists().then(|| diff.base_text())
2739 });
2740
2741 let mode;
2742 let staged_text;
2743 let committed_text;
2744 if diff.base_text_exists() {
2745 let committed_snapshot = diff.base_text();
2746 committed_text = Some(committed_snapshot.text());
2747 if let Some(index_text) = index_snapshot {
2748 if index_text.remote_id() == committed_snapshot.remote_id() {
2749 mode = Mode::IndexMatchesHead;
2750 staged_text = None;
2751 } else {
2752 mode = Mode::IndexAndHead;
2753 staged_text = Some(index_text.text());
2754 }
2755 } else {
2756 mode = Mode::IndexAndHead;
2757 staged_text = None;
2758 }
2759 } else {
2760 mode = Mode::IndexAndHead;
2761 committed_text = None;
2762 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2763 }
2764
2765 proto::OpenUncommittedDiffResponse {
2766 committed_text,
2767 staged_text,
2768 mode: mode.into(),
2769 }
2770 })
2771 }
2772
2773 async fn handle_update_diff_bases(
2774 this: Entity<Self>,
2775 request: TypedEnvelope<proto::UpdateDiffBases>,
2776 mut cx: AsyncApp,
2777 ) -> Result<()> {
2778 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2779 this.update(&mut cx, |this, cx| {
2780 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2781 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2782 {
2783 let buffer = buffer.read(cx).text_snapshot();
2784 diff_state.update(cx, |diff_state, cx| {
2785 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2786 })
2787 }
2788 })
2789 }
2790
2791 async fn handle_blame_buffer(
2792 this: Entity<Self>,
2793 envelope: TypedEnvelope<proto::BlameBuffer>,
2794 mut cx: AsyncApp,
2795 ) -> Result<proto::BlameBufferResponse> {
2796 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2797 let version = deserialize_version(&envelope.payload.version);
2798 let buffer = this.read_with(&cx, |this, cx| {
2799 this.buffer_store.read(cx).get_existing(buffer_id)
2800 })??;
2801 buffer
2802 .update(&mut cx, |buffer, _| {
2803 buffer.wait_for_version(version.clone())
2804 })?
2805 .await?;
2806 let blame = this
2807 .update(&mut cx, |this, cx| {
2808 this.blame_buffer(&buffer, Some(version), cx)
2809 })?
2810 .await?;
2811 Ok(serialize_blame_buffer_response(blame))
2812 }
2813
2814 async fn handle_get_permalink_to_line(
2815 this: Entity<Self>,
2816 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2817 mut cx: AsyncApp,
2818 ) -> Result<proto::GetPermalinkToLineResponse> {
2819 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2820 // let version = deserialize_version(&envelope.payload.version);
2821 let selection = {
2822 let proto_selection = envelope
2823 .payload
2824 .selection
2825 .context("no selection to get permalink for defined")?;
2826 proto_selection.start as u32..proto_selection.end as u32
2827 };
2828 let buffer = this.read_with(&cx, |this, cx| {
2829 this.buffer_store.read(cx).get_existing(buffer_id)
2830 })??;
2831 let permalink = this
2832 .update(&mut cx, |this, cx| {
2833 this.get_permalink_to_line(&buffer, selection, cx)
2834 })?
2835 .await?;
2836 Ok(proto::GetPermalinkToLineResponse {
2837 permalink: permalink.to_string(),
2838 })
2839 }
2840
2841 fn repository_for_request(
2842 this: &Entity<Self>,
2843 id: RepositoryId,
2844 cx: &mut AsyncApp,
2845 ) -> Result<Entity<Repository>> {
2846 this.read_with(cx, |this, _| {
2847 this.repositories
2848 .get(&id)
2849 .context("missing repository handle")
2850 .cloned()
2851 })?
2852 }
2853
2854 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2855 self.repositories
2856 .iter()
2857 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2858 .collect()
2859 }
2860
2861 fn process_updated_entries(
2862 &self,
2863 worktree: &Entity<Worktree>,
2864 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2865 cx: &mut App,
2866 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2867 let path_style = worktree.read(cx).path_style();
2868 let mut repo_paths = self
2869 .repositories
2870 .values()
2871 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2872 .collect::<Vec<_>>();
2873 let mut entries: Vec<_> = updated_entries
2874 .iter()
2875 .map(|(path, _, _)| path.clone())
2876 .collect();
2877 entries.sort();
2878 let worktree = worktree.read(cx);
2879
2880 let entries = entries
2881 .into_iter()
2882 .map(|path| worktree.absolutize(&path))
2883 .collect::<Arc<[_]>>();
2884
2885 let executor = cx.background_executor().clone();
2886 cx.background_executor().spawn(async move {
2887 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2888 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2889 let mut tasks = FuturesOrdered::new();
2890 for (repo_path, repo) in repo_paths.into_iter().rev() {
2891 let entries = entries.clone();
2892 let task = executor.spawn(async move {
2893 // Find all repository paths that belong to this repo
2894 let mut ix = entries.partition_point(|path| path < &*repo_path);
2895 if ix == entries.len() {
2896 return None;
2897 };
2898
2899 let mut paths = Vec::new();
2900 // All paths prefixed by a given repo will constitute a continuous range.
2901 while let Some(path) = entries.get(ix)
2902 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2903 &repo_path, path, path_style,
2904 )
2905 {
2906 paths.push((repo_path, ix));
2907 ix += 1;
2908 }
2909 if paths.is_empty() {
2910 None
2911 } else {
2912 Some((repo, paths))
2913 }
2914 });
2915 tasks.push_back(task);
2916 }
2917
2918 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2919 let mut path_was_used = vec![false; entries.len()];
2920 let tasks = tasks.collect::<Vec<_>>().await;
2921 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2922 // We always want to assign a path to it's innermost repository.
2923 for t in tasks {
2924 let Some((repo, paths)) = t else {
2925 continue;
2926 };
2927 let entry = paths_by_git_repo.entry(repo).or_default();
2928 for (repo_path, ix) in paths {
2929 if path_was_used[ix] {
2930 continue;
2931 }
2932 path_was_used[ix] = true;
2933 entry.push(repo_path);
2934 }
2935 }
2936
2937 paths_by_git_repo
2938 })
2939 }
2940}
2941
2942impl BufferGitState {
2943 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2944 Self {
2945 unstaged_diff: Default::default(),
2946 uncommitted_diff: Default::default(),
2947 recalculate_diff_task: Default::default(),
2948 language: Default::default(),
2949 language_registry: Default::default(),
2950 recalculating_tx: postage::watch::channel_with(false).0,
2951 hunk_staging_operation_count: 0,
2952 hunk_staging_operation_count_as_of_write: 0,
2953 head_text: Default::default(),
2954 index_text: Default::default(),
2955 head_changed: Default::default(),
2956 index_changed: Default::default(),
2957 language_changed: Default::default(),
2958 conflict_updated_futures: Default::default(),
2959 conflict_set: Default::default(),
2960 reparse_conflict_markers_task: Default::default(),
2961 }
2962 }
2963
2964 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2965 self.language = buffer.read(cx).language().cloned();
2966 self.language_changed = true;
2967 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2968 }
2969
2970 fn reparse_conflict_markers(
2971 &mut self,
2972 buffer: text::BufferSnapshot,
2973 cx: &mut Context<Self>,
2974 ) -> oneshot::Receiver<()> {
2975 let (tx, rx) = oneshot::channel();
2976
2977 let Some(conflict_set) = self
2978 .conflict_set
2979 .as_ref()
2980 .and_then(|conflict_set| conflict_set.upgrade())
2981 else {
2982 return rx;
2983 };
2984
2985 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2986 if conflict_set.has_conflict {
2987 Some(conflict_set.snapshot())
2988 } else {
2989 None
2990 }
2991 });
2992
2993 if let Some(old_snapshot) = old_snapshot {
2994 self.conflict_updated_futures.push(tx);
2995 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2996 let (snapshot, changed_range) = cx
2997 .background_spawn(async move {
2998 let new_snapshot = ConflictSet::parse(&buffer);
2999 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3000 (new_snapshot, changed_range)
3001 })
3002 .await;
3003 this.update(cx, |this, cx| {
3004 if let Some(conflict_set) = &this.conflict_set {
3005 conflict_set
3006 .update(cx, |conflict_set, cx| {
3007 conflict_set.set_snapshot(snapshot, changed_range, cx);
3008 })
3009 .ok();
3010 }
3011 let futures = std::mem::take(&mut this.conflict_updated_futures);
3012 for tx in futures {
3013 tx.send(()).ok();
3014 }
3015 })
3016 }))
3017 }
3018
3019 rx
3020 }
3021
3022 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3023 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3024 }
3025
3026 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3027 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3028 }
3029
3030 fn handle_base_texts_updated(
3031 &mut self,
3032 buffer: text::BufferSnapshot,
3033 message: proto::UpdateDiffBases,
3034 cx: &mut Context<Self>,
3035 ) {
3036 use proto::update_diff_bases::Mode;
3037
3038 let Some(mode) = Mode::from_i32(message.mode) else {
3039 return;
3040 };
3041
3042 let diff_bases_change = match mode {
3043 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3044 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3045 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3046 Mode::IndexAndHead => DiffBasesChange::SetEach {
3047 index: message.staged_text,
3048 head: message.committed_text,
3049 },
3050 };
3051
3052 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3053 }
3054
3055 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3056 if *self.recalculating_tx.borrow() {
3057 let mut rx = self.recalculating_tx.subscribe();
3058 Some(async move {
3059 loop {
3060 let is_recalculating = rx.recv().await;
3061 if is_recalculating != Some(true) {
3062 break;
3063 }
3064 }
3065 })
3066 } else {
3067 None
3068 }
3069 }
3070
3071 fn diff_bases_changed(
3072 &mut self,
3073 buffer: text::BufferSnapshot,
3074 diff_bases_change: Option<DiffBasesChange>,
3075 cx: &mut Context<Self>,
3076 ) {
3077 match diff_bases_change {
3078 Some(DiffBasesChange::SetIndex(index)) => {
3079 self.index_text = index.map(|mut index| {
3080 text::LineEnding::normalize(&mut index);
3081 Arc::new(index)
3082 });
3083 self.index_changed = true;
3084 }
3085 Some(DiffBasesChange::SetHead(head)) => {
3086 self.head_text = head.map(|mut head| {
3087 text::LineEnding::normalize(&mut head);
3088 Arc::new(head)
3089 });
3090 self.head_changed = true;
3091 }
3092 Some(DiffBasesChange::SetBoth(text)) => {
3093 let text = text.map(|mut text| {
3094 text::LineEnding::normalize(&mut text);
3095 Arc::new(text)
3096 });
3097 self.head_text = text.clone();
3098 self.index_text = text;
3099 self.head_changed = true;
3100 self.index_changed = true;
3101 }
3102 Some(DiffBasesChange::SetEach { index, head }) => {
3103 self.index_text = index.map(|mut index| {
3104 text::LineEnding::normalize(&mut index);
3105 Arc::new(index)
3106 });
3107 self.index_changed = true;
3108 self.head_text = head.map(|mut head| {
3109 text::LineEnding::normalize(&mut head);
3110 Arc::new(head)
3111 });
3112 self.head_changed = true;
3113 }
3114 None => {}
3115 }
3116
3117 self.recalculate_diffs(buffer, cx)
3118 }
3119
3120 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3121 *self.recalculating_tx.borrow_mut() = true;
3122
3123 let language = self.language.clone();
3124 let language_registry = self.language_registry.clone();
3125 let unstaged_diff = self.unstaged_diff();
3126 let uncommitted_diff = self.uncommitted_diff();
3127 let head = self.head_text.clone();
3128 let index = self.index_text.clone();
3129 let index_changed = self.index_changed;
3130 let head_changed = self.head_changed;
3131 let language_changed = self.language_changed;
3132 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3133 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3134 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3135 (None, None) => true,
3136 _ => false,
3137 };
3138 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3139 log::debug!(
3140 "start recalculating diffs for buffer {}",
3141 buffer.remote_id()
3142 );
3143
3144 let mut new_unstaged_diff = None;
3145 if let Some(unstaged_diff) = &unstaged_diff {
3146 new_unstaged_diff = Some(
3147 BufferDiff::update_diff(
3148 unstaged_diff.clone(),
3149 buffer.clone(),
3150 index,
3151 index_changed,
3152 language_changed,
3153 language.clone(),
3154 language_registry.clone(),
3155 cx,
3156 )
3157 .await?,
3158 );
3159 }
3160
3161 // Dropping BufferDiff can be expensive, so yield back to the event loop
3162 // for a bit
3163 yield_now().await;
3164
3165 let mut new_uncommitted_diff = None;
3166 if let Some(uncommitted_diff) = &uncommitted_diff {
3167 new_uncommitted_diff = if index_matches_head {
3168 new_unstaged_diff.clone()
3169 } else {
3170 Some(
3171 BufferDiff::update_diff(
3172 uncommitted_diff.clone(),
3173 buffer.clone(),
3174 head,
3175 head_changed,
3176 language_changed,
3177 language.clone(),
3178 language_registry.clone(),
3179 cx,
3180 )
3181 .await?,
3182 )
3183 }
3184 }
3185
3186 // Dropping BufferDiff can be expensive, so yield back to the event loop
3187 // for a bit
3188 yield_now().await;
3189
3190 let cancel = this.update(cx, |this, _| {
3191 // This checks whether all pending stage/unstage operations
3192 // have quiesced (i.e. both the corresponding write and the
3193 // read of that write have completed). If not, then we cancel
3194 // this recalculation attempt to avoid invalidating pending
3195 // state too quickly; another recalculation will come along
3196 // later and clear the pending state once the state of the index has settled.
3197 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3198 *this.recalculating_tx.borrow_mut() = false;
3199 true
3200 } else {
3201 false
3202 }
3203 })?;
3204 if cancel {
3205 log::debug!(
3206 concat!(
3207 "aborting recalculating diffs for buffer {}",
3208 "due to subsequent hunk operations",
3209 ),
3210 buffer.remote_id()
3211 );
3212 return Ok(());
3213 }
3214
3215 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3216 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3217 {
3218 unstaged_diff.update(cx, |diff, cx| {
3219 if language_changed {
3220 diff.language_changed(cx);
3221 }
3222 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3223 })?
3224 } else {
3225 None
3226 };
3227
3228 yield_now().await;
3229
3230 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3231 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3232 {
3233 uncommitted_diff.update(cx, |diff, cx| {
3234 if language_changed {
3235 diff.language_changed(cx);
3236 }
3237 diff.set_snapshot_with_secondary(
3238 new_uncommitted_diff,
3239 &buffer,
3240 unstaged_changed_range,
3241 true,
3242 cx,
3243 );
3244 })?;
3245 }
3246
3247 log::debug!(
3248 "finished recalculating diffs for buffer {}",
3249 buffer.remote_id()
3250 );
3251
3252 if let Some(this) = this.upgrade() {
3253 this.update(cx, |this, _| {
3254 this.index_changed = false;
3255 this.head_changed = false;
3256 this.language_changed = false;
3257 *this.recalculating_tx.borrow_mut() = false;
3258 })?;
3259 }
3260
3261 Ok(())
3262 }));
3263 }
3264}
3265
3266fn make_remote_delegate(
3267 this: Entity<GitStore>,
3268 project_id: u64,
3269 repository_id: RepositoryId,
3270 askpass_id: u64,
3271 cx: &mut AsyncApp,
3272) -> AskPassDelegate {
3273 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3274 this.update(cx, |this, cx| {
3275 let Some((client, _)) = this.downstream_client() else {
3276 return;
3277 };
3278 let response = client.request(proto::AskPassRequest {
3279 project_id,
3280 repository_id: repository_id.to_proto(),
3281 askpass_id,
3282 prompt,
3283 });
3284 cx.spawn(async move |_, _| {
3285 let mut response = response.await?.response;
3286 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3287 .ok();
3288 response.zeroize();
3289 anyhow::Ok(())
3290 })
3291 .detach_and_log_err(cx);
3292 })
3293 .log_err();
3294 })
3295}
3296
3297impl RepositoryId {
3298 pub fn to_proto(self) -> u64 {
3299 self.0
3300 }
3301
3302 pub fn from_proto(id: u64) -> Self {
3303 RepositoryId(id)
3304 }
3305}
3306
3307impl RepositorySnapshot {
3308 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3309 Self {
3310 id,
3311 statuses_by_path: Default::default(),
3312 work_directory_abs_path,
3313 branch: None,
3314 head_commit: None,
3315 scan_id: 0,
3316 merge: Default::default(),
3317 remote_origin_url: None,
3318 remote_upstream_url: None,
3319 stash_entries: Default::default(),
3320 path_style,
3321 }
3322 }
3323
3324 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3325 proto::UpdateRepository {
3326 branch_summary: self.branch.as_ref().map(branch_to_proto),
3327 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3328 updated_statuses: self
3329 .statuses_by_path
3330 .iter()
3331 .map(|entry| entry.to_proto())
3332 .collect(),
3333 removed_statuses: Default::default(),
3334 current_merge_conflicts: self
3335 .merge
3336 .conflicted_paths
3337 .iter()
3338 .map(|repo_path| repo_path.to_proto())
3339 .collect(),
3340 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3341 project_id,
3342 id: self.id.to_proto(),
3343 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3344 entry_ids: vec![self.id.to_proto()],
3345 scan_id: self.scan_id,
3346 is_last_update: true,
3347 stash_entries: self
3348 .stash_entries
3349 .entries
3350 .iter()
3351 .map(stash_to_proto)
3352 .collect(),
3353 remote_upstream_url: self.remote_upstream_url.clone(),
3354 remote_origin_url: self.remote_origin_url.clone(),
3355 }
3356 }
3357
3358 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3359 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3360 let mut removed_statuses: Vec<String> = Vec::new();
3361
3362 let mut new_statuses = self.statuses_by_path.iter().peekable();
3363 let mut old_statuses = old.statuses_by_path.iter().peekable();
3364
3365 let mut current_new_entry = new_statuses.next();
3366 let mut current_old_entry = old_statuses.next();
3367 loop {
3368 match (current_new_entry, current_old_entry) {
3369 (Some(new_entry), Some(old_entry)) => {
3370 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3371 Ordering::Less => {
3372 updated_statuses.push(new_entry.to_proto());
3373 current_new_entry = new_statuses.next();
3374 }
3375 Ordering::Equal => {
3376 if new_entry.status != old_entry.status {
3377 updated_statuses.push(new_entry.to_proto());
3378 }
3379 current_old_entry = old_statuses.next();
3380 current_new_entry = new_statuses.next();
3381 }
3382 Ordering::Greater => {
3383 removed_statuses.push(old_entry.repo_path.to_proto());
3384 current_old_entry = old_statuses.next();
3385 }
3386 }
3387 }
3388 (None, Some(old_entry)) => {
3389 removed_statuses.push(old_entry.repo_path.to_proto());
3390 current_old_entry = old_statuses.next();
3391 }
3392 (Some(new_entry), None) => {
3393 updated_statuses.push(new_entry.to_proto());
3394 current_new_entry = new_statuses.next();
3395 }
3396 (None, None) => break,
3397 }
3398 }
3399
3400 proto::UpdateRepository {
3401 branch_summary: self.branch.as_ref().map(branch_to_proto),
3402 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3403 updated_statuses,
3404 removed_statuses,
3405 current_merge_conflicts: self
3406 .merge
3407 .conflicted_paths
3408 .iter()
3409 .map(|path| path.to_proto())
3410 .collect(),
3411 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3412 project_id,
3413 id: self.id.to_proto(),
3414 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3415 entry_ids: vec![],
3416 scan_id: self.scan_id,
3417 is_last_update: true,
3418 stash_entries: self
3419 .stash_entries
3420 .entries
3421 .iter()
3422 .map(stash_to_proto)
3423 .collect(),
3424 remote_upstream_url: self.remote_upstream_url.clone(),
3425 remote_origin_url: self.remote_origin_url.clone(),
3426 }
3427 }
3428
3429 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3430 self.statuses_by_path.iter().cloned()
3431 }
3432
3433 pub fn status_summary(&self) -> GitSummary {
3434 self.statuses_by_path.summary().item_summary
3435 }
3436
3437 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3438 self.statuses_by_path
3439 .get(&PathKey(path.as_ref().clone()), ())
3440 .cloned()
3441 }
3442
3443 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3444 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3445 }
3446
3447 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3448 self.path_style
3449 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3450 .unwrap()
3451 .into()
3452 }
3453
3454 #[inline]
3455 fn abs_path_to_repo_path_inner(
3456 work_directory_abs_path: &Path,
3457 abs_path: &Path,
3458 path_style: PathStyle,
3459 ) -> Option<RepoPath> {
3460 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3461 Some(RepoPath::from_rel_path(&rel_path))
3462 }
3463
3464 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3465 self.merge.conflicted_paths.contains(repo_path)
3466 }
3467
3468 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3469 let had_conflict_on_last_merge_head_change =
3470 self.merge.conflicted_paths.contains(repo_path);
3471 let has_conflict_currently = self
3472 .status_for_path(repo_path)
3473 .is_some_and(|entry| entry.status.is_conflicted());
3474 had_conflict_on_last_merge_head_change || has_conflict_currently
3475 }
3476
3477 /// This is the name that will be displayed in the repository selector for this repository.
3478 pub fn display_name(&self) -> SharedString {
3479 self.work_directory_abs_path
3480 .file_name()
3481 .unwrap_or_default()
3482 .to_string_lossy()
3483 .to_string()
3484 .into()
3485 }
3486}
3487
3488pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3489 proto::StashEntry {
3490 oid: entry.oid.as_bytes().to_vec(),
3491 message: entry.message.clone(),
3492 branch: entry.branch.clone(),
3493 index: entry.index as u64,
3494 timestamp: entry.timestamp,
3495 }
3496}
3497
3498pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3499 Ok(StashEntry {
3500 oid: Oid::from_bytes(&entry.oid)?,
3501 message: entry.message.clone(),
3502 index: entry.index as usize,
3503 branch: entry.branch.clone(),
3504 timestamp: entry.timestamp,
3505 })
3506}
3507
3508impl MergeDetails {
3509 async fn load(
3510 backend: &Arc<dyn GitRepository>,
3511 status: &SumTree<StatusEntry>,
3512 prev_snapshot: &RepositorySnapshot,
3513 ) -> Result<(MergeDetails, bool)> {
3514 log::debug!("load merge details");
3515 let message = backend.merge_message().await;
3516 let heads = backend
3517 .revparse_batch(vec![
3518 "MERGE_HEAD".into(),
3519 "CHERRY_PICK_HEAD".into(),
3520 "REBASE_HEAD".into(),
3521 "REVERT_HEAD".into(),
3522 "APPLY_HEAD".into(),
3523 ])
3524 .await
3525 .log_err()
3526 .unwrap_or_default()
3527 .into_iter()
3528 .map(|opt| opt.map(SharedString::from))
3529 .collect::<Vec<_>>();
3530 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3531 let conflicted_paths = if merge_heads_changed {
3532 let current_conflicted_paths = TreeSet::from_ordered_entries(
3533 status
3534 .iter()
3535 .filter(|entry| entry.status.is_conflicted())
3536 .map(|entry| entry.repo_path.clone()),
3537 );
3538
3539 // It can happen that we run a scan while a lengthy merge is in progress
3540 // that will eventually result in conflicts, but before those conflicts
3541 // are reported by `git status`. Since for the moment we only care about
3542 // the merge heads state for the purposes of tracking conflicts, don't update
3543 // this state until we see some conflicts.
3544 if heads.iter().any(Option::is_some)
3545 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3546 && current_conflicted_paths.is_empty()
3547 {
3548 log::debug!("not updating merge heads because no conflicts found");
3549 return Ok((
3550 MergeDetails {
3551 message: message.map(SharedString::from),
3552 ..prev_snapshot.merge.clone()
3553 },
3554 false,
3555 ));
3556 }
3557
3558 current_conflicted_paths
3559 } else {
3560 prev_snapshot.merge.conflicted_paths.clone()
3561 };
3562 let details = MergeDetails {
3563 conflicted_paths,
3564 message: message.map(SharedString::from),
3565 heads,
3566 };
3567 Ok((details, merge_heads_changed))
3568 }
3569}
3570
3571impl Repository {
3572 pub fn snapshot(&self) -> RepositorySnapshot {
3573 self.snapshot.clone()
3574 }
3575
3576 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3577 self.pending_ops.iter().cloned()
3578 }
3579
3580 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3581 self.pending_ops.summary().clone()
3582 }
3583
3584 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3585 self.pending_ops
3586 .get(&PathKey(path.as_ref().clone()), ())
3587 .cloned()
3588 }
3589
3590 fn local(
3591 id: RepositoryId,
3592 work_directory_abs_path: Arc<Path>,
3593 dot_git_abs_path: Arc<Path>,
3594 project_environment: WeakEntity<ProjectEnvironment>,
3595 fs: Arc<dyn Fs>,
3596 git_store: WeakEntity<GitStore>,
3597 cx: &mut Context<Self>,
3598 ) -> Self {
3599 let snapshot =
3600 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3601 let state = cx
3602 .spawn(async move |_, cx| {
3603 LocalRepositoryState::new(
3604 work_directory_abs_path,
3605 dot_git_abs_path,
3606 project_environment,
3607 fs,
3608 cx,
3609 )
3610 .await
3611 .map_err(|err| err.to_string())
3612 })
3613 .shared();
3614 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3615 let state = cx
3616 .spawn(async move |_, _| {
3617 let state = state.await?;
3618 Ok(RepositoryState::Local(state))
3619 })
3620 .shared();
3621
3622 Repository {
3623 this: cx.weak_entity(),
3624 git_store,
3625 snapshot,
3626 pending_ops: Default::default(),
3627 repository_state: state,
3628 commit_message_buffer: None,
3629 askpass_delegates: Default::default(),
3630 paths_needing_status_update: Default::default(),
3631 latest_askpass_id: 0,
3632 job_sender,
3633 job_id: 0,
3634 active_jobs: Default::default(),
3635 }
3636 }
3637
3638 fn remote(
3639 id: RepositoryId,
3640 work_directory_abs_path: Arc<Path>,
3641 path_style: PathStyle,
3642 project_id: ProjectId,
3643 client: AnyProtoClient,
3644 git_store: WeakEntity<GitStore>,
3645 cx: &mut Context<Self>,
3646 ) -> Self {
3647 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3648 let repository_state = RemoteRepositoryState { project_id, client };
3649 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3650 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3651 Self {
3652 this: cx.weak_entity(),
3653 snapshot,
3654 commit_message_buffer: None,
3655 git_store,
3656 pending_ops: Default::default(),
3657 paths_needing_status_update: Default::default(),
3658 job_sender,
3659 repository_state,
3660 askpass_delegates: Default::default(),
3661 latest_askpass_id: 0,
3662 active_jobs: Default::default(),
3663 job_id: 0,
3664 }
3665 }
3666
3667 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3668 self.git_store.upgrade()
3669 }
3670
3671 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3672 let this = cx.weak_entity();
3673 let git_store = self.git_store.clone();
3674 let _ = self.send_keyed_job(
3675 Some(GitJobKey::ReloadBufferDiffBases),
3676 None,
3677 |state, mut cx| async move {
3678 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3679 log::error!("tried to recompute diffs for a non-local repository");
3680 return Ok(());
3681 };
3682
3683 let Some(this) = this.upgrade() else {
3684 return Ok(());
3685 };
3686
3687 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3688 git_store.update(cx, |git_store, cx| {
3689 git_store
3690 .diffs
3691 .iter()
3692 .filter_map(|(buffer_id, diff_state)| {
3693 let buffer_store = git_store.buffer_store.read(cx);
3694 let buffer = buffer_store.get(*buffer_id)?;
3695 let file = File::from_dyn(buffer.read(cx).file())?;
3696 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3697 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3698 log::debug!(
3699 "start reload diff bases for repo path {}",
3700 repo_path.as_unix_str()
3701 );
3702 diff_state.update(cx, |diff_state, _| {
3703 let has_unstaged_diff = diff_state
3704 .unstaged_diff
3705 .as_ref()
3706 .is_some_and(|diff| diff.is_upgradable());
3707 let has_uncommitted_diff = diff_state
3708 .uncommitted_diff
3709 .as_ref()
3710 .is_some_and(|set| set.is_upgradable());
3711
3712 Some((
3713 buffer,
3714 repo_path,
3715 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3716 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3717 ))
3718 })
3719 })
3720 .collect::<Vec<_>>()
3721 })
3722 })??;
3723
3724 let buffer_diff_base_changes = cx
3725 .background_spawn(async move {
3726 let mut changes = Vec::new();
3727 for (buffer, repo_path, current_index_text, current_head_text) in
3728 &repo_diff_state_updates
3729 {
3730 let index_text = if current_index_text.is_some() {
3731 backend.load_index_text(repo_path.clone()).await
3732 } else {
3733 None
3734 };
3735 let head_text = if current_head_text.is_some() {
3736 backend.load_committed_text(repo_path.clone()).await
3737 } else {
3738 None
3739 };
3740
3741 let change =
3742 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3743 (Some(current_index), Some(current_head)) => {
3744 let index_changed =
3745 index_text.as_ref() != current_index.as_deref();
3746 let head_changed =
3747 head_text.as_ref() != current_head.as_deref();
3748 if index_changed && head_changed {
3749 if index_text == head_text {
3750 Some(DiffBasesChange::SetBoth(head_text))
3751 } else {
3752 Some(DiffBasesChange::SetEach {
3753 index: index_text,
3754 head: head_text,
3755 })
3756 }
3757 } else if index_changed {
3758 Some(DiffBasesChange::SetIndex(index_text))
3759 } else if head_changed {
3760 Some(DiffBasesChange::SetHead(head_text))
3761 } else {
3762 None
3763 }
3764 }
3765 (Some(current_index), None) => {
3766 let index_changed =
3767 index_text.as_ref() != current_index.as_deref();
3768 index_changed
3769 .then_some(DiffBasesChange::SetIndex(index_text))
3770 }
3771 (None, Some(current_head)) => {
3772 let head_changed =
3773 head_text.as_ref() != current_head.as_deref();
3774 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3775 }
3776 (None, None) => None,
3777 };
3778
3779 changes.push((buffer.clone(), change))
3780 }
3781 changes
3782 })
3783 .await;
3784
3785 git_store.update(&mut cx, |git_store, cx| {
3786 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3787 let buffer_snapshot = buffer.read(cx).text_snapshot();
3788 let buffer_id = buffer_snapshot.remote_id();
3789 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3790 continue;
3791 };
3792
3793 let downstream_client = git_store.downstream_client();
3794 diff_state.update(cx, |diff_state, cx| {
3795 use proto::update_diff_bases::Mode;
3796
3797 if let Some((diff_bases_change, (client, project_id))) =
3798 diff_bases_change.clone().zip(downstream_client)
3799 {
3800 let (staged_text, committed_text, mode) = match diff_bases_change {
3801 DiffBasesChange::SetIndex(index) => {
3802 (index, None, Mode::IndexOnly)
3803 }
3804 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3805 DiffBasesChange::SetEach { index, head } => {
3806 (index, head, Mode::IndexAndHead)
3807 }
3808 DiffBasesChange::SetBoth(text) => {
3809 (None, text, Mode::IndexMatchesHead)
3810 }
3811 };
3812 client
3813 .send(proto::UpdateDiffBases {
3814 project_id: project_id.to_proto(),
3815 buffer_id: buffer_id.to_proto(),
3816 staged_text,
3817 committed_text,
3818 mode: mode as i32,
3819 })
3820 .log_err();
3821 }
3822
3823 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3824 });
3825 }
3826 })
3827 },
3828 );
3829 }
3830
3831 pub fn send_job<F, Fut, R>(
3832 &mut self,
3833 status: Option<SharedString>,
3834 job: F,
3835 ) -> oneshot::Receiver<R>
3836 where
3837 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3838 Fut: Future<Output = R> + 'static,
3839 R: Send + 'static,
3840 {
3841 self.send_keyed_job(None, status, job)
3842 }
3843
3844 fn send_keyed_job<F, Fut, R>(
3845 &mut self,
3846 key: Option<GitJobKey>,
3847 status: Option<SharedString>,
3848 job: F,
3849 ) -> oneshot::Receiver<R>
3850 where
3851 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3852 Fut: Future<Output = R> + 'static,
3853 R: Send + 'static,
3854 {
3855 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3856 let job_id = post_inc(&mut self.job_id);
3857 let this = self.this.clone();
3858 self.job_sender
3859 .unbounded_send(GitJob {
3860 key,
3861 job: Box::new(move |state, cx: &mut AsyncApp| {
3862 let job = job(state, cx.clone());
3863 cx.spawn(async move |cx| {
3864 if let Some(s) = status.clone() {
3865 this.update(cx, |this, cx| {
3866 this.active_jobs.insert(
3867 job_id,
3868 JobInfo {
3869 start: Instant::now(),
3870 message: s.clone(),
3871 },
3872 );
3873
3874 cx.notify();
3875 })
3876 .ok();
3877 }
3878 let result = job.await;
3879
3880 this.update(cx, |this, cx| {
3881 this.active_jobs.remove(&job_id);
3882 cx.notify();
3883 })
3884 .ok();
3885
3886 result_tx.send(result).ok();
3887 })
3888 }),
3889 })
3890 .ok();
3891 result_rx
3892 }
3893
3894 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3895 let Some(git_store) = self.git_store.upgrade() else {
3896 return;
3897 };
3898 let entity = cx.entity();
3899 git_store.update(cx, |git_store, cx| {
3900 let Some((&id, _)) = git_store
3901 .repositories
3902 .iter()
3903 .find(|(_, handle)| *handle == &entity)
3904 else {
3905 return;
3906 };
3907 git_store.active_repo_id = Some(id);
3908 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3909 });
3910 }
3911
3912 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3913 self.snapshot.status()
3914 }
3915
3916 pub fn cached_stash(&self) -> GitStash {
3917 self.snapshot.stash_entries.clone()
3918 }
3919
3920 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3921 let git_store = self.git_store.upgrade()?;
3922 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3923 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3924 let abs_path = SanitizedPath::new(&abs_path);
3925 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3926 Some(ProjectPath {
3927 worktree_id: worktree.read(cx).id(),
3928 path: relative_path,
3929 })
3930 }
3931
3932 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3933 let git_store = self.git_store.upgrade()?;
3934 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3935 let abs_path = worktree_store.absolutize(path, cx)?;
3936 self.snapshot.abs_path_to_repo_path(&abs_path)
3937 }
3938
3939 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3940 other
3941 .read(cx)
3942 .snapshot
3943 .work_directory_abs_path
3944 .starts_with(&self.snapshot.work_directory_abs_path)
3945 }
3946
3947 pub fn open_commit_buffer(
3948 &mut self,
3949 languages: Option<Arc<LanguageRegistry>>,
3950 buffer_store: Entity<BufferStore>,
3951 cx: &mut Context<Self>,
3952 ) -> Task<Result<Entity<Buffer>>> {
3953 let id = self.id;
3954 if let Some(buffer) = self.commit_message_buffer.clone() {
3955 return Task::ready(Ok(buffer));
3956 }
3957 let this = cx.weak_entity();
3958
3959 let rx = self.send_job(None, move |state, mut cx| async move {
3960 let Some(this) = this.upgrade() else {
3961 bail!("git store was dropped");
3962 };
3963 match state {
3964 RepositoryState::Local(..) => {
3965 this.update(&mut cx, |_, cx| {
3966 Self::open_local_commit_buffer(languages, buffer_store, cx)
3967 })?
3968 .await
3969 }
3970 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3971 let request = client.request(proto::OpenCommitMessageBuffer {
3972 project_id: project_id.0,
3973 repository_id: id.to_proto(),
3974 });
3975 let response = request.await.context("requesting to open commit buffer")?;
3976 let buffer_id = BufferId::new(response.buffer_id)?;
3977 let buffer = buffer_store
3978 .update(&mut cx, |buffer_store, cx| {
3979 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3980 })?
3981 .await?;
3982 if let Some(language_registry) = languages {
3983 let git_commit_language =
3984 language_registry.language_for_name("Git Commit").await?;
3985 buffer.update(&mut cx, |buffer, cx| {
3986 buffer.set_language(Some(git_commit_language), cx);
3987 })?;
3988 }
3989 this.update(&mut cx, |this, _| {
3990 this.commit_message_buffer = Some(buffer.clone());
3991 })?;
3992 Ok(buffer)
3993 }
3994 }
3995 });
3996
3997 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3998 }
3999
4000 fn open_local_commit_buffer(
4001 language_registry: Option<Arc<LanguageRegistry>>,
4002 buffer_store: Entity<BufferStore>,
4003 cx: &mut Context<Self>,
4004 ) -> Task<Result<Entity<Buffer>>> {
4005 cx.spawn(async move |repository, cx| {
4006 let buffer = buffer_store
4007 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
4008 .await?;
4009
4010 if let Some(language_registry) = language_registry {
4011 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
4012 buffer.update(cx, |buffer, cx| {
4013 buffer.set_language(Some(git_commit_language), cx);
4014 })?;
4015 }
4016
4017 repository.update(cx, |repository, _| {
4018 repository.commit_message_buffer = Some(buffer.clone());
4019 })?;
4020 Ok(buffer)
4021 })
4022 }
4023
4024 pub fn checkout_files(
4025 &mut self,
4026 commit: &str,
4027 paths: Vec<RepoPath>,
4028 cx: &mut Context<Self>,
4029 ) -> Task<Result<()>> {
4030 let commit = commit.to_string();
4031 let id = self.id;
4032
4033 self.spawn_job_with_tracking(
4034 paths.clone(),
4035 pending_op::GitStatus::Reverted,
4036 cx,
4037 async move |this, cx| {
4038 this.update(cx, |this, _cx| {
4039 this.send_job(
4040 Some(format!("git checkout {}", commit).into()),
4041 move |git_repo, _| async move {
4042 match git_repo {
4043 RepositoryState::Local(LocalRepositoryState {
4044 backend,
4045 environment,
4046 ..
4047 }) => {
4048 backend
4049 .checkout_files(commit, paths, environment.clone())
4050 .await
4051 }
4052 RepositoryState::Remote(RemoteRepositoryState {
4053 project_id,
4054 client,
4055 }) => {
4056 client
4057 .request(proto::GitCheckoutFiles {
4058 project_id: project_id.0,
4059 repository_id: id.to_proto(),
4060 commit,
4061 paths: paths
4062 .into_iter()
4063 .map(|p| p.to_proto())
4064 .collect(),
4065 })
4066 .await?;
4067
4068 Ok(())
4069 }
4070 }
4071 },
4072 )
4073 })?
4074 .await?
4075 },
4076 )
4077 }
4078
4079 pub fn reset(
4080 &mut self,
4081 commit: String,
4082 reset_mode: ResetMode,
4083 _cx: &mut App,
4084 ) -> oneshot::Receiver<Result<()>> {
4085 let id = self.id;
4086
4087 self.send_job(None, move |git_repo, _| async move {
4088 match git_repo {
4089 RepositoryState::Local(LocalRepositoryState {
4090 backend,
4091 environment,
4092 ..
4093 }) => backend.reset(commit, reset_mode, environment).await,
4094 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4095 client
4096 .request(proto::GitReset {
4097 project_id: project_id.0,
4098 repository_id: id.to_proto(),
4099 commit,
4100 mode: match reset_mode {
4101 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4102 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4103 },
4104 })
4105 .await?;
4106
4107 Ok(())
4108 }
4109 }
4110 })
4111 }
4112
4113 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4114 let id = self.id;
4115 self.send_job(None, move |git_repo, _cx| async move {
4116 match git_repo {
4117 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4118 backend.show(commit).await
4119 }
4120 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4121 let resp = client
4122 .request(proto::GitShow {
4123 project_id: project_id.0,
4124 repository_id: id.to_proto(),
4125 commit,
4126 })
4127 .await?;
4128
4129 Ok(CommitDetails {
4130 sha: resp.sha.into(),
4131 message: resp.message.into(),
4132 commit_timestamp: resp.commit_timestamp,
4133 author_email: resp.author_email.into(),
4134 author_name: resp.author_name.into(),
4135 })
4136 }
4137 }
4138 })
4139 }
4140
4141 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4142 let id = self.id;
4143 self.send_job(None, move |git_repo, cx| async move {
4144 match git_repo {
4145 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4146 backend.load_commit(commit, cx).await
4147 }
4148 RepositoryState::Remote(RemoteRepositoryState {
4149 client, project_id, ..
4150 }) => {
4151 let response = client
4152 .request(proto::LoadCommitDiff {
4153 project_id: project_id.0,
4154 repository_id: id.to_proto(),
4155 commit,
4156 })
4157 .await?;
4158 Ok(CommitDiff {
4159 files: response
4160 .files
4161 .into_iter()
4162 .map(|file| {
4163 Ok(CommitFile {
4164 path: RepoPath::from_proto(&file.path)?,
4165 old_text: file.old_text,
4166 new_text: file.new_text,
4167 })
4168 })
4169 .collect::<Result<Vec<_>>>()?,
4170 })
4171 }
4172 }
4173 })
4174 }
4175
4176 pub fn file_history(
4177 &mut self,
4178 path: RepoPath,
4179 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4180 self.file_history_paginated(path, 0, None)
4181 }
4182
4183 pub fn file_history_paginated(
4184 &mut self,
4185 path: RepoPath,
4186 skip: usize,
4187 limit: Option<usize>,
4188 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4189 let id = self.id;
4190 self.send_job(None, move |git_repo, _cx| async move {
4191 match git_repo {
4192 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4193 backend.file_history_paginated(path, skip, limit).await
4194 }
4195 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4196 let response = client
4197 .request(proto::GitFileHistory {
4198 project_id: project_id.0,
4199 repository_id: id.to_proto(),
4200 path: path.to_proto(),
4201 skip: skip as u64,
4202 limit: limit.map(|l| l as u64),
4203 })
4204 .await?;
4205 Ok(git::repository::FileHistory {
4206 entries: response
4207 .entries
4208 .into_iter()
4209 .map(|entry| git::repository::FileHistoryEntry {
4210 sha: entry.sha.into(),
4211 subject: entry.subject.into(),
4212 message: entry.message.into(),
4213 commit_timestamp: entry.commit_timestamp,
4214 author_name: entry.author_name.into(),
4215 author_email: entry.author_email.into(),
4216 })
4217 .collect(),
4218 path: RepoPath::from_proto(&response.path)?,
4219 })
4220 }
4221 }
4222 })
4223 }
4224
4225 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4226 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4227 }
4228
4229 fn save_buffers<'a>(
4230 &self,
4231 entries: impl IntoIterator<Item = &'a RepoPath>,
4232 cx: &mut Context<Self>,
4233 ) -> Vec<Task<anyhow::Result<()>>> {
4234 let mut save_futures = Vec::new();
4235 if let Some(buffer_store) = self.buffer_store(cx) {
4236 buffer_store.update(cx, |buffer_store, cx| {
4237 for path in entries {
4238 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4239 continue;
4240 };
4241 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4242 && buffer
4243 .read(cx)
4244 .file()
4245 .is_some_and(|file| file.disk_state().exists())
4246 && buffer.read(cx).has_unsaved_edits()
4247 {
4248 save_futures.push(buffer_store.save_buffer(buffer, cx));
4249 }
4250 }
4251 })
4252 }
4253 save_futures
4254 }
4255
4256 fn mark_entries_pending_for_stage(
4257 &self,
4258 entries: &[RepoPath],
4259 stage: bool,
4260 cx: &mut Context<Self>,
4261 ) {
4262 let Some(git_store) = self.git_store() else {
4263 return;
4264 };
4265
4266 let mut project_paths = Vec::new();
4267 for repo_path in entries {
4268 if let Some(project_path) = self.repo_path_to_project_path(repo_path, cx) {
4269 project_paths.push(project_path);
4270 }
4271 }
4272
4273 git_store.update(cx, move |git_store, cx| {
4274 git_store.mark_entries_pending_by_project_paths(&project_paths, stage, cx);
4275 });
4276 }
4277
4278 pub fn stage_entries(
4279 &mut self,
4280 entries: Vec<RepoPath>,
4281 cx: &mut Context<Self>,
4282 ) -> Task<anyhow::Result<()>> {
4283 if entries.is_empty() {
4284 return Task::ready(Ok(()));
4285 }
4286
4287 self.mark_entries_pending_for_stage(&entries, true, cx);
4288
4289 let id = self.id;
4290 let save_tasks = self.save_buffers(&entries, cx);
4291 let paths = entries
4292 .iter()
4293 .map(|p| p.as_unix_str())
4294 .collect::<Vec<_>>()
4295 .join(" ");
4296 let status = format!("git add {paths}");
4297 let job_key = GitJobKey::WriteIndex(entries.clone());
4298
4299 self.spawn_job_with_tracking(
4300 entries.clone(),
4301 pending_op::GitStatus::Staged,
4302 cx,
4303 async move |this, cx| {
4304 for save_task in save_tasks {
4305 save_task.await?;
4306 }
4307
4308 this.update(cx, |this, _| {
4309 this.send_keyed_job(
4310 Some(job_key),
4311 Some(status.into()),
4312 move |git_repo, _cx| async move {
4313 match git_repo {
4314 RepositoryState::Local(LocalRepositoryState {
4315 backend,
4316 environment,
4317 ..
4318 }) => backend.stage_paths(entries, environment.clone()).await,
4319 RepositoryState::Remote(RemoteRepositoryState {
4320 project_id,
4321 client,
4322 }) => {
4323 client
4324 .request(proto::Stage {
4325 project_id: project_id.0,
4326 repository_id: id.to_proto(),
4327 paths: entries
4328 .into_iter()
4329 .map(|repo_path| repo_path.to_proto())
4330 .collect(),
4331 })
4332 .await
4333 .context("sending stage request")?;
4334
4335 Ok(())
4336 }
4337 }
4338 },
4339 )
4340 })?
4341 .await?
4342 },
4343 )
4344 }
4345
4346 pub fn unstage_entries(
4347 &mut self,
4348 entries: Vec<RepoPath>,
4349 cx: &mut Context<Self>,
4350 ) -> Task<anyhow::Result<()>> {
4351 if entries.is_empty() {
4352 return Task::ready(Ok(()));
4353 }
4354
4355 self.mark_entries_pending_for_stage(&entries, false, cx);
4356
4357 let id = self.id;
4358 let save_tasks = self.save_buffers(&entries, cx);
4359 let paths = entries
4360 .iter()
4361 .map(|p| p.as_unix_str())
4362 .collect::<Vec<_>>()
4363 .join(" ");
4364 let status = format!("git reset {paths}");
4365 let job_key = GitJobKey::WriteIndex(entries.clone());
4366
4367 self.spawn_job_with_tracking(
4368 entries.clone(),
4369 pending_op::GitStatus::Unstaged,
4370 cx,
4371 async move |this, cx| {
4372 for save_task in save_tasks {
4373 save_task.await?;
4374 }
4375
4376 this.update(cx, |this, _| {
4377 this.send_keyed_job(
4378 Some(job_key),
4379 Some(status.into()),
4380 move |git_repo, _cx| async move {
4381 match git_repo {
4382 RepositoryState::Local(LocalRepositoryState {
4383 backend,
4384 environment,
4385 ..
4386 }) => backend.unstage_paths(entries, environment).await,
4387 RepositoryState::Remote(RemoteRepositoryState {
4388 project_id,
4389 client,
4390 }) => {
4391 client
4392 .request(proto::Unstage {
4393 project_id: project_id.0,
4394 repository_id: id.to_proto(),
4395 paths: entries
4396 .into_iter()
4397 .map(|repo_path| repo_path.to_proto())
4398 .collect(),
4399 })
4400 .await
4401 .context("sending unstage request")?;
4402
4403 Ok(())
4404 }
4405 }
4406 },
4407 )
4408 })?
4409 .await?
4410 },
4411 )
4412 }
4413
4414 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4415 let to_stage = self
4416 .cached_status()
4417 .filter_map(|entry| {
4418 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4419 if ops.staging() || ops.staged() {
4420 None
4421 } else {
4422 Some(entry.repo_path)
4423 }
4424 } else if entry.status.staging().is_fully_staged() {
4425 None
4426 } else {
4427 Some(entry.repo_path)
4428 }
4429 })
4430 .collect();
4431 self.stage_entries(to_stage, cx)
4432 }
4433
4434 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4435 let to_unstage = self
4436 .cached_status()
4437 .filter_map(|entry| {
4438 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4439 if !ops.staging() && !ops.staged() {
4440 None
4441 } else {
4442 Some(entry.repo_path)
4443 }
4444 } else if entry.status.staging().is_fully_unstaged() {
4445 None
4446 } else {
4447 Some(entry.repo_path)
4448 }
4449 })
4450 .collect();
4451 self.unstage_entries(to_unstage, cx)
4452 }
4453
4454 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4455 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4456
4457 self.stash_entries(to_stash, cx)
4458 }
4459
4460 pub fn stash_entries(
4461 &mut self,
4462 entries: Vec<RepoPath>,
4463 cx: &mut Context<Self>,
4464 ) -> Task<anyhow::Result<()>> {
4465 let id = self.id;
4466
4467 cx.spawn(async move |this, cx| {
4468 this.update(cx, |this, _| {
4469 this.send_job(None, move |git_repo, _cx| async move {
4470 match git_repo {
4471 RepositoryState::Local(LocalRepositoryState {
4472 backend,
4473 environment,
4474 ..
4475 }) => backend.stash_paths(entries, environment).await,
4476 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4477 client
4478 .request(proto::Stash {
4479 project_id: project_id.0,
4480 repository_id: id.to_proto(),
4481 paths: entries
4482 .into_iter()
4483 .map(|repo_path| repo_path.to_proto())
4484 .collect(),
4485 })
4486 .await
4487 .context("sending stash request")?;
4488 Ok(())
4489 }
4490 }
4491 })
4492 })?
4493 .await??;
4494 Ok(())
4495 })
4496 }
4497
4498 pub fn stash_pop(
4499 &mut self,
4500 index: Option<usize>,
4501 cx: &mut Context<Self>,
4502 ) -> Task<anyhow::Result<()>> {
4503 let id = self.id;
4504 cx.spawn(async move |this, cx| {
4505 this.update(cx, |this, _| {
4506 this.send_job(None, move |git_repo, _cx| async move {
4507 match git_repo {
4508 RepositoryState::Local(LocalRepositoryState {
4509 backend,
4510 environment,
4511 ..
4512 }) => backend.stash_pop(index, environment).await,
4513 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4514 client
4515 .request(proto::StashPop {
4516 project_id: project_id.0,
4517 repository_id: id.to_proto(),
4518 stash_index: index.map(|i| i as u64),
4519 })
4520 .await
4521 .context("sending stash pop request")?;
4522 Ok(())
4523 }
4524 }
4525 })
4526 })?
4527 .await??;
4528 Ok(())
4529 })
4530 }
4531
4532 pub fn stash_apply(
4533 &mut self,
4534 index: Option<usize>,
4535 cx: &mut Context<Self>,
4536 ) -> Task<anyhow::Result<()>> {
4537 let id = self.id;
4538 cx.spawn(async move |this, cx| {
4539 this.update(cx, |this, _| {
4540 this.send_job(None, move |git_repo, _cx| async move {
4541 match git_repo {
4542 RepositoryState::Local(LocalRepositoryState {
4543 backend,
4544 environment,
4545 ..
4546 }) => backend.stash_apply(index, environment).await,
4547 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4548 client
4549 .request(proto::StashApply {
4550 project_id: project_id.0,
4551 repository_id: id.to_proto(),
4552 stash_index: index.map(|i| i as u64),
4553 })
4554 .await
4555 .context("sending stash apply request")?;
4556 Ok(())
4557 }
4558 }
4559 })
4560 })?
4561 .await??;
4562 Ok(())
4563 })
4564 }
4565
4566 pub fn stash_drop(
4567 &mut self,
4568 index: Option<usize>,
4569 cx: &mut Context<Self>,
4570 ) -> oneshot::Receiver<anyhow::Result<()>> {
4571 let id = self.id;
4572 let updates_tx = self
4573 .git_store()
4574 .and_then(|git_store| match &git_store.read(cx).state {
4575 GitStoreState::Local { downstream, .. } => downstream
4576 .as_ref()
4577 .map(|downstream| downstream.updates_tx.clone()),
4578 _ => None,
4579 });
4580 let this = cx.weak_entity();
4581 self.send_job(None, move |git_repo, mut cx| async move {
4582 match git_repo {
4583 RepositoryState::Local(LocalRepositoryState {
4584 backend,
4585 environment,
4586 ..
4587 }) => {
4588 // TODO would be nice to not have to do this manually
4589 let result = backend.stash_drop(index, environment).await;
4590 if result.is_ok()
4591 && let Ok(stash_entries) = backend.stash_entries().await
4592 {
4593 let snapshot = this.update(&mut cx, |this, cx| {
4594 this.snapshot.stash_entries = stash_entries;
4595 cx.emit(RepositoryEvent::StashEntriesChanged);
4596 this.snapshot.clone()
4597 })?;
4598 if let Some(updates_tx) = updates_tx {
4599 updates_tx
4600 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4601 .ok();
4602 }
4603 }
4604
4605 result
4606 }
4607 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4608 client
4609 .request(proto::StashDrop {
4610 project_id: project_id.0,
4611 repository_id: id.to_proto(),
4612 stash_index: index.map(|i| i as u64),
4613 })
4614 .await
4615 .context("sending stash pop request")?;
4616 Ok(())
4617 }
4618 }
4619 })
4620 }
4621
4622 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4623 let id = self.id;
4624 self.send_job(
4625 Some(format!("git hook {}", hook.as_str()).into()),
4626 move |git_repo, _cx| async move {
4627 match git_repo {
4628 RepositoryState::Local(LocalRepositoryState {
4629 backend,
4630 environment,
4631 ..
4632 }) => backend.run_hook(hook, environment.clone()).await,
4633 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4634 client
4635 .request(proto::RunGitHook {
4636 project_id: project_id.0,
4637 repository_id: id.to_proto(),
4638 hook: hook.to_proto(),
4639 })
4640 .await?;
4641
4642 Ok(())
4643 }
4644 }
4645 },
4646 )
4647 }
4648
4649 pub fn commit(
4650 &mut self,
4651 message: SharedString,
4652 name_and_email: Option<(SharedString, SharedString)>,
4653 options: CommitOptions,
4654 askpass: AskPassDelegate,
4655 cx: &mut App,
4656 ) -> oneshot::Receiver<Result<()>> {
4657 let id = self.id;
4658 let askpass_delegates = self.askpass_delegates.clone();
4659 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4660
4661 let rx = self.run_hook(RunHook::PreCommit, cx);
4662
4663 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4664 rx.await??;
4665
4666 match git_repo {
4667 RepositoryState::Local(LocalRepositoryState {
4668 backend,
4669 environment,
4670 ..
4671 }) => {
4672 backend
4673 .commit(message, name_and_email, options, askpass, environment)
4674 .await
4675 }
4676 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4677 askpass_delegates.lock().insert(askpass_id, askpass);
4678 let _defer = util::defer(|| {
4679 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4680 debug_assert!(askpass_delegate.is_some());
4681 });
4682 let (name, email) = name_and_email.unzip();
4683 client
4684 .request(proto::Commit {
4685 project_id: project_id.0,
4686 repository_id: id.to_proto(),
4687 message: String::from(message),
4688 name: name.map(String::from),
4689 email: email.map(String::from),
4690 options: Some(proto::commit::CommitOptions {
4691 amend: options.amend,
4692 signoff: options.signoff,
4693 }),
4694 askpass_id,
4695 })
4696 .await
4697 .context("sending commit request")?;
4698
4699 Ok(())
4700 }
4701 }
4702 })
4703 }
4704
4705 pub fn fetch(
4706 &mut self,
4707 fetch_options: FetchOptions,
4708 askpass: AskPassDelegate,
4709 _cx: &mut App,
4710 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4711 let askpass_delegates = self.askpass_delegates.clone();
4712 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4713 let id = self.id;
4714
4715 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4716 match git_repo {
4717 RepositoryState::Local(LocalRepositoryState {
4718 backend,
4719 environment,
4720 ..
4721 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4722 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4723 askpass_delegates.lock().insert(askpass_id, askpass);
4724 let _defer = util::defer(|| {
4725 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4726 debug_assert!(askpass_delegate.is_some());
4727 });
4728
4729 let response = client
4730 .request(proto::Fetch {
4731 project_id: project_id.0,
4732 repository_id: id.to_proto(),
4733 askpass_id,
4734 remote: fetch_options.to_proto(),
4735 })
4736 .await
4737 .context("sending fetch request")?;
4738
4739 Ok(RemoteCommandOutput {
4740 stdout: response.stdout,
4741 stderr: response.stderr,
4742 })
4743 }
4744 }
4745 })
4746 }
4747
4748 pub fn push(
4749 &mut self,
4750 branch: SharedString,
4751 remote: SharedString,
4752 options: Option<PushOptions>,
4753 askpass: AskPassDelegate,
4754 cx: &mut Context<Self>,
4755 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4756 let askpass_delegates = self.askpass_delegates.clone();
4757 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4758 let id = self.id;
4759
4760 let args = options
4761 .map(|option| match option {
4762 PushOptions::SetUpstream => " --set-upstream",
4763 PushOptions::Force => " --force-with-lease",
4764 })
4765 .unwrap_or("");
4766
4767 let updates_tx = self
4768 .git_store()
4769 .and_then(|git_store| match &git_store.read(cx).state {
4770 GitStoreState::Local { downstream, .. } => downstream
4771 .as_ref()
4772 .map(|downstream| downstream.updates_tx.clone()),
4773 _ => None,
4774 });
4775
4776 let this = cx.weak_entity();
4777 self.send_job(
4778 Some(format!("git push {} {} {}", args, remote, branch).into()),
4779 move |git_repo, mut cx| async move {
4780 match git_repo {
4781 RepositoryState::Local(LocalRepositoryState {
4782 backend,
4783 environment,
4784 ..
4785 }) => {
4786 let result = backend
4787 .push(
4788 branch.to_string(),
4789 remote.to_string(),
4790 options,
4791 askpass,
4792 environment.clone(),
4793 cx.clone(),
4794 )
4795 .await;
4796 // TODO would be nice to not have to do this manually
4797 if result.is_ok() {
4798 let branches = backend.branches().await?;
4799 let branch = branches.into_iter().find(|branch| branch.is_head);
4800 log::info!("head branch after scan is {branch:?}");
4801 let snapshot = this.update(&mut cx, |this, cx| {
4802 this.snapshot.branch = branch;
4803 cx.emit(RepositoryEvent::BranchChanged);
4804 this.snapshot.clone()
4805 })?;
4806 if let Some(updates_tx) = updates_tx {
4807 updates_tx
4808 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4809 .ok();
4810 }
4811 }
4812 result
4813 }
4814 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4815 askpass_delegates.lock().insert(askpass_id, askpass);
4816 let _defer = util::defer(|| {
4817 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4818 debug_assert!(askpass_delegate.is_some());
4819 });
4820 let response = client
4821 .request(proto::Push {
4822 project_id: project_id.0,
4823 repository_id: id.to_proto(),
4824 askpass_id,
4825 branch_name: branch.to_string(),
4826 remote_name: remote.to_string(),
4827 options: options.map(|options| match options {
4828 PushOptions::Force => proto::push::PushOptions::Force,
4829 PushOptions::SetUpstream => {
4830 proto::push::PushOptions::SetUpstream
4831 }
4832 }
4833 as i32),
4834 })
4835 .await
4836 .context("sending push request")?;
4837
4838 Ok(RemoteCommandOutput {
4839 stdout: response.stdout,
4840 stderr: response.stderr,
4841 })
4842 }
4843 }
4844 },
4845 )
4846 }
4847
4848 pub fn pull(
4849 &mut self,
4850 branch: Option<SharedString>,
4851 remote: SharedString,
4852 rebase: bool,
4853 askpass: AskPassDelegate,
4854 _cx: &mut App,
4855 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4856 let askpass_delegates = self.askpass_delegates.clone();
4857 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4858 let id = self.id;
4859
4860 let mut status = "git pull".to_string();
4861 if rebase {
4862 status.push_str(" --rebase");
4863 }
4864 status.push_str(&format!(" {}", remote));
4865 if let Some(b) = &branch {
4866 status.push_str(&format!(" {}", b));
4867 }
4868
4869 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4870 match git_repo {
4871 RepositoryState::Local(LocalRepositoryState {
4872 backend,
4873 environment,
4874 ..
4875 }) => {
4876 backend
4877 .pull(
4878 branch.as_ref().map(|b| b.to_string()),
4879 remote.to_string(),
4880 rebase,
4881 askpass,
4882 environment.clone(),
4883 cx,
4884 )
4885 .await
4886 }
4887 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4888 askpass_delegates.lock().insert(askpass_id, askpass);
4889 let _defer = util::defer(|| {
4890 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4891 debug_assert!(askpass_delegate.is_some());
4892 });
4893 let response = client
4894 .request(proto::Pull {
4895 project_id: project_id.0,
4896 repository_id: id.to_proto(),
4897 askpass_id,
4898 rebase,
4899 branch_name: branch.as_ref().map(|b| b.to_string()),
4900 remote_name: remote.to_string(),
4901 })
4902 .await
4903 .context("sending pull request")?;
4904
4905 Ok(RemoteCommandOutput {
4906 stdout: response.stdout,
4907 stderr: response.stderr,
4908 })
4909 }
4910 }
4911 })
4912 }
4913
4914 fn spawn_set_index_text_job(
4915 &mut self,
4916 path: RepoPath,
4917 content: Option<String>,
4918 hunk_staging_operation_count: Option<usize>,
4919 cx: &mut Context<Self>,
4920 ) -> oneshot::Receiver<anyhow::Result<()>> {
4921 let id = self.id;
4922 let this = cx.weak_entity();
4923 let git_store = self.git_store.clone();
4924 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4925 self.send_keyed_job(
4926 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4927 None,
4928 move |git_repo, mut cx| async move {
4929 log::debug!(
4930 "start updating index text for buffer {}",
4931 path.as_unix_str()
4932 );
4933
4934 match git_repo {
4935 RepositoryState::Local(LocalRepositoryState {
4936 fs,
4937 backend,
4938 environment,
4939 ..
4940 }) => {
4941 let executable = match fs.metadata(&abs_path).await {
4942 Ok(Some(meta)) => meta.is_executable,
4943 Ok(None) => false,
4944 Err(_err) => false,
4945 };
4946 backend
4947 .set_index_text(path.clone(), content, environment.clone(), executable)
4948 .await?;
4949 }
4950 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4951 client
4952 .request(proto::SetIndexText {
4953 project_id: project_id.0,
4954 repository_id: id.to_proto(),
4955 path: path.to_proto(),
4956 text: content,
4957 })
4958 .await?;
4959 }
4960 }
4961 log::debug!(
4962 "finish updating index text for buffer {}",
4963 path.as_unix_str()
4964 );
4965
4966 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4967 let project_path = this
4968 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4969 .ok()
4970 .flatten();
4971 git_store.update(&mut cx, |git_store, cx| {
4972 let buffer_id = git_store
4973 .buffer_store
4974 .read(cx)
4975 .get_by_path(&project_path?)?
4976 .read(cx)
4977 .remote_id();
4978 let diff_state = git_store.diffs.get(&buffer_id)?;
4979 diff_state.update(cx, |diff_state, _| {
4980 diff_state.hunk_staging_operation_count_as_of_write =
4981 hunk_staging_operation_count;
4982 });
4983 Some(())
4984 })?;
4985 }
4986 Ok(())
4987 },
4988 )
4989 }
4990
4991 pub fn create_remote(
4992 &mut self,
4993 remote_name: String,
4994 remote_url: String,
4995 ) -> oneshot::Receiver<Result<()>> {
4996 let id = self.id;
4997 self.send_job(
4998 Some(format!("git remote add {remote_name} {remote_url}").into()),
4999 move |repo, _cx| async move {
5000 match repo {
5001 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5002 backend.create_remote(remote_name, remote_url).await
5003 }
5004 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5005 client
5006 .request(proto::GitCreateRemote {
5007 project_id: project_id.0,
5008 repository_id: id.to_proto(),
5009 remote_name,
5010 remote_url,
5011 })
5012 .await?;
5013
5014 Ok(())
5015 }
5016 }
5017 },
5018 )
5019 }
5020
5021 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5022 let id = self.id;
5023 self.send_job(
5024 Some(format!("git remove remote {remote_name}").into()),
5025 move |repo, _cx| async move {
5026 match repo {
5027 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5028 backend.remove_remote(remote_name).await
5029 }
5030 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5031 client
5032 .request(proto::GitRemoveRemote {
5033 project_id: project_id.0,
5034 repository_id: id.to_proto(),
5035 remote_name,
5036 })
5037 .await?;
5038
5039 Ok(())
5040 }
5041 }
5042 },
5043 )
5044 }
5045
5046 pub fn get_remotes(
5047 &mut self,
5048 branch_name: Option<String>,
5049 is_push: bool,
5050 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5051 let id = self.id;
5052 self.send_job(None, move |repo, _cx| async move {
5053 match repo {
5054 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5055 let remote = if let Some(branch_name) = branch_name {
5056 if is_push {
5057 backend.get_push_remote(branch_name).await?
5058 } else {
5059 backend.get_branch_remote(branch_name).await?
5060 }
5061 } else {
5062 None
5063 };
5064
5065 match remote {
5066 Some(remote) => Ok(vec![remote]),
5067 None => backend.get_all_remotes().await,
5068 }
5069 }
5070 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5071 let response = client
5072 .request(proto::GetRemotes {
5073 project_id: project_id.0,
5074 repository_id: id.to_proto(),
5075 branch_name,
5076 is_push,
5077 })
5078 .await?;
5079
5080 let remotes = response
5081 .remotes
5082 .into_iter()
5083 .map(|remotes| Remote {
5084 name: remotes.name.into(),
5085 })
5086 .collect();
5087
5088 Ok(remotes)
5089 }
5090 }
5091 })
5092 }
5093
5094 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5095 let id = self.id;
5096 self.send_job(None, move |repo, _| async move {
5097 match repo {
5098 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5099 backend.branches().await
5100 }
5101 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5102 let response = client
5103 .request(proto::GitGetBranches {
5104 project_id: project_id.0,
5105 repository_id: id.to_proto(),
5106 })
5107 .await?;
5108
5109 let branches = response
5110 .branches
5111 .into_iter()
5112 .map(|branch| proto_to_branch(&branch))
5113 .collect();
5114
5115 Ok(branches)
5116 }
5117 }
5118 })
5119 }
5120
5121 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5122 let id = self.id;
5123 self.send_job(None, move |repo, _| async move {
5124 match repo {
5125 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5126 backend.worktrees().await
5127 }
5128 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5129 let response = client
5130 .request(proto::GitGetWorktrees {
5131 project_id: project_id.0,
5132 repository_id: id.to_proto(),
5133 })
5134 .await?;
5135
5136 let worktrees = response
5137 .worktrees
5138 .into_iter()
5139 .map(|worktree| proto_to_worktree(&worktree))
5140 .collect();
5141
5142 Ok(worktrees)
5143 }
5144 }
5145 })
5146 }
5147
5148 pub fn create_worktree(
5149 &mut self,
5150 name: String,
5151 path: PathBuf,
5152 commit: Option<String>,
5153 ) -> oneshot::Receiver<Result<()>> {
5154 let id = self.id;
5155 self.send_job(
5156 Some("git worktree add".into()),
5157 move |repo, _cx| async move {
5158 match repo {
5159 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5160 backend.create_worktree(name, path, commit).await
5161 }
5162 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5163 client
5164 .request(proto::GitCreateWorktree {
5165 project_id: project_id.0,
5166 repository_id: id.to_proto(),
5167 name,
5168 directory: path.to_string_lossy().to_string(),
5169 commit,
5170 })
5171 .await?;
5172
5173 Ok(())
5174 }
5175 }
5176 },
5177 )
5178 }
5179
5180 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
5181 let id = self.id;
5182 self.send_job(None, move |repo, _| async move {
5183 match repo {
5184 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5185 backend.default_branch().await
5186 }
5187 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5188 let response = client
5189 .request(proto::GetDefaultBranch {
5190 project_id: project_id.0,
5191 repository_id: id.to_proto(),
5192 })
5193 .await?;
5194
5195 anyhow::Ok(response.branch.map(SharedString::from))
5196 }
5197 }
5198 })
5199 }
5200
5201 pub fn diff_tree(
5202 &mut self,
5203 diff_type: DiffTreeType,
5204 _cx: &App,
5205 ) -> oneshot::Receiver<Result<TreeDiff>> {
5206 let repository_id = self.snapshot.id;
5207 self.send_job(None, move |repo, _cx| async move {
5208 match repo {
5209 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5210 backend.diff_tree(diff_type).await
5211 }
5212 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5213 let response = client
5214 .request(proto::GetTreeDiff {
5215 project_id: project_id.0,
5216 repository_id: repository_id.0,
5217 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5218 base: diff_type.base().to_string(),
5219 head: diff_type.head().to_string(),
5220 })
5221 .await?;
5222
5223 let entries = response
5224 .entries
5225 .into_iter()
5226 .filter_map(|entry| {
5227 let status = match entry.status() {
5228 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5229 proto::tree_diff_status::Status::Modified => {
5230 TreeDiffStatus::Modified {
5231 old: git::Oid::from_str(
5232 &entry.oid.context("missing oid").log_err()?,
5233 )
5234 .log_err()?,
5235 }
5236 }
5237 proto::tree_diff_status::Status::Deleted => {
5238 TreeDiffStatus::Deleted {
5239 old: git::Oid::from_str(
5240 &entry.oid.context("missing oid").log_err()?,
5241 )
5242 .log_err()?,
5243 }
5244 }
5245 };
5246 Some((
5247 RepoPath::from_rel_path(
5248 &RelPath::from_proto(&entry.path).log_err()?,
5249 ),
5250 status,
5251 ))
5252 })
5253 .collect();
5254
5255 Ok(TreeDiff { entries })
5256 }
5257 }
5258 })
5259 }
5260
5261 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5262 let id = self.id;
5263 self.send_job(None, move |repo, _cx| async move {
5264 match repo {
5265 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5266 backend.diff(diff_type).await
5267 }
5268 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5269 let response = client
5270 .request(proto::GitDiff {
5271 project_id: project_id.0,
5272 repository_id: id.to_proto(),
5273 diff_type: match diff_type {
5274 DiffType::HeadToIndex => {
5275 proto::git_diff::DiffType::HeadToIndex.into()
5276 }
5277 DiffType::HeadToWorktree => {
5278 proto::git_diff::DiffType::HeadToWorktree.into()
5279 }
5280 },
5281 })
5282 .await?;
5283
5284 Ok(response.diff)
5285 }
5286 }
5287 })
5288 }
5289
5290 pub fn create_branch(
5291 &mut self,
5292 branch_name: String,
5293 base_branch: Option<String>,
5294 ) -> oneshot::Receiver<Result<()>> {
5295 let id = self.id;
5296 let status_msg = if let Some(ref base) = base_branch {
5297 format!("git switch -c {branch_name} {base}").into()
5298 } else {
5299 format!("git switch -c {branch_name}").into()
5300 };
5301 self.send_job(Some(status_msg), move |repo, _cx| async move {
5302 match repo {
5303 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5304 backend.create_branch(branch_name, base_branch).await
5305 }
5306 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5307 client
5308 .request(proto::GitCreateBranch {
5309 project_id: project_id.0,
5310 repository_id: id.to_proto(),
5311 branch_name,
5312 })
5313 .await?;
5314
5315 Ok(())
5316 }
5317 }
5318 })
5319 }
5320
5321 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5322 let id = self.id;
5323 self.send_job(
5324 Some(format!("git switch {branch_name}").into()),
5325 move |repo, _cx| async move {
5326 match repo {
5327 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5328 backend.change_branch(branch_name).await
5329 }
5330 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5331 client
5332 .request(proto::GitChangeBranch {
5333 project_id: project_id.0,
5334 repository_id: id.to_proto(),
5335 branch_name,
5336 })
5337 .await?;
5338
5339 Ok(())
5340 }
5341 }
5342 },
5343 )
5344 }
5345
5346 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5347 let id = self.id;
5348 self.send_job(
5349 Some(format!("git branch -d {branch_name}").into()),
5350 move |repo, _cx| async move {
5351 match repo {
5352 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5353 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5354 client
5355 .request(proto::GitDeleteBranch {
5356 project_id: project_id.0,
5357 repository_id: id.to_proto(),
5358 branch_name,
5359 })
5360 .await?;
5361
5362 Ok(())
5363 }
5364 }
5365 },
5366 )
5367 }
5368
5369 pub fn rename_branch(
5370 &mut self,
5371 branch: String,
5372 new_name: String,
5373 ) -> oneshot::Receiver<Result<()>> {
5374 let id = self.id;
5375 self.send_job(
5376 Some(format!("git branch -m {branch} {new_name}").into()),
5377 move |repo, _cx| async move {
5378 match repo {
5379 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5380 backend.rename_branch(branch, new_name).await
5381 }
5382 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5383 client
5384 .request(proto::GitRenameBranch {
5385 project_id: project_id.0,
5386 repository_id: id.to_proto(),
5387 branch,
5388 new_name,
5389 })
5390 .await?;
5391
5392 Ok(())
5393 }
5394 }
5395 },
5396 )
5397 }
5398
5399 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5400 let id = self.id;
5401 self.send_job(None, move |repo, _cx| async move {
5402 match repo {
5403 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5404 backend.check_for_pushed_commit().await
5405 }
5406 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5407 let response = client
5408 .request(proto::CheckForPushedCommits {
5409 project_id: project_id.0,
5410 repository_id: id.to_proto(),
5411 })
5412 .await?;
5413
5414 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5415
5416 Ok(branches)
5417 }
5418 }
5419 })
5420 }
5421
5422 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5423 self.send_job(None, |repo, _cx| async move {
5424 match repo {
5425 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5426 backend.checkpoint().await
5427 }
5428 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5429 }
5430 })
5431 }
5432
5433 pub fn restore_checkpoint(
5434 &mut self,
5435 checkpoint: GitRepositoryCheckpoint,
5436 ) -> oneshot::Receiver<Result<()>> {
5437 self.send_job(None, move |repo, _cx| async move {
5438 match repo {
5439 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5440 backend.restore_checkpoint(checkpoint).await
5441 }
5442 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5443 }
5444 })
5445 }
5446
5447 pub(crate) fn apply_remote_update(
5448 &mut self,
5449 update: proto::UpdateRepository,
5450 cx: &mut Context<Self>,
5451 ) -> Result<()> {
5452 let conflicted_paths = TreeSet::from_ordered_entries(
5453 update
5454 .current_merge_conflicts
5455 .into_iter()
5456 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5457 );
5458 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5459 let new_head_commit = update
5460 .head_commit_details
5461 .as_ref()
5462 .map(proto_to_commit_details);
5463 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5464 cx.emit(RepositoryEvent::BranchChanged)
5465 }
5466 self.snapshot.branch = new_branch;
5467 self.snapshot.head_commit = new_head_commit;
5468
5469 self.snapshot.merge.conflicted_paths = conflicted_paths;
5470 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5471 let new_stash_entries = GitStash {
5472 entries: update
5473 .stash_entries
5474 .iter()
5475 .filter_map(|entry| proto_to_stash(entry).ok())
5476 .collect(),
5477 };
5478 if self.snapshot.stash_entries != new_stash_entries {
5479 cx.emit(RepositoryEvent::StashEntriesChanged)
5480 }
5481 self.snapshot.stash_entries = new_stash_entries;
5482 self.snapshot.remote_upstream_url = update.remote_upstream_url;
5483 self.snapshot.remote_origin_url = update.remote_origin_url;
5484
5485 let edits = update
5486 .removed_statuses
5487 .into_iter()
5488 .filter_map(|path| {
5489 Some(sum_tree::Edit::Remove(PathKey(
5490 RelPath::from_proto(&path).log_err()?,
5491 )))
5492 })
5493 .chain(
5494 update
5495 .updated_statuses
5496 .into_iter()
5497 .filter_map(|updated_status| {
5498 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5499 }),
5500 )
5501 .collect::<Vec<_>>();
5502 if !edits.is_empty() {
5503 cx.emit(RepositoryEvent::StatusesChanged);
5504 }
5505 self.snapshot.statuses_by_path.edit(edits, ());
5506 if update.is_last_update {
5507 self.snapshot.scan_id = update.scan_id;
5508 }
5509 self.clear_pending_ops(cx);
5510 Ok(())
5511 }
5512
5513 pub fn compare_checkpoints(
5514 &mut self,
5515 left: GitRepositoryCheckpoint,
5516 right: GitRepositoryCheckpoint,
5517 ) -> oneshot::Receiver<Result<bool>> {
5518 self.send_job(None, move |repo, _cx| async move {
5519 match repo {
5520 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5521 backend.compare_checkpoints(left, right).await
5522 }
5523 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5524 }
5525 })
5526 }
5527
5528 pub fn diff_checkpoints(
5529 &mut self,
5530 base_checkpoint: GitRepositoryCheckpoint,
5531 target_checkpoint: GitRepositoryCheckpoint,
5532 ) -> oneshot::Receiver<Result<String>> {
5533 self.send_job(None, move |repo, _cx| async move {
5534 match repo {
5535 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5536 backend
5537 .diff_checkpoints(base_checkpoint, target_checkpoint)
5538 .await
5539 }
5540 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5541 }
5542 })
5543 }
5544
5545 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5546 let updated = SumTree::from_iter(
5547 self.pending_ops.iter().filter_map(|ops| {
5548 let inner_ops: Vec<PendingOp> =
5549 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5550 if inner_ops.is_empty() {
5551 None
5552 } else {
5553 Some(PendingOps {
5554 repo_path: ops.repo_path.clone(),
5555 ops: inner_ops,
5556 })
5557 }
5558 }),
5559 (),
5560 );
5561
5562 if updated != self.pending_ops {
5563 cx.emit(RepositoryEvent::PendingOpsChanged {
5564 pending_ops: self.pending_ops.clone(),
5565 })
5566 }
5567
5568 self.pending_ops = updated;
5569 }
5570
5571 fn schedule_scan(
5572 &mut self,
5573 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5574 cx: &mut Context<Self>,
5575 ) {
5576 let this = cx.weak_entity();
5577 let _ = self.send_keyed_job(
5578 Some(GitJobKey::ReloadGitState),
5579 None,
5580 |state, mut cx| async move {
5581 log::debug!("run scheduled git status scan");
5582
5583 let Some(this) = this.upgrade() else {
5584 return Ok(());
5585 };
5586 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5587 bail!("not a local repository")
5588 };
5589 let (snapshot, events) = this
5590 .update(&mut cx, |this, _| {
5591 this.paths_needing_status_update.clear();
5592 compute_snapshot(
5593 this.id,
5594 this.work_directory_abs_path.clone(),
5595 this.snapshot.clone(),
5596 backend.clone(),
5597 )
5598 })?
5599 .await?;
5600 this.update(&mut cx, |this, cx| {
5601 this.snapshot = snapshot.clone();
5602 this.clear_pending_ops(cx);
5603 for event in events {
5604 cx.emit(event);
5605 }
5606 })?;
5607 if let Some(updates_tx) = updates_tx {
5608 updates_tx
5609 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5610 .ok();
5611 }
5612 Ok(())
5613 },
5614 );
5615 }
5616
5617 fn spawn_local_git_worker(
5618 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5619 cx: &mut Context<Self>,
5620 ) -> mpsc::UnboundedSender<GitJob> {
5621 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5622
5623 cx.spawn(async move |_, cx| {
5624 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5625 if let Some(git_hosting_provider_registry) =
5626 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5627 {
5628 git_hosting_providers::register_additional_providers(
5629 git_hosting_provider_registry,
5630 state.backend.clone(),
5631 )
5632 .await;
5633 }
5634 let state = RepositoryState::Local(state);
5635 let mut jobs = VecDeque::new();
5636 loop {
5637 while let Ok(Some(next_job)) = job_rx.try_next() {
5638 jobs.push_back(next_job);
5639 }
5640
5641 if let Some(job) = jobs.pop_front() {
5642 if let Some(current_key) = &job.key
5643 && jobs
5644 .iter()
5645 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5646 {
5647 continue;
5648 }
5649 (job.job)(state.clone(), cx).await;
5650 } else if let Some(job) = job_rx.next().await {
5651 jobs.push_back(job);
5652 } else {
5653 break;
5654 }
5655 }
5656 anyhow::Ok(())
5657 })
5658 .detach_and_log_err(cx);
5659
5660 job_tx
5661 }
5662
5663 fn spawn_remote_git_worker(
5664 state: RemoteRepositoryState,
5665 cx: &mut Context<Self>,
5666 ) -> mpsc::UnboundedSender<GitJob> {
5667 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5668
5669 cx.spawn(async move |_, cx| {
5670 let state = RepositoryState::Remote(state);
5671 let mut jobs = VecDeque::new();
5672 loop {
5673 while let Ok(Some(next_job)) = job_rx.try_next() {
5674 jobs.push_back(next_job);
5675 }
5676
5677 if let Some(job) = jobs.pop_front() {
5678 if let Some(current_key) = &job.key
5679 && jobs
5680 .iter()
5681 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5682 {
5683 continue;
5684 }
5685 (job.job)(state.clone(), cx).await;
5686 } else if let Some(job) = job_rx.next().await {
5687 jobs.push_back(job);
5688 } else {
5689 break;
5690 }
5691 }
5692 anyhow::Ok(())
5693 })
5694 .detach_and_log_err(cx);
5695
5696 job_tx
5697 }
5698
5699 fn load_staged_text(
5700 &mut self,
5701 buffer_id: BufferId,
5702 repo_path: RepoPath,
5703 cx: &App,
5704 ) -> Task<Result<Option<String>>> {
5705 let rx = self.send_job(None, move |state, _| async move {
5706 match state {
5707 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5708 anyhow::Ok(backend.load_index_text(repo_path).await)
5709 }
5710 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5711 let response = client
5712 .request(proto::OpenUnstagedDiff {
5713 project_id: project_id.to_proto(),
5714 buffer_id: buffer_id.to_proto(),
5715 })
5716 .await?;
5717 Ok(response.staged_text)
5718 }
5719 }
5720 });
5721 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5722 }
5723
5724 fn load_committed_text(
5725 &mut self,
5726 buffer_id: BufferId,
5727 repo_path: RepoPath,
5728 cx: &App,
5729 ) -> Task<Result<DiffBasesChange>> {
5730 let rx = self.send_job(None, move |state, _| async move {
5731 match state {
5732 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5733 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5734 let staged_text = backend.load_index_text(repo_path).await;
5735 let diff_bases_change = if committed_text == staged_text {
5736 DiffBasesChange::SetBoth(committed_text)
5737 } else {
5738 DiffBasesChange::SetEach {
5739 index: staged_text,
5740 head: committed_text,
5741 }
5742 };
5743 anyhow::Ok(diff_bases_change)
5744 }
5745 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5746 use proto::open_uncommitted_diff_response::Mode;
5747
5748 let response = client
5749 .request(proto::OpenUncommittedDiff {
5750 project_id: project_id.to_proto(),
5751 buffer_id: buffer_id.to_proto(),
5752 })
5753 .await?;
5754 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5755 let bases = match mode {
5756 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5757 Mode::IndexAndHead => DiffBasesChange::SetEach {
5758 head: response.committed_text,
5759 index: response.staged_text,
5760 },
5761 };
5762 Ok(bases)
5763 }
5764 }
5765 });
5766
5767 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5768 }
5769 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5770 let repository_id = self.snapshot.id;
5771 let rx = self.send_job(None, move |state, _| async move {
5772 match state {
5773 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5774 backend.load_blob_content(oid).await
5775 }
5776 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5777 let response = client
5778 .request(proto::GetBlobContent {
5779 project_id: project_id.to_proto(),
5780 repository_id: repository_id.0,
5781 oid: oid.to_string(),
5782 })
5783 .await?;
5784 Ok(response.content)
5785 }
5786 }
5787 });
5788 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5789 }
5790
5791 fn paths_changed(
5792 &mut self,
5793 paths: Vec<RepoPath>,
5794 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5795 cx: &mut Context<Self>,
5796 ) {
5797 self.paths_needing_status_update.extend(paths);
5798
5799 let this = cx.weak_entity();
5800 let _ = self.send_keyed_job(
5801 Some(GitJobKey::RefreshStatuses),
5802 None,
5803 |state, mut cx| async move {
5804 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5805 (
5806 this.snapshot.clone(),
5807 mem::take(&mut this.paths_needing_status_update),
5808 )
5809 })?;
5810 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5811 bail!("not a local repository")
5812 };
5813
5814 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5815 if paths.is_empty() {
5816 return Ok(());
5817 }
5818 let statuses = backend.status(&paths).await?;
5819 let stash_entries = backend.stash_entries().await?;
5820
5821 let changed_path_statuses = cx
5822 .background_spawn(async move {
5823 let mut changed_path_statuses = Vec::new();
5824 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5825 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5826
5827 for (repo_path, status) in &*statuses.entries {
5828 changed_paths.remove(repo_path);
5829 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5830 && cursor.item().is_some_and(|entry| entry.status == *status)
5831 {
5832 continue;
5833 }
5834
5835 changed_path_statuses.push(Edit::Insert(StatusEntry {
5836 repo_path: repo_path.clone(),
5837 status: *status,
5838 }));
5839 }
5840 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5841 for path in changed_paths.into_iter() {
5842 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5843 changed_path_statuses
5844 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5845 }
5846 }
5847 changed_path_statuses
5848 })
5849 .await;
5850
5851 this.update(&mut cx, |this, cx| {
5852 if this.snapshot.stash_entries != stash_entries {
5853 cx.emit(RepositoryEvent::StashEntriesChanged);
5854 this.snapshot.stash_entries = stash_entries;
5855 }
5856
5857 if !changed_path_statuses.is_empty() {
5858 cx.emit(RepositoryEvent::StatusesChanged);
5859 this.snapshot
5860 .statuses_by_path
5861 .edit(changed_path_statuses, ());
5862 this.snapshot.scan_id += 1;
5863 }
5864
5865 if let Some(updates_tx) = updates_tx {
5866 updates_tx
5867 .unbounded_send(DownstreamUpdate::UpdateRepository(
5868 this.snapshot.clone(),
5869 ))
5870 .ok();
5871 }
5872 })
5873 },
5874 );
5875 }
5876
5877 /// currently running git command and when it started
5878 pub fn current_job(&self) -> Option<JobInfo> {
5879 self.active_jobs.values().next().cloned()
5880 }
5881
5882 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5883 self.send_job(None, |_, _| async {})
5884 }
5885
5886 fn spawn_job_with_tracking<AsyncFn>(
5887 &mut self,
5888 paths: Vec<RepoPath>,
5889 git_status: pending_op::GitStatus,
5890 cx: &mut Context<Self>,
5891 f: AsyncFn,
5892 ) -> Task<Result<()>>
5893 where
5894 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5895 {
5896 let ids = self.new_pending_ops_for_paths(paths, git_status);
5897
5898 cx.spawn(async move |this, cx| {
5899 let (job_status, result) = match f(this.clone(), cx).await {
5900 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5901 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5902 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5903 };
5904
5905 this.update(cx, |this, _| {
5906 let mut edits = Vec::with_capacity(ids.len());
5907 for (id, entry) in ids {
5908 if let Some(mut ops) = this
5909 .pending_ops
5910 .get(&PathKey(entry.as_ref().clone()), ())
5911 .cloned()
5912 {
5913 if let Some(op) = ops.op_by_id_mut(id) {
5914 op.job_status = job_status;
5915 }
5916 edits.push(sum_tree::Edit::Insert(ops));
5917 }
5918 }
5919 this.pending_ops.edit(edits, ());
5920 })?;
5921
5922 result
5923 })
5924 }
5925
5926 fn new_pending_ops_for_paths(
5927 &mut self,
5928 paths: Vec<RepoPath>,
5929 git_status: pending_op::GitStatus,
5930 ) -> Vec<(PendingOpId, RepoPath)> {
5931 let mut edits = Vec::with_capacity(paths.len());
5932 let mut ids = Vec::with_capacity(paths.len());
5933 for path in paths {
5934 let mut ops = self
5935 .pending_ops
5936 .get(&PathKey(path.as_ref().clone()), ())
5937 .cloned()
5938 .unwrap_or_else(|| PendingOps::new(&path));
5939 let id = ops.max_id() + 1;
5940 ops.ops.push(PendingOp {
5941 id,
5942 git_status,
5943 job_status: pending_op::JobStatus::Running,
5944 });
5945 edits.push(sum_tree::Edit::Insert(ops));
5946 ids.push((id, path));
5947 }
5948 self.pending_ops.edit(edits, ());
5949 ids
5950 }
5951}
5952
5953fn get_permalink_in_rust_registry_src(
5954 provider_registry: Arc<GitHostingProviderRegistry>,
5955 path: PathBuf,
5956 selection: Range<u32>,
5957) -> Result<url::Url> {
5958 #[derive(Deserialize)]
5959 struct CargoVcsGit {
5960 sha1: String,
5961 }
5962
5963 #[derive(Deserialize)]
5964 struct CargoVcsInfo {
5965 git: CargoVcsGit,
5966 path_in_vcs: String,
5967 }
5968
5969 #[derive(Deserialize)]
5970 struct CargoPackage {
5971 repository: String,
5972 }
5973
5974 #[derive(Deserialize)]
5975 struct CargoToml {
5976 package: CargoPackage,
5977 }
5978
5979 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5980 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5981 Some((dir, json))
5982 }) else {
5983 bail!("No .cargo_vcs_info.json found in parent directories")
5984 };
5985 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5986 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5987 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5988 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5989 .context("parsing package.repository field of manifest")?;
5990 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5991 let permalink = provider.build_permalink(
5992 remote,
5993 BuildPermalinkParams::new(
5994 &cargo_vcs_info.git.sha1,
5995 &RepoPath::from_rel_path(
5996 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5997 ),
5998 Some(selection),
5999 ),
6000 );
6001 Ok(permalink)
6002}
6003
6004fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6005 let Some(blame) = blame else {
6006 return proto::BlameBufferResponse {
6007 blame_response: None,
6008 };
6009 };
6010
6011 let entries = blame
6012 .entries
6013 .into_iter()
6014 .map(|entry| proto::BlameEntry {
6015 sha: entry.sha.as_bytes().into(),
6016 start_line: entry.range.start,
6017 end_line: entry.range.end,
6018 original_line_number: entry.original_line_number,
6019 author: entry.author,
6020 author_mail: entry.author_mail,
6021 author_time: entry.author_time,
6022 author_tz: entry.author_tz,
6023 committer: entry.committer_name,
6024 committer_mail: entry.committer_email,
6025 committer_time: entry.committer_time,
6026 committer_tz: entry.committer_tz,
6027 summary: entry.summary,
6028 previous: entry.previous,
6029 filename: entry.filename,
6030 })
6031 .collect::<Vec<_>>();
6032
6033 let messages = blame
6034 .messages
6035 .into_iter()
6036 .map(|(oid, message)| proto::CommitMessage {
6037 oid: oid.as_bytes().into(),
6038 message,
6039 })
6040 .collect::<Vec<_>>();
6041
6042 proto::BlameBufferResponse {
6043 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6044 }
6045}
6046
6047fn deserialize_blame_buffer_response(
6048 response: proto::BlameBufferResponse,
6049) -> Option<git::blame::Blame> {
6050 let response = response.blame_response?;
6051 let entries = response
6052 .entries
6053 .into_iter()
6054 .filter_map(|entry| {
6055 Some(git::blame::BlameEntry {
6056 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6057 range: entry.start_line..entry.end_line,
6058 original_line_number: entry.original_line_number,
6059 committer_name: entry.committer,
6060 committer_time: entry.committer_time,
6061 committer_tz: entry.committer_tz,
6062 committer_email: entry.committer_mail,
6063 author: entry.author,
6064 author_mail: entry.author_mail,
6065 author_time: entry.author_time,
6066 author_tz: entry.author_tz,
6067 summary: entry.summary,
6068 previous: entry.previous,
6069 filename: entry.filename,
6070 })
6071 })
6072 .collect::<Vec<_>>();
6073
6074 let messages = response
6075 .messages
6076 .into_iter()
6077 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6078 .collect::<HashMap<_, _>>();
6079
6080 Some(Blame { entries, messages })
6081}
6082
6083fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6084 proto::Branch {
6085 is_head: branch.is_head,
6086 ref_name: branch.ref_name.to_string(),
6087 unix_timestamp: branch
6088 .most_recent_commit
6089 .as_ref()
6090 .map(|commit| commit.commit_timestamp as u64),
6091 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6092 ref_name: upstream.ref_name.to_string(),
6093 tracking: upstream
6094 .tracking
6095 .status()
6096 .map(|upstream| proto::UpstreamTracking {
6097 ahead: upstream.ahead as u64,
6098 behind: upstream.behind as u64,
6099 }),
6100 }),
6101 most_recent_commit: branch
6102 .most_recent_commit
6103 .as_ref()
6104 .map(|commit| proto::CommitSummary {
6105 sha: commit.sha.to_string(),
6106 subject: commit.subject.to_string(),
6107 commit_timestamp: commit.commit_timestamp,
6108 author_name: commit.author_name.to_string(),
6109 }),
6110 }
6111}
6112
6113fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6114 proto::Worktree {
6115 path: worktree.path.to_string_lossy().to_string(),
6116 ref_name: worktree.ref_name.to_string(),
6117 sha: worktree.sha.to_string(),
6118 }
6119}
6120
6121fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6122 git::repository::Worktree {
6123 path: PathBuf::from(proto.path.clone()),
6124 ref_name: proto.ref_name.clone().into(),
6125 sha: proto.sha.clone().into(),
6126 }
6127}
6128
6129fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6130 git::repository::Branch {
6131 is_head: proto.is_head,
6132 ref_name: proto.ref_name.clone().into(),
6133 upstream: proto
6134 .upstream
6135 .as_ref()
6136 .map(|upstream| git::repository::Upstream {
6137 ref_name: upstream.ref_name.to_string().into(),
6138 tracking: upstream
6139 .tracking
6140 .as_ref()
6141 .map(|tracking| {
6142 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6143 ahead: tracking.ahead as u32,
6144 behind: tracking.behind as u32,
6145 })
6146 })
6147 .unwrap_or(git::repository::UpstreamTracking::Gone),
6148 }),
6149 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6150 git::repository::CommitSummary {
6151 sha: commit.sha.to_string().into(),
6152 subject: commit.subject.to_string().into(),
6153 commit_timestamp: commit.commit_timestamp,
6154 author_name: commit.author_name.to_string().into(),
6155 has_parent: true,
6156 }
6157 }),
6158 }
6159}
6160
6161fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6162 proto::GitCommitDetails {
6163 sha: commit.sha.to_string(),
6164 message: commit.message.to_string(),
6165 commit_timestamp: commit.commit_timestamp,
6166 author_email: commit.author_email.to_string(),
6167 author_name: commit.author_name.to_string(),
6168 }
6169}
6170
6171fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6172 CommitDetails {
6173 sha: proto.sha.clone().into(),
6174 message: proto.message.clone().into(),
6175 commit_timestamp: proto.commit_timestamp,
6176 author_email: proto.author_email.clone().into(),
6177 author_name: proto.author_name.clone().into(),
6178 }
6179}
6180
6181async fn compute_snapshot(
6182 id: RepositoryId,
6183 work_directory_abs_path: Arc<Path>,
6184 prev_snapshot: RepositorySnapshot,
6185 backend: Arc<dyn GitRepository>,
6186) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6187 let mut events = Vec::new();
6188 let branches = backend.branches().await?;
6189 let branch = branches.into_iter().find(|branch| branch.is_head);
6190 let statuses = backend
6191 .status(&[RepoPath::from_rel_path(
6192 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6193 )])
6194 .await?;
6195 let stash_entries = backend.stash_entries().await?;
6196 let statuses_by_path = SumTree::from_iter(
6197 statuses
6198 .entries
6199 .iter()
6200 .map(|(repo_path, status)| StatusEntry {
6201 repo_path: repo_path.clone(),
6202 status: *status,
6203 }),
6204 (),
6205 );
6206 let (merge_details, merge_heads_changed) =
6207 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6208 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6209
6210 if merge_heads_changed {
6211 events.push(RepositoryEvent::MergeHeadsChanged);
6212 }
6213
6214 if statuses_by_path != prev_snapshot.statuses_by_path {
6215 events.push(RepositoryEvent::StatusesChanged)
6216 }
6217
6218 // Useful when branch is None in detached head state
6219 let head_commit = match backend.head_sha().await {
6220 Some(head_sha) => backend.show(head_sha).await.log_err(),
6221 None => None,
6222 };
6223
6224 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6225 events.push(RepositoryEvent::BranchChanged);
6226 }
6227
6228 let remote_origin_url = backend.remote_url("origin").await;
6229 let remote_upstream_url = backend.remote_url("upstream").await;
6230
6231 let snapshot = RepositorySnapshot {
6232 id,
6233 statuses_by_path,
6234 work_directory_abs_path,
6235 path_style: prev_snapshot.path_style,
6236 scan_id: prev_snapshot.scan_id + 1,
6237 branch,
6238 head_commit,
6239 merge: merge_details,
6240 remote_origin_url,
6241 remote_upstream_url,
6242 stash_entries,
6243 };
6244
6245 Ok((snapshot, events))
6246}
6247
6248fn status_from_proto(
6249 simple_status: i32,
6250 status: Option<proto::GitFileStatus>,
6251) -> anyhow::Result<FileStatus> {
6252 use proto::git_file_status::Variant;
6253
6254 let Some(variant) = status.and_then(|status| status.variant) else {
6255 let code = proto::GitStatus::from_i32(simple_status)
6256 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6257 let result = match code {
6258 proto::GitStatus::Added => TrackedStatus {
6259 worktree_status: StatusCode::Added,
6260 index_status: StatusCode::Unmodified,
6261 }
6262 .into(),
6263 proto::GitStatus::Modified => TrackedStatus {
6264 worktree_status: StatusCode::Modified,
6265 index_status: StatusCode::Unmodified,
6266 }
6267 .into(),
6268 proto::GitStatus::Conflict => UnmergedStatus {
6269 first_head: UnmergedStatusCode::Updated,
6270 second_head: UnmergedStatusCode::Updated,
6271 }
6272 .into(),
6273 proto::GitStatus::Deleted => TrackedStatus {
6274 worktree_status: StatusCode::Deleted,
6275 index_status: StatusCode::Unmodified,
6276 }
6277 .into(),
6278 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6279 };
6280 return Ok(result);
6281 };
6282
6283 let result = match variant {
6284 Variant::Untracked(_) => FileStatus::Untracked,
6285 Variant::Ignored(_) => FileStatus::Ignored,
6286 Variant::Unmerged(unmerged) => {
6287 let [first_head, second_head] =
6288 [unmerged.first_head, unmerged.second_head].map(|head| {
6289 let code = proto::GitStatus::from_i32(head)
6290 .with_context(|| format!("Invalid git status code: {head}"))?;
6291 let result = match code {
6292 proto::GitStatus::Added => UnmergedStatusCode::Added,
6293 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6294 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6295 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6296 };
6297 Ok(result)
6298 });
6299 let [first_head, second_head] = [first_head?, second_head?];
6300 UnmergedStatus {
6301 first_head,
6302 second_head,
6303 }
6304 .into()
6305 }
6306 Variant::Tracked(tracked) => {
6307 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6308 .map(|status| {
6309 let code = proto::GitStatus::from_i32(status)
6310 .with_context(|| format!("Invalid git status code: {status}"))?;
6311 let result = match code {
6312 proto::GitStatus::Modified => StatusCode::Modified,
6313 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6314 proto::GitStatus::Added => StatusCode::Added,
6315 proto::GitStatus::Deleted => StatusCode::Deleted,
6316 proto::GitStatus::Renamed => StatusCode::Renamed,
6317 proto::GitStatus::Copied => StatusCode::Copied,
6318 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6319 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6320 };
6321 Ok(result)
6322 });
6323 let [index_status, worktree_status] = [index_status?, worktree_status?];
6324 TrackedStatus {
6325 index_status,
6326 worktree_status,
6327 }
6328 .into()
6329 }
6330 };
6331 Ok(result)
6332}
6333
6334fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6335 use proto::git_file_status::{Tracked, Unmerged, Variant};
6336
6337 let variant = match status {
6338 FileStatus::Untracked => Variant::Untracked(Default::default()),
6339 FileStatus::Ignored => Variant::Ignored(Default::default()),
6340 FileStatus::Unmerged(UnmergedStatus {
6341 first_head,
6342 second_head,
6343 }) => Variant::Unmerged(Unmerged {
6344 first_head: unmerged_status_to_proto(first_head),
6345 second_head: unmerged_status_to_proto(second_head),
6346 }),
6347 FileStatus::Tracked(TrackedStatus {
6348 index_status,
6349 worktree_status,
6350 }) => Variant::Tracked(Tracked {
6351 index_status: tracked_status_to_proto(index_status),
6352 worktree_status: tracked_status_to_proto(worktree_status),
6353 }),
6354 };
6355 proto::GitFileStatus {
6356 variant: Some(variant),
6357 }
6358}
6359
6360fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6361 match code {
6362 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6363 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6364 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6365 }
6366}
6367
6368fn tracked_status_to_proto(code: StatusCode) -> i32 {
6369 match code {
6370 StatusCode::Added => proto::GitStatus::Added as _,
6371 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6372 StatusCode::Modified => proto::GitStatus::Modified as _,
6373 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6374 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6375 StatusCode::Copied => proto::GitStatus::Copied as _,
6376 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6377 }
6378}