1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<str>>,
133 index_text: Option<Arc<str>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_create_remote);
476 client.add_entity_request_handler(Self::handle_remove_remote);
477 client.add_entity_request_handler(Self::handle_delete_branch);
478 client.add_entity_request_handler(Self::handle_git_init);
479 client.add_entity_request_handler(Self::handle_push);
480 client.add_entity_request_handler(Self::handle_pull);
481 client.add_entity_request_handler(Self::handle_fetch);
482 client.add_entity_request_handler(Self::handle_stage);
483 client.add_entity_request_handler(Self::handle_unstage);
484 client.add_entity_request_handler(Self::handle_stash);
485 client.add_entity_request_handler(Self::handle_stash_pop);
486 client.add_entity_request_handler(Self::handle_stash_apply);
487 client.add_entity_request_handler(Self::handle_stash_drop);
488 client.add_entity_request_handler(Self::handle_commit);
489 client.add_entity_request_handler(Self::handle_run_hook);
490 client.add_entity_request_handler(Self::handle_reset);
491 client.add_entity_request_handler(Self::handle_show);
492 client.add_entity_request_handler(Self::handle_load_commit_diff);
493 client.add_entity_request_handler(Self::handle_file_history);
494 client.add_entity_request_handler(Self::handle_checkout_files);
495 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
496 client.add_entity_request_handler(Self::handle_set_index_text);
497 client.add_entity_request_handler(Self::handle_askpass);
498 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
499 client.add_entity_request_handler(Self::handle_git_diff);
500 client.add_entity_request_handler(Self::handle_tree_diff);
501 client.add_entity_request_handler(Self::handle_get_blob_content);
502 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
503 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
504 client.add_entity_message_handler(Self::handle_update_diff_bases);
505 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
506 client.add_entity_request_handler(Self::handle_blame_buffer);
507 client.add_entity_message_handler(Self::handle_update_repository);
508 client.add_entity_message_handler(Self::handle_remove_repository);
509 client.add_entity_request_handler(Self::handle_git_clone);
510 client.add_entity_request_handler(Self::handle_get_worktrees);
511 client.add_entity_request_handler(Self::handle_create_worktree);
512 }
513
514 pub fn is_local(&self) -> bool {
515 matches!(self.state, GitStoreState::Local { .. })
516 }
517 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
518 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
519 let id = repo.read(cx).id;
520 if self.active_repo_id != Some(id) {
521 self.active_repo_id = Some(id);
522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
523 }
524 }
525 }
526
527 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
528 match &mut self.state {
529 GitStoreState::Remote {
530 downstream: downstream_client,
531 ..
532 } => {
533 for repo in self.repositories.values() {
534 let update = repo.read(cx).snapshot.initial_update(project_id);
535 for update in split_repository_update(update) {
536 client.send(update).log_err();
537 }
538 }
539 *downstream_client = Some((client, ProjectId(project_id)));
540 }
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 let mut snapshots = HashMap::default();
546 let (updates_tx, mut updates_rx) = mpsc::unbounded();
547 for repo in self.repositories.values() {
548 updates_tx
549 .unbounded_send(DownstreamUpdate::UpdateRepository(
550 repo.read(cx).snapshot.clone(),
551 ))
552 .ok();
553 }
554 *downstream_client = Some(LocalDownstreamState {
555 client: client.clone(),
556 project_id: ProjectId(project_id),
557 updates_tx,
558 _task: cx.spawn(async move |this, cx| {
559 cx.background_spawn(async move {
560 while let Some(update) = updates_rx.next().await {
561 match update {
562 DownstreamUpdate::UpdateRepository(snapshot) => {
563 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
564 {
565 let update =
566 snapshot.build_update(old_snapshot, project_id);
567 *old_snapshot = snapshot;
568 for update in split_repository_update(update) {
569 client.send(update)?;
570 }
571 } else {
572 let update = snapshot.initial_update(project_id);
573 for update in split_repository_update(update) {
574 client.send(update)?;
575 }
576 snapshots.insert(snapshot.id, snapshot);
577 }
578 }
579 DownstreamUpdate::RemoveRepository(id) => {
580 client.send(proto::RemoveRepository {
581 project_id,
582 id: id.to_proto(),
583 })?;
584 }
585 }
586 }
587 anyhow::Ok(())
588 })
589 .await
590 .ok();
591 this.update(cx, |this, _| {
592 if let GitStoreState::Local {
593 downstream: downstream_client,
594 ..
595 } = &mut this.state
596 {
597 downstream_client.take();
598 } else {
599 unreachable!("unshared called on remote store");
600 }
601 })
602 }),
603 });
604 }
605 }
606 }
607
608 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
609 match &mut self.state {
610 GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } => {
614 downstream_client.take();
615 }
616 GitStoreState::Remote {
617 downstream: downstream_client,
618 ..
619 } => {
620 downstream_client.take();
621 }
622 }
623 self.shared_diffs.clear();
624 }
625
626 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
627 self.shared_diffs.remove(peer_id);
628 }
629
630 pub fn active_repository(&self) -> Option<Entity<Repository>> {
631 self.active_repo_id
632 .as_ref()
633 .map(|id| self.repositories[id].clone())
634 }
635
636 pub fn open_unstaged_diff(
637 &mut self,
638 buffer: Entity<Buffer>,
639 cx: &mut Context<Self>,
640 ) -> Task<Result<Entity<BufferDiff>>> {
641 let buffer_id = buffer.read(cx).remote_id();
642 if let Some(diff_state) = self.diffs.get(&buffer_id)
643 && let Some(unstaged_diff) = diff_state
644 .read(cx)
645 .unstaged_diff
646 .as_ref()
647 .and_then(|weak| weak.upgrade())
648 {
649 if let Some(task) =
650 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
651 {
652 return cx.background_executor().spawn(async move {
653 task.await;
654 Ok(unstaged_diff)
655 });
656 }
657 return Task::ready(Ok(unstaged_diff));
658 }
659
660 let Some((repo, repo_path)) =
661 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
662 else {
663 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
664 };
665
666 let task = self
667 .loading_diffs
668 .entry((buffer_id, DiffKind::Unstaged))
669 .or_insert_with(|| {
670 let staged_text = repo.update(cx, |repo, cx| {
671 repo.load_staged_text(buffer_id, repo_path, cx)
672 });
673 cx.spawn(async move |this, cx| {
674 Self::open_diff_internal(
675 this,
676 DiffKind::Unstaged,
677 staged_text.await.map(DiffBasesChange::SetIndex),
678 buffer,
679 cx,
680 )
681 .await
682 .map_err(Arc::new)
683 })
684 .shared()
685 })
686 .clone();
687
688 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
689 }
690
691 pub fn open_diff_since(
692 &mut self,
693 oid: Option<git::Oid>,
694 buffer: Entity<Buffer>,
695 repo: Entity<Repository>,
696 cx: &mut Context<Self>,
697 ) -> Task<Result<Entity<BufferDiff>>> {
698 cx.spawn(async move |this, cx| {
699 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
700 let content = match oid {
701 None => None,
702 Some(oid) => Some(
703 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
704 .await?,
705 ),
706 };
707 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
708
709 buffer_diff
710 .update(cx, |buffer_diff, cx| {
711 buffer_diff.set_base_text(
712 content.map(|s| s.as_str().into()),
713 buffer_snapshot.language().cloned(),
714 buffer_snapshot.text,
715 cx,
716 )
717 })
718 .await?;
719 let unstaged_diff = this
720 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
721 .await?;
722 buffer_diff.update(cx, |buffer_diff, _| {
723 buffer_diff.set_secondary_diff(unstaged_diff);
724 });
725
726 this.update(cx, |_, cx| {
727 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
728 .detach();
729 })?;
730
731 Ok(buffer_diff)
732 })
733 }
734
735 pub fn open_uncommitted_diff(
736 &mut self,
737 buffer: Entity<Buffer>,
738 cx: &mut Context<Self>,
739 ) -> Task<Result<Entity<BufferDiff>>> {
740 let buffer_id = buffer.read(cx).remote_id();
741
742 if let Some(diff_state) = self.diffs.get(&buffer_id)
743 && let Some(uncommitted_diff) = diff_state
744 .read(cx)
745 .uncommitted_diff
746 .as_ref()
747 .and_then(|weak| weak.upgrade())
748 {
749 if let Some(task) =
750 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
751 {
752 return cx.background_executor().spawn(async move {
753 task.await;
754 Ok(uncommitted_diff)
755 });
756 }
757 return Task::ready(Ok(uncommitted_diff));
758 }
759
760 let Some((repo, repo_path)) =
761 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
762 else {
763 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
764 };
765
766 let task = self
767 .loading_diffs
768 .entry((buffer_id, DiffKind::Uncommitted))
769 .or_insert_with(|| {
770 let changes = repo.update(cx, |repo, cx| {
771 repo.load_committed_text(buffer_id, repo_path, cx)
772 });
773
774 // todo(lw): hot foreground spawn
775 cx.spawn(async move |this, cx| {
776 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
777 .await
778 .map_err(Arc::new)
779 })
780 .shared()
781 })
782 .clone();
783
784 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
785 }
786
787 async fn open_diff_internal(
788 this: WeakEntity<Self>,
789 kind: DiffKind,
790 texts: Result<DiffBasesChange>,
791 buffer_entity: Entity<Buffer>,
792 cx: &mut AsyncApp,
793 ) -> Result<Entity<BufferDiff>> {
794 let diff_bases_change = match texts {
795 Err(e) => {
796 this.update(cx, |this, cx| {
797 let buffer = buffer_entity.read(cx);
798 let buffer_id = buffer.remote_id();
799 this.loading_diffs.remove(&(buffer_id, kind));
800 })?;
801 return Err(e);
802 }
803 Ok(change) => change,
804 };
805
806 this.update(cx, |this, cx| {
807 let buffer = buffer_entity.read(cx);
808 let buffer_id = buffer.remote_id();
809 let language = buffer.language().cloned();
810 let language_registry = buffer.language_registry();
811 let text_snapshot = buffer.text_snapshot();
812 this.loading_diffs.remove(&(buffer_id, kind));
813
814 let git_store = cx.weak_entity();
815 let diff_state = this
816 .diffs
817 .entry(buffer_id)
818 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
819
820 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
821
822 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
823 diff_state.update(cx, |diff_state, cx| {
824 diff_state.language_changed = true;
825 diff_state.language = language;
826 diff_state.language_registry = language_registry;
827
828 match kind {
829 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
830 DiffKind::Uncommitted => {
831 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
832 diff
833 } else {
834 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
835 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
836 unstaged_diff
837 };
838
839 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
840 diff_state.uncommitted_diff = Some(diff.downgrade())
841 }
842 }
843
844 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
845 let rx = diff_state.wait_for_recalculation();
846
847 anyhow::Ok(async move {
848 if let Some(rx) = rx {
849 rx.await;
850 }
851 Ok(diff)
852 })
853 })
854 })??
855 .await
856 }
857
858 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
859 let diff_state = self.diffs.get(&buffer_id)?;
860 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
861 }
862
863 pub fn get_uncommitted_diff(
864 &self,
865 buffer_id: BufferId,
866 cx: &App,
867 ) -> Option<Entity<BufferDiff>> {
868 let diff_state = self.diffs.get(&buffer_id)?;
869 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
870 }
871
872 pub fn open_conflict_set(
873 &mut self,
874 buffer: Entity<Buffer>,
875 cx: &mut Context<Self>,
876 ) -> Entity<ConflictSet> {
877 log::debug!("open conflict set");
878 let buffer_id = buffer.read(cx).remote_id();
879
880 if let Some(git_state) = self.diffs.get(&buffer_id)
881 && let Some(conflict_set) = git_state
882 .read(cx)
883 .conflict_set
884 .as_ref()
885 .and_then(|weak| weak.upgrade())
886 {
887 let conflict_set = conflict_set;
888 let buffer_snapshot = buffer.read(cx).text_snapshot();
889
890 git_state.update(cx, |state, cx| {
891 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
892 });
893
894 return conflict_set;
895 }
896
897 let is_unmerged = self
898 .repository_and_path_for_buffer_id(buffer_id, cx)
899 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
900 let git_store = cx.weak_entity();
901 let buffer_git_state = self
902 .diffs
903 .entry(buffer_id)
904 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
905 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
906
907 self._subscriptions
908 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
909 cx.emit(GitStoreEvent::ConflictsUpdated);
910 }));
911
912 buffer_git_state.update(cx, |state, cx| {
913 state.conflict_set = Some(conflict_set.downgrade());
914 let buffer_snapshot = buffer.read(cx).text_snapshot();
915 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
916 });
917
918 conflict_set
919 }
920
921 pub fn project_path_git_status(
922 &self,
923 project_path: &ProjectPath,
924 cx: &App,
925 ) -> Option<FileStatus> {
926 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
927 Some(repo.read(cx).status_for_path(&repo_path)?.status)
928 }
929
930 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
931 let mut work_directory_abs_paths = Vec::new();
932 let mut checkpoints = Vec::new();
933 for repository in self.repositories.values() {
934 repository.update(cx, |repository, _| {
935 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
936 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
937 });
938 }
939
940 cx.background_executor().spawn(async move {
941 let checkpoints = future::try_join_all(checkpoints).await?;
942 Ok(GitStoreCheckpoint {
943 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
944 .into_iter()
945 .zip(checkpoints)
946 .collect(),
947 })
948 })
949 }
950
951 pub fn restore_checkpoint(
952 &self,
953 checkpoint: GitStoreCheckpoint,
954 cx: &mut App,
955 ) -> Task<Result<()>> {
956 let repositories_by_work_dir_abs_path = self
957 .repositories
958 .values()
959 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
960 .collect::<HashMap<_, _>>();
961
962 let mut tasks = Vec::new();
963 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
964 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
965 let restore = repository.update(cx, |repository, _| {
966 repository.restore_checkpoint(checkpoint)
967 });
968 tasks.push(async move { restore.await? });
969 }
970 }
971 cx.background_spawn(async move {
972 future::try_join_all(tasks).await?;
973 Ok(())
974 })
975 }
976
977 /// Compares two checkpoints, returning true if they are equal.
978 pub fn compare_checkpoints(
979 &self,
980 left: GitStoreCheckpoint,
981 mut right: GitStoreCheckpoint,
982 cx: &mut App,
983 ) -> Task<Result<bool>> {
984 let repositories_by_work_dir_abs_path = self
985 .repositories
986 .values()
987 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
988 .collect::<HashMap<_, _>>();
989
990 let mut tasks = Vec::new();
991 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
992 if let Some(right_checkpoint) = right
993 .checkpoints_by_work_dir_abs_path
994 .remove(&work_dir_abs_path)
995 {
996 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
997 {
998 let compare = repository.update(cx, |repository, _| {
999 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1000 });
1001
1002 tasks.push(async move { compare.await? });
1003 }
1004 } else {
1005 return Task::ready(Ok(false));
1006 }
1007 }
1008 cx.background_spawn(async move {
1009 Ok(future::try_join_all(tasks)
1010 .await?
1011 .into_iter()
1012 .all(|result| result))
1013 })
1014 }
1015
1016 /// Blames a buffer.
1017 pub fn blame_buffer(
1018 &self,
1019 buffer: &Entity<Buffer>,
1020 version: Option<clock::Global>,
1021 cx: &mut Context<Self>,
1022 ) -> Task<Result<Option<Blame>>> {
1023 let buffer = buffer.read(cx);
1024 let Some((repo, repo_path)) =
1025 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1026 else {
1027 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1028 };
1029 let content = match &version {
1030 Some(version) => buffer.rope_for_version(version),
1031 None => buffer.as_rope().clone(),
1032 };
1033 let line_ending = buffer.line_ending();
1034 let version = version.unwrap_or(buffer.version());
1035 let buffer_id = buffer.remote_id();
1036
1037 let repo = repo.downgrade();
1038 cx.spawn(async move |_, cx| {
1039 let repository_state = repo
1040 .update(cx, |repo, _| repo.repository_state.clone())?
1041 .await
1042 .map_err(|err| anyhow::anyhow!(err))?;
1043 match repository_state {
1044 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1045 .blame(repo_path.clone(), content, line_ending)
1046 .await
1047 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1048 .map(Some),
1049 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1050 let response = client
1051 .request(proto::BlameBuffer {
1052 project_id: project_id.to_proto(),
1053 buffer_id: buffer_id.into(),
1054 version: serialize_version(&version),
1055 })
1056 .await?;
1057 Ok(deserialize_blame_buffer_response(response))
1058 }
1059 }
1060 })
1061 }
1062
1063 pub fn file_history(
1064 &self,
1065 repo: &Entity<Repository>,
1066 path: RepoPath,
1067 cx: &mut App,
1068 ) -> Task<Result<git::repository::FileHistory>> {
1069 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1070
1071 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1072 }
1073
1074 pub fn file_history_paginated(
1075 &self,
1076 repo: &Entity<Repository>,
1077 path: RepoPath,
1078 skip: usize,
1079 limit: Option<usize>,
1080 cx: &mut App,
1081 ) -> Task<Result<git::repository::FileHistory>> {
1082 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1083
1084 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1085 }
1086
1087 pub fn get_permalink_to_line(
1088 &self,
1089 buffer: &Entity<Buffer>,
1090 selection: Range<u32>,
1091 cx: &mut App,
1092 ) -> Task<Result<url::Url>> {
1093 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1094 return Task::ready(Err(anyhow!("buffer has no file")));
1095 };
1096
1097 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1098 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1099 cx,
1100 ) else {
1101 // If we're not in a Git repo, check whether this is a Rust source
1102 // file in the Cargo registry (presumably opened with go-to-definition
1103 // from a normal Rust file). If so, we can put together a permalink
1104 // using crate metadata.
1105 if buffer
1106 .read(cx)
1107 .language()
1108 .is_none_or(|lang| lang.name() != "Rust".into())
1109 {
1110 return Task::ready(Err(anyhow!("no permalink available")));
1111 }
1112 let file_path = file.worktree.read(cx).absolutize(&file.path);
1113 return cx.spawn(async move |cx| {
1114 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1115 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1116 .context("no permalink available")
1117 });
1118 };
1119
1120 let buffer_id = buffer.read(cx).remote_id();
1121 let branch = repo.read(cx).branch.clone();
1122 let remote = branch
1123 .as_ref()
1124 .and_then(|b| b.upstream.as_ref())
1125 .and_then(|b| b.remote_name())
1126 .unwrap_or("origin")
1127 .to_string();
1128
1129 let rx = repo.update(cx, |repo, _| {
1130 repo.send_job(None, move |state, cx| async move {
1131 match state {
1132 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1133 let origin_url = backend
1134 .remote_url(&remote)
1135 .await
1136 .with_context(|| format!("remote \"{remote}\" not found"))?;
1137
1138 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1139
1140 let provider_registry =
1141 cx.update(GitHostingProviderRegistry::default_global);
1142
1143 let (provider, remote) =
1144 parse_git_remote_url(provider_registry, &origin_url)
1145 .context("parsing Git remote URL")?;
1146
1147 Ok(provider.build_permalink(
1148 remote,
1149 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1150 ))
1151 }
1152 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1153 let response = client
1154 .request(proto::GetPermalinkToLine {
1155 project_id: project_id.to_proto(),
1156 buffer_id: buffer_id.into(),
1157 selection: Some(proto::Range {
1158 start: selection.start as u64,
1159 end: selection.end as u64,
1160 }),
1161 })
1162 .await?;
1163
1164 url::Url::parse(&response.permalink).context("failed to parse permalink")
1165 }
1166 }
1167 })
1168 });
1169 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1170 }
1171
1172 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1173 match &self.state {
1174 GitStoreState::Local {
1175 downstream: downstream_client,
1176 ..
1177 } => downstream_client
1178 .as_ref()
1179 .map(|state| (state.client.clone(), state.project_id)),
1180 GitStoreState::Remote {
1181 downstream: downstream_client,
1182 ..
1183 } => downstream_client.clone(),
1184 }
1185 }
1186
1187 fn upstream_client(&self) -> Option<AnyProtoClient> {
1188 match &self.state {
1189 GitStoreState::Local { .. } => None,
1190 GitStoreState::Remote {
1191 upstream_client, ..
1192 } => Some(upstream_client.clone()),
1193 }
1194 }
1195
1196 fn on_worktree_store_event(
1197 &mut self,
1198 worktree_store: Entity<WorktreeStore>,
1199 event: &WorktreeStoreEvent,
1200 cx: &mut Context<Self>,
1201 ) {
1202 let GitStoreState::Local {
1203 project_environment,
1204 downstream,
1205 next_repository_id,
1206 fs,
1207 } = &self.state
1208 else {
1209 return;
1210 };
1211
1212 match event {
1213 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1214 if let Some(worktree) = self
1215 .worktree_store
1216 .read(cx)
1217 .worktree_for_id(*worktree_id, cx)
1218 {
1219 let paths_by_git_repo =
1220 self.process_updated_entries(&worktree, updated_entries, cx);
1221 let downstream = downstream
1222 .as_ref()
1223 .map(|downstream| downstream.updates_tx.clone());
1224 cx.spawn(async move |_, cx| {
1225 let paths_by_git_repo = paths_by_git_repo.await;
1226 for (repo, paths) in paths_by_git_repo {
1227 repo.update(cx, |repo, cx| {
1228 repo.paths_changed(paths, downstream.clone(), cx);
1229 });
1230 }
1231 })
1232 .detach();
1233 }
1234 }
1235 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1236 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1237 else {
1238 return;
1239 };
1240 if !worktree.read(cx).is_visible() {
1241 log::debug!(
1242 "not adding repositories for local worktree {:?} because it's not visible",
1243 worktree.read(cx).abs_path()
1244 );
1245 return;
1246 }
1247 self.update_repositories_from_worktree(
1248 *worktree_id,
1249 project_environment.clone(),
1250 next_repository_id.clone(),
1251 downstream
1252 .as_ref()
1253 .map(|downstream| downstream.updates_tx.clone()),
1254 changed_repos.clone(),
1255 fs.clone(),
1256 cx,
1257 );
1258 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1259 }
1260 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1261 let repos_without_worktree: Vec<RepositoryId> = self
1262 .worktree_ids
1263 .iter_mut()
1264 .filter_map(|(repo_id, worktree_ids)| {
1265 worktree_ids.remove(worktree_id);
1266 if worktree_ids.is_empty() {
1267 Some(*repo_id)
1268 } else {
1269 None
1270 }
1271 })
1272 .collect();
1273 let is_active_repo_removed = repos_without_worktree
1274 .iter()
1275 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1276
1277 for repo_id in repos_without_worktree {
1278 self.repositories.remove(&repo_id);
1279 self.worktree_ids.remove(&repo_id);
1280 if let Some(updates_tx) =
1281 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1282 {
1283 updates_tx
1284 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1285 .ok();
1286 }
1287 }
1288
1289 if is_active_repo_removed {
1290 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1291 self.active_repo_id = Some(repo_id);
1292 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1293 } else {
1294 self.active_repo_id = None;
1295 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1296 }
1297 }
1298 }
1299 _ => {}
1300 }
1301 }
1302 fn on_repository_event(
1303 &mut self,
1304 repo: Entity<Repository>,
1305 event: &RepositoryEvent,
1306 cx: &mut Context<Self>,
1307 ) {
1308 let id = repo.read(cx).id;
1309 let repo_snapshot = repo.read(cx).snapshot.clone();
1310 for (buffer_id, diff) in self.diffs.iter() {
1311 if let Some((buffer_repo, repo_path)) =
1312 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1313 && buffer_repo == repo
1314 {
1315 diff.update(cx, |diff, cx| {
1316 if let Some(conflict_set) = &diff.conflict_set {
1317 let conflict_status_changed =
1318 conflict_set.update(cx, |conflict_set, cx| {
1319 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1320 conflict_set.set_has_conflict(has_conflict, cx)
1321 })?;
1322 if conflict_status_changed {
1323 let buffer_store = self.buffer_store.read(cx);
1324 if let Some(buffer) = buffer_store.get(*buffer_id) {
1325 let _ = diff
1326 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1327 }
1328 }
1329 }
1330 anyhow::Ok(())
1331 })
1332 .ok();
1333 }
1334 }
1335 cx.emit(GitStoreEvent::RepositoryUpdated(
1336 id,
1337 event.clone(),
1338 self.active_repo_id == Some(id),
1339 ))
1340 }
1341
1342 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1343 cx.emit(GitStoreEvent::JobsUpdated)
1344 }
1345
1346 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1347 fn update_repositories_from_worktree(
1348 &mut self,
1349 worktree_id: WorktreeId,
1350 project_environment: Entity<ProjectEnvironment>,
1351 next_repository_id: Arc<AtomicU64>,
1352 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1353 updated_git_repositories: UpdatedGitRepositoriesSet,
1354 fs: Arc<dyn Fs>,
1355 cx: &mut Context<Self>,
1356 ) {
1357 let mut removed_ids = Vec::new();
1358 for update in updated_git_repositories.iter() {
1359 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1360 let existing_work_directory_abs_path =
1361 repo.read(cx).work_directory_abs_path.clone();
1362 Some(&existing_work_directory_abs_path)
1363 == update.old_work_directory_abs_path.as_ref()
1364 || Some(&existing_work_directory_abs_path)
1365 == update.new_work_directory_abs_path.as_ref()
1366 }) {
1367 let repo_id = *id;
1368 if let Some(new_work_directory_abs_path) =
1369 update.new_work_directory_abs_path.clone()
1370 {
1371 self.worktree_ids
1372 .entry(repo_id)
1373 .or_insert_with(HashSet::new)
1374 .insert(worktree_id);
1375 existing.update(cx, |existing, cx| {
1376 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1377 existing.schedule_scan(updates_tx.clone(), cx);
1378 });
1379 } else {
1380 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1381 worktree_ids.remove(&worktree_id);
1382 if worktree_ids.is_empty() {
1383 removed_ids.push(repo_id);
1384 }
1385 }
1386 }
1387 } else if let UpdatedGitRepository {
1388 new_work_directory_abs_path: Some(work_directory_abs_path),
1389 dot_git_abs_path: Some(dot_git_abs_path),
1390 repository_dir_abs_path: Some(_repository_dir_abs_path),
1391 common_dir_abs_path: Some(_common_dir_abs_path),
1392 ..
1393 } = update
1394 {
1395 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1396 let git_store = cx.weak_entity();
1397 let repo = cx.new(|cx| {
1398 let mut repo = Repository::local(
1399 id,
1400 work_directory_abs_path.clone(),
1401 dot_git_abs_path.clone(),
1402 project_environment.downgrade(),
1403 fs.clone(),
1404 git_store,
1405 cx,
1406 );
1407 if let Some(updates_tx) = updates_tx.as_ref() {
1408 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1409 updates_tx
1410 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1411 .ok();
1412 }
1413 repo.schedule_scan(updates_tx.clone(), cx);
1414 repo
1415 });
1416 self._subscriptions
1417 .push(cx.subscribe(&repo, Self::on_repository_event));
1418 self._subscriptions
1419 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1420 self.repositories.insert(id, repo);
1421 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1422 cx.emit(GitStoreEvent::RepositoryAdded);
1423 self.active_repo_id.get_or_insert_with(|| {
1424 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1425 id
1426 });
1427 }
1428 }
1429
1430 for id in removed_ids {
1431 if self.active_repo_id == Some(id) {
1432 self.active_repo_id = None;
1433 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1434 }
1435 self.repositories.remove(&id);
1436 if let Some(updates_tx) = updates_tx.as_ref() {
1437 updates_tx
1438 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1439 .ok();
1440 }
1441 }
1442 }
1443
1444 fn on_buffer_store_event(
1445 &mut self,
1446 _: Entity<BufferStore>,
1447 event: &BufferStoreEvent,
1448 cx: &mut Context<Self>,
1449 ) {
1450 match event {
1451 BufferStoreEvent::BufferAdded(buffer) => {
1452 cx.subscribe(buffer, |this, buffer, event, cx| {
1453 if let BufferEvent::LanguageChanged(_) = event {
1454 let buffer_id = buffer.read(cx).remote_id();
1455 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1456 diff_state.update(cx, |diff_state, cx| {
1457 diff_state.buffer_language_changed(buffer, cx);
1458 });
1459 }
1460 }
1461 })
1462 .detach();
1463 }
1464 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1465 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1466 diffs.remove(buffer_id);
1467 }
1468 }
1469 BufferStoreEvent::BufferDropped(buffer_id) => {
1470 self.diffs.remove(buffer_id);
1471 for diffs in self.shared_diffs.values_mut() {
1472 diffs.remove(buffer_id);
1473 }
1474 }
1475 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1476 // Whenever a buffer's file path changes, it's possible that the
1477 // new path is actually a path that is being tracked by a git
1478 // repository. In that case, we'll want to update the buffer's
1479 // `BufferDiffState`, in case it already has one.
1480 let buffer_id = buffer.read(cx).remote_id();
1481 let diff_state = self.diffs.get(&buffer_id);
1482 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1483
1484 if let Some(diff_state) = diff_state
1485 && let Some((repo, repo_path)) = repo
1486 {
1487 let buffer = buffer.clone();
1488 let diff_state = diff_state.clone();
1489
1490 cx.spawn(async move |_git_store, cx| {
1491 async {
1492 let diff_bases_change = repo
1493 .update(cx, |repo, cx| {
1494 repo.load_committed_text(buffer_id, repo_path, cx)
1495 })
1496 .await?;
1497
1498 diff_state.update(cx, |diff_state, cx| {
1499 let buffer_snapshot = buffer.read(cx).text_snapshot();
1500 diff_state.diff_bases_changed(
1501 buffer_snapshot,
1502 Some(diff_bases_change),
1503 cx,
1504 );
1505 });
1506 anyhow::Ok(())
1507 }
1508 .await
1509 .log_err();
1510 })
1511 .detach();
1512 }
1513 }
1514 _ => {}
1515 }
1516 }
1517
1518 pub fn recalculate_buffer_diffs(
1519 &mut self,
1520 buffers: Vec<Entity<Buffer>>,
1521 cx: &mut Context<Self>,
1522 ) -> impl Future<Output = ()> + use<> {
1523 let mut futures = Vec::new();
1524 for buffer in buffers {
1525 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1526 let buffer = buffer.read(cx).text_snapshot();
1527 diff_state.update(cx, |diff_state, cx| {
1528 diff_state.recalculate_diffs(buffer.clone(), cx);
1529 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1530 });
1531 futures.push(diff_state.update(cx, |diff_state, cx| {
1532 diff_state
1533 .reparse_conflict_markers(buffer, cx)
1534 .map(|_| {})
1535 .boxed()
1536 }));
1537 }
1538 }
1539 async move {
1540 futures::future::join_all(futures).await;
1541 }
1542 }
1543
1544 fn on_buffer_diff_event(
1545 &mut self,
1546 diff: Entity<buffer_diff::BufferDiff>,
1547 event: &BufferDiffEvent,
1548 cx: &mut Context<Self>,
1549 ) {
1550 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1551 let buffer_id = diff.read(cx).buffer_id;
1552 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1553 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1554 diff_state.hunk_staging_operation_count += 1;
1555 diff_state.hunk_staging_operation_count
1556 });
1557 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1558 let recv = repo.update(cx, |repo, cx| {
1559 log::debug!("hunks changed for {}", path.as_unix_str());
1560 repo.spawn_set_index_text_job(
1561 path,
1562 new_index_text.as_ref().map(|rope| rope.to_string()),
1563 Some(hunk_staging_operation_count),
1564 cx,
1565 )
1566 });
1567 let diff = diff.downgrade();
1568 cx.spawn(async move |this, cx| {
1569 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1570 diff.update(cx, |diff, cx| {
1571 diff.clear_pending_hunks(cx);
1572 })
1573 .ok();
1574 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1575 .ok();
1576 }
1577 })
1578 .detach();
1579 }
1580 }
1581 }
1582 }
1583
1584 fn local_worktree_git_repos_changed(
1585 &mut self,
1586 worktree: Entity<Worktree>,
1587 changed_repos: &UpdatedGitRepositoriesSet,
1588 cx: &mut Context<Self>,
1589 ) {
1590 log::debug!("local worktree repos changed");
1591 debug_assert!(worktree.read(cx).is_local());
1592
1593 for repository in self.repositories.values() {
1594 repository.update(cx, |repository, cx| {
1595 let repo_abs_path = &repository.work_directory_abs_path;
1596 if changed_repos.iter().any(|update| {
1597 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1598 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1599 }) {
1600 repository.reload_buffer_diff_bases(cx);
1601 }
1602 });
1603 }
1604 }
1605
1606 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1607 &self.repositories
1608 }
1609
1610 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1611 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1612 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1613 Some(status.status)
1614 }
1615
1616 pub fn repository_and_path_for_buffer_id(
1617 &self,
1618 buffer_id: BufferId,
1619 cx: &App,
1620 ) -> Option<(Entity<Repository>, RepoPath)> {
1621 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1622 let project_path = buffer.read(cx).project_path(cx)?;
1623 self.repository_and_path_for_project_path(&project_path, cx)
1624 }
1625
1626 pub fn repository_and_path_for_project_path(
1627 &self,
1628 path: &ProjectPath,
1629 cx: &App,
1630 ) -> Option<(Entity<Repository>, RepoPath)> {
1631 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1632 self.repositories
1633 .values()
1634 .filter_map(|repo| {
1635 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1636 Some((repo.clone(), repo_path))
1637 })
1638 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1639 }
1640
1641 pub fn git_init(
1642 &self,
1643 path: Arc<Path>,
1644 fallback_branch_name: String,
1645 cx: &App,
1646 ) -> Task<Result<()>> {
1647 match &self.state {
1648 GitStoreState::Local { fs, .. } => {
1649 let fs = fs.clone();
1650 cx.background_executor()
1651 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1652 }
1653 GitStoreState::Remote {
1654 upstream_client,
1655 upstream_project_id: project_id,
1656 ..
1657 } => {
1658 let client = upstream_client.clone();
1659 let project_id = *project_id;
1660 cx.background_executor().spawn(async move {
1661 client
1662 .request(proto::GitInit {
1663 project_id: project_id,
1664 abs_path: path.to_string_lossy().into_owned(),
1665 fallback_branch_name,
1666 })
1667 .await?;
1668 Ok(())
1669 })
1670 }
1671 }
1672 }
1673
1674 pub fn git_clone(
1675 &self,
1676 repo: String,
1677 path: impl Into<Arc<std::path::Path>>,
1678 cx: &App,
1679 ) -> Task<Result<()>> {
1680 let path = path.into();
1681 match &self.state {
1682 GitStoreState::Local { fs, .. } => {
1683 let fs = fs.clone();
1684 cx.background_executor()
1685 .spawn(async move { fs.git_clone(&repo, &path).await })
1686 }
1687 GitStoreState::Remote {
1688 upstream_client,
1689 upstream_project_id,
1690 ..
1691 } => {
1692 if upstream_client.is_via_collab() {
1693 return Task::ready(Err(anyhow!(
1694 "Git Clone isn't supported for project guests"
1695 )));
1696 }
1697 let request = upstream_client.request(proto::GitClone {
1698 project_id: *upstream_project_id,
1699 abs_path: path.to_string_lossy().into_owned(),
1700 remote_repo: repo,
1701 });
1702
1703 cx.background_spawn(async move {
1704 let result = request.await?;
1705
1706 match result.success {
1707 true => Ok(()),
1708 false => Err(anyhow!("Git Clone failed")),
1709 }
1710 })
1711 }
1712 }
1713 }
1714
1715 async fn handle_update_repository(
1716 this: Entity<Self>,
1717 envelope: TypedEnvelope<proto::UpdateRepository>,
1718 mut cx: AsyncApp,
1719 ) -> Result<()> {
1720 this.update(&mut cx, |this, cx| {
1721 let path_style = this.worktree_store.read(cx).path_style();
1722 let mut update = envelope.payload;
1723
1724 let id = RepositoryId::from_proto(update.id);
1725 let client = this.upstream_client().context("no upstream client")?;
1726
1727 let mut repo_subscription = None;
1728 let repo = this.repositories.entry(id).or_insert_with(|| {
1729 let git_store = cx.weak_entity();
1730 let repo = cx.new(|cx| {
1731 Repository::remote(
1732 id,
1733 Path::new(&update.abs_path).into(),
1734 path_style,
1735 ProjectId(update.project_id),
1736 client,
1737 git_store,
1738 cx,
1739 )
1740 });
1741 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1742 cx.emit(GitStoreEvent::RepositoryAdded);
1743 repo
1744 });
1745 this._subscriptions.extend(repo_subscription);
1746
1747 repo.update(cx, {
1748 let update = update.clone();
1749 |repo, cx| repo.apply_remote_update(update, cx)
1750 })?;
1751
1752 this.active_repo_id.get_or_insert_with(|| {
1753 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1754 id
1755 });
1756
1757 if let Some((client, project_id)) = this.downstream_client() {
1758 update.project_id = project_id.to_proto();
1759 client.send(update).log_err();
1760 }
1761 Ok(())
1762 })
1763 }
1764
1765 async fn handle_remove_repository(
1766 this: Entity<Self>,
1767 envelope: TypedEnvelope<proto::RemoveRepository>,
1768 mut cx: AsyncApp,
1769 ) -> Result<()> {
1770 this.update(&mut cx, |this, cx| {
1771 let mut update = envelope.payload;
1772 let id = RepositoryId::from_proto(update.id);
1773 this.repositories.remove(&id);
1774 if let Some((client, project_id)) = this.downstream_client() {
1775 update.project_id = project_id.to_proto();
1776 client.send(update).log_err();
1777 }
1778 if this.active_repo_id == Some(id) {
1779 this.active_repo_id = None;
1780 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1781 }
1782 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1783 });
1784 Ok(())
1785 }
1786
1787 async fn handle_git_init(
1788 this: Entity<Self>,
1789 envelope: TypedEnvelope<proto::GitInit>,
1790 cx: AsyncApp,
1791 ) -> Result<proto::Ack> {
1792 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1793 let name = envelope.payload.fallback_branch_name;
1794 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1795 .await?;
1796
1797 Ok(proto::Ack {})
1798 }
1799
1800 async fn handle_git_clone(
1801 this: Entity<Self>,
1802 envelope: TypedEnvelope<proto::GitClone>,
1803 cx: AsyncApp,
1804 ) -> Result<proto::GitCloneResponse> {
1805 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1806 let repo_name = envelope.payload.remote_repo;
1807 let result = cx
1808 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
1809 .await;
1810
1811 Ok(proto::GitCloneResponse {
1812 success: result.is_ok(),
1813 })
1814 }
1815
1816 async fn handle_fetch(
1817 this: Entity<Self>,
1818 envelope: TypedEnvelope<proto::Fetch>,
1819 mut cx: AsyncApp,
1820 ) -> Result<proto::RemoteMessageResponse> {
1821 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1822 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1823 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1824 let askpass_id = envelope.payload.askpass_id;
1825
1826 let askpass = make_remote_delegate(
1827 this,
1828 envelope.payload.project_id,
1829 repository_id,
1830 askpass_id,
1831 &mut cx,
1832 );
1833
1834 let remote_output = repository_handle
1835 .update(&mut cx, |repository_handle, cx| {
1836 repository_handle.fetch(fetch_options, askpass, cx)
1837 })
1838 .await??;
1839
1840 Ok(proto::RemoteMessageResponse {
1841 stdout: remote_output.stdout,
1842 stderr: remote_output.stderr,
1843 })
1844 }
1845
1846 async fn handle_push(
1847 this: Entity<Self>,
1848 envelope: TypedEnvelope<proto::Push>,
1849 mut cx: AsyncApp,
1850 ) -> Result<proto::RemoteMessageResponse> {
1851 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1852 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1853
1854 let askpass_id = envelope.payload.askpass_id;
1855 let askpass = make_remote_delegate(
1856 this,
1857 envelope.payload.project_id,
1858 repository_id,
1859 askpass_id,
1860 &mut cx,
1861 );
1862
1863 let options = envelope
1864 .payload
1865 .options
1866 .as_ref()
1867 .map(|_| match envelope.payload.options() {
1868 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1869 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1870 });
1871
1872 let branch_name = envelope.payload.branch_name.into();
1873 let remote_branch_name = envelope.payload.remote_branch_name.into();
1874 let remote_name = envelope.payload.remote_name.into();
1875
1876 let remote_output = repository_handle
1877 .update(&mut cx, |repository_handle, cx| {
1878 repository_handle.push(
1879 branch_name,
1880 remote_branch_name,
1881 remote_name,
1882 options,
1883 askpass,
1884 cx,
1885 )
1886 })
1887 .await??;
1888 Ok(proto::RemoteMessageResponse {
1889 stdout: remote_output.stdout,
1890 stderr: remote_output.stderr,
1891 })
1892 }
1893
1894 async fn handle_pull(
1895 this: Entity<Self>,
1896 envelope: TypedEnvelope<proto::Pull>,
1897 mut cx: AsyncApp,
1898 ) -> Result<proto::RemoteMessageResponse> {
1899 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1900 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1901 let askpass_id = envelope.payload.askpass_id;
1902 let askpass = make_remote_delegate(
1903 this,
1904 envelope.payload.project_id,
1905 repository_id,
1906 askpass_id,
1907 &mut cx,
1908 );
1909
1910 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1911 let remote_name = envelope.payload.remote_name.into();
1912 let rebase = envelope.payload.rebase;
1913
1914 let remote_message = repository_handle
1915 .update(&mut cx, |repository_handle, cx| {
1916 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1917 })
1918 .await??;
1919
1920 Ok(proto::RemoteMessageResponse {
1921 stdout: remote_message.stdout,
1922 stderr: remote_message.stderr,
1923 })
1924 }
1925
1926 async fn handle_stage(
1927 this: Entity<Self>,
1928 envelope: TypedEnvelope<proto::Stage>,
1929 mut cx: AsyncApp,
1930 ) -> Result<proto::Ack> {
1931 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1932 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1933
1934 let entries = envelope
1935 .payload
1936 .paths
1937 .into_iter()
1938 .map(|path| RepoPath::new(&path))
1939 .collect::<Result<Vec<_>>>()?;
1940
1941 repository_handle
1942 .update(&mut cx, |repository_handle, cx| {
1943 repository_handle.stage_entries(entries, cx)
1944 })
1945 .await?;
1946 Ok(proto::Ack {})
1947 }
1948
1949 async fn handle_unstage(
1950 this: Entity<Self>,
1951 envelope: TypedEnvelope<proto::Unstage>,
1952 mut cx: AsyncApp,
1953 ) -> Result<proto::Ack> {
1954 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1955 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1956
1957 let entries = envelope
1958 .payload
1959 .paths
1960 .into_iter()
1961 .map(|path| RepoPath::new(&path))
1962 .collect::<Result<Vec<_>>>()?;
1963
1964 repository_handle
1965 .update(&mut cx, |repository_handle, cx| {
1966 repository_handle.unstage_entries(entries, cx)
1967 })
1968 .await?;
1969
1970 Ok(proto::Ack {})
1971 }
1972
1973 async fn handle_stash(
1974 this: Entity<Self>,
1975 envelope: TypedEnvelope<proto::Stash>,
1976 mut cx: AsyncApp,
1977 ) -> Result<proto::Ack> {
1978 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1979 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1980
1981 let entries = envelope
1982 .payload
1983 .paths
1984 .into_iter()
1985 .map(|path| RepoPath::new(&path))
1986 .collect::<Result<Vec<_>>>()?;
1987
1988 repository_handle
1989 .update(&mut cx, |repository_handle, cx| {
1990 repository_handle.stash_entries(entries, cx)
1991 })
1992 .await?;
1993
1994 Ok(proto::Ack {})
1995 }
1996
1997 async fn handle_stash_pop(
1998 this: Entity<Self>,
1999 envelope: TypedEnvelope<proto::StashPop>,
2000 mut cx: AsyncApp,
2001 ) -> Result<proto::Ack> {
2002 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2003 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2004 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2005
2006 repository_handle
2007 .update(&mut cx, |repository_handle, cx| {
2008 repository_handle.stash_pop(stash_index, cx)
2009 })
2010 .await?;
2011
2012 Ok(proto::Ack {})
2013 }
2014
2015 async fn handle_stash_apply(
2016 this: Entity<Self>,
2017 envelope: TypedEnvelope<proto::StashApply>,
2018 mut cx: AsyncApp,
2019 ) -> Result<proto::Ack> {
2020 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2021 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2022 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2023
2024 repository_handle
2025 .update(&mut cx, |repository_handle, cx| {
2026 repository_handle.stash_apply(stash_index, cx)
2027 })
2028 .await?;
2029
2030 Ok(proto::Ack {})
2031 }
2032
2033 async fn handle_stash_drop(
2034 this: Entity<Self>,
2035 envelope: TypedEnvelope<proto::StashDrop>,
2036 mut cx: AsyncApp,
2037 ) -> Result<proto::Ack> {
2038 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2039 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2040 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2041
2042 repository_handle
2043 .update(&mut cx, |repository_handle, cx| {
2044 repository_handle.stash_drop(stash_index, cx)
2045 })
2046 .await??;
2047
2048 Ok(proto::Ack {})
2049 }
2050
2051 async fn handle_set_index_text(
2052 this: Entity<Self>,
2053 envelope: TypedEnvelope<proto::SetIndexText>,
2054 mut cx: AsyncApp,
2055 ) -> Result<proto::Ack> {
2056 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2057 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2058 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2059
2060 repository_handle
2061 .update(&mut cx, |repository_handle, cx| {
2062 repository_handle.spawn_set_index_text_job(
2063 repo_path,
2064 envelope.payload.text,
2065 None,
2066 cx,
2067 )
2068 })
2069 .await??;
2070 Ok(proto::Ack {})
2071 }
2072
2073 async fn handle_run_hook(
2074 this: Entity<Self>,
2075 envelope: TypedEnvelope<proto::RunGitHook>,
2076 mut cx: AsyncApp,
2077 ) -> Result<proto::Ack> {
2078 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2079 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2080 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2081 repository_handle
2082 .update(&mut cx, |repository_handle, cx| {
2083 repository_handle.run_hook(hook, cx)
2084 })
2085 .await??;
2086 Ok(proto::Ack {})
2087 }
2088
2089 async fn handle_commit(
2090 this: Entity<Self>,
2091 envelope: TypedEnvelope<proto::Commit>,
2092 mut cx: AsyncApp,
2093 ) -> Result<proto::Ack> {
2094 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2095 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2096 let askpass_id = envelope.payload.askpass_id;
2097
2098 let askpass = make_remote_delegate(
2099 this,
2100 envelope.payload.project_id,
2101 repository_id,
2102 askpass_id,
2103 &mut cx,
2104 );
2105
2106 let message = SharedString::from(envelope.payload.message);
2107 let name = envelope.payload.name.map(SharedString::from);
2108 let email = envelope.payload.email.map(SharedString::from);
2109 let options = envelope.payload.options.unwrap_or_default();
2110
2111 repository_handle
2112 .update(&mut cx, |repository_handle, cx| {
2113 repository_handle.commit(
2114 message,
2115 name.zip(email),
2116 CommitOptions {
2117 amend: options.amend,
2118 signoff: options.signoff,
2119 },
2120 askpass,
2121 cx,
2122 )
2123 })
2124 .await??;
2125 Ok(proto::Ack {})
2126 }
2127
2128 async fn handle_get_remotes(
2129 this: Entity<Self>,
2130 envelope: TypedEnvelope<proto::GetRemotes>,
2131 mut cx: AsyncApp,
2132 ) -> Result<proto::GetRemotesResponse> {
2133 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2134 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2135
2136 let branch_name = envelope.payload.branch_name;
2137 let is_push = envelope.payload.is_push;
2138
2139 let remotes = repository_handle
2140 .update(&mut cx, |repository_handle, _| {
2141 repository_handle.get_remotes(branch_name, is_push)
2142 })
2143 .await??;
2144
2145 Ok(proto::GetRemotesResponse {
2146 remotes: remotes
2147 .into_iter()
2148 .map(|remotes| proto::get_remotes_response::Remote {
2149 name: remotes.name.to_string(),
2150 })
2151 .collect::<Vec<_>>(),
2152 })
2153 }
2154
2155 async fn handle_get_worktrees(
2156 this: Entity<Self>,
2157 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2158 mut cx: AsyncApp,
2159 ) -> Result<proto::GitWorktreesResponse> {
2160 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2161 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2162
2163 let worktrees = repository_handle
2164 .update(&mut cx, |repository_handle, _| {
2165 repository_handle.worktrees()
2166 })
2167 .await??;
2168
2169 Ok(proto::GitWorktreesResponse {
2170 worktrees: worktrees
2171 .into_iter()
2172 .map(|worktree| worktree_to_proto(&worktree))
2173 .collect::<Vec<_>>(),
2174 })
2175 }
2176
2177 async fn handle_create_worktree(
2178 this: Entity<Self>,
2179 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2180 mut cx: AsyncApp,
2181 ) -> Result<proto::Ack> {
2182 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2183 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2184 let directory = PathBuf::from(envelope.payload.directory);
2185 let name = envelope.payload.name;
2186 let commit = envelope.payload.commit;
2187
2188 repository_handle
2189 .update(&mut cx, |repository_handle, _| {
2190 repository_handle.create_worktree(name, directory, commit)
2191 })
2192 .await??;
2193
2194 Ok(proto::Ack {})
2195 }
2196
2197 async fn handle_get_branches(
2198 this: Entity<Self>,
2199 envelope: TypedEnvelope<proto::GitGetBranches>,
2200 mut cx: AsyncApp,
2201 ) -> Result<proto::GitBranchesResponse> {
2202 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2203 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2204
2205 let branches = repository_handle
2206 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2207 .await??;
2208
2209 Ok(proto::GitBranchesResponse {
2210 branches: branches
2211 .into_iter()
2212 .map(|branch| branch_to_proto(&branch))
2213 .collect::<Vec<_>>(),
2214 })
2215 }
2216 async fn handle_get_default_branch(
2217 this: Entity<Self>,
2218 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2219 mut cx: AsyncApp,
2220 ) -> Result<proto::GetDefaultBranchResponse> {
2221 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2222 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2223
2224 let branch = repository_handle
2225 .update(&mut cx, |repository_handle, _| {
2226 repository_handle.default_branch()
2227 })
2228 .await??
2229 .map(Into::into);
2230
2231 Ok(proto::GetDefaultBranchResponse { branch })
2232 }
2233 async fn handle_create_branch(
2234 this: Entity<Self>,
2235 envelope: TypedEnvelope<proto::GitCreateBranch>,
2236 mut cx: AsyncApp,
2237 ) -> Result<proto::Ack> {
2238 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2239 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2240 let branch_name = envelope.payload.branch_name;
2241
2242 repository_handle
2243 .update(&mut cx, |repository_handle, _| {
2244 repository_handle.create_branch(branch_name, None)
2245 })
2246 .await??;
2247
2248 Ok(proto::Ack {})
2249 }
2250
2251 async fn handle_change_branch(
2252 this: Entity<Self>,
2253 envelope: TypedEnvelope<proto::GitChangeBranch>,
2254 mut cx: AsyncApp,
2255 ) -> Result<proto::Ack> {
2256 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2257 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2258 let branch_name = envelope.payload.branch_name;
2259
2260 repository_handle
2261 .update(&mut cx, |repository_handle, _| {
2262 repository_handle.change_branch(branch_name)
2263 })
2264 .await??;
2265
2266 Ok(proto::Ack {})
2267 }
2268
2269 async fn handle_rename_branch(
2270 this: Entity<Self>,
2271 envelope: TypedEnvelope<proto::GitRenameBranch>,
2272 mut cx: AsyncApp,
2273 ) -> Result<proto::Ack> {
2274 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2275 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2276 let branch = envelope.payload.branch;
2277 let new_name = envelope.payload.new_name;
2278
2279 repository_handle
2280 .update(&mut cx, |repository_handle, _| {
2281 repository_handle.rename_branch(branch, new_name)
2282 })
2283 .await??;
2284
2285 Ok(proto::Ack {})
2286 }
2287
2288 async fn handle_create_remote(
2289 this: Entity<Self>,
2290 envelope: TypedEnvelope<proto::GitCreateRemote>,
2291 mut cx: AsyncApp,
2292 ) -> Result<proto::Ack> {
2293 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2294 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2295 let remote_name = envelope.payload.remote_name;
2296 let remote_url = envelope.payload.remote_url;
2297
2298 repository_handle
2299 .update(&mut cx, |repository_handle, _| {
2300 repository_handle.create_remote(remote_name, remote_url)
2301 })
2302 .await??;
2303
2304 Ok(proto::Ack {})
2305 }
2306
2307 async fn handle_delete_branch(
2308 this: Entity<Self>,
2309 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2310 mut cx: AsyncApp,
2311 ) -> Result<proto::Ack> {
2312 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2313 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2314 let branch_name = envelope.payload.branch_name;
2315
2316 repository_handle
2317 .update(&mut cx, |repository_handle, _| {
2318 repository_handle.delete_branch(branch_name)
2319 })
2320 .await??;
2321
2322 Ok(proto::Ack {})
2323 }
2324
2325 async fn handle_remove_remote(
2326 this: Entity<Self>,
2327 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2328 mut cx: AsyncApp,
2329 ) -> Result<proto::Ack> {
2330 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2331 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2332 let remote_name = envelope.payload.remote_name;
2333
2334 repository_handle
2335 .update(&mut cx, |repository_handle, _| {
2336 repository_handle.remove_remote(remote_name)
2337 })
2338 .await??;
2339
2340 Ok(proto::Ack {})
2341 }
2342
2343 async fn handle_show(
2344 this: Entity<Self>,
2345 envelope: TypedEnvelope<proto::GitShow>,
2346 mut cx: AsyncApp,
2347 ) -> Result<proto::GitCommitDetails> {
2348 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2349 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2350
2351 let commit = repository_handle
2352 .update(&mut cx, |repository_handle, _| {
2353 repository_handle.show(envelope.payload.commit)
2354 })
2355 .await??;
2356 Ok(proto::GitCommitDetails {
2357 sha: commit.sha.into(),
2358 message: commit.message.into(),
2359 commit_timestamp: commit.commit_timestamp,
2360 author_email: commit.author_email.into(),
2361 author_name: commit.author_name.into(),
2362 })
2363 }
2364
2365 async fn handle_load_commit_diff(
2366 this: Entity<Self>,
2367 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2368 mut cx: AsyncApp,
2369 ) -> Result<proto::LoadCommitDiffResponse> {
2370 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2371 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2372
2373 let commit_diff = repository_handle
2374 .update(&mut cx, |repository_handle, _| {
2375 repository_handle.load_commit_diff(envelope.payload.commit)
2376 })
2377 .await??;
2378 Ok(proto::LoadCommitDiffResponse {
2379 files: commit_diff
2380 .files
2381 .into_iter()
2382 .map(|file| proto::CommitFile {
2383 path: file.path.to_proto(),
2384 old_text: file.old_text,
2385 new_text: file.new_text,
2386 })
2387 .collect(),
2388 })
2389 }
2390
2391 async fn handle_file_history(
2392 this: Entity<Self>,
2393 envelope: TypedEnvelope<proto::GitFileHistory>,
2394 mut cx: AsyncApp,
2395 ) -> Result<proto::GitFileHistoryResponse> {
2396 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2397 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2398 let path = RepoPath::from_proto(&envelope.payload.path)?;
2399 let skip = envelope.payload.skip as usize;
2400 let limit = envelope.payload.limit.map(|l| l as usize);
2401
2402 let file_history = repository_handle
2403 .update(&mut cx, |repository_handle, _| {
2404 repository_handle.file_history_paginated(path, skip, limit)
2405 })
2406 .await??;
2407
2408 Ok(proto::GitFileHistoryResponse {
2409 entries: file_history
2410 .entries
2411 .into_iter()
2412 .map(|entry| proto::FileHistoryEntry {
2413 sha: entry.sha.to_string(),
2414 subject: entry.subject.to_string(),
2415 message: entry.message.to_string(),
2416 commit_timestamp: entry.commit_timestamp,
2417 author_name: entry.author_name.to_string(),
2418 author_email: entry.author_email.to_string(),
2419 })
2420 .collect(),
2421 path: file_history.path.to_proto(),
2422 })
2423 }
2424
2425 async fn handle_reset(
2426 this: Entity<Self>,
2427 envelope: TypedEnvelope<proto::GitReset>,
2428 mut cx: AsyncApp,
2429 ) -> Result<proto::Ack> {
2430 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2431 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2432
2433 let mode = match envelope.payload.mode() {
2434 git_reset::ResetMode::Soft => ResetMode::Soft,
2435 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2436 };
2437
2438 repository_handle
2439 .update(&mut cx, |repository_handle, cx| {
2440 repository_handle.reset(envelope.payload.commit, mode, cx)
2441 })
2442 .await??;
2443 Ok(proto::Ack {})
2444 }
2445
2446 async fn handle_checkout_files(
2447 this: Entity<Self>,
2448 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2449 mut cx: AsyncApp,
2450 ) -> Result<proto::Ack> {
2451 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2452 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2453 let paths = envelope
2454 .payload
2455 .paths
2456 .iter()
2457 .map(|s| RepoPath::from_proto(s))
2458 .collect::<Result<Vec<_>>>()?;
2459
2460 repository_handle
2461 .update(&mut cx, |repository_handle, cx| {
2462 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2463 })
2464 .await?;
2465 Ok(proto::Ack {})
2466 }
2467
2468 async fn handle_open_commit_message_buffer(
2469 this: Entity<Self>,
2470 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2471 mut cx: AsyncApp,
2472 ) -> Result<proto::OpenBufferResponse> {
2473 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2474 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2475 let buffer = repository
2476 .update(&mut cx, |repository, cx| {
2477 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2478 })
2479 .await?;
2480
2481 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2482 this.update(&mut cx, |this, cx| {
2483 this.buffer_store.update(cx, |buffer_store, cx| {
2484 buffer_store
2485 .create_buffer_for_peer(
2486 &buffer,
2487 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2488 cx,
2489 )
2490 .detach_and_log_err(cx);
2491 })
2492 });
2493
2494 Ok(proto::OpenBufferResponse {
2495 buffer_id: buffer_id.to_proto(),
2496 })
2497 }
2498
2499 async fn handle_askpass(
2500 this: Entity<Self>,
2501 envelope: TypedEnvelope<proto::AskPassRequest>,
2502 mut cx: AsyncApp,
2503 ) -> Result<proto::AskPassResponse> {
2504 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2505 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2506
2507 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2508 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2509 debug_panic!("no askpass found");
2510 anyhow::bail!("no askpass found");
2511 };
2512
2513 let response = askpass
2514 .ask_password(envelope.payload.prompt)
2515 .await
2516 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2517
2518 delegates
2519 .lock()
2520 .insert(envelope.payload.askpass_id, askpass);
2521
2522 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2523 Ok(proto::AskPassResponse {
2524 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2525 })
2526 }
2527
2528 async fn handle_check_for_pushed_commits(
2529 this: Entity<Self>,
2530 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2531 mut cx: AsyncApp,
2532 ) -> Result<proto::CheckForPushedCommitsResponse> {
2533 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2534 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2535
2536 let branches = repository_handle
2537 .update(&mut cx, |repository_handle, _| {
2538 repository_handle.check_for_pushed_commits()
2539 })
2540 .await??;
2541 Ok(proto::CheckForPushedCommitsResponse {
2542 pushed_to: branches
2543 .into_iter()
2544 .map(|commit| commit.to_string())
2545 .collect(),
2546 })
2547 }
2548
2549 async fn handle_git_diff(
2550 this: Entity<Self>,
2551 envelope: TypedEnvelope<proto::GitDiff>,
2552 mut cx: AsyncApp,
2553 ) -> Result<proto::GitDiffResponse> {
2554 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2555 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2556 let diff_type = match envelope.payload.diff_type() {
2557 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2558 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2559 };
2560
2561 let mut diff = repository_handle
2562 .update(&mut cx, |repository_handle, cx| {
2563 repository_handle.diff(diff_type, cx)
2564 })
2565 .await??;
2566 const ONE_MB: usize = 1_000_000;
2567 if diff.len() > ONE_MB {
2568 diff = diff.chars().take(ONE_MB).collect()
2569 }
2570
2571 Ok(proto::GitDiffResponse { diff })
2572 }
2573
2574 async fn handle_tree_diff(
2575 this: Entity<Self>,
2576 request: TypedEnvelope<proto::GetTreeDiff>,
2577 mut cx: AsyncApp,
2578 ) -> Result<proto::GetTreeDiffResponse> {
2579 let repository_id = RepositoryId(request.payload.repository_id);
2580 let diff_type = if request.payload.is_merge {
2581 DiffTreeType::MergeBase {
2582 base: request.payload.base.into(),
2583 head: request.payload.head.into(),
2584 }
2585 } else {
2586 DiffTreeType::Since {
2587 base: request.payload.base.into(),
2588 head: request.payload.head.into(),
2589 }
2590 };
2591
2592 let diff = this
2593 .update(&mut cx, |this, cx| {
2594 let repository = this.repositories().get(&repository_id)?;
2595 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2596 })
2597 .context("missing repository")?
2598 .await??;
2599
2600 Ok(proto::GetTreeDiffResponse {
2601 entries: diff
2602 .entries
2603 .into_iter()
2604 .map(|(path, status)| proto::TreeDiffStatus {
2605 path: path.as_ref().to_proto(),
2606 status: match status {
2607 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2608 TreeDiffStatus::Modified { .. } => {
2609 proto::tree_diff_status::Status::Modified.into()
2610 }
2611 TreeDiffStatus::Deleted { .. } => {
2612 proto::tree_diff_status::Status::Deleted.into()
2613 }
2614 },
2615 oid: match status {
2616 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2617 Some(old.to_string())
2618 }
2619 TreeDiffStatus::Added => None,
2620 },
2621 })
2622 .collect(),
2623 })
2624 }
2625
2626 async fn handle_get_blob_content(
2627 this: Entity<Self>,
2628 request: TypedEnvelope<proto::GetBlobContent>,
2629 mut cx: AsyncApp,
2630 ) -> Result<proto::GetBlobContentResponse> {
2631 let oid = git::Oid::from_str(&request.payload.oid)?;
2632 let repository_id = RepositoryId(request.payload.repository_id);
2633 let content = this
2634 .update(&mut cx, |this, cx| {
2635 let repository = this.repositories().get(&repository_id)?;
2636 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2637 })
2638 .context("missing repository")?
2639 .await?;
2640 Ok(proto::GetBlobContentResponse { content })
2641 }
2642
2643 async fn handle_open_unstaged_diff(
2644 this: Entity<Self>,
2645 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2646 mut cx: AsyncApp,
2647 ) -> Result<proto::OpenUnstagedDiffResponse> {
2648 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2649 let diff = this
2650 .update(&mut cx, |this, cx| {
2651 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2652 Some(this.open_unstaged_diff(buffer, cx))
2653 })
2654 .context("missing buffer")?
2655 .await?;
2656 this.update(&mut cx, |this, _| {
2657 let shared_diffs = this
2658 .shared_diffs
2659 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2660 .or_default();
2661 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2662 });
2663 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2664 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2665 }
2666
2667 async fn handle_open_uncommitted_diff(
2668 this: Entity<Self>,
2669 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2670 mut cx: AsyncApp,
2671 ) -> Result<proto::OpenUncommittedDiffResponse> {
2672 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2673 let diff = this
2674 .update(&mut cx, |this, cx| {
2675 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2676 Some(this.open_uncommitted_diff(buffer, cx))
2677 })
2678 .context("missing buffer")?
2679 .await?;
2680 this.update(&mut cx, |this, _| {
2681 let shared_diffs = this
2682 .shared_diffs
2683 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2684 .or_default();
2685 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2686 });
2687 Ok(diff.read_with(&cx, |diff, cx| {
2688 use proto::open_uncommitted_diff_response::Mode;
2689
2690 let unstaged_diff = diff.secondary_diff();
2691 let index_snapshot = unstaged_diff.and_then(|diff| {
2692 let diff = diff.read(cx);
2693 diff.base_text_exists().then(|| diff.base_text(cx))
2694 });
2695
2696 let mode;
2697 let staged_text;
2698 let committed_text;
2699 if diff.base_text_exists() {
2700 let committed_snapshot = diff.base_text(cx);
2701 committed_text = Some(committed_snapshot.text());
2702 if let Some(index_text) = index_snapshot {
2703 if index_text.remote_id() == committed_snapshot.remote_id() {
2704 mode = Mode::IndexMatchesHead;
2705 staged_text = None;
2706 } else {
2707 mode = Mode::IndexAndHead;
2708 staged_text = Some(index_text.text());
2709 }
2710 } else {
2711 mode = Mode::IndexAndHead;
2712 staged_text = None;
2713 }
2714 } else {
2715 mode = Mode::IndexAndHead;
2716 committed_text = None;
2717 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2718 }
2719
2720 proto::OpenUncommittedDiffResponse {
2721 committed_text,
2722 staged_text,
2723 mode: mode.into(),
2724 }
2725 }))
2726 }
2727
2728 async fn handle_update_diff_bases(
2729 this: Entity<Self>,
2730 request: TypedEnvelope<proto::UpdateDiffBases>,
2731 mut cx: AsyncApp,
2732 ) -> Result<()> {
2733 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2734 this.update(&mut cx, |this, cx| {
2735 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2736 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2737 {
2738 let buffer = buffer.read(cx).text_snapshot();
2739 diff_state.update(cx, |diff_state, cx| {
2740 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2741 })
2742 }
2743 });
2744 Ok(())
2745 }
2746
2747 async fn handle_blame_buffer(
2748 this: Entity<Self>,
2749 envelope: TypedEnvelope<proto::BlameBuffer>,
2750 mut cx: AsyncApp,
2751 ) -> Result<proto::BlameBufferResponse> {
2752 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2753 let version = deserialize_version(&envelope.payload.version);
2754 let buffer = this.read_with(&cx, |this, cx| {
2755 this.buffer_store.read(cx).get_existing(buffer_id)
2756 })?;
2757 buffer
2758 .update(&mut cx, |buffer, _| {
2759 buffer.wait_for_version(version.clone())
2760 })
2761 .await?;
2762 let blame = this
2763 .update(&mut cx, |this, cx| {
2764 this.blame_buffer(&buffer, Some(version), cx)
2765 })
2766 .await?;
2767 Ok(serialize_blame_buffer_response(blame))
2768 }
2769
2770 async fn handle_get_permalink_to_line(
2771 this: Entity<Self>,
2772 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2773 mut cx: AsyncApp,
2774 ) -> Result<proto::GetPermalinkToLineResponse> {
2775 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2776 // let version = deserialize_version(&envelope.payload.version);
2777 let selection = {
2778 let proto_selection = envelope
2779 .payload
2780 .selection
2781 .context("no selection to get permalink for defined")?;
2782 proto_selection.start as u32..proto_selection.end as u32
2783 };
2784 let buffer = this.read_with(&cx, |this, cx| {
2785 this.buffer_store.read(cx).get_existing(buffer_id)
2786 })?;
2787 let permalink = this
2788 .update(&mut cx, |this, cx| {
2789 this.get_permalink_to_line(&buffer, selection, cx)
2790 })
2791 .await?;
2792 Ok(proto::GetPermalinkToLineResponse {
2793 permalink: permalink.to_string(),
2794 })
2795 }
2796
2797 fn repository_for_request(
2798 this: &Entity<Self>,
2799 id: RepositoryId,
2800 cx: &mut AsyncApp,
2801 ) -> Result<Entity<Repository>> {
2802 this.read_with(cx, |this, _| {
2803 this.repositories
2804 .get(&id)
2805 .context("missing repository handle")
2806 .cloned()
2807 })
2808 }
2809
2810 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2811 self.repositories
2812 .iter()
2813 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2814 .collect()
2815 }
2816
2817 fn process_updated_entries(
2818 &self,
2819 worktree: &Entity<Worktree>,
2820 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2821 cx: &mut App,
2822 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2823 let path_style = worktree.read(cx).path_style();
2824 let mut repo_paths = self
2825 .repositories
2826 .values()
2827 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2828 .collect::<Vec<_>>();
2829 let mut entries: Vec<_> = updated_entries
2830 .iter()
2831 .map(|(path, _, _)| path.clone())
2832 .collect();
2833 entries.sort();
2834 let worktree = worktree.read(cx);
2835
2836 let entries = entries
2837 .into_iter()
2838 .map(|path| worktree.absolutize(&path))
2839 .collect::<Arc<[_]>>();
2840
2841 let executor = cx.background_executor().clone();
2842 cx.background_executor().spawn(async move {
2843 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2844 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2845 let mut tasks = FuturesOrdered::new();
2846 for (repo_path, repo) in repo_paths.into_iter().rev() {
2847 let entries = entries.clone();
2848 let task = executor.spawn(async move {
2849 // Find all repository paths that belong to this repo
2850 let mut ix = entries.partition_point(|path| path < &*repo_path);
2851 if ix == entries.len() {
2852 return None;
2853 };
2854
2855 let mut paths = Vec::new();
2856 // All paths prefixed by a given repo will constitute a continuous range.
2857 while let Some(path) = entries.get(ix)
2858 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2859 &repo_path, path, path_style,
2860 )
2861 {
2862 paths.push((repo_path, ix));
2863 ix += 1;
2864 }
2865 if paths.is_empty() {
2866 None
2867 } else {
2868 Some((repo, paths))
2869 }
2870 });
2871 tasks.push_back(task);
2872 }
2873
2874 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2875 let mut path_was_used = vec![false; entries.len()];
2876 let tasks = tasks.collect::<Vec<_>>().await;
2877 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2878 // We always want to assign a path to it's innermost repository.
2879 for t in tasks {
2880 let Some((repo, paths)) = t else {
2881 continue;
2882 };
2883 let entry = paths_by_git_repo.entry(repo).or_default();
2884 for (repo_path, ix) in paths {
2885 if path_was_used[ix] {
2886 continue;
2887 }
2888 path_was_used[ix] = true;
2889 entry.push(repo_path);
2890 }
2891 }
2892
2893 paths_by_git_repo
2894 })
2895 }
2896}
2897
2898impl BufferGitState {
2899 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2900 Self {
2901 unstaged_diff: Default::default(),
2902 uncommitted_diff: Default::default(),
2903 recalculate_diff_task: Default::default(),
2904 language: Default::default(),
2905 language_registry: Default::default(),
2906 recalculating_tx: postage::watch::channel_with(false).0,
2907 hunk_staging_operation_count: 0,
2908 hunk_staging_operation_count_as_of_write: 0,
2909 head_text: Default::default(),
2910 index_text: Default::default(),
2911 head_changed: Default::default(),
2912 index_changed: Default::default(),
2913 language_changed: Default::default(),
2914 conflict_updated_futures: Default::default(),
2915 conflict_set: Default::default(),
2916 reparse_conflict_markers_task: Default::default(),
2917 }
2918 }
2919
2920 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2921 self.language = buffer.read(cx).language().cloned();
2922 self.language_changed = true;
2923 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2924 }
2925
2926 fn reparse_conflict_markers(
2927 &mut self,
2928 buffer: text::BufferSnapshot,
2929 cx: &mut Context<Self>,
2930 ) -> oneshot::Receiver<()> {
2931 let (tx, rx) = oneshot::channel();
2932
2933 let Some(conflict_set) = self
2934 .conflict_set
2935 .as_ref()
2936 .and_then(|conflict_set| conflict_set.upgrade())
2937 else {
2938 return rx;
2939 };
2940
2941 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2942 if conflict_set.has_conflict {
2943 Some(conflict_set.snapshot())
2944 } else {
2945 None
2946 }
2947 });
2948
2949 if let Some(old_snapshot) = old_snapshot {
2950 self.conflict_updated_futures.push(tx);
2951 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2952 let (snapshot, changed_range) = cx
2953 .background_spawn(async move {
2954 let new_snapshot = ConflictSet::parse(&buffer);
2955 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2956 (new_snapshot, changed_range)
2957 })
2958 .await;
2959 this.update(cx, |this, cx| {
2960 if let Some(conflict_set) = &this.conflict_set {
2961 conflict_set
2962 .update(cx, |conflict_set, cx| {
2963 conflict_set.set_snapshot(snapshot, changed_range, cx);
2964 })
2965 .ok();
2966 }
2967 let futures = std::mem::take(&mut this.conflict_updated_futures);
2968 for tx in futures {
2969 tx.send(()).ok();
2970 }
2971 })
2972 }))
2973 }
2974
2975 rx
2976 }
2977
2978 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2979 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2980 }
2981
2982 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2983 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2984 }
2985
2986 fn handle_base_texts_updated(
2987 &mut self,
2988 buffer: text::BufferSnapshot,
2989 message: proto::UpdateDiffBases,
2990 cx: &mut Context<Self>,
2991 ) {
2992 use proto::update_diff_bases::Mode;
2993
2994 let Some(mode) = Mode::from_i32(message.mode) else {
2995 return;
2996 };
2997
2998 let diff_bases_change = match mode {
2999 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3000 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3001 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3002 Mode::IndexAndHead => DiffBasesChange::SetEach {
3003 index: message.staged_text,
3004 head: message.committed_text,
3005 },
3006 };
3007
3008 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3009 }
3010
3011 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3012 if *self.recalculating_tx.borrow() {
3013 let mut rx = self.recalculating_tx.subscribe();
3014 Some(async move {
3015 loop {
3016 let is_recalculating = rx.recv().await;
3017 if is_recalculating != Some(true) {
3018 break;
3019 }
3020 }
3021 })
3022 } else {
3023 None
3024 }
3025 }
3026
3027 fn diff_bases_changed(
3028 &mut self,
3029 buffer: text::BufferSnapshot,
3030 diff_bases_change: Option<DiffBasesChange>,
3031 cx: &mut Context<Self>,
3032 ) {
3033 match diff_bases_change {
3034 Some(DiffBasesChange::SetIndex(index)) => {
3035 self.index_text = index.map(|mut index| {
3036 text::LineEnding::normalize(&mut index);
3037 Arc::from(index.as_str())
3038 });
3039 self.index_changed = true;
3040 }
3041 Some(DiffBasesChange::SetHead(head)) => {
3042 self.head_text = head.map(|mut head| {
3043 text::LineEnding::normalize(&mut head);
3044 Arc::from(head.as_str())
3045 });
3046 self.head_changed = true;
3047 }
3048 Some(DiffBasesChange::SetBoth(text)) => {
3049 let text = text.map(|mut text| {
3050 text::LineEnding::normalize(&mut text);
3051 Arc::from(text.as_str())
3052 });
3053 self.head_text = text.clone();
3054 self.index_text = text;
3055 self.head_changed = true;
3056 self.index_changed = true;
3057 }
3058 Some(DiffBasesChange::SetEach { index, head }) => {
3059 self.index_text = index.map(|mut index| {
3060 text::LineEnding::normalize(&mut index);
3061 Arc::from(index.as_str())
3062 });
3063 self.index_changed = true;
3064 self.head_text = head.map(|mut head| {
3065 text::LineEnding::normalize(&mut head);
3066 Arc::from(head.as_str())
3067 });
3068 self.head_changed = true;
3069 }
3070 None => {}
3071 }
3072
3073 self.recalculate_diffs(buffer, cx)
3074 }
3075
3076 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3077 *self.recalculating_tx.borrow_mut() = true;
3078
3079 let language = self.language.clone();
3080 let language_registry = self.language_registry.clone();
3081 let unstaged_diff = self.unstaged_diff();
3082 let uncommitted_diff = self.uncommitted_diff();
3083 let head = self.head_text.clone();
3084 let index = self.index_text.clone();
3085 let index_changed = self.index_changed;
3086 let head_changed = self.head_changed;
3087 let language_changed = self.language_changed;
3088 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3089 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3090 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3091 (None, None) => true,
3092 _ => false,
3093 };
3094 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3095 log::debug!(
3096 "start recalculating diffs for buffer {}",
3097 buffer.remote_id()
3098 );
3099
3100 let mut new_unstaged_diff = None;
3101 if let Some(unstaged_diff) = &unstaged_diff {
3102 new_unstaged_diff = Some(
3103 cx.update(|cx| {
3104 unstaged_diff.read(cx).update_diff(
3105 buffer.clone(),
3106 index,
3107 index_changed,
3108 language.clone(),
3109 cx,
3110 )
3111 })
3112 .await,
3113 );
3114 }
3115
3116 // Dropping BufferDiff can be expensive, so yield back to the event loop
3117 // for a bit
3118 yield_now().await;
3119
3120 let mut new_uncommitted_diff = None;
3121 if let Some(uncommitted_diff) = &uncommitted_diff {
3122 new_uncommitted_diff = if index_matches_head {
3123 new_unstaged_diff.clone()
3124 } else {
3125 Some(
3126 cx.update(|cx| {
3127 uncommitted_diff.read(cx).update_diff(
3128 buffer.clone(),
3129 head,
3130 head_changed,
3131 language.clone(),
3132 cx,
3133 )
3134 })
3135 .await,
3136 )
3137 }
3138 }
3139
3140 // Dropping BufferDiff can be expensive, so yield back to the event loop
3141 // for a bit
3142 yield_now().await;
3143
3144 let cancel = this.update(cx, |this, _| {
3145 // This checks whether all pending stage/unstage operations
3146 // have quiesced (i.e. both the corresponding write and the
3147 // read of that write have completed). If not, then we cancel
3148 // this recalculation attempt to avoid invalidating pending
3149 // state too quickly; another recalculation will come along
3150 // later and clear the pending state once the state of the index has settled.
3151 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3152 *this.recalculating_tx.borrow_mut() = false;
3153 true
3154 } else {
3155 false
3156 }
3157 })?;
3158 if cancel {
3159 log::debug!(
3160 concat!(
3161 "aborting recalculating diffs for buffer {}",
3162 "due to subsequent hunk operations",
3163 ),
3164 buffer.remote_id()
3165 );
3166 return Ok(());
3167 }
3168
3169 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3170 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3171 {
3172 let task = unstaged_diff.update(cx, |diff, cx| {
3173 if language_changed {
3174 diff.language_changed(language.clone(), language_registry.clone(), cx);
3175 }
3176 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3177 });
3178 Some(task.await)
3179 } else {
3180 None
3181 };
3182
3183 yield_now().await;
3184
3185 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3186 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3187 {
3188 uncommitted_diff
3189 .update(cx, |diff, cx| {
3190 if language_changed {
3191 diff.language_changed(language, language_registry, cx);
3192 }
3193 diff.set_snapshot_with_secondary(
3194 new_uncommitted_diff,
3195 &buffer,
3196 unstaged_changed_range.flatten(),
3197 true,
3198 cx,
3199 )
3200 })
3201 .await;
3202 }
3203
3204 log::debug!(
3205 "finished recalculating diffs for buffer {}",
3206 buffer.remote_id()
3207 );
3208
3209 if let Some(this) = this.upgrade() {
3210 this.update(cx, |this, _| {
3211 this.index_changed = false;
3212 this.head_changed = false;
3213 this.language_changed = false;
3214 *this.recalculating_tx.borrow_mut() = false;
3215 });
3216 }
3217
3218 Ok(())
3219 }));
3220 }
3221}
3222
3223fn make_remote_delegate(
3224 this: Entity<GitStore>,
3225 project_id: u64,
3226 repository_id: RepositoryId,
3227 askpass_id: u64,
3228 cx: &mut AsyncApp,
3229) -> AskPassDelegate {
3230 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3231 this.update(cx, |this, cx| {
3232 let Some((client, _)) = this.downstream_client() else {
3233 return;
3234 };
3235 let response = client.request(proto::AskPassRequest {
3236 project_id,
3237 repository_id: repository_id.to_proto(),
3238 askpass_id,
3239 prompt,
3240 });
3241 cx.spawn(async move |_, _| {
3242 let mut response = response.await?.response;
3243 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3244 .ok();
3245 response.zeroize();
3246 anyhow::Ok(())
3247 })
3248 .detach_and_log_err(cx);
3249 });
3250 })
3251}
3252
3253impl RepositoryId {
3254 pub fn to_proto(self) -> u64 {
3255 self.0
3256 }
3257
3258 pub fn from_proto(id: u64) -> Self {
3259 RepositoryId(id)
3260 }
3261}
3262
3263impl RepositorySnapshot {
3264 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3265 Self {
3266 id,
3267 statuses_by_path: Default::default(),
3268 work_directory_abs_path,
3269 branch: None,
3270 head_commit: None,
3271 scan_id: 0,
3272 merge: Default::default(),
3273 remote_origin_url: None,
3274 remote_upstream_url: None,
3275 stash_entries: Default::default(),
3276 path_style,
3277 }
3278 }
3279
3280 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3281 proto::UpdateRepository {
3282 branch_summary: self.branch.as_ref().map(branch_to_proto),
3283 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3284 updated_statuses: self
3285 .statuses_by_path
3286 .iter()
3287 .map(|entry| entry.to_proto())
3288 .collect(),
3289 removed_statuses: Default::default(),
3290 current_merge_conflicts: self
3291 .merge
3292 .conflicted_paths
3293 .iter()
3294 .map(|repo_path| repo_path.to_proto())
3295 .collect(),
3296 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3297 project_id,
3298 id: self.id.to_proto(),
3299 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3300 entry_ids: vec![self.id.to_proto()],
3301 scan_id: self.scan_id,
3302 is_last_update: true,
3303 stash_entries: self
3304 .stash_entries
3305 .entries
3306 .iter()
3307 .map(stash_to_proto)
3308 .collect(),
3309 remote_upstream_url: self.remote_upstream_url.clone(),
3310 remote_origin_url: self.remote_origin_url.clone(),
3311 }
3312 }
3313
3314 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3315 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3316 let mut removed_statuses: Vec<String> = Vec::new();
3317
3318 let mut new_statuses = self.statuses_by_path.iter().peekable();
3319 let mut old_statuses = old.statuses_by_path.iter().peekable();
3320
3321 let mut current_new_entry = new_statuses.next();
3322 let mut current_old_entry = old_statuses.next();
3323 loop {
3324 match (current_new_entry, current_old_entry) {
3325 (Some(new_entry), Some(old_entry)) => {
3326 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3327 Ordering::Less => {
3328 updated_statuses.push(new_entry.to_proto());
3329 current_new_entry = new_statuses.next();
3330 }
3331 Ordering::Equal => {
3332 if new_entry.status != old_entry.status {
3333 updated_statuses.push(new_entry.to_proto());
3334 }
3335 current_old_entry = old_statuses.next();
3336 current_new_entry = new_statuses.next();
3337 }
3338 Ordering::Greater => {
3339 removed_statuses.push(old_entry.repo_path.to_proto());
3340 current_old_entry = old_statuses.next();
3341 }
3342 }
3343 }
3344 (None, Some(old_entry)) => {
3345 removed_statuses.push(old_entry.repo_path.to_proto());
3346 current_old_entry = old_statuses.next();
3347 }
3348 (Some(new_entry), None) => {
3349 updated_statuses.push(new_entry.to_proto());
3350 current_new_entry = new_statuses.next();
3351 }
3352 (None, None) => break,
3353 }
3354 }
3355
3356 proto::UpdateRepository {
3357 branch_summary: self.branch.as_ref().map(branch_to_proto),
3358 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3359 updated_statuses,
3360 removed_statuses,
3361 current_merge_conflicts: self
3362 .merge
3363 .conflicted_paths
3364 .iter()
3365 .map(|path| path.to_proto())
3366 .collect(),
3367 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3368 project_id,
3369 id: self.id.to_proto(),
3370 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3371 entry_ids: vec![],
3372 scan_id: self.scan_id,
3373 is_last_update: true,
3374 stash_entries: self
3375 .stash_entries
3376 .entries
3377 .iter()
3378 .map(stash_to_proto)
3379 .collect(),
3380 remote_upstream_url: self.remote_upstream_url.clone(),
3381 remote_origin_url: self.remote_origin_url.clone(),
3382 }
3383 }
3384
3385 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3386 self.statuses_by_path.iter().cloned()
3387 }
3388
3389 pub fn status_summary(&self) -> GitSummary {
3390 self.statuses_by_path.summary().item_summary
3391 }
3392
3393 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3394 self.statuses_by_path
3395 .get(&PathKey(path.as_ref().clone()), ())
3396 .cloned()
3397 }
3398
3399 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3400 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3401 }
3402
3403 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3404 self.path_style
3405 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3406 .unwrap()
3407 .into()
3408 }
3409
3410 #[inline]
3411 fn abs_path_to_repo_path_inner(
3412 work_directory_abs_path: &Path,
3413 abs_path: &Path,
3414 path_style: PathStyle,
3415 ) -> Option<RepoPath> {
3416 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3417 Some(RepoPath::from_rel_path(&rel_path))
3418 }
3419
3420 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3421 self.merge.conflicted_paths.contains(repo_path)
3422 }
3423
3424 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3425 let had_conflict_on_last_merge_head_change =
3426 self.merge.conflicted_paths.contains(repo_path);
3427 let has_conflict_currently = self
3428 .status_for_path(repo_path)
3429 .is_some_and(|entry| entry.status.is_conflicted());
3430 had_conflict_on_last_merge_head_change || has_conflict_currently
3431 }
3432
3433 /// This is the name that will be displayed in the repository selector for this repository.
3434 pub fn display_name(&self) -> SharedString {
3435 self.work_directory_abs_path
3436 .file_name()
3437 .unwrap_or_default()
3438 .to_string_lossy()
3439 .to_string()
3440 .into()
3441 }
3442}
3443
3444pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3445 proto::StashEntry {
3446 oid: entry.oid.as_bytes().to_vec(),
3447 message: entry.message.clone(),
3448 branch: entry.branch.clone(),
3449 index: entry.index as u64,
3450 timestamp: entry.timestamp,
3451 }
3452}
3453
3454pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3455 Ok(StashEntry {
3456 oid: Oid::from_bytes(&entry.oid)?,
3457 message: entry.message.clone(),
3458 index: entry.index as usize,
3459 branch: entry.branch.clone(),
3460 timestamp: entry.timestamp,
3461 })
3462}
3463
3464impl MergeDetails {
3465 async fn load(
3466 backend: &Arc<dyn GitRepository>,
3467 status: &SumTree<StatusEntry>,
3468 prev_snapshot: &RepositorySnapshot,
3469 ) -> Result<(MergeDetails, bool)> {
3470 log::debug!("load merge details");
3471 let message = backend.merge_message().await;
3472 let heads = backend
3473 .revparse_batch(vec![
3474 "MERGE_HEAD".into(),
3475 "CHERRY_PICK_HEAD".into(),
3476 "REBASE_HEAD".into(),
3477 "REVERT_HEAD".into(),
3478 "APPLY_HEAD".into(),
3479 ])
3480 .await
3481 .log_err()
3482 .unwrap_or_default()
3483 .into_iter()
3484 .map(|opt| opt.map(SharedString::from))
3485 .collect::<Vec<_>>();
3486 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3487 let conflicted_paths = if merge_heads_changed {
3488 let current_conflicted_paths = TreeSet::from_ordered_entries(
3489 status
3490 .iter()
3491 .filter(|entry| entry.status.is_conflicted())
3492 .map(|entry| entry.repo_path.clone()),
3493 );
3494
3495 // It can happen that we run a scan while a lengthy merge is in progress
3496 // that will eventually result in conflicts, but before those conflicts
3497 // are reported by `git status`. Since for the moment we only care about
3498 // the merge heads state for the purposes of tracking conflicts, don't update
3499 // this state until we see some conflicts.
3500 if heads.iter().any(Option::is_some)
3501 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3502 && current_conflicted_paths.is_empty()
3503 {
3504 log::debug!("not updating merge heads because no conflicts found");
3505 return Ok((
3506 MergeDetails {
3507 message: message.map(SharedString::from),
3508 ..prev_snapshot.merge.clone()
3509 },
3510 false,
3511 ));
3512 }
3513
3514 current_conflicted_paths
3515 } else {
3516 prev_snapshot.merge.conflicted_paths.clone()
3517 };
3518 let details = MergeDetails {
3519 conflicted_paths,
3520 message: message.map(SharedString::from),
3521 heads,
3522 };
3523 Ok((details, merge_heads_changed))
3524 }
3525}
3526
3527impl Repository {
3528 pub fn snapshot(&self) -> RepositorySnapshot {
3529 self.snapshot.clone()
3530 }
3531
3532 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3533 self.pending_ops.iter().cloned()
3534 }
3535
3536 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3537 self.pending_ops.summary().clone()
3538 }
3539
3540 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3541 self.pending_ops
3542 .get(&PathKey(path.as_ref().clone()), ())
3543 .cloned()
3544 }
3545
3546 fn local(
3547 id: RepositoryId,
3548 work_directory_abs_path: Arc<Path>,
3549 dot_git_abs_path: Arc<Path>,
3550 project_environment: WeakEntity<ProjectEnvironment>,
3551 fs: Arc<dyn Fs>,
3552 git_store: WeakEntity<GitStore>,
3553 cx: &mut Context<Self>,
3554 ) -> Self {
3555 let snapshot =
3556 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3557 let state = cx
3558 .spawn(async move |_, cx| {
3559 LocalRepositoryState::new(
3560 work_directory_abs_path,
3561 dot_git_abs_path,
3562 project_environment,
3563 fs,
3564 cx,
3565 )
3566 .await
3567 .map_err(|err| err.to_string())
3568 })
3569 .shared();
3570 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3571 let state = cx
3572 .spawn(async move |_, _| {
3573 let state = state.await?;
3574 Ok(RepositoryState::Local(state))
3575 })
3576 .shared();
3577
3578 Repository {
3579 this: cx.weak_entity(),
3580 git_store,
3581 snapshot,
3582 pending_ops: Default::default(),
3583 repository_state: state,
3584 commit_message_buffer: None,
3585 askpass_delegates: Default::default(),
3586 paths_needing_status_update: Default::default(),
3587 latest_askpass_id: 0,
3588 job_sender,
3589 job_id: 0,
3590 active_jobs: Default::default(),
3591 }
3592 }
3593
3594 fn remote(
3595 id: RepositoryId,
3596 work_directory_abs_path: Arc<Path>,
3597 path_style: PathStyle,
3598 project_id: ProjectId,
3599 client: AnyProtoClient,
3600 git_store: WeakEntity<GitStore>,
3601 cx: &mut Context<Self>,
3602 ) -> Self {
3603 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3604 let repository_state = RemoteRepositoryState { project_id, client };
3605 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3606 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3607 Self {
3608 this: cx.weak_entity(),
3609 snapshot,
3610 commit_message_buffer: None,
3611 git_store,
3612 pending_ops: Default::default(),
3613 paths_needing_status_update: Default::default(),
3614 job_sender,
3615 repository_state,
3616 askpass_delegates: Default::default(),
3617 latest_askpass_id: 0,
3618 active_jobs: Default::default(),
3619 job_id: 0,
3620 }
3621 }
3622
3623 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3624 self.git_store.upgrade()
3625 }
3626
3627 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3628 let this = cx.weak_entity();
3629 let git_store = self.git_store.clone();
3630 let _ = self.send_keyed_job(
3631 Some(GitJobKey::ReloadBufferDiffBases),
3632 None,
3633 |state, mut cx| async move {
3634 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3635 log::error!("tried to recompute diffs for a non-local repository");
3636 return Ok(());
3637 };
3638
3639 let Some(this) = this.upgrade() else {
3640 return Ok(());
3641 };
3642
3643 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3644 git_store.update(cx, |git_store, cx| {
3645 git_store
3646 .diffs
3647 .iter()
3648 .filter_map(|(buffer_id, diff_state)| {
3649 let buffer_store = git_store.buffer_store.read(cx);
3650 let buffer = buffer_store.get(*buffer_id)?;
3651 let file = File::from_dyn(buffer.read(cx).file())?;
3652 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3653 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3654 log::debug!(
3655 "start reload diff bases for repo path {}",
3656 repo_path.as_unix_str()
3657 );
3658 diff_state.update(cx, |diff_state, _| {
3659 let has_unstaged_diff = diff_state
3660 .unstaged_diff
3661 .as_ref()
3662 .is_some_and(|diff| diff.is_upgradable());
3663 let has_uncommitted_diff = diff_state
3664 .uncommitted_diff
3665 .as_ref()
3666 .is_some_and(|set| set.is_upgradable());
3667
3668 Some((
3669 buffer,
3670 repo_path,
3671 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3672 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3673 ))
3674 })
3675 })
3676 .collect::<Vec<_>>()
3677 })
3678 })?;
3679
3680 let buffer_diff_base_changes = cx
3681 .background_spawn(async move {
3682 let mut changes = Vec::new();
3683 for (buffer, repo_path, current_index_text, current_head_text) in
3684 &repo_diff_state_updates
3685 {
3686 let index_text = if current_index_text.is_some() {
3687 backend.load_index_text(repo_path.clone()).await
3688 } else {
3689 None
3690 };
3691 let head_text = if current_head_text.is_some() {
3692 backend.load_committed_text(repo_path.clone()).await
3693 } else {
3694 None
3695 };
3696
3697 let change =
3698 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3699 (Some(current_index), Some(current_head)) => {
3700 let index_changed =
3701 index_text.as_deref() != current_index.as_deref();
3702 let head_changed =
3703 head_text.as_deref() != current_head.as_deref();
3704 if index_changed && head_changed {
3705 if index_text == head_text {
3706 Some(DiffBasesChange::SetBoth(head_text))
3707 } else {
3708 Some(DiffBasesChange::SetEach {
3709 index: index_text,
3710 head: head_text,
3711 })
3712 }
3713 } else if index_changed {
3714 Some(DiffBasesChange::SetIndex(index_text))
3715 } else if head_changed {
3716 Some(DiffBasesChange::SetHead(head_text))
3717 } else {
3718 None
3719 }
3720 }
3721 (Some(current_index), None) => {
3722 let index_changed =
3723 index_text.as_deref() != current_index.as_deref();
3724 index_changed
3725 .then_some(DiffBasesChange::SetIndex(index_text))
3726 }
3727 (None, Some(current_head)) => {
3728 let head_changed =
3729 head_text.as_deref() != current_head.as_deref();
3730 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3731 }
3732 (None, None) => None,
3733 };
3734
3735 changes.push((buffer.clone(), change))
3736 }
3737 changes
3738 })
3739 .await;
3740
3741 git_store.update(&mut cx, |git_store, cx| {
3742 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3743 let buffer_snapshot = buffer.read(cx).text_snapshot();
3744 let buffer_id = buffer_snapshot.remote_id();
3745 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3746 continue;
3747 };
3748
3749 let downstream_client = git_store.downstream_client();
3750 diff_state.update(cx, |diff_state, cx| {
3751 use proto::update_diff_bases::Mode;
3752
3753 if let Some((diff_bases_change, (client, project_id))) =
3754 diff_bases_change.clone().zip(downstream_client)
3755 {
3756 let (staged_text, committed_text, mode) = match diff_bases_change {
3757 DiffBasesChange::SetIndex(index) => {
3758 (index, None, Mode::IndexOnly)
3759 }
3760 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3761 DiffBasesChange::SetEach { index, head } => {
3762 (index, head, Mode::IndexAndHead)
3763 }
3764 DiffBasesChange::SetBoth(text) => {
3765 (None, text, Mode::IndexMatchesHead)
3766 }
3767 };
3768 client
3769 .send(proto::UpdateDiffBases {
3770 project_id: project_id.to_proto(),
3771 buffer_id: buffer_id.to_proto(),
3772 staged_text,
3773 committed_text,
3774 mode: mode as i32,
3775 })
3776 .log_err();
3777 }
3778
3779 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3780 });
3781 }
3782 })
3783 },
3784 );
3785 }
3786
3787 pub fn send_job<F, Fut, R>(
3788 &mut self,
3789 status: Option<SharedString>,
3790 job: F,
3791 ) -> oneshot::Receiver<R>
3792 where
3793 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3794 Fut: Future<Output = R> + 'static,
3795 R: Send + 'static,
3796 {
3797 self.send_keyed_job(None, status, job)
3798 }
3799
3800 fn send_keyed_job<F, Fut, R>(
3801 &mut self,
3802 key: Option<GitJobKey>,
3803 status: Option<SharedString>,
3804 job: F,
3805 ) -> oneshot::Receiver<R>
3806 where
3807 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3808 Fut: Future<Output = R> + 'static,
3809 R: Send + 'static,
3810 {
3811 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3812 let job_id = post_inc(&mut self.job_id);
3813 let this = self.this.clone();
3814 self.job_sender
3815 .unbounded_send(GitJob {
3816 key,
3817 job: Box::new(move |state, cx: &mut AsyncApp| {
3818 let job = job(state, cx.clone());
3819 cx.spawn(async move |cx| {
3820 if let Some(s) = status.clone() {
3821 this.update(cx, |this, cx| {
3822 this.active_jobs.insert(
3823 job_id,
3824 JobInfo {
3825 start: Instant::now(),
3826 message: s.clone(),
3827 },
3828 );
3829
3830 cx.notify();
3831 })
3832 .ok();
3833 }
3834 let result = job.await;
3835
3836 this.update(cx, |this, cx| {
3837 this.active_jobs.remove(&job_id);
3838 cx.notify();
3839 })
3840 .ok();
3841
3842 result_tx.send(result).ok();
3843 })
3844 }),
3845 })
3846 .ok();
3847 result_rx
3848 }
3849
3850 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3851 let Some(git_store) = self.git_store.upgrade() else {
3852 return;
3853 };
3854 let entity = cx.entity();
3855 git_store.update(cx, |git_store, cx| {
3856 let Some((&id, _)) = git_store
3857 .repositories
3858 .iter()
3859 .find(|(_, handle)| *handle == &entity)
3860 else {
3861 return;
3862 };
3863 git_store.active_repo_id = Some(id);
3864 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3865 });
3866 }
3867
3868 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3869 self.snapshot.status()
3870 }
3871
3872 pub fn cached_stash(&self) -> GitStash {
3873 self.snapshot.stash_entries.clone()
3874 }
3875
3876 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3877 let git_store = self.git_store.upgrade()?;
3878 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3879 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3880 let abs_path = SanitizedPath::new(&abs_path);
3881 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3882 Some(ProjectPath {
3883 worktree_id: worktree.read(cx).id(),
3884 path: relative_path,
3885 })
3886 }
3887
3888 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3889 let git_store = self.git_store.upgrade()?;
3890 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3891 let abs_path = worktree_store.absolutize(path, cx)?;
3892 self.snapshot.abs_path_to_repo_path(&abs_path)
3893 }
3894
3895 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3896 other
3897 .read(cx)
3898 .snapshot
3899 .work_directory_abs_path
3900 .starts_with(&self.snapshot.work_directory_abs_path)
3901 }
3902
3903 pub fn open_commit_buffer(
3904 &mut self,
3905 languages: Option<Arc<LanguageRegistry>>,
3906 buffer_store: Entity<BufferStore>,
3907 cx: &mut Context<Self>,
3908 ) -> Task<Result<Entity<Buffer>>> {
3909 let id = self.id;
3910 if let Some(buffer) = self.commit_message_buffer.clone() {
3911 return Task::ready(Ok(buffer));
3912 }
3913 let this = cx.weak_entity();
3914
3915 let rx = self.send_job(None, move |state, mut cx| async move {
3916 let Some(this) = this.upgrade() else {
3917 bail!("git store was dropped");
3918 };
3919 match state {
3920 RepositoryState::Local(..) => {
3921 this.update(&mut cx, |_, cx| {
3922 Self::open_local_commit_buffer(languages, buffer_store, cx)
3923 })
3924 .await
3925 }
3926 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3927 let request = client.request(proto::OpenCommitMessageBuffer {
3928 project_id: project_id.0,
3929 repository_id: id.to_proto(),
3930 });
3931 let response = request.await.context("requesting to open commit buffer")?;
3932 let buffer_id = BufferId::new(response.buffer_id)?;
3933 let buffer = buffer_store
3934 .update(&mut cx, |buffer_store, cx| {
3935 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3936 })
3937 .await?;
3938 if let Some(language_registry) = languages {
3939 let git_commit_language =
3940 language_registry.language_for_name("Git Commit").await?;
3941 buffer.update(&mut cx, |buffer, cx| {
3942 buffer.set_language(Some(git_commit_language), cx);
3943 });
3944 }
3945 this.update(&mut cx, |this, _| {
3946 this.commit_message_buffer = Some(buffer.clone());
3947 });
3948 Ok(buffer)
3949 }
3950 }
3951 });
3952
3953 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3954 }
3955
3956 fn open_local_commit_buffer(
3957 language_registry: Option<Arc<LanguageRegistry>>,
3958 buffer_store: Entity<BufferStore>,
3959 cx: &mut Context<Self>,
3960 ) -> Task<Result<Entity<Buffer>>> {
3961 cx.spawn(async move |repository, cx| {
3962 let buffer = buffer_store
3963 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))
3964 .await?;
3965
3966 if let Some(language_registry) = language_registry {
3967 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3968 buffer.update(cx, |buffer, cx| {
3969 buffer.set_language(Some(git_commit_language), cx);
3970 });
3971 }
3972
3973 repository.update(cx, |repository, _| {
3974 repository.commit_message_buffer = Some(buffer.clone());
3975 })?;
3976 Ok(buffer)
3977 })
3978 }
3979
3980 pub fn checkout_files(
3981 &mut self,
3982 commit: &str,
3983 paths: Vec<RepoPath>,
3984 cx: &mut Context<Self>,
3985 ) -> Task<Result<()>> {
3986 let commit = commit.to_string();
3987 let id = self.id;
3988
3989 self.spawn_job_with_tracking(
3990 paths.clone(),
3991 pending_op::GitStatus::Reverted,
3992 cx,
3993 async move |this, cx| {
3994 this.update(cx, |this, _cx| {
3995 this.send_job(
3996 Some(format!("git checkout {}", commit).into()),
3997 move |git_repo, _| async move {
3998 match git_repo {
3999 RepositoryState::Local(LocalRepositoryState {
4000 backend,
4001 environment,
4002 ..
4003 }) => {
4004 backend
4005 .checkout_files(commit, paths, environment.clone())
4006 .await
4007 }
4008 RepositoryState::Remote(RemoteRepositoryState {
4009 project_id,
4010 client,
4011 }) => {
4012 client
4013 .request(proto::GitCheckoutFiles {
4014 project_id: project_id.0,
4015 repository_id: id.to_proto(),
4016 commit,
4017 paths: paths
4018 .into_iter()
4019 .map(|p| p.to_proto())
4020 .collect(),
4021 })
4022 .await?;
4023
4024 Ok(())
4025 }
4026 }
4027 },
4028 )
4029 })?
4030 .await?
4031 },
4032 )
4033 }
4034
4035 pub fn reset(
4036 &mut self,
4037 commit: String,
4038 reset_mode: ResetMode,
4039 _cx: &mut App,
4040 ) -> oneshot::Receiver<Result<()>> {
4041 let id = self.id;
4042
4043 self.send_job(None, move |git_repo, _| async move {
4044 match git_repo {
4045 RepositoryState::Local(LocalRepositoryState {
4046 backend,
4047 environment,
4048 ..
4049 }) => backend.reset(commit, reset_mode, environment).await,
4050 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4051 client
4052 .request(proto::GitReset {
4053 project_id: project_id.0,
4054 repository_id: id.to_proto(),
4055 commit,
4056 mode: match reset_mode {
4057 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4058 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4059 },
4060 })
4061 .await?;
4062
4063 Ok(())
4064 }
4065 }
4066 })
4067 }
4068
4069 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4070 let id = self.id;
4071 self.send_job(None, move |git_repo, _cx| async move {
4072 match git_repo {
4073 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4074 backend.show(commit).await
4075 }
4076 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4077 let resp = client
4078 .request(proto::GitShow {
4079 project_id: project_id.0,
4080 repository_id: id.to_proto(),
4081 commit,
4082 })
4083 .await?;
4084
4085 Ok(CommitDetails {
4086 sha: resp.sha.into(),
4087 message: resp.message.into(),
4088 commit_timestamp: resp.commit_timestamp,
4089 author_email: resp.author_email.into(),
4090 author_name: resp.author_name.into(),
4091 })
4092 }
4093 }
4094 })
4095 }
4096
4097 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4098 let id = self.id;
4099 self.send_job(None, move |git_repo, cx| async move {
4100 match git_repo {
4101 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4102 backend.load_commit(commit, cx).await
4103 }
4104 RepositoryState::Remote(RemoteRepositoryState {
4105 client, project_id, ..
4106 }) => {
4107 let response = client
4108 .request(proto::LoadCommitDiff {
4109 project_id: project_id.0,
4110 repository_id: id.to_proto(),
4111 commit,
4112 })
4113 .await?;
4114 Ok(CommitDiff {
4115 files: response
4116 .files
4117 .into_iter()
4118 .map(|file| {
4119 Ok(CommitFile {
4120 path: RepoPath::from_proto(&file.path)?,
4121 old_text: file.old_text,
4122 new_text: file.new_text,
4123 })
4124 })
4125 .collect::<Result<Vec<_>>>()?,
4126 })
4127 }
4128 }
4129 })
4130 }
4131
4132 pub fn file_history(
4133 &mut self,
4134 path: RepoPath,
4135 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4136 self.file_history_paginated(path, 0, None)
4137 }
4138
4139 pub fn file_history_paginated(
4140 &mut self,
4141 path: RepoPath,
4142 skip: usize,
4143 limit: Option<usize>,
4144 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4145 let id = self.id;
4146 self.send_job(None, move |git_repo, _cx| async move {
4147 match git_repo {
4148 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4149 backend.file_history_paginated(path, skip, limit).await
4150 }
4151 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4152 let response = client
4153 .request(proto::GitFileHistory {
4154 project_id: project_id.0,
4155 repository_id: id.to_proto(),
4156 path: path.to_proto(),
4157 skip: skip as u64,
4158 limit: limit.map(|l| l as u64),
4159 })
4160 .await?;
4161 Ok(git::repository::FileHistory {
4162 entries: response
4163 .entries
4164 .into_iter()
4165 .map(|entry| git::repository::FileHistoryEntry {
4166 sha: entry.sha.into(),
4167 subject: entry.subject.into(),
4168 message: entry.message.into(),
4169 commit_timestamp: entry.commit_timestamp,
4170 author_name: entry.author_name.into(),
4171 author_email: entry.author_email.into(),
4172 })
4173 .collect(),
4174 path: RepoPath::from_proto(&response.path)?,
4175 })
4176 }
4177 }
4178 })
4179 }
4180
4181 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4182 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4183 }
4184
4185 fn save_buffers<'a>(
4186 &self,
4187 entries: impl IntoIterator<Item = &'a RepoPath>,
4188 cx: &mut Context<Self>,
4189 ) -> Vec<Task<anyhow::Result<()>>> {
4190 let mut save_futures = Vec::new();
4191 if let Some(buffer_store) = self.buffer_store(cx) {
4192 buffer_store.update(cx, |buffer_store, cx| {
4193 for path in entries {
4194 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4195 continue;
4196 };
4197 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4198 && buffer
4199 .read(cx)
4200 .file()
4201 .is_some_and(|file| file.disk_state().exists())
4202 && buffer.read(cx).has_unsaved_edits()
4203 {
4204 save_futures.push(buffer_store.save_buffer(buffer, cx));
4205 }
4206 }
4207 })
4208 }
4209 save_futures
4210 }
4211
4212 pub fn stage_entries(
4213 &mut self,
4214 entries: Vec<RepoPath>,
4215 cx: &mut Context<Self>,
4216 ) -> Task<anyhow::Result<()>> {
4217 self.stage_or_unstage_entries(true, entries, cx)
4218 }
4219
4220 pub fn unstage_entries(
4221 &mut self,
4222 entries: Vec<RepoPath>,
4223 cx: &mut Context<Self>,
4224 ) -> Task<anyhow::Result<()>> {
4225 self.stage_or_unstage_entries(false, entries, cx)
4226 }
4227
4228 fn stage_or_unstage_entries(
4229 &mut self,
4230 stage: bool,
4231 entries: Vec<RepoPath>,
4232 cx: &mut Context<Self>,
4233 ) -> Task<anyhow::Result<()>> {
4234 if entries.is_empty() {
4235 return Task::ready(Ok(()));
4236 }
4237 let Some(git_store) = self.git_store.upgrade() else {
4238 return Task::ready(Ok(()));
4239 };
4240 let id = self.id;
4241 let save_tasks = self.save_buffers(&entries, cx);
4242 let paths = entries
4243 .iter()
4244 .map(|p| p.as_unix_str())
4245 .collect::<Vec<_>>()
4246 .join(" ");
4247 let status = if stage {
4248 format!("git add {paths}")
4249 } else {
4250 format!("git reset {paths}")
4251 };
4252 let job_key = GitJobKey::WriteIndex(entries.clone());
4253
4254 self.spawn_job_with_tracking(
4255 entries.clone(),
4256 if stage {
4257 pending_op::GitStatus::Staged
4258 } else {
4259 pending_op::GitStatus::Unstaged
4260 },
4261 cx,
4262 async move |this, cx| {
4263 for save_task in save_tasks {
4264 save_task.await?;
4265 }
4266
4267 this.update(cx, |this, cx| {
4268 let weak_this = cx.weak_entity();
4269 this.send_keyed_job(
4270 Some(job_key),
4271 Some(status.into()),
4272 move |git_repo, mut cx| async move {
4273 let hunk_staging_operation_counts = weak_this
4274 .update(&mut cx, |this, cx| {
4275 let mut hunk_staging_operation_counts = HashMap::default();
4276 for path in &entries {
4277 let Some(project_path) =
4278 this.repo_path_to_project_path(path, cx)
4279 else {
4280 continue;
4281 };
4282 let Some(buffer) = git_store
4283 .read(cx)
4284 .buffer_store
4285 .read(cx)
4286 .get_by_path(&project_path)
4287 else {
4288 continue;
4289 };
4290 let Some(diff_state) = git_store
4291 .read(cx)
4292 .diffs
4293 .get(&buffer.read(cx).remote_id())
4294 .cloned()
4295 else {
4296 continue;
4297 };
4298 let Some(uncommitted_diff) =
4299 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4300 |uncommitted_diff| uncommitted_diff.upgrade(),
4301 )
4302 else {
4303 continue;
4304 };
4305 let buffer_snapshot = buffer.read(cx).text_snapshot();
4306 let file_exists = buffer
4307 .read(cx)
4308 .file()
4309 .is_some_and(|file| file.disk_state().exists());
4310 let hunk_staging_operation_count =
4311 diff_state.update(cx, |diff_state, cx| {
4312 uncommitted_diff.update(
4313 cx,
4314 |uncommitted_diff, cx| {
4315 uncommitted_diff
4316 .stage_or_unstage_all_hunks(
4317 stage,
4318 &buffer_snapshot,
4319 file_exists,
4320 cx,
4321 );
4322 },
4323 );
4324
4325 diff_state.hunk_staging_operation_count += 1;
4326 diff_state.hunk_staging_operation_count
4327 });
4328 hunk_staging_operation_counts.insert(
4329 diff_state.downgrade(),
4330 hunk_staging_operation_count,
4331 );
4332 }
4333 hunk_staging_operation_counts
4334 })
4335 .unwrap_or_default();
4336
4337 let result = match git_repo {
4338 RepositoryState::Local(LocalRepositoryState {
4339 backend,
4340 environment,
4341 ..
4342 }) => {
4343 if stage {
4344 backend.stage_paths(entries, environment.clone()).await
4345 } else {
4346 backend.unstage_paths(entries, environment.clone()).await
4347 }
4348 }
4349 RepositoryState::Remote(RemoteRepositoryState {
4350 project_id,
4351 client,
4352 }) => {
4353 if stage {
4354 client
4355 .request(proto::Stage {
4356 project_id: project_id.0,
4357 repository_id: id.to_proto(),
4358 paths: entries
4359 .into_iter()
4360 .map(|repo_path| repo_path.to_proto())
4361 .collect(),
4362 })
4363 .await
4364 .context("sending stage request")
4365 .map(|_| ())
4366 } else {
4367 client
4368 .request(proto::Unstage {
4369 project_id: project_id.0,
4370 repository_id: id.to_proto(),
4371 paths: entries
4372 .into_iter()
4373 .map(|repo_path| repo_path.to_proto())
4374 .collect(),
4375 })
4376 .await
4377 .context("sending unstage request")
4378 .map(|_| ())
4379 }
4380 }
4381 };
4382
4383 for (diff_state, hunk_staging_operation_count) in
4384 hunk_staging_operation_counts
4385 {
4386 diff_state
4387 .update(&mut cx, |diff_state, cx| {
4388 if result.is_ok() {
4389 diff_state.hunk_staging_operation_count_as_of_write =
4390 hunk_staging_operation_count;
4391 } else if let Some(uncommitted_diff) =
4392 &diff_state.uncommitted_diff
4393 {
4394 uncommitted_diff
4395 .update(cx, |uncommitted_diff, cx| {
4396 uncommitted_diff.clear_pending_hunks(cx);
4397 })
4398 .ok();
4399 }
4400 })
4401 .ok();
4402 }
4403
4404 result
4405 },
4406 )
4407 })?
4408 .await?
4409 },
4410 )
4411 }
4412
4413 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4414 let to_stage = self
4415 .cached_status()
4416 .filter_map(|entry| {
4417 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4418 if ops.staging() || ops.staged() {
4419 None
4420 } else {
4421 Some(entry.repo_path)
4422 }
4423 } else if entry.status.staging().is_fully_staged() {
4424 None
4425 } else {
4426 Some(entry.repo_path)
4427 }
4428 })
4429 .collect();
4430 self.stage_or_unstage_entries(true, to_stage, cx)
4431 }
4432
4433 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4434 let to_unstage = self
4435 .cached_status()
4436 .filter_map(|entry| {
4437 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4438 if !ops.staging() && !ops.staged() {
4439 None
4440 } else {
4441 Some(entry.repo_path)
4442 }
4443 } else if entry.status.staging().is_fully_unstaged() {
4444 None
4445 } else {
4446 Some(entry.repo_path)
4447 }
4448 })
4449 .collect();
4450 self.stage_or_unstage_entries(false, to_unstage, cx)
4451 }
4452
4453 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4454 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4455
4456 self.stash_entries(to_stash, cx)
4457 }
4458
4459 pub fn stash_entries(
4460 &mut self,
4461 entries: Vec<RepoPath>,
4462 cx: &mut Context<Self>,
4463 ) -> Task<anyhow::Result<()>> {
4464 let id = self.id;
4465
4466 cx.spawn(async move |this, cx| {
4467 this.update(cx, |this, _| {
4468 this.send_job(None, move |git_repo, _cx| async move {
4469 match git_repo {
4470 RepositoryState::Local(LocalRepositoryState {
4471 backend,
4472 environment,
4473 ..
4474 }) => backend.stash_paths(entries, environment).await,
4475 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4476 client
4477 .request(proto::Stash {
4478 project_id: project_id.0,
4479 repository_id: id.to_proto(),
4480 paths: entries
4481 .into_iter()
4482 .map(|repo_path| repo_path.to_proto())
4483 .collect(),
4484 })
4485 .await
4486 .context("sending stash request")?;
4487 Ok(())
4488 }
4489 }
4490 })
4491 })?
4492 .await??;
4493 Ok(())
4494 })
4495 }
4496
4497 pub fn stash_pop(
4498 &mut self,
4499 index: Option<usize>,
4500 cx: &mut Context<Self>,
4501 ) -> Task<anyhow::Result<()>> {
4502 let id = self.id;
4503 cx.spawn(async move |this, cx| {
4504 this.update(cx, |this, _| {
4505 this.send_job(None, move |git_repo, _cx| async move {
4506 match git_repo {
4507 RepositoryState::Local(LocalRepositoryState {
4508 backend,
4509 environment,
4510 ..
4511 }) => backend.stash_pop(index, environment).await,
4512 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4513 client
4514 .request(proto::StashPop {
4515 project_id: project_id.0,
4516 repository_id: id.to_proto(),
4517 stash_index: index.map(|i| i as u64),
4518 })
4519 .await
4520 .context("sending stash pop request")?;
4521 Ok(())
4522 }
4523 }
4524 })
4525 })?
4526 .await??;
4527 Ok(())
4528 })
4529 }
4530
4531 pub fn stash_apply(
4532 &mut self,
4533 index: Option<usize>,
4534 cx: &mut Context<Self>,
4535 ) -> Task<anyhow::Result<()>> {
4536 let id = self.id;
4537 cx.spawn(async move |this, cx| {
4538 this.update(cx, |this, _| {
4539 this.send_job(None, move |git_repo, _cx| async move {
4540 match git_repo {
4541 RepositoryState::Local(LocalRepositoryState {
4542 backend,
4543 environment,
4544 ..
4545 }) => backend.stash_apply(index, environment).await,
4546 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4547 client
4548 .request(proto::StashApply {
4549 project_id: project_id.0,
4550 repository_id: id.to_proto(),
4551 stash_index: index.map(|i| i as u64),
4552 })
4553 .await
4554 .context("sending stash apply request")?;
4555 Ok(())
4556 }
4557 }
4558 })
4559 })?
4560 .await??;
4561 Ok(())
4562 })
4563 }
4564
4565 pub fn stash_drop(
4566 &mut self,
4567 index: Option<usize>,
4568 cx: &mut Context<Self>,
4569 ) -> oneshot::Receiver<anyhow::Result<()>> {
4570 let id = self.id;
4571 let updates_tx = self
4572 .git_store()
4573 .and_then(|git_store| match &git_store.read(cx).state {
4574 GitStoreState::Local { downstream, .. } => downstream
4575 .as_ref()
4576 .map(|downstream| downstream.updates_tx.clone()),
4577 _ => None,
4578 });
4579 let this = cx.weak_entity();
4580 self.send_job(None, move |git_repo, mut cx| async move {
4581 match git_repo {
4582 RepositoryState::Local(LocalRepositoryState {
4583 backend,
4584 environment,
4585 ..
4586 }) => {
4587 // TODO would be nice to not have to do this manually
4588 let result = backend.stash_drop(index, environment).await;
4589 if result.is_ok()
4590 && let Ok(stash_entries) = backend.stash_entries().await
4591 {
4592 let snapshot = this.update(&mut cx, |this, cx| {
4593 this.snapshot.stash_entries = stash_entries;
4594 cx.emit(RepositoryEvent::StashEntriesChanged);
4595 this.snapshot.clone()
4596 })?;
4597 if let Some(updates_tx) = updates_tx {
4598 updates_tx
4599 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4600 .ok();
4601 }
4602 }
4603
4604 result
4605 }
4606 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4607 client
4608 .request(proto::StashDrop {
4609 project_id: project_id.0,
4610 repository_id: id.to_proto(),
4611 stash_index: index.map(|i| i as u64),
4612 })
4613 .await
4614 .context("sending stash pop request")?;
4615 Ok(())
4616 }
4617 }
4618 })
4619 }
4620
4621 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4622 let id = self.id;
4623 self.send_job(
4624 Some(format!("git hook {}", hook.as_str()).into()),
4625 move |git_repo, _cx| async move {
4626 match git_repo {
4627 RepositoryState::Local(LocalRepositoryState {
4628 backend,
4629 environment,
4630 ..
4631 }) => backend.run_hook(hook, environment.clone()).await,
4632 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4633 client
4634 .request(proto::RunGitHook {
4635 project_id: project_id.0,
4636 repository_id: id.to_proto(),
4637 hook: hook.to_proto(),
4638 })
4639 .await?;
4640
4641 Ok(())
4642 }
4643 }
4644 },
4645 )
4646 }
4647
4648 pub fn commit(
4649 &mut self,
4650 message: SharedString,
4651 name_and_email: Option<(SharedString, SharedString)>,
4652 options: CommitOptions,
4653 askpass: AskPassDelegate,
4654 cx: &mut App,
4655 ) -> oneshot::Receiver<Result<()>> {
4656 let id = self.id;
4657 let askpass_delegates = self.askpass_delegates.clone();
4658 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4659
4660 let rx = self.run_hook(RunHook::PreCommit, cx);
4661
4662 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4663 rx.await??;
4664
4665 match git_repo {
4666 RepositoryState::Local(LocalRepositoryState {
4667 backend,
4668 environment,
4669 ..
4670 }) => {
4671 backend
4672 .commit(message, name_and_email, options, askpass, environment)
4673 .await
4674 }
4675 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4676 askpass_delegates.lock().insert(askpass_id, askpass);
4677 let _defer = util::defer(|| {
4678 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4679 debug_assert!(askpass_delegate.is_some());
4680 });
4681 let (name, email) = name_and_email.unzip();
4682 client
4683 .request(proto::Commit {
4684 project_id: project_id.0,
4685 repository_id: id.to_proto(),
4686 message: String::from(message),
4687 name: name.map(String::from),
4688 email: email.map(String::from),
4689 options: Some(proto::commit::CommitOptions {
4690 amend: options.amend,
4691 signoff: options.signoff,
4692 }),
4693 askpass_id,
4694 })
4695 .await
4696 .context("sending commit request")?;
4697
4698 Ok(())
4699 }
4700 }
4701 })
4702 }
4703
4704 pub fn fetch(
4705 &mut self,
4706 fetch_options: FetchOptions,
4707 askpass: AskPassDelegate,
4708 _cx: &mut App,
4709 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4710 let askpass_delegates = self.askpass_delegates.clone();
4711 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4712 let id = self.id;
4713
4714 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4715 match git_repo {
4716 RepositoryState::Local(LocalRepositoryState {
4717 backend,
4718 environment,
4719 ..
4720 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4721 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4722 askpass_delegates.lock().insert(askpass_id, askpass);
4723 let _defer = util::defer(|| {
4724 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4725 debug_assert!(askpass_delegate.is_some());
4726 });
4727
4728 let response = client
4729 .request(proto::Fetch {
4730 project_id: project_id.0,
4731 repository_id: id.to_proto(),
4732 askpass_id,
4733 remote: fetch_options.to_proto(),
4734 })
4735 .await
4736 .context("sending fetch request")?;
4737
4738 Ok(RemoteCommandOutput {
4739 stdout: response.stdout,
4740 stderr: response.stderr,
4741 })
4742 }
4743 }
4744 })
4745 }
4746
4747 pub fn push(
4748 &mut self,
4749 branch: SharedString,
4750 remote_branch: SharedString,
4751 remote: SharedString,
4752 options: Option<PushOptions>,
4753 askpass: AskPassDelegate,
4754 cx: &mut Context<Self>,
4755 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4756 let askpass_delegates = self.askpass_delegates.clone();
4757 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4758 let id = self.id;
4759
4760 let args = options
4761 .map(|option| match option {
4762 PushOptions::SetUpstream => " --set-upstream",
4763 PushOptions::Force => " --force-with-lease",
4764 })
4765 .unwrap_or("");
4766
4767 let updates_tx = self
4768 .git_store()
4769 .and_then(|git_store| match &git_store.read(cx).state {
4770 GitStoreState::Local { downstream, .. } => downstream
4771 .as_ref()
4772 .map(|downstream| downstream.updates_tx.clone()),
4773 _ => None,
4774 });
4775
4776 let this = cx.weak_entity();
4777 self.send_job(
4778 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
4779 move |git_repo, mut cx| async move {
4780 match git_repo {
4781 RepositoryState::Local(LocalRepositoryState {
4782 backend,
4783 environment,
4784 ..
4785 }) => {
4786 let result = backend
4787 .push(
4788 branch.to_string(),
4789 remote_branch.to_string(),
4790 remote.to_string(),
4791 options,
4792 askpass,
4793 environment.clone(),
4794 cx.clone(),
4795 )
4796 .await;
4797 // TODO would be nice to not have to do this manually
4798 if result.is_ok() {
4799 let branches = backend.branches().await?;
4800 let branch = branches.into_iter().find(|branch| branch.is_head);
4801 log::info!("head branch after scan is {branch:?}");
4802 let snapshot = this.update(&mut cx, |this, cx| {
4803 this.snapshot.branch = branch;
4804 cx.emit(RepositoryEvent::BranchChanged);
4805 this.snapshot.clone()
4806 })?;
4807 if let Some(updates_tx) = updates_tx {
4808 updates_tx
4809 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4810 .ok();
4811 }
4812 }
4813 result
4814 }
4815 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4816 askpass_delegates.lock().insert(askpass_id, askpass);
4817 let _defer = util::defer(|| {
4818 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4819 debug_assert!(askpass_delegate.is_some());
4820 });
4821 let response = client
4822 .request(proto::Push {
4823 project_id: project_id.0,
4824 repository_id: id.to_proto(),
4825 askpass_id,
4826 branch_name: branch.to_string(),
4827 remote_branch_name: remote_branch.to_string(),
4828 remote_name: remote.to_string(),
4829 options: options.map(|options| match options {
4830 PushOptions::Force => proto::push::PushOptions::Force,
4831 PushOptions::SetUpstream => {
4832 proto::push::PushOptions::SetUpstream
4833 }
4834 }
4835 as i32),
4836 })
4837 .await
4838 .context("sending push request")?;
4839
4840 Ok(RemoteCommandOutput {
4841 stdout: response.stdout,
4842 stderr: response.stderr,
4843 })
4844 }
4845 }
4846 },
4847 )
4848 }
4849
4850 pub fn pull(
4851 &mut self,
4852 branch: Option<SharedString>,
4853 remote: SharedString,
4854 rebase: bool,
4855 askpass: AskPassDelegate,
4856 _cx: &mut App,
4857 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4858 let askpass_delegates = self.askpass_delegates.clone();
4859 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4860 let id = self.id;
4861
4862 let mut status = "git pull".to_string();
4863 if rebase {
4864 status.push_str(" --rebase");
4865 }
4866 status.push_str(&format!(" {}", remote));
4867 if let Some(b) = &branch {
4868 status.push_str(&format!(" {}", b));
4869 }
4870
4871 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4872 match git_repo {
4873 RepositoryState::Local(LocalRepositoryState {
4874 backend,
4875 environment,
4876 ..
4877 }) => {
4878 backend
4879 .pull(
4880 branch.as_ref().map(|b| b.to_string()),
4881 remote.to_string(),
4882 rebase,
4883 askpass,
4884 environment.clone(),
4885 cx,
4886 )
4887 .await
4888 }
4889 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4890 askpass_delegates.lock().insert(askpass_id, askpass);
4891 let _defer = util::defer(|| {
4892 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4893 debug_assert!(askpass_delegate.is_some());
4894 });
4895 let response = client
4896 .request(proto::Pull {
4897 project_id: project_id.0,
4898 repository_id: id.to_proto(),
4899 askpass_id,
4900 rebase,
4901 branch_name: branch.as_ref().map(|b| b.to_string()),
4902 remote_name: remote.to_string(),
4903 })
4904 .await
4905 .context("sending pull request")?;
4906
4907 Ok(RemoteCommandOutput {
4908 stdout: response.stdout,
4909 stderr: response.stderr,
4910 })
4911 }
4912 }
4913 })
4914 }
4915
4916 fn spawn_set_index_text_job(
4917 &mut self,
4918 path: RepoPath,
4919 content: Option<String>,
4920 hunk_staging_operation_count: Option<usize>,
4921 cx: &mut Context<Self>,
4922 ) -> oneshot::Receiver<anyhow::Result<()>> {
4923 let id = self.id;
4924 let this = cx.weak_entity();
4925 let git_store = self.git_store.clone();
4926 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4927 self.send_keyed_job(
4928 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4929 None,
4930 move |git_repo, mut cx| async move {
4931 log::debug!(
4932 "start updating index text for buffer {}",
4933 path.as_unix_str()
4934 );
4935
4936 match git_repo {
4937 RepositoryState::Local(LocalRepositoryState {
4938 fs,
4939 backend,
4940 environment,
4941 ..
4942 }) => {
4943 let executable = match fs.metadata(&abs_path).await {
4944 Ok(Some(meta)) => meta.is_executable,
4945 Ok(None) => false,
4946 Err(_err) => false,
4947 };
4948 backend
4949 .set_index_text(path.clone(), content, environment.clone(), executable)
4950 .await?;
4951 }
4952 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4953 client
4954 .request(proto::SetIndexText {
4955 project_id: project_id.0,
4956 repository_id: id.to_proto(),
4957 path: path.to_proto(),
4958 text: content,
4959 })
4960 .await?;
4961 }
4962 }
4963 log::debug!(
4964 "finish updating index text for buffer {}",
4965 path.as_unix_str()
4966 );
4967
4968 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4969 let project_path = this
4970 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4971 .ok()
4972 .flatten();
4973 git_store
4974 .update(&mut cx, |git_store, cx| {
4975 let buffer_id = git_store
4976 .buffer_store
4977 .read(cx)
4978 .get_by_path(&project_path?)?
4979 .read(cx)
4980 .remote_id();
4981 let diff_state = git_store.diffs.get(&buffer_id)?;
4982 diff_state.update(cx, |diff_state, _| {
4983 diff_state.hunk_staging_operation_count_as_of_write =
4984 hunk_staging_operation_count;
4985 });
4986 Some(())
4987 })
4988 .context("Git store dropped")?;
4989 }
4990 Ok(())
4991 },
4992 )
4993 }
4994
4995 pub fn create_remote(
4996 &mut self,
4997 remote_name: String,
4998 remote_url: String,
4999 ) -> oneshot::Receiver<Result<()>> {
5000 let id = self.id;
5001 self.send_job(
5002 Some(format!("git remote add {remote_name} {remote_url}").into()),
5003 move |repo, _cx| async move {
5004 match repo {
5005 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5006 backend.create_remote(remote_name, remote_url).await
5007 }
5008 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5009 client
5010 .request(proto::GitCreateRemote {
5011 project_id: project_id.0,
5012 repository_id: id.to_proto(),
5013 remote_name,
5014 remote_url,
5015 })
5016 .await?;
5017
5018 Ok(())
5019 }
5020 }
5021 },
5022 )
5023 }
5024
5025 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5026 let id = self.id;
5027 self.send_job(
5028 Some(format!("git remove remote {remote_name}").into()),
5029 move |repo, _cx| async move {
5030 match repo {
5031 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5032 backend.remove_remote(remote_name).await
5033 }
5034 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5035 client
5036 .request(proto::GitRemoveRemote {
5037 project_id: project_id.0,
5038 repository_id: id.to_proto(),
5039 remote_name,
5040 })
5041 .await?;
5042
5043 Ok(())
5044 }
5045 }
5046 },
5047 )
5048 }
5049
5050 pub fn get_remotes(
5051 &mut self,
5052 branch_name: Option<String>,
5053 is_push: bool,
5054 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5055 let id = self.id;
5056 self.send_job(None, move |repo, _cx| async move {
5057 match repo {
5058 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5059 let remote = if let Some(branch_name) = branch_name {
5060 if is_push {
5061 backend.get_push_remote(branch_name).await?
5062 } else {
5063 backend.get_branch_remote(branch_name).await?
5064 }
5065 } else {
5066 None
5067 };
5068
5069 match remote {
5070 Some(remote) => Ok(vec![remote]),
5071 None => backend.get_all_remotes().await,
5072 }
5073 }
5074 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5075 let response = client
5076 .request(proto::GetRemotes {
5077 project_id: project_id.0,
5078 repository_id: id.to_proto(),
5079 branch_name,
5080 is_push,
5081 })
5082 .await?;
5083
5084 let remotes = response
5085 .remotes
5086 .into_iter()
5087 .map(|remotes| Remote {
5088 name: remotes.name.into(),
5089 })
5090 .collect();
5091
5092 Ok(remotes)
5093 }
5094 }
5095 })
5096 }
5097
5098 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5099 let id = self.id;
5100 self.send_job(None, move |repo, _| async move {
5101 match repo {
5102 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5103 backend.branches().await
5104 }
5105 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5106 let response = client
5107 .request(proto::GitGetBranches {
5108 project_id: project_id.0,
5109 repository_id: id.to_proto(),
5110 })
5111 .await?;
5112
5113 let branches = response
5114 .branches
5115 .into_iter()
5116 .map(|branch| proto_to_branch(&branch))
5117 .collect();
5118
5119 Ok(branches)
5120 }
5121 }
5122 })
5123 }
5124
5125 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5126 let id = self.id;
5127 self.send_job(None, move |repo, _| async move {
5128 match repo {
5129 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5130 backend.worktrees().await
5131 }
5132 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5133 let response = client
5134 .request(proto::GitGetWorktrees {
5135 project_id: project_id.0,
5136 repository_id: id.to_proto(),
5137 })
5138 .await?;
5139
5140 let worktrees = response
5141 .worktrees
5142 .into_iter()
5143 .map(|worktree| proto_to_worktree(&worktree))
5144 .collect();
5145
5146 Ok(worktrees)
5147 }
5148 }
5149 })
5150 }
5151
5152 pub fn create_worktree(
5153 &mut self,
5154 name: String,
5155 path: PathBuf,
5156 commit: Option<String>,
5157 ) -> oneshot::Receiver<Result<()>> {
5158 let id = self.id;
5159 self.send_job(
5160 Some("git worktree add".into()),
5161 move |repo, _cx| async move {
5162 match repo {
5163 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5164 backend.create_worktree(name, path, commit).await
5165 }
5166 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5167 client
5168 .request(proto::GitCreateWorktree {
5169 project_id: project_id.0,
5170 repository_id: id.to_proto(),
5171 name,
5172 directory: path.to_string_lossy().to_string(),
5173 commit,
5174 })
5175 .await?;
5176
5177 Ok(())
5178 }
5179 }
5180 },
5181 )
5182 }
5183
5184 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
5185 let id = self.id;
5186 self.send_job(None, move |repo, _| async move {
5187 match repo {
5188 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5189 backend.default_branch().await
5190 }
5191 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5192 let response = client
5193 .request(proto::GetDefaultBranch {
5194 project_id: project_id.0,
5195 repository_id: id.to_proto(),
5196 })
5197 .await?;
5198
5199 anyhow::Ok(response.branch.map(SharedString::from))
5200 }
5201 }
5202 })
5203 }
5204
5205 pub fn diff_tree(
5206 &mut self,
5207 diff_type: DiffTreeType,
5208 _cx: &App,
5209 ) -> oneshot::Receiver<Result<TreeDiff>> {
5210 let repository_id = self.snapshot.id;
5211 self.send_job(None, move |repo, _cx| async move {
5212 match repo {
5213 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5214 backend.diff_tree(diff_type).await
5215 }
5216 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5217 let response = client
5218 .request(proto::GetTreeDiff {
5219 project_id: project_id.0,
5220 repository_id: repository_id.0,
5221 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5222 base: diff_type.base().to_string(),
5223 head: diff_type.head().to_string(),
5224 })
5225 .await?;
5226
5227 let entries = response
5228 .entries
5229 .into_iter()
5230 .filter_map(|entry| {
5231 let status = match entry.status() {
5232 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5233 proto::tree_diff_status::Status::Modified => {
5234 TreeDiffStatus::Modified {
5235 old: git::Oid::from_str(
5236 &entry.oid.context("missing oid").log_err()?,
5237 )
5238 .log_err()?,
5239 }
5240 }
5241 proto::tree_diff_status::Status::Deleted => {
5242 TreeDiffStatus::Deleted {
5243 old: git::Oid::from_str(
5244 &entry.oid.context("missing oid").log_err()?,
5245 )
5246 .log_err()?,
5247 }
5248 }
5249 };
5250 Some((
5251 RepoPath::from_rel_path(
5252 &RelPath::from_proto(&entry.path).log_err()?,
5253 ),
5254 status,
5255 ))
5256 })
5257 .collect();
5258
5259 Ok(TreeDiff { entries })
5260 }
5261 }
5262 })
5263 }
5264
5265 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5266 let id = self.id;
5267 self.send_job(None, move |repo, _cx| async move {
5268 match repo {
5269 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5270 backend.diff(diff_type).await
5271 }
5272 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5273 let response = client
5274 .request(proto::GitDiff {
5275 project_id: project_id.0,
5276 repository_id: id.to_proto(),
5277 diff_type: match diff_type {
5278 DiffType::HeadToIndex => {
5279 proto::git_diff::DiffType::HeadToIndex.into()
5280 }
5281 DiffType::HeadToWorktree => {
5282 proto::git_diff::DiffType::HeadToWorktree.into()
5283 }
5284 },
5285 })
5286 .await?;
5287
5288 Ok(response.diff)
5289 }
5290 }
5291 })
5292 }
5293
5294 pub fn create_branch(
5295 &mut self,
5296 branch_name: String,
5297 base_branch: Option<String>,
5298 ) -> oneshot::Receiver<Result<()>> {
5299 let id = self.id;
5300 let status_msg = if let Some(ref base) = base_branch {
5301 format!("git switch -c {branch_name} {base}").into()
5302 } else {
5303 format!("git switch -c {branch_name}").into()
5304 };
5305 self.send_job(Some(status_msg), move |repo, _cx| async move {
5306 match repo {
5307 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5308 backend.create_branch(branch_name, base_branch).await
5309 }
5310 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5311 client
5312 .request(proto::GitCreateBranch {
5313 project_id: project_id.0,
5314 repository_id: id.to_proto(),
5315 branch_name,
5316 })
5317 .await?;
5318
5319 Ok(())
5320 }
5321 }
5322 })
5323 }
5324
5325 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5326 let id = self.id;
5327 self.send_job(
5328 Some(format!("git switch {branch_name}").into()),
5329 move |repo, _cx| async move {
5330 match repo {
5331 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5332 backend.change_branch(branch_name).await
5333 }
5334 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5335 client
5336 .request(proto::GitChangeBranch {
5337 project_id: project_id.0,
5338 repository_id: id.to_proto(),
5339 branch_name,
5340 })
5341 .await?;
5342
5343 Ok(())
5344 }
5345 }
5346 },
5347 )
5348 }
5349
5350 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5351 let id = self.id;
5352 self.send_job(
5353 Some(format!("git branch -d {branch_name}").into()),
5354 move |repo, _cx| async move {
5355 match repo {
5356 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5357 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5358 client
5359 .request(proto::GitDeleteBranch {
5360 project_id: project_id.0,
5361 repository_id: id.to_proto(),
5362 branch_name,
5363 })
5364 .await?;
5365
5366 Ok(())
5367 }
5368 }
5369 },
5370 )
5371 }
5372
5373 pub fn rename_branch(
5374 &mut self,
5375 branch: String,
5376 new_name: String,
5377 ) -> oneshot::Receiver<Result<()>> {
5378 let id = self.id;
5379 self.send_job(
5380 Some(format!("git branch -m {branch} {new_name}").into()),
5381 move |repo, _cx| async move {
5382 match repo {
5383 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5384 backend.rename_branch(branch, new_name).await
5385 }
5386 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5387 client
5388 .request(proto::GitRenameBranch {
5389 project_id: project_id.0,
5390 repository_id: id.to_proto(),
5391 branch,
5392 new_name,
5393 })
5394 .await?;
5395
5396 Ok(())
5397 }
5398 }
5399 },
5400 )
5401 }
5402
5403 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5404 let id = self.id;
5405 self.send_job(None, move |repo, _cx| async move {
5406 match repo {
5407 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5408 backend.check_for_pushed_commit().await
5409 }
5410 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5411 let response = client
5412 .request(proto::CheckForPushedCommits {
5413 project_id: project_id.0,
5414 repository_id: id.to_proto(),
5415 })
5416 .await?;
5417
5418 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5419
5420 Ok(branches)
5421 }
5422 }
5423 })
5424 }
5425
5426 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5427 self.send_job(None, |repo, _cx| async move {
5428 match repo {
5429 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5430 backend.checkpoint().await
5431 }
5432 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5433 }
5434 })
5435 }
5436
5437 pub fn restore_checkpoint(
5438 &mut self,
5439 checkpoint: GitRepositoryCheckpoint,
5440 ) -> oneshot::Receiver<Result<()>> {
5441 self.send_job(None, move |repo, _cx| async move {
5442 match repo {
5443 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5444 backend.restore_checkpoint(checkpoint).await
5445 }
5446 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5447 }
5448 })
5449 }
5450
5451 pub(crate) fn apply_remote_update(
5452 &mut self,
5453 update: proto::UpdateRepository,
5454 cx: &mut Context<Self>,
5455 ) -> Result<()> {
5456 let conflicted_paths = TreeSet::from_ordered_entries(
5457 update
5458 .current_merge_conflicts
5459 .into_iter()
5460 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5461 );
5462 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5463 let new_head_commit = update
5464 .head_commit_details
5465 .as_ref()
5466 .map(proto_to_commit_details);
5467 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5468 cx.emit(RepositoryEvent::BranchChanged)
5469 }
5470 self.snapshot.branch = new_branch;
5471 self.snapshot.head_commit = new_head_commit;
5472
5473 self.snapshot.merge.conflicted_paths = conflicted_paths;
5474 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5475 let new_stash_entries = GitStash {
5476 entries: update
5477 .stash_entries
5478 .iter()
5479 .filter_map(|entry| proto_to_stash(entry).ok())
5480 .collect(),
5481 };
5482 if self.snapshot.stash_entries != new_stash_entries {
5483 cx.emit(RepositoryEvent::StashEntriesChanged)
5484 }
5485 self.snapshot.stash_entries = new_stash_entries;
5486 self.snapshot.remote_upstream_url = update.remote_upstream_url;
5487 self.snapshot.remote_origin_url = update.remote_origin_url;
5488
5489 let edits = update
5490 .removed_statuses
5491 .into_iter()
5492 .filter_map(|path| {
5493 Some(sum_tree::Edit::Remove(PathKey(
5494 RelPath::from_proto(&path).log_err()?,
5495 )))
5496 })
5497 .chain(
5498 update
5499 .updated_statuses
5500 .into_iter()
5501 .filter_map(|updated_status| {
5502 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5503 }),
5504 )
5505 .collect::<Vec<_>>();
5506 if !edits.is_empty() {
5507 cx.emit(RepositoryEvent::StatusesChanged);
5508 }
5509 self.snapshot.statuses_by_path.edit(edits, ());
5510 if update.is_last_update {
5511 self.snapshot.scan_id = update.scan_id;
5512 }
5513 self.clear_pending_ops(cx);
5514 Ok(())
5515 }
5516
5517 pub fn compare_checkpoints(
5518 &mut self,
5519 left: GitRepositoryCheckpoint,
5520 right: GitRepositoryCheckpoint,
5521 ) -> oneshot::Receiver<Result<bool>> {
5522 self.send_job(None, move |repo, _cx| async move {
5523 match repo {
5524 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5525 backend.compare_checkpoints(left, right).await
5526 }
5527 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5528 }
5529 })
5530 }
5531
5532 pub fn diff_checkpoints(
5533 &mut self,
5534 base_checkpoint: GitRepositoryCheckpoint,
5535 target_checkpoint: GitRepositoryCheckpoint,
5536 ) -> oneshot::Receiver<Result<String>> {
5537 self.send_job(None, move |repo, _cx| async move {
5538 match repo {
5539 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5540 backend
5541 .diff_checkpoints(base_checkpoint, target_checkpoint)
5542 .await
5543 }
5544 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5545 }
5546 })
5547 }
5548
5549 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5550 let updated = SumTree::from_iter(
5551 self.pending_ops.iter().filter_map(|ops| {
5552 let inner_ops: Vec<PendingOp> =
5553 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5554 if inner_ops.is_empty() {
5555 None
5556 } else {
5557 Some(PendingOps {
5558 repo_path: ops.repo_path.clone(),
5559 ops: inner_ops,
5560 })
5561 }
5562 }),
5563 (),
5564 );
5565
5566 if updated != self.pending_ops {
5567 cx.emit(RepositoryEvent::PendingOpsChanged {
5568 pending_ops: self.pending_ops.clone(),
5569 })
5570 }
5571
5572 self.pending_ops = updated;
5573 }
5574
5575 fn schedule_scan(
5576 &mut self,
5577 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5578 cx: &mut Context<Self>,
5579 ) {
5580 let this = cx.weak_entity();
5581 let _ = self.send_keyed_job(
5582 Some(GitJobKey::ReloadGitState),
5583 None,
5584 |state, mut cx| async move {
5585 log::debug!("run scheduled git status scan");
5586
5587 let Some(this) = this.upgrade() else {
5588 return Ok(());
5589 };
5590 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5591 bail!("not a local repository")
5592 };
5593 let (snapshot, events) = this
5594 .update(&mut cx, |this, _| {
5595 this.paths_needing_status_update.clear();
5596 compute_snapshot(
5597 this.id,
5598 this.work_directory_abs_path.clone(),
5599 this.snapshot.clone(),
5600 backend.clone(),
5601 )
5602 })
5603 .await?;
5604 this.update(&mut cx, |this, cx| {
5605 this.snapshot = snapshot.clone();
5606 this.clear_pending_ops(cx);
5607 for event in events {
5608 cx.emit(event);
5609 }
5610 });
5611 if let Some(updates_tx) = updates_tx {
5612 updates_tx
5613 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5614 .ok();
5615 }
5616 Ok(())
5617 },
5618 );
5619 }
5620
5621 fn spawn_local_git_worker(
5622 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5623 cx: &mut Context<Self>,
5624 ) -> mpsc::UnboundedSender<GitJob> {
5625 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5626
5627 cx.spawn(async move |_, cx| {
5628 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5629 if let Some(git_hosting_provider_registry) =
5630 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
5631 {
5632 git_hosting_providers::register_additional_providers(
5633 git_hosting_provider_registry,
5634 state.backend.clone(),
5635 )
5636 .await;
5637 }
5638 let state = RepositoryState::Local(state);
5639 let mut jobs = VecDeque::new();
5640 loop {
5641 while let Ok(Some(next_job)) = job_rx.try_next() {
5642 jobs.push_back(next_job);
5643 }
5644
5645 if let Some(job) = jobs.pop_front() {
5646 if let Some(current_key) = &job.key
5647 && jobs
5648 .iter()
5649 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5650 {
5651 continue;
5652 }
5653 (job.job)(state.clone(), cx).await;
5654 } else if let Some(job) = job_rx.next().await {
5655 jobs.push_back(job);
5656 } else {
5657 break;
5658 }
5659 }
5660 anyhow::Ok(())
5661 })
5662 .detach_and_log_err(cx);
5663
5664 job_tx
5665 }
5666
5667 fn spawn_remote_git_worker(
5668 state: RemoteRepositoryState,
5669 cx: &mut Context<Self>,
5670 ) -> mpsc::UnboundedSender<GitJob> {
5671 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5672
5673 cx.spawn(async move |_, cx| {
5674 let state = RepositoryState::Remote(state);
5675 let mut jobs = VecDeque::new();
5676 loop {
5677 while let Ok(Some(next_job)) = job_rx.try_next() {
5678 jobs.push_back(next_job);
5679 }
5680
5681 if let Some(job) = jobs.pop_front() {
5682 if let Some(current_key) = &job.key
5683 && jobs
5684 .iter()
5685 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5686 {
5687 continue;
5688 }
5689 (job.job)(state.clone(), cx).await;
5690 } else if let Some(job) = job_rx.next().await {
5691 jobs.push_back(job);
5692 } else {
5693 break;
5694 }
5695 }
5696 anyhow::Ok(())
5697 })
5698 .detach_and_log_err(cx);
5699
5700 job_tx
5701 }
5702
5703 fn load_staged_text(
5704 &mut self,
5705 buffer_id: BufferId,
5706 repo_path: RepoPath,
5707 cx: &App,
5708 ) -> Task<Result<Option<String>>> {
5709 let rx = self.send_job(None, move |state, _| async move {
5710 match state {
5711 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5712 anyhow::Ok(backend.load_index_text(repo_path).await)
5713 }
5714 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5715 let response = client
5716 .request(proto::OpenUnstagedDiff {
5717 project_id: project_id.to_proto(),
5718 buffer_id: buffer_id.to_proto(),
5719 })
5720 .await?;
5721 Ok(response.staged_text)
5722 }
5723 }
5724 });
5725 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5726 }
5727
5728 fn load_committed_text(
5729 &mut self,
5730 buffer_id: BufferId,
5731 repo_path: RepoPath,
5732 cx: &App,
5733 ) -> Task<Result<DiffBasesChange>> {
5734 let rx = self.send_job(None, move |state, _| async move {
5735 match state {
5736 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5737 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5738 let staged_text = backend.load_index_text(repo_path).await;
5739 let diff_bases_change = if committed_text == staged_text {
5740 DiffBasesChange::SetBoth(committed_text)
5741 } else {
5742 DiffBasesChange::SetEach {
5743 index: staged_text,
5744 head: committed_text,
5745 }
5746 };
5747 anyhow::Ok(diff_bases_change)
5748 }
5749 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5750 use proto::open_uncommitted_diff_response::Mode;
5751
5752 let response = client
5753 .request(proto::OpenUncommittedDiff {
5754 project_id: project_id.to_proto(),
5755 buffer_id: buffer_id.to_proto(),
5756 })
5757 .await?;
5758 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5759 let bases = match mode {
5760 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5761 Mode::IndexAndHead => DiffBasesChange::SetEach {
5762 head: response.committed_text,
5763 index: response.staged_text,
5764 },
5765 };
5766 Ok(bases)
5767 }
5768 }
5769 });
5770
5771 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5772 }
5773
5774 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5775 let repository_id = self.snapshot.id;
5776 let rx = self.send_job(None, move |state, _| async move {
5777 match state {
5778 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5779 backend.load_blob_content(oid).await
5780 }
5781 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5782 let response = client
5783 .request(proto::GetBlobContent {
5784 project_id: project_id.to_proto(),
5785 repository_id: repository_id.0,
5786 oid: oid.to_string(),
5787 })
5788 .await?;
5789 Ok(response.content)
5790 }
5791 }
5792 });
5793 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5794 }
5795
5796 fn paths_changed(
5797 &mut self,
5798 paths: Vec<RepoPath>,
5799 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5800 cx: &mut Context<Self>,
5801 ) {
5802 self.paths_needing_status_update.extend(paths);
5803
5804 let this = cx.weak_entity();
5805 let _ = self.send_keyed_job(
5806 Some(GitJobKey::RefreshStatuses),
5807 None,
5808 |state, mut cx| async move {
5809 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5810 (
5811 this.snapshot.clone(),
5812 mem::take(&mut this.paths_needing_status_update),
5813 )
5814 })?;
5815 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5816 bail!("not a local repository")
5817 };
5818
5819 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5820 if paths.is_empty() {
5821 return Ok(());
5822 }
5823 let statuses = backend.status(&paths).await?;
5824 let stash_entries = backend.stash_entries().await?;
5825
5826 let changed_path_statuses = cx
5827 .background_spawn(async move {
5828 let mut changed_path_statuses = Vec::new();
5829 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5830 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5831
5832 for (repo_path, status) in &*statuses.entries {
5833 changed_paths.remove(repo_path);
5834 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5835 && cursor.item().is_some_and(|entry| entry.status == *status)
5836 {
5837 continue;
5838 }
5839
5840 changed_path_statuses.push(Edit::Insert(StatusEntry {
5841 repo_path: repo_path.clone(),
5842 status: *status,
5843 }));
5844 }
5845 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5846 for path in changed_paths.into_iter() {
5847 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5848 changed_path_statuses
5849 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5850 }
5851 }
5852 changed_path_statuses
5853 })
5854 .await;
5855
5856 this.update(&mut cx, |this, cx| {
5857 if this.snapshot.stash_entries != stash_entries {
5858 cx.emit(RepositoryEvent::StashEntriesChanged);
5859 this.snapshot.stash_entries = stash_entries;
5860 }
5861
5862 if !changed_path_statuses.is_empty() {
5863 cx.emit(RepositoryEvent::StatusesChanged);
5864 this.snapshot
5865 .statuses_by_path
5866 .edit(changed_path_statuses, ());
5867 this.snapshot.scan_id += 1;
5868 }
5869
5870 if let Some(updates_tx) = updates_tx {
5871 updates_tx
5872 .unbounded_send(DownstreamUpdate::UpdateRepository(
5873 this.snapshot.clone(),
5874 ))
5875 .ok();
5876 }
5877 })
5878 },
5879 );
5880 }
5881
5882 /// currently running git command and when it started
5883 pub fn current_job(&self) -> Option<JobInfo> {
5884 self.active_jobs.values().next().cloned()
5885 }
5886
5887 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5888 self.send_job(None, |_, _| async {})
5889 }
5890
5891 fn spawn_job_with_tracking<AsyncFn>(
5892 &mut self,
5893 paths: Vec<RepoPath>,
5894 git_status: pending_op::GitStatus,
5895 cx: &mut Context<Self>,
5896 f: AsyncFn,
5897 ) -> Task<Result<()>>
5898 where
5899 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5900 {
5901 let ids = self.new_pending_ops_for_paths(paths, git_status);
5902
5903 cx.spawn(async move |this, cx| {
5904 let (job_status, result) = match f(this.clone(), cx).await {
5905 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5906 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5907 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5908 };
5909
5910 this.update(cx, |this, _| {
5911 let mut edits = Vec::with_capacity(ids.len());
5912 for (id, entry) in ids {
5913 if let Some(mut ops) = this
5914 .pending_ops
5915 .get(&PathKey(entry.as_ref().clone()), ())
5916 .cloned()
5917 {
5918 if let Some(op) = ops.op_by_id_mut(id) {
5919 op.job_status = job_status;
5920 }
5921 edits.push(sum_tree::Edit::Insert(ops));
5922 }
5923 }
5924 this.pending_ops.edit(edits, ());
5925 })?;
5926
5927 result
5928 })
5929 }
5930
5931 fn new_pending_ops_for_paths(
5932 &mut self,
5933 paths: Vec<RepoPath>,
5934 git_status: pending_op::GitStatus,
5935 ) -> Vec<(PendingOpId, RepoPath)> {
5936 let mut edits = Vec::with_capacity(paths.len());
5937 let mut ids = Vec::with_capacity(paths.len());
5938 for path in paths {
5939 let mut ops = self
5940 .pending_ops
5941 .get(&PathKey(path.as_ref().clone()), ())
5942 .cloned()
5943 .unwrap_or_else(|| PendingOps::new(&path));
5944 let id = ops.max_id() + 1;
5945 ops.ops.push(PendingOp {
5946 id,
5947 git_status,
5948 job_status: pending_op::JobStatus::Running,
5949 });
5950 edits.push(sum_tree::Edit::Insert(ops));
5951 ids.push((id, path));
5952 }
5953 self.pending_ops.edit(edits, ());
5954 ids
5955 }
5956 pub fn default_remote_url(&self) -> Option<String> {
5957 self.remote_upstream_url
5958 .clone()
5959 .or(self.remote_origin_url.clone())
5960 }
5961}
5962
5963fn get_permalink_in_rust_registry_src(
5964 provider_registry: Arc<GitHostingProviderRegistry>,
5965 path: PathBuf,
5966 selection: Range<u32>,
5967) -> Result<url::Url> {
5968 #[derive(Deserialize)]
5969 struct CargoVcsGit {
5970 sha1: String,
5971 }
5972
5973 #[derive(Deserialize)]
5974 struct CargoVcsInfo {
5975 git: CargoVcsGit,
5976 path_in_vcs: String,
5977 }
5978
5979 #[derive(Deserialize)]
5980 struct CargoPackage {
5981 repository: String,
5982 }
5983
5984 #[derive(Deserialize)]
5985 struct CargoToml {
5986 package: CargoPackage,
5987 }
5988
5989 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5990 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5991 Some((dir, json))
5992 }) else {
5993 bail!("No .cargo_vcs_info.json found in parent directories")
5994 };
5995 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5996 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5997 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5998 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5999 .context("parsing package.repository field of manifest")?;
6000 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6001 let permalink = provider.build_permalink(
6002 remote,
6003 BuildPermalinkParams::new(
6004 &cargo_vcs_info.git.sha1,
6005 &RepoPath::from_rel_path(
6006 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6007 ),
6008 Some(selection),
6009 ),
6010 );
6011 Ok(permalink)
6012}
6013
6014fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6015 let Some(blame) = blame else {
6016 return proto::BlameBufferResponse {
6017 blame_response: None,
6018 };
6019 };
6020
6021 let entries = blame
6022 .entries
6023 .into_iter()
6024 .map(|entry| proto::BlameEntry {
6025 sha: entry.sha.as_bytes().into(),
6026 start_line: entry.range.start,
6027 end_line: entry.range.end,
6028 original_line_number: entry.original_line_number,
6029 author: entry.author,
6030 author_mail: entry.author_mail,
6031 author_time: entry.author_time,
6032 author_tz: entry.author_tz,
6033 committer: entry.committer_name,
6034 committer_mail: entry.committer_email,
6035 committer_time: entry.committer_time,
6036 committer_tz: entry.committer_tz,
6037 summary: entry.summary,
6038 previous: entry.previous,
6039 filename: entry.filename,
6040 })
6041 .collect::<Vec<_>>();
6042
6043 let messages = blame
6044 .messages
6045 .into_iter()
6046 .map(|(oid, message)| proto::CommitMessage {
6047 oid: oid.as_bytes().into(),
6048 message,
6049 })
6050 .collect::<Vec<_>>();
6051
6052 proto::BlameBufferResponse {
6053 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6054 }
6055}
6056
6057fn deserialize_blame_buffer_response(
6058 response: proto::BlameBufferResponse,
6059) -> Option<git::blame::Blame> {
6060 let response = response.blame_response?;
6061 let entries = response
6062 .entries
6063 .into_iter()
6064 .filter_map(|entry| {
6065 Some(git::blame::BlameEntry {
6066 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6067 range: entry.start_line..entry.end_line,
6068 original_line_number: entry.original_line_number,
6069 committer_name: entry.committer,
6070 committer_time: entry.committer_time,
6071 committer_tz: entry.committer_tz,
6072 committer_email: entry.committer_mail,
6073 author: entry.author,
6074 author_mail: entry.author_mail,
6075 author_time: entry.author_time,
6076 author_tz: entry.author_tz,
6077 summary: entry.summary,
6078 previous: entry.previous,
6079 filename: entry.filename,
6080 })
6081 })
6082 .collect::<Vec<_>>();
6083
6084 let messages = response
6085 .messages
6086 .into_iter()
6087 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6088 .collect::<HashMap<_, _>>();
6089
6090 Some(Blame { entries, messages })
6091}
6092
6093fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6094 proto::Branch {
6095 is_head: branch.is_head,
6096 ref_name: branch.ref_name.to_string(),
6097 unix_timestamp: branch
6098 .most_recent_commit
6099 .as_ref()
6100 .map(|commit| commit.commit_timestamp as u64),
6101 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6102 ref_name: upstream.ref_name.to_string(),
6103 tracking: upstream
6104 .tracking
6105 .status()
6106 .map(|upstream| proto::UpstreamTracking {
6107 ahead: upstream.ahead as u64,
6108 behind: upstream.behind as u64,
6109 }),
6110 }),
6111 most_recent_commit: branch
6112 .most_recent_commit
6113 .as_ref()
6114 .map(|commit| proto::CommitSummary {
6115 sha: commit.sha.to_string(),
6116 subject: commit.subject.to_string(),
6117 commit_timestamp: commit.commit_timestamp,
6118 author_name: commit.author_name.to_string(),
6119 }),
6120 }
6121}
6122
6123fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6124 proto::Worktree {
6125 path: worktree.path.to_string_lossy().to_string(),
6126 ref_name: worktree.ref_name.to_string(),
6127 sha: worktree.sha.to_string(),
6128 }
6129}
6130
6131fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6132 git::repository::Worktree {
6133 path: PathBuf::from(proto.path.clone()),
6134 ref_name: proto.ref_name.clone().into(),
6135 sha: proto.sha.clone().into(),
6136 }
6137}
6138
6139fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6140 git::repository::Branch {
6141 is_head: proto.is_head,
6142 ref_name: proto.ref_name.clone().into(),
6143 upstream: proto
6144 .upstream
6145 .as_ref()
6146 .map(|upstream| git::repository::Upstream {
6147 ref_name: upstream.ref_name.to_string().into(),
6148 tracking: upstream
6149 .tracking
6150 .as_ref()
6151 .map(|tracking| {
6152 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6153 ahead: tracking.ahead as u32,
6154 behind: tracking.behind as u32,
6155 })
6156 })
6157 .unwrap_or(git::repository::UpstreamTracking::Gone),
6158 }),
6159 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6160 git::repository::CommitSummary {
6161 sha: commit.sha.to_string().into(),
6162 subject: commit.subject.to_string().into(),
6163 commit_timestamp: commit.commit_timestamp,
6164 author_name: commit.author_name.to_string().into(),
6165 has_parent: true,
6166 }
6167 }),
6168 }
6169}
6170
6171fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6172 proto::GitCommitDetails {
6173 sha: commit.sha.to_string(),
6174 message: commit.message.to_string(),
6175 commit_timestamp: commit.commit_timestamp,
6176 author_email: commit.author_email.to_string(),
6177 author_name: commit.author_name.to_string(),
6178 }
6179}
6180
6181fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6182 CommitDetails {
6183 sha: proto.sha.clone().into(),
6184 message: proto.message.clone().into(),
6185 commit_timestamp: proto.commit_timestamp,
6186 author_email: proto.author_email.clone().into(),
6187 author_name: proto.author_name.clone().into(),
6188 }
6189}
6190
6191async fn compute_snapshot(
6192 id: RepositoryId,
6193 work_directory_abs_path: Arc<Path>,
6194 prev_snapshot: RepositorySnapshot,
6195 backend: Arc<dyn GitRepository>,
6196) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6197 let mut events = Vec::new();
6198 let branches = backend.branches().await?;
6199 let branch = branches.into_iter().find(|branch| branch.is_head);
6200 let statuses = backend
6201 .status(&[RepoPath::from_rel_path(
6202 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6203 )])
6204 .await?;
6205 let stash_entries = backend.stash_entries().await?;
6206 let statuses_by_path = SumTree::from_iter(
6207 statuses
6208 .entries
6209 .iter()
6210 .map(|(repo_path, status)| StatusEntry {
6211 repo_path: repo_path.clone(),
6212 status: *status,
6213 }),
6214 (),
6215 );
6216 let (merge_details, merge_heads_changed) =
6217 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6218 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6219
6220 if merge_heads_changed {
6221 events.push(RepositoryEvent::MergeHeadsChanged);
6222 }
6223
6224 if statuses_by_path != prev_snapshot.statuses_by_path {
6225 events.push(RepositoryEvent::StatusesChanged)
6226 }
6227
6228 // Useful when branch is None in detached head state
6229 let head_commit = match backend.head_sha().await {
6230 Some(head_sha) => backend.show(head_sha).await.log_err(),
6231 None => None,
6232 };
6233
6234 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6235 events.push(RepositoryEvent::BranchChanged);
6236 }
6237
6238 let remote_origin_url = backend.remote_url("origin").await;
6239 let remote_upstream_url = backend.remote_url("upstream").await;
6240
6241 let snapshot = RepositorySnapshot {
6242 id,
6243 statuses_by_path,
6244 work_directory_abs_path,
6245 path_style: prev_snapshot.path_style,
6246 scan_id: prev_snapshot.scan_id + 1,
6247 branch,
6248 head_commit,
6249 merge: merge_details,
6250 remote_origin_url,
6251 remote_upstream_url,
6252 stash_entries,
6253 };
6254
6255 Ok((snapshot, events))
6256}
6257
6258fn status_from_proto(
6259 simple_status: i32,
6260 status: Option<proto::GitFileStatus>,
6261) -> anyhow::Result<FileStatus> {
6262 use proto::git_file_status::Variant;
6263
6264 let Some(variant) = status.and_then(|status| status.variant) else {
6265 let code = proto::GitStatus::from_i32(simple_status)
6266 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6267 let result = match code {
6268 proto::GitStatus::Added => TrackedStatus {
6269 worktree_status: StatusCode::Added,
6270 index_status: StatusCode::Unmodified,
6271 }
6272 .into(),
6273 proto::GitStatus::Modified => TrackedStatus {
6274 worktree_status: StatusCode::Modified,
6275 index_status: StatusCode::Unmodified,
6276 }
6277 .into(),
6278 proto::GitStatus::Conflict => UnmergedStatus {
6279 first_head: UnmergedStatusCode::Updated,
6280 second_head: UnmergedStatusCode::Updated,
6281 }
6282 .into(),
6283 proto::GitStatus::Deleted => TrackedStatus {
6284 worktree_status: StatusCode::Deleted,
6285 index_status: StatusCode::Unmodified,
6286 }
6287 .into(),
6288 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6289 };
6290 return Ok(result);
6291 };
6292
6293 let result = match variant {
6294 Variant::Untracked(_) => FileStatus::Untracked,
6295 Variant::Ignored(_) => FileStatus::Ignored,
6296 Variant::Unmerged(unmerged) => {
6297 let [first_head, second_head] =
6298 [unmerged.first_head, unmerged.second_head].map(|head| {
6299 let code = proto::GitStatus::from_i32(head)
6300 .with_context(|| format!("Invalid git status code: {head}"))?;
6301 let result = match code {
6302 proto::GitStatus::Added => UnmergedStatusCode::Added,
6303 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6304 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6305 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6306 };
6307 Ok(result)
6308 });
6309 let [first_head, second_head] = [first_head?, second_head?];
6310 UnmergedStatus {
6311 first_head,
6312 second_head,
6313 }
6314 .into()
6315 }
6316 Variant::Tracked(tracked) => {
6317 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6318 .map(|status| {
6319 let code = proto::GitStatus::from_i32(status)
6320 .with_context(|| format!("Invalid git status code: {status}"))?;
6321 let result = match code {
6322 proto::GitStatus::Modified => StatusCode::Modified,
6323 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6324 proto::GitStatus::Added => StatusCode::Added,
6325 proto::GitStatus::Deleted => StatusCode::Deleted,
6326 proto::GitStatus::Renamed => StatusCode::Renamed,
6327 proto::GitStatus::Copied => StatusCode::Copied,
6328 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6329 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6330 };
6331 Ok(result)
6332 });
6333 let [index_status, worktree_status] = [index_status?, worktree_status?];
6334 TrackedStatus {
6335 index_status,
6336 worktree_status,
6337 }
6338 .into()
6339 }
6340 };
6341 Ok(result)
6342}
6343
6344fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6345 use proto::git_file_status::{Tracked, Unmerged, Variant};
6346
6347 let variant = match status {
6348 FileStatus::Untracked => Variant::Untracked(Default::default()),
6349 FileStatus::Ignored => Variant::Ignored(Default::default()),
6350 FileStatus::Unmerged(UnmergedStatus {
6351 first_head,
6352 second_head,
6353 }) => Variant::Unmerged(Unmerged {
6354 first_head: unmerged_status_to_proto(first_head),
6355 second_head: unmerged_status_to_proto(second_head),
6356 }),
6357 FileStatus::Tracked(TrackedStatus {
6358 index_status,
6359 worktree_status,
6360 }) => Variant::Tracked(Tracked {
6361 index_status: tracked_status_to_proto(index_status),
6362 worktree_status: tracked_status_to_proto(worktree_status),
6363 }),
6364 };
6365 proto::GitFileStatus {
6366 variant: Some(variant),
6367 }
6368}
6369
6370fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6371 match code {
6372 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6373 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6374 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6375 }
6376}
6377
6378fn tracked_status_to_proto(code: StatusCode) -> i32 {
6379 match code {
6380 StatusCode::Added => proto::GitStatus::Added as _,
6381 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6382 StatusCode::Modified => proto::GitStatus::Modified as _,
6383 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6384 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6385 StatusCode::Copied => proto::GitStatus::Copied as _,
6386 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6387 }
6388}