1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<str>>,
133 index_text: Option<Arc<str>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_create_remote);
476 client.add_entity_request_handler(Self::handle_remove_remote);
477 client.add_entity_request_handler(Self::handle_delete_branch);
478 client.add_entity_request_handler(Self::handle_git_init);
479 client.add_entity_request_handler(Self::handle_push);
480 client.add_entity_request_handler(Self::handle_pull);
481 client.add_entity_request_handler(Self::handle_fetch);
482 client.add_entity_request_handler(Self::handle_stage);
483 client.add_entity_request_handler(Self::handle_unstage);
484 client.add_entity_request_handler(Self::handle_stash);
485 client.add_entity_request_handler(Self::handle_stash_pop);
486 client.add_entity_request_handler(Self::handle_stash_apply);
487 client.add_entity_request_handler(Self::handle_stash_drop);
488 client.add_entity_request_handler(Self::handle_commit);
489 client.add_entity_request_handler(Self::handle_run_hook);
490 client.add_entity_request_handler(Self::handle_reset);
491 client.add_entity_request_handler(Self::handle_show);
492 client.add_entity_request_handler(Self::handle_load_commit_diff);
493 client.add_entity_request_handler(Self::handle_file_history);
494 client.add_entity_request_handler(Self::handle_checkout_files);
495 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
496 client.add_entity_request_handler(Self::handle_set_index_text);
497 client.add_entity_request_handler(Self::handle_askpass);
498 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
499 client.add_entity_request_handler(Self::handle_git_diff);
500 client.add_entity_request_handler(Self::handle_tree_diff);
501 client.add_entity_request_handler(Self::handle_get_blob_content);
502 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
503 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
504 client.add_entity_message_handler(Self::handle_update_diff_bases);
505 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
506 client.add_entity_request_handler(Self::handle_blame_buffer);
507 client.add_entity_message_handler(Self::handle_update_repository);
508 client.add_entity_message_handler(Self::handle_remove_repository);
509 client.add_entity_request_handler(Self::handle_git_clone);
510 client.add_entity_request_handler(Self::handle_get_worktrees);
511 client.add_entity_request_handler(Self::handle_create_worktree);
512 }
513
514 pub fn is_local(&self) -> bool {
515 matches!(self.state, GitStoreState::Local { .. })
516 }
517 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
518 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
519 let id = repo.read(cx).id;
520 if self.active_repo_id != Some(id) {
521 self.active_repo_id = Some(id);
522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
523 }
524 }
525 }
526
527 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
528 match &mut self.state {
529 GitStoreState::Remote {
530 downstream: downstream_client,
531 ..
532 } => {
533 for repo in self.repositories.values() {
534 let update = repo.read(cx).snapshot.initial_update(project_id);
535 for update in split_repository_update(update) {
536 client.send(update).log_err();
537 }
538 }
539 *downstream_client = Some((client, ProjectId(project_id)));
540 }
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 let mut snapshots = HashMap::default();
546 let (updates_tx, mut updates_rx) = mpsc::unbounded();
547 for repo in self.repositories.values() {
548 updates_tx
549 .unbounded_send(DownstreamUpdate::UpdateRepository(
550 repo.read(cx).snapshot.clone(),
551 ))
552 .ok();
553 }
554 *downstream_client = Some(LocalDownstreamState {
555 client: client.clone(),
556 project_id: ProjectId(project_id),
557 updates_tx,
558 _task: cx.spawn(async move |this, cx| {
559 cx.background_spawn(async move {
560 while let Some(update) = updates_rx.next().await {
561 match update {
562 DownstreamUpdate::UpdateRepository(snapshot) => {
563 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
564 {
565 let update =
566 snapshot.build_update(old_snapshot, project_id);
567 *old_snapshot = snapshot;
568 for update in split_repository_update(update) {
569 client.send(update)?;
570 }
571 } else {
572 let update = snapshot.initial_update(project_id);
573 for update in split_repository_update(update) {
574 client.send(update)?;
575 }
576 snapshots.insert(snapshot.id, snapshot);
577 }
578 }
579 DownstreamUpdate::RemoveRepository(id) => {
580 client.send(proto::RemoveRepository {
581 project_id,
582 id: id.to_proto(),
583 })?;
584 }
585 }
586 }
587 anyhow::Ok(())
588 })
589 .await
590 .ok();
591 this.update(cx, |this, _| {
592 if let GitStoreState::Local {
593 downstream: downstream_client,
594 ..
595 } = &mut this.state
596 {
597 downstream_client.take();
598 } else {
599 unreachable!("unshared called on remote store");
600 }
601 })
602 }),
603 });
604 }
605 }
606 }
607
608 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
609 match &mut self.state {
610 GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } => {
614 downstream_client.take();
615 }
616 GitStoreState::Remote {
617 downstream: downstream_client,
618 ..
619 } => {
620 downstream_client.take();
621 }
622 }
623 self.shared_diffs.clear();
624 }
625
626 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
627 self.shared_diffs.remove(peer_id);
628 }
629
630 pub fn active_repository(&self) -> Option<Entity<Repository>> {
631 self.active_repo_id
632 .as_ref()
633 .map(|id| self.repositories[id].clone())
634 }
635
636 pub fn open_unstaged_diff(
637 &mut self,
638 buffer: Entity<Buffer>,
639 cx: &mut Context<Self>,
640 ) -> Task<Result<Entity<BufferDiff>>> {
641 let buffer_id = buffer.read(cx).remote_id();
642 if let Some(diff_state) = self.diffs.get(&buffer_id)
643 && let Some(unstaged_diff) = diff_state
644 .read(cx)
645 .unstaged_diff
646 .as_ref()
647 .and_then(|weak| weak.upgrade())
648 {
649 if let Some(task) =
650 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
651 {
652 return cx.background_executor().spawn(async move {
653 task.await;
654 Ok(unstaged_diff)
655 });
656 }
657 return Task::ready(Ok(unstaged_diff));
658 }
659
660 let Some((repo, repo_path)) =
661 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
662 else {
663 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
664 };
665
666 let task = self
667 .loading_diffs
668 .entry((buffer_id, DiffKind::Unstaged))
669 .or_insert_with(|| {
670 let staged_text = repo.update(cx, |repo, cx| {
671 repo.load_staged_text(buffer_id, repo_path, cx)
672 });
673 cx.spawn(async move |this, cx| {
674 Self::open_diff_internal(
675 this,
676 DiffKind::Unstaged,
677 staged_text.await.map(DiffBasesChange::SetIndex),
678 buffer,
679 cx,
680 )
681 .await
682 .map_err(Arc::new)
683 })
684 .shared()
685 })
686 .clone();
687
688 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
689 }
690
691 pub fn open_diff_since(
692 &mut self,
693 oid: Option<git::Oid>,
694 buffer: Entity<Buffer>,
695 repo: Entity<Repository>,
696 cx: &mut Context<Self>,
697 ) -> Task<Result<Entity<BufferDiff>>> {
698 cx.spawn(async move |this, cx| {
699 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
700 let language_registry = buffer.update(cx, |buffer, _| buffer.language_registry());
701 let content = match oid {
702 None => None,
703 Some(oid) => Some(
704 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
705 .await?,
706 ),
707 };
708 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
709
710 buffer_diff
711 .update(cx, |buffer_diff, cx| {
712 buffer_diff.language_changed(
713 buffer_snapshot.language().cloned(),
714 language_registry,
715 cx,
716 );
717 buffer_diff.set_base_text(
718 content.map(|s| s.as_str().into()),
719 buffer_snapshot.language().cloned(),
720 buffer_snapshot.text,
721 cx,
722 )
723 })
724 .await?;
725 let unstaged_diff = this
726 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
727 .await?;
728 buffer_diff.update(cx, |buffer_diff, _| {
729 buffer_diff.set_secondary_diff(unstaged_diff);
730 });
731
732 this.update(cx, |_, cx| {
733 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
734 .detach();
735 })?;
736
737 Ok(buffer_diff)
738 })
739 }
740
741 pub fn open_uncommitted_diff(
742 &mut self,
743 buffer: Entity<Buffer>,
744 cx: &mut Context<Self>,
745 ) -> Task<Result<Entity<BufferDiff>>> {
746 let buffer_id = buffer.read(cx).remote_id();
747
748 if let Some(diff_state) = self.diffs.get(&buffer_id)
749 && let Some(uncommitted_diff) = diff_state
750 .read(cx)
751 .uncommitted_diff
752 .as_ref()
753 .and_then(|weak| weak.upgrade())
754 {
755 if let Some(task) =
756 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
757 {
758 return cx.background_executor().spawn(async move {
759 task.await;
760 Ok(uncommitted_diff)
761 });
762 }
763 return Task::ready(Ok(uncommitted_diff));
764 }
765
766 let Some((repo, repo_path)) =
767 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
768 else {
769 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
770 };
771
772 let task = self
773 .loading_diffs
774 .entry((buffer_id, DiffKind::Uncommitted))
775 .or_insert_with(|| {
776 let changes = repo.update(cx, |repo, cx| {
777 repo.load_committed_text(buffer_id, repo_path, cx)
778 });
779
780 // todo(lw): hot foreground spawn
781 cx.spawn(async move |this, cx| {
782 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
783 .await
784 .map_err(Arc::new)
785 })
786 .shared()
787 })
788 .clone();
789
790 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
791 }
792
793 async fn open_diff_internal(
794 this: WeakEntity<Self>,
795 kind: DiffKind,
796 texts: Result<DiffBasesChange>,
797 buffer_entity: Entity<Buffer>,
798 cx: &mut AsyncApp,
799 ) -> Result<Entity<BufferDiff>> {
800 let diff_bases_change = match texts {
801 Err(e) => {
802 this.update(cx, |this, cx| {
803 let buffer = buffer_entity.read(cx);
804 let buffer_id = buffer.remote_id();
805 this.loading_diffs.remove(&(buffer_id, kind));
806 })?;
807 return Err(e);
808 }
809 Ok(change) => change,
810 };
811
812 this.update(cx, |this, cx| {
813 let buffer = buffer_entity.read(cx);
814 let buffer_id = buffer.remote_id();
815 let language = buffer.language().cloned();
816 let language_registry = buffer.language_registry();
817 let text_snapshot = buffer.text_snapshot();
818 this.loading_diffs.remove(&(buffer_id, kind));
819
820 let git_store = cx.weak_entity();
821 let diff_state = this
822 .diffs
823 .entry(buffer_id)
824 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
825
826 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
827
828 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
829 diff_state.update(cx, |diff_state, cx| {
830 diff_state.language_changed = true;
831 diff_state.language = language;
832 diff_state.language_registry = language_registry;
833
834 match kind {
835 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
836 DiffKind::Uncommitted => {
837 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
838 diff
839 } else {
840 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
841 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
842 unstaged_diff
843 };
844
845 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
846 diff_state.uncommitted_diff = Some(diff.downgrade())
847 }
848 }
849
850 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
851 let rx = diff_state.wait_for_recalculation();
852
853 anyhow::Ok(async move {
854 if let Some(rx) = rx {
855 rx.await;
856 }
857 Ok(diff)
858 })
859 })
860 })??
861 .await
862 }
863
864 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
865 let diff_state = self.diffs.get(&buffer_id)?;
866 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
867 }
868
869 pub fn get_uncommitted_diff(
870 &self,
871 buffer_id: BufferId,
872 cx: &App,
873 ) -> Option<Entity<BufferDiff>> {
874 let diff_state = self.diffs.get(&buffer_id)?;
875 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
876 }
877
878 pub fn open_conflict_set(
879 &mut self,
880 buffer: Entity<Buffer>,
881 cx: &mut Context<Self>,
882 ) -> Entity<ConflictSet> {
883 log::debug!("open conflict set");
884 let buffer_id = buffer.read(cx).remote_id();
885
886 if let Some(git_state) = self.diffs.get(&buffer_id)
887 && let Some(conflict_set) = git_state
888 .read(cx)
889 .conflict_set
890 .as_ref()
891 .and_then(|weak| weak.upgrade())
892 {
893 let conflict_set = conflict_set;
894 let buffer_snapshot = buffer.read(cx).text_snapshot();
895
896 git_state.update(cx, |state, cx| {
897 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
898 });
899
900 return conflict_set;
901 }
902
903 let is_unmerged = self
904 .repository_and_path_for_buffer_id(buffer_id, cx)
905 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
906 let git_store = cx.weak_entity();
907 let buffer_git_state = self
908 .diffs
909 .entry(buffer_id)
910 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
911 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
912
913 self._subscriptions
914 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
915 cx.emit(GitStoreEvent::ConflictsUpdated);
916 }));
917
918 buffer_git_state.update(cx, |state, cx| {
919 state.conflict_set = Some(conflict_set.downgrade());
920 let buffer_snapshot = buffer.read(cx).text_snapshot();
921 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
922 });
923
924 conflict_set
925 }
926
927 pub fn project_path_git_status(
928 &self,
929 project_path: &ProjectPath,
930 cx: &App,
931 ) -> Option<FileStatus> {
932 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
933 Some(repo.read(cx).status_for_path(&repo_path)?.status)
934 }
935
936 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
937 let mut work_directory_abs_paths = Vec::new();
938 let mut checkpoints = Vec::new();
939 for repository in self.repositories.values() {
940 repository.update(cx, |repository, _| {
941 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
942 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
943 });
944 }
945
946 cx.background_executor().spawn(async move {
947 let checkpoints = future::try_join_all(checkpoints).await?;
948 Ok(GitStoreCheckpoint {
949 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
950 .into_iter()
951 .zip(checkpoints)
952 .collect(),
953 })
954 })
955 }
956
957 pub fn restore_checkpoint(
958 &self,
959 checkpoint: GitStoreCheckpoint,
960 cx: &mut App,
961 ) -> Task<Result<()>> {
962 let repositories_by_work_dir_abs_path = self
963 .repositories
964 .values()
965 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
966 .collect::<HashMap<_, _>>();
967
968 let mut tasks = Vec::new();
969 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
970 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
971 let restore = repository.update(cx, |repository, _| {
972 repository.restore_checkpoint(checkpoint)
973 });
974 tasks.push(async move { restore.await? });
975 }
976 }
977 cx.background_spawn(async move {
978 future::try_join_all(tasks).await?;
979 Ok(())
980 })
981 }
982
983 /// Compares two checkpoints, returning true if they are equal.
984 pub fn compare_checkpoints(
985 &self,
986 left: GitStoreCheckpoint,
987 mut right: GitStoreCheckpoint,
988 cx: &mut App,
989 ) -> Task<Result<bool>> {
990 let repositories_by_work_dir_abs_path = self
991 .repositories
992 .values()
993 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
994 .collect::<HashMap<_, _>>();
995
996 let mut tasks = Vec::new();
997 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
998 if let Some(right_checkpoint) = right
999 .checkpoints_by_work_dir_abs_path
1000 .remove(&work_dir_abs_path)
1001 {
1002 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1003 {
1004 let compare = repository.update(cx, |repository, _| {
1005 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1006 });
1007
1008 tasks.push(async move { compare.await? });
1009 }
1010 } else {
1011 return Task::ready(Ok(false));
1012 }
1013 }
1014 cx.background_spawn(async move {
1015 Ok(future::try_join_all(tasks)
1016 .await?
1017 .into_iter()
1018 .all(|result| result))
1019 })
1020 }
1021
1022 /// Blames a buffer.
1023 pub fn blame_buffer(
1024 &self,
1025 buffer: &Entity<Buffer>,
1026 version: Option<clock::Global>,
1027 cx: &mut Context<Self>,
1028 ) -> Task<Result<Option<Blame>>> {
1029 let buffer = buffer.read(cx);
1030 let Some((repo, repo_path)) =
1031 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1032 else {
1033 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1034 };
1035 let content = match &version {
1036 Some(version) => buffer.rope_for_version(version),
1037 None => buffer.as_rope().clone(),
1038 };
1039 let line_ending = buffer.line_ending();
1040 let version = version.unwrap_or(buffer.version());
1041 let buffer_id = buffer.remote_id();
1042
1043 let repo = repo.downgrade();
1044 cx.spawn(async move |_, cx| {
1045 let repository_state = repo
1046 .update(cx, |repo, _| repo.repository_state.clone())?
1047 .await
1048 .map_err(|err| anyhow::anyhow!(err))?;
1049 match repository_state {
1050 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1051 .blame(repo_path.clone(), content, line_ending)
1052 .await
1053 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1054 .map(Some),
1055 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1056 let response = client
1057 .request(proto::BlameBuffer {
1058 project_id: project_id.to_proto(),
1059 buffer_id: buffer_id.into(),
1060 version: serialize_version(&version),
1061 })
1062 .await?;
1063 Ok(deserialize_blame_buffer_response(response))
1064 }
1065 }
1066 })
1067 }
1068
1069 pub fn file_history(
1070 &self,
1071 repo: &Entity<Repository>,
1072 path: RepoPath,
1073 cx: &mut App,
1074 ) -> Task<Result<git::repository::FileHistory>> {
1075 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1076
1077 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1078 }
1079
1080 pub fn file_history_paginated(
1081 &self,
1082 repo: &Entity<Repository>,
1083 path: RepoPath,
1084 skip: usize,
1085 limit: Option<usize>,
1086 cx: &mut App,
1087 ) -> Task<Result<git::repository::FileHistory>> {
1088 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1089
1090 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1091 }
1092
1093 pub fn get_permalink_to_line(
1094 &self,
1095 buffer: &Entity<Buffer>,
1096 selection: Range<u32>,
1097 cx: &mut App,
1098 ) -> Task<Result<url::Url>> {
1099 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1100 return Task::ready(Err(anyhow!("buffer has no file")));
1101 };
1102
1103 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1104 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1105 cx,
1106 ) else {
1107 // If we're not in a Git repo, check whether this is a Rust source
1108 // file in the Cargo registry (presumably opened with go-to-definition
1109 // from a normal Rust file). If so, we can put together a permalink
1110 // using crate metadata.
1111 if buffer
1112 .read(cx)
1113 .language()
1114 .is_none_or(|lang| lang.name() != "Rust".into())
1115 {
1116 return Task::ready(Err(anyhow!("no permalink available")));
1117 }
1118 let file_path = file.worktree.read(cx).absolutize(&file.path);
1119 return cx.spawn(async move |cx| {
1120 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1121 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1122 .context("no permalink available")
1123 });
1124 };
1125
1126 let buffer_id = buffer.read(cx).remote_id();
1127 let branch = repo.read(cx).branch.clone();
1128 let remote = branch
1129 .as_ref()
1130 .and_then(|b| b.upstream.as_ref())
1131 .and_then(|b| b.remote_name())
1132 .unwrap_or("origin")
1133 .to_string();
1134
1135 let rx = repo.update(cx, |repo, _| {
1136 repo.send_job(None, move |state, cx| async move {
1137 match state {
1138 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1139 let origin_url = backend
1140 .remote_url(&remote)
1141 .await
1142 .with_context(|| format!("remote \"{remote}\" not found"))?;
1143
1144 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1145
1146 let provider_registry =
1147 cx.update(GitHostingProviderRegistry::default_global);
1148
1149 let (provider, remote) =
1150 parse_git_remote_url(provider_registry, &origin_url)
1151 .context("parsing Git remote URL")?;
1152
1153 Ok(provider.build_permalink(
1154 remote,
1155 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1156 ))
1157 }
1158 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1159 let response = client
1160 .request(proto::GetPermalinkToLine {
1161 project_id: project_id.to_proto(),
1162 buffer_id: buffer_id.into(),
1163 selection: Some(proto::Range {
1164 start: selection.start as u64,
1165 end: selection.end as u64,
1166 }),
1167 })
1168 .await?;
1169
1170 url::Url::parse(&response.permalink).context("failed to parse permalink")
1171 }
1172 }
1173 })
1174 });
1175 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1176 }
1177
1178 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1179 match &self.state {
1180 GitStoreState::Local {
1181 downstream: downstream_client,
1182 ..
1183 } => downstream_client
1184 .as_ref()
1185 .map(|state| (state.client.clone(), state.project_id)),
1186 GitStoreState::Remote {
1187 downstream: downstream_client,
1188 ..
1189 } => downstream_client.clone(),
1190 }
1191 }
1192
1193 fn upstream_client(&self) -> Option<AnyProtoClient> {
1194 match &self.state {
1195 GitStoreState::Local { .. } => None,
1196 GitStoreState::Remote {
1197 upstream_client, ..
1198 } => Some(upstream_client.clone()),
1199 }
1200 }
1201
1202 fn on_worktree_store_event(
1203 &mut self,
1204 worktree_store: Entity<WorktreeStore>,
1205 event: &WorktreeStoreEvent,
1206 cx: &mut Context<Self>,
1207 ) {
1208 let GitStoreState::Local {
1209 project_environment,
1210 downstream,
1211 next_repository_id,
1212 fs,
1213 } = &self.state
1214 else {
1215 return;
1216 };
1217
1218 match event {
1219 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1220 if let Some(worktree) = self
1221 .worktree_store
1222 .read(cx)
1223 .worktree_for_id(*worktree_id, cx)
1224 {
1225 let paths_by_git_repo =
1226 self.process_updated_entries(&worktree, updated_entries, cx);
1227 let downstream = downstream
1228 .as_ref()
1229 .map(|downstream| downstream.updates_tx.clone());
1230 cx.spawn(async move |_, cx| {
1231 let paths_by_git_repo = paths_by_git_repo.await;
1232 for (repo, paths) in paths_by_git_repo {
1233 repo.update(cx, |repo, cx| {
1234 repo.paths_changed(paths, downstream.clone(), cx);
1235 });
1236 }
1237 })
1238 .detach();
1239 }
1240 }
1241 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1242 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1243 else {
1244 return;
1245 };
1246 if !worktree.read(cx).is_visible() {
1247 log::debug!(
1248 "not adding repositories for local worktree {:?} because it's not visible",
1249 worktree.read(cx).abs_path()
1250 );
1251 return;
1252 }
1253 self.update_repositories_from_worktree(
1254 *worktree_id,
1255 project_environment.clone(),
1256 next_repository_id.clone(),
1257 downstream
1258 .as_ref()
1259 .map(|downstream| downstream.updates_tx.clone()),
1260 changed_repos.clone(),
1261 fs.clone(),
1262 cx,
1263 );
1264 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1265 }
1266 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1267 let repos_without_worktree: Vec<RepositoryId> = self
1268 .worktree_ids
1269 .iter_mut()
1270 .filter_map(|(repo_id, worktree_ids)| {
1271 worktree_ids.remove(worktree_id);
1272 if worktree_ids.is_empty() {
1273 Some(*repo_id)
1274 } else {
1275 None
1276 }
1277 })
1278 .collect();
1279 let is_active_repo_removed = repos_without_worktree
1280 .iter()
1281 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1282
1283 for repo_id in repos_without_worktree {
1284 self.repositories.remove(&repo_id);
1285 self.worktree_ids.remove(&repo_id);
1286 if let Some(updates_tx) =
1287 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1288 {
1289 updates_tx
1290 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1291 .ok();
1292 }
1293 }
1294
1295 if is_active_repo_removed {
1296 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1297 self.active_repo_id = Some(repo_id);
1298 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1299 } else {
1300 self.active_repo_id = None;
1301 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1302 }
1303 }
1304 }
1305 _ => {}
1306 }
1307 }
1308 fn on_repository_event(
1309 &mut self,
1310 repo: Entity<Repository>,
1311 event: &RepositoryEvent,
1312 cx: &mut Context<Self>,
1313 ) {
1314 let id = repo.read(cx).id;
1315 let repo_snapshot = repo.read(cx).snapshot.clone();
1316 for (buffer_id, diff) in self.diffs.iter() {
1317 if let Some((buffer_repo, repo_path)) =
1318 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1319 && buffer_repo == repo
1320 {
1321 diff.update(cx, |diff, cx| {
1322 if let Some(conflict_set) = &diff.conflict_set {
1323 let conflict_status_changed =
1324 conflict_set.update(cx, |conflict_set, cx| {
1325 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1326 conflict_set.set_has_conflict(has_conflict, cx)
1327 })?;
1328 if conflict_status_changed {
1329 let buffer_store = self.buffer_store.read(cx);
1330 if let Some(buffer) = buffer_store.get(*buffer_id) {
1331 let _ = diff
1332 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1333 }
1334 }
1335 }
1336 anyhow::Ok(())
1337 })
1338 .ok();
1339 }
1340 }
1341 cx.emit(GitStoreEvent::RepositoryUpdated(
1342 id,
1343 event.clone(),
1344 self.active_repo_id == Some(id),
1345 ))
1346 }
1347
1348 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1349 cx.emit(GitStoreEvent::JobsUpdated)
1350 }
1351
1352 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1353 fn update_repositories_from_worktree(
1354 &mut self,
1355 worktree_id: WorktreeId,
1356 project_environment: Entity<ProjectEnvironment>,
1357 next_repository_id: Arc<AtomicU64>,
1358 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1359 updated_git_repositories: UpdatedGitRepositoriesSet,
1360 fs: Arc<dyn Fs>,
1361 cx: &mut Context<Self>,
1362 ) {
1363 let mut removed_ids = Vec::new();
1364 for update in updated_git_repositories.iter() {
1365 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1366 let existing_work_directory_abs_path =
1367 repo.read(cx).work_directory_abs_path.clone();
1368 Some(&existing_work_directory_abs_path)
1369 == update.old_work_directory_abs_path.as_ref()
1370 || Some(&existing_work_directory_abs_path)
1371 == update.new_work_directory_abs_path.as_ref()
1372 }) {
1373 let repo_id = *id;
1374 if let Some(new_work_directory_abs_path) =
1375 update.new_work_directory_abs_path.clone()
1376 {
1377 self.worktree_ids
1378 .entry(repo_id)
1379 .or_insert_with(HashSet::new)
1380 .insert(worktree_id);
1381 existing.update(cx, |existing, cx| {
1382 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1383 existing.schedule_scan(updates_tx.clone(), cx);
1384 });
1385 } else {
1386 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1387 worktree_ids.remove(&worktree_id);
1388 if worktree_ids.is_empty() {
1389 removed_ids.push(repo_id);
1390 }
1391 }
1392 }
1393 } else if let UpdatedGitRepository {
1394 new_work_directory_abs_path: Some(work_directory_abs_path),
1395 dot_git_abs_path: Some(dot_git_abs_path),
1396 repository_dir_abs_path: Some(_repository_dir_abs_path),
1397 common_dir_abs_path: Some(_common_dir_abs_path),
1398 ..
1399 } = update
1400 {
1401 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1402 let git_store = cx.weak_entity();
1403 let repo = cx.new(|cx| {
1404 let mut repo = Repository::local(
1405 id,
1406 work_directory_abs_path.clone(),
1407 dot_git_abs_path.clone(),
1408 project_environment.downgrade(),
1409 fs.clone(),
1410 git_store,
1411 cx,
1412 );
1413 if let Some(updates_tx) = updates_tx.as_ref() {
1414 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1415 updates_tx
1416 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1417 .ok();
1418 }
1419 repo.schedule_scan(updates_tx.clone(), cx);
1420 repo
1421 });
1422 self._subscriptions
1423 .push(cx.subscribe(&repo, Self::on_repository_event));
1424 self._subscriptions
1425 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1426 self.repositories.insert(id, repo);
1427 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1428 cx.emit(GitStoreEvent::RepositoryAdded);
1429 self.active_repo_id.get_or_insert_with(|| {
1430 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1431 id
1432 });
1433 }
1434 }
1435
1436 for id in removed_ids {
1437 if self.active_repo_id == Some(id) {
1438 self.active_repo_id = None;
1439 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1440 }
1441 self.repositories.remove(&id);
1442 if let Some(updates_tx) = updates_tx.as_ref() {
1443 updates_tx
1444 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1445 .ok();
1446 }
1447 }
1448 }
1449
1450 fn on_buffer_store_event(
1451 &mut self,
1452 _: Entity<BufferStore>,
1453 event: &BufferStoreEvent,
1454 cx: &mut Context<Self>,
1455 ) {
1456 match event {
1457 BufferStoreEvent::BufferAdded(buffer) => {
1458 cx.subscribe(buffer, |this, buffer, event, cx| {
1459 if let BufferEvent::LanguageChanged(_) = event {
1460 let buffer_id = buffer.read(cx).remote_id();
1461 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1462 diff_state.update(cx, |diff_state, cx| {
1463 diff_state.buffer_language_changed(buffer, cx);
1464 });
1465 }
1466 }
1467 })
1468 .detach();
1469 }
1470 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1471 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1472 diffs.remove(buffer_id);
1473 }
1474 }
1475 BufferStoreEvent::BufferDropped(buffer_id) => {
1476 self.diffs.remove(buffer_id);
1477 for diffs in self.shared_diffs.values_mut() {
1478 diffs.remove(buffer_id);
1479 }
1480 }
1481 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1482 // Whenever a buffer's file path changes, it's possible that the
1483 // new path is actually a path that is being tracked by a git
1484 // repository. In that case, we'll want to update the buffer's
1485 // `BufferDiffState`, in case it already has one.
1486 let buffer_id = buffer.read(cx).remote_id();
1487 let diff_state = self.diffs.get(&buffer_id);
1488 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1489
1490 if let Some(diff_state) = diff_state
1491 && let Some((repo, repo_path)) = repo
1492 {
1493 let buffer = buffer.clone();
1494 let diff_state = diff_state.clone();
1495
1496 cx.spawn(async move |_git_store, cx| {
1497 async {
1498 let diff_bases_change = repo
1499 .update(cx, |repo, cx| {
1500 repo.load_committed_text(buffer_id, repo_path, cx)
1501 })
1502 .await?;
1503
1504 diff_state.update(cx, |diff_state, cx| {
1505 let buffer_snapshot = buffer.read(cx).text_snapshot();
1506 diff_state.diff_bases_changed(
1507 buffer_snapshot,
1508 Some(diff_bases_change),
1509 cx,
1510 );
1511 });
1512 anyhow::Ok(())
1513 }
1514 .await
1515 .log_err();
1516 })
1517 .detach();
1518 }
1519 }
1520 _ => {}
1521 }
1522 }
1523
1524 pub fn recalculate_buffer_diffs(
1525 &mut self,
1526 buffers: Vec<Entity<Buffer>>,
1527 cx: &mut Context<Self>,
1528 ) -> impl Future<Output = ()> + use<> {
1529 let mut futures = Vec::new();
1530 for buffer in buffers {
1531 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1532 let buffer = buffer.read(cx).text_snapshot();
1533 diff_state.update(cx, |diff_state, cx| {
1534 diff_state.recalculate_diffs(buffer.clone(), cx);
1535 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1536 });
1537 futures.push(diff_state.update(cx, |diff_state, cx| {
1538 diff_state
1539 .reparse_conflict_markers(buffer, cx)
1540 .map(|_| {})
1541 .boxed()
1542 }));
1543 }
1544 }
1545 async move {
1546 futures::future::join_all(futures).await;
1547 }
1548 }
1549
1550 fn on_buffer_diff_event(
1551 &mut self,
1552 diff: Entity<buffer_diff::BufferDiff>,
1553 event: &BufferDiffEvent,
1554 cx: &mut Context<Self>,
1555 ) {
1556 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1557 let buffer_id = diff.read(cx).buffer_id;
1558 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1559 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1560 diff_state.hunk_staging_operation_count += 1;
1561 diff_state.hunk_staging_operation_count
1562 });
1563 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1564 let recv = repo.update(cx, |repo, cx| {
1565 log::debug!("hunks changed for {}", path.as_unix_str());
1566 repo.spawn_set_index_text_job(
1567 path,
1568 new_index_text.as_ref().map(|rope| rope.to_string()),
1569 Some(hunk_staging_operation_count),
1570 cx,
1571 )
1572 });
1573 let diff = diff.downgrade();
1574 cx.spawn(async move |this, cx| {
1575 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1576 diff.update(cx, |diff, cx| {
1577 diff.clear_pending_hunks(cx);
1578 })
1579 .ok();
1580 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1581 .ok();
1582 }
1583 })
1584 .detach();
1585 }
1586 }
1587 }
1588 }
1589
1590 fn local_worktree_git_repos_changed(
1591 &mut self,
1592 worktree: Entity<Worktree>,
1593 changed_repos: &UpdatedGitRepositoriesSet,
1594 cx: &mut Context<Self>,
1595 ) {
1596 log::debug!("local worktree repos changed");
1597 debug_assert!(worktree.read(cx).is_local());
1598
1599 for repository in self.repositories.values() {
1600 repository.update(cx, |repository, cx| {
1601 let repo_abs_path = &repository.work_directory_abs_path;
1602 if changed_repos.iter().any(|update| {
1603 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1604 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1605 }) {
1606 repository.reload_buffer_diff_bases(cx);
1607 }
1608 });
1609 }
1610 }
1611
1612 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1613 &self.repositories
1614 }
1615
1616 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1617 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1618 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1619 Some(status.status)
1620 }
1621
1622 pub fn repository_and_path_for_buffer_id(
1623 &self,
1624 buffer_id: BufferId,
1625 cx: &App,
1626 ) -> Option<(Entity<Repository>, RepoPath)> {
1627 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1628 let project_path = buffer.read(cx).project_path(cx)?;
1629 self.repository_and_path_for_project_path(&project_path, cx)
1630 }
1631
1632 pub fn repository_and_path_for_project_path(
1633 &self,
1634 path: &ProjectPath,
1635 cx: &App,
1636 ) -> Option<(Entity<Repository>, RepoPath)> {
1637 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1638 self.repositories
1639 .values()
1640 .filter_map(|repo| {
1641 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1642 Some((repo.clone(), repo_path))
1643 })
1644 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1645 }
1646
1647 pub fn git_init(
1648 &self,
1649 path: Arc<Path>,
1650 fallback_branch_name: String,
1651 cx: &App,
1652 ) -> Task<Result<()>> {
1653 match &self.state {
1654 GitStoreState::Local { fs, .. } => {
1655 let fs = fs.clone();
1656 cx.background_executor()
1657 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1658 }
1659 GitStoreState::Remote {
1660 upstream_client,
1661 upstream_project_id: project_id,
1662 ..
1663 } => {
1664 let client = upstream_client.clone();
1665 let project_id = *project_id;
1666 cx.background_executor().spawn(async move {
1667 client
1668 .request(proto::GitInit {
1669 project_id: project_id,
1670 abs_path: path.to_string_lossy().into_owned(),
1671 fallback_branch_name,
1672 })
1673 .await?;
1674 Ok(())
1675 })
1676 }
1677 }
1678 }
1679
1680 pub fn git_clone(
1681 &self,
1682 repo: String,
1683 path: impl Into<Arc<std::path::Path>>,
1684 cx: &App,
1685 ) -> Task<Result<()>> {
1686 let path = path.into();
1687 match &self.state {
1688 GitStoreState::Local { fs, .. } => {
1689 let fs = fs.clone();
1690 cx.background_executor()
1691 .spawn(async move { fs.git_clone(&repo, &path).await })
1692 }
1693 GitStoreState::Remote {
1694 upstream_client,
1695 upstream_project_id,
1696 ..
1697 } => {
1698 if upstream_client.is_via_collab() {
1699 return Task::ready(Err(anyhow!(
1700 "Git Clone isn't supported for project guests"
1701 )));
1702 }
1703 let request = upstream_client.request(proto::GitClone {
1704 project_id: *upstream_project_id,
1705 abs_path: path.to_string_lossy().into_owned(),
1706 remote_repo: repo,
1707 });
1708
1709 cx.background_spawn(async move {
1710 let result = request.await?;
1711
1712 match result.success {
1713 true => Ok(()),
1714 false => Err(anyhow!("Git Clone failed")),
1715 }
1716 })
1717 }
1718 }
1719 }
1720
1721 async fn handle_update_repository(
1722 this: Entity<Self>,
1723 envelope: TypedEnvelope<proto::UpdateRepository>,
1724 mut cx: AsyncApp,
1725 ) -> Result<()> {
1726 this.update(&mut cx, |this, cx| {
1727 let path_style = this.worktree_store.read(cx).path_style();
1728 let mut update = envelope.payload;
1729
1730 let id = RepositoryId::from_proto(update.id);
1731 let client = this.upstream_client().context("no upstream client")?;
1732
1733 let mut repo_subscription = None;
1734 let repo = this.repositories.entry(id).or_insert_with(|| {
1735 let git_store = cx.weak_entity();
1736 let repo = cx.new(|cx| {
1737 Repository::remote(
1738 id,
1739 Path::new(&update.abs_path).into(),
1740 path_style,
1741 ProjectId(update.project_id),
1742 client,
1743 git_store,
1744 cx,
1745 )
1746 });
1747 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1748 cx.emit(GitStoreEvent::RepositoryAdded);
1749 repo
1750 });
1751 this._subscriptions.extend(repo_subscription);
1752
1753 repo.update(cx, {
1754 let update = update.clone();
1755 |repo, cx| repo.apply_remote_update(update, cx)
1756 })?;
1757
1758 this.active_repo_id.get_or_insert_with(|| {
1759 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1760 id
1761 });
1762
1763 if let Some((client, project_id)) = this.downstream_client() {
1764 update.project_id = project_id.to_proto();
1765 client.send(update).log_err();
1766 }
1767 Ok(())
1768 })
1769 }
1770
1771 async fn handle_remove_repository(
1772 this: Entity<Self>,
1773 envelope: TypedEnvelope<proto::RemoveRepository>,
1774 mut cx: AsyncApp,
1775 ) -> Result<()> {
1776 this.update(&mut cx, |this, cx| {
1777 let mut update = envelope.payload;
1778 let id = RepositoryId::from_proto(update.id);
1779 this.repositories.remove(&id);
1780 if let Some((client, project_id)) = this.downstream_client() {
1781 update.project_id = project_id.to_proto();
1782 client.send(update).log_err();
1783 }
1784 if this.active_repo_id == Some(id) {
1785 this.active_repo_id = None;
1786 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1787 }
1788 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1789 });
1790 Ok(())
1791 }
1792
1793 async fn handle_git_init(
1794 this: Entity<Self>,
1795 envelope: TypedEnvelope<proto::GitInit>,
1796 cx: AsyncApp,
1797 ) -> Result<proto::Ack> {
1798 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1799 let name = envelope.payload.fallback_branch_name;
1800 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1801 .await?;
1802
1803 Ok(proto::Ack {})
1804 }
1805
1806 async fn handle_git_clone(
1807 this: Entity<Self>,
1808 envelope: TypedEnvelope<proto::GitClone>,
1809 cx: AsyncApp,
1810 ) -> Result<proto::GitCloneResponse> {
1811 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1812 let repo_name = envelope.payload.remote_repo;
1813 let result = cx
1814 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
1815 .await;
1816
1817 Ok(proto::GitCloneResponse {
1818 success: result.is_ok(),
1819 })
1820 }
1821
1822 async fn handle_fetch(
1823 this: Entity<Self>,
1824 envelope: TypedEnvelope<proto::Fetch>,
1825 mut cx: AsyncApp,
1826 ) -> Result<proto::RemoteMessageResponse> {
1827 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1828 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1829 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1830 let askpass_id = envelope.payload.askpass_id;
1831
1832 let askpass = make_remote_delegate(
1833 this,
1834 envelope.payload.project_id,
1835 repository_id,
1836 askpass_id,
1837 &mut cx,
1838 );
1839
1840 let remote_output = repository_handle
1841 .update(&mut cx, |repository_handle, cx| {
1842 repository_handle.fetch(fetch_options, askpass, cx)
1843 })
1844 .await??;
1845
1846 Ok(proto::RemoteMessageResponse {
1847 stdout: remote_output.stdout,
1848 stderr: remote_output.stderr,
1849 })
1850 }
1851
1852 async fn handle_push(
1853 this: Entity<Self>,
1854 envelope: TypedEnvelope<proto::Push>,
1855 mut cx: AsyncApp,
1856 ) -> Result<proto::RemoteMessageResponse> {
1857 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1858 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1859
1860 let askpass_id = envelope.payload.askpass_id;
1861 let askpass = make_remote_delegate(
1862 this,
1863 envelope.payload.project_id,
1864 repository_id,
1865 askpass_id,
1866 &mut cx,
1867 );
1868
1869 let options = envelope
1870 .payload
1871 .options
1872 .as_ref()
1873 .map(|_| match envelope.payload.options() {
1874 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1875 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1876 });
1877
1878 let branch_name = envelope.payload.branch_name.into();
1879 let remote_branch_name = envelope.payload.remote_branch_name.into();
1880 let remote_name = envelope.payload.remote_name.into();
1881
1882 let remote_output = repository_handle
1883 .update(&mut cx, |repository_handle, cx| {
1884 repository_handle.push(
1885 branch_name,
1886 remote_branch_name,
1887 remote_name,
1888 options,
1889 askpass,
1890 cx,
1891 )
1892 })
1893 .await??;
1894 Ok(proto::RemoteMessageResponse {
1895 stdout: remote_output.stdout,
1896 stderr: remote_output.stderr,
1897 })
1898 }
1899
1900 async fn handle_pull(
1901 this: Entity<Self>,
1902 envelope: TypedEnvelope<proto::Pull>,
1903 mut cx: AsyncApp,
1904 ) -> Result<proto::RemoteMessageResponse> {
1905 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1906 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1907 let askpass_id = envelope.payload.askpass_id;
1908 let askpass = make_remote_delegate(
1909 this,
1910 envelope.payload.project_id,
1911 repository_id,
1912 askpass_id,
1913 &mut cx,
1914 );
1915
1916 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1917 let remote_name = envelope.payload.remote_name.into();
1918 let rebase = envelope.payload.rebase;
1919
1920 let remote_message = repository_handle
1921 .update(&mut cx, |repository_handle, cx| {
1922 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1923 })
1924 .await??;
1925
1926 Ok(proto::RemoteMessageResponse {
1927 stdout: remote_message.stdout,
1928 stderr: remote_message.stderr,
1929 })
1930 }
1931
1932 async fn handle_stage(
1933 this: Entity<Self>,
1934 envelope: TypedEnvelope<proto::Stage>,
1935 mut cx: AsyncApp,
1936 ) -> Result<proto::Ack> {
1937 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1938 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1939
1940 let entries = envelope
1941 .payload
1942 .paths
1943 .into_iter()
1944 .map(|path| RepoPath::new(&path))
1945 .collect::<Result<Vec<_>>>()?;
1946
1947 repository_handle
1948 .update(&mut cx, |repository_handle, cx| {
1949 repository_handle.stage_entries(entries, cx)
1950 })
1951 .await?;
1952 Ok(proto::Ack {})
1953 }
1954
1955 async fn handle_unstage(
1956 this: Entity<Self>,
1957 envelope: TypedEnvelope<proto::Unstage>,
1958 mut cx: AsyncApp,
1959 ) -> Result<proto::Ack> {
1960 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1961 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1962
1963 let entries = envelope
1964 .payload
1965 .paths
1966 .into_iter()
1967 .map(|path| RepoPath::new(&path))
1968 .collect::<Result<Vec<_>>>()?;
1969
1970 repository_handle
1971 .update(&mut cx, |repository_handle, cx| {
1972 repository_handle.unstage_entries(entries, cx)
1973 })
1974 .await?;
1975
1976 Ok(proto::Ack {})
1977 }
1978
1979 async fn handle_stash(
1980 this: Entity<Self>,
1981 envelope: TypedEnvelope<proto::Stash>,
1982 mut cx: AsyncApp,
1983 ) -> Result<proto::Ack> {
1984 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1985 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1986
1987 let entries = envelope
1988 .payload
1989 .paths
1990 .into_iter()
1991 .map(|path| RepoPath::new(&path))
1992 .collect::<Result<Vec<_>>>()?;
1993
1994 repository_handle
1995 .update(&mut cx, |repository_handle, cx| {
1996 repository_handle.stash_entries(entries, cx)
1997 })
1998 .await?;
1999
2000 Ok(proto::Ack {})
2001 }
2002
2003 async fn handle_stash_pop(
2004 this: Entity<Self>,
2005 envelope: TypedEnvelope<proto::StashPop>,
2006 mut cx: AsyncApp,
2007 ) -> Result<proto::Ack> {
2008 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2009 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2010 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2011
2012 repository_handle
2013 .update(&mut cx, |repository_handle, cx| {
2014 repository_handle.stash_pop(stash_index, cx)
2015 })
2016 .await?;
2017
2018 Ok(proto::Ack {})
2019 }
2020
2021 async fn handle_stash_apply(
2022 this: Entity<Self>,
2023 envelope: TypedEnvelope<proto::StashApply>,
2024 mut cx: AsyncApp,
2025 ) -> Result<proto::Ack> {
2026 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2027 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2028 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2029
2030 repository_handle
2031 .update(&mut cx, |repository_handle, cx| {
2032 repository_handle.stash_apply(stash_index, cx)
2033 })
2034 .await?;
2035
2036 Ok(proto::Ack {})
2037 }
2038
2039 async fn handle_stash_drop(
2040 this: Entity<Self>,
2041 envelope: TypedEnvelope<proto::StashDrop>,
2042 mut cx: AsyncApp,
2043 ) -> Result<proto::Ack> {
2044 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2045 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2046 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2047
2048 repository_handle
2049 .update(&mut cx, |repository_handle, cx| {
2050 repository_handle.stash_drop(stash_index, cx)
2051 })
2052 .await??;
2053
2054 Ok(proto::Ack {})
2055 }
2056
2057 async fn handle_set_index_text(
2058 this: Entity<Self>,
2059 envelope: TypedEnvelope<proto::SetIndexText>,
2060 mut cx: AsyncApp,
2061 ) -> Result<proto::Ack> {
2062 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2063 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2064 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2065
2066 repository_handle
2067 .update(&mut cx, |repository_handle, cx| {
2068 repository_handle.spawn_set_index_text_job(
2069 repo_path,
2070 envelope.payload.text,
2071 None,
2072 cx,
2073 )
2074 })
2075 .await??;
2076 Ok(proto::Ack {})
2077 }
2078
2079 async fn handle_run_hook(
2080 this: Entity<Self>,
2081 envelope: TypedEnvelope<proto::RunGitHook>,
2082 mut cx: AsyncApp,
2083 ) -> Result<proto::Ack> {
2084 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2085 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2086 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2087 repository_handle
2088 .update(&mut cx, |repository_handle, cx| {
2089 repository_handle.run_hook(hook, cx)
2090 })
2091 .await??;
2092 Ok(proto::Ack {})
2093 }
2094
2095 async fn handle_commit(
2096 this: Entity<Self>,
2097 envelope: TypedEnvelope<proto::Commit>,
2098 mut cx: AsyncApp,
2099 ) -> Result<proto::Ack> {
2100 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2101 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2102 let askpass_id = envelope.payload.askpass_id;
2103
2104 let askpass = make_remote_delegate(
2105 this,
2106 envelope.payload.project_id,
2107 repository_id,
2108 askpass_id,
2109 &mut cx,
2110 );
2111
2112 let message = SharedString::from(envelope.payload.message);
2113 let name = envelope.payload.name.map(SharedString::from);
2114 let email = envelope.payload.email.map(SharedString::from);
2115 let options = envelope.payload.options.unwrap_or_default();
2116
2117 repository_handle
2118 .update(&mut cx, |repository_handle, cx| {
2119 repository_handle.commit(
2120 message,
2121 name.zip(email),
2122 CommitOptions {
2123 amend: options.amend,
2124 signoff: options.signoff,
2125 },
2126 askpass,
2127 cx,
2128 )
2129 })
2130 .await??;
2131 Ok(proto::Ack {})
2132 }
2133
2134 async fn handle_get_remotes(
2135 this: Entity<Self>,
2136 envelope: TypedEnvelope<proto::GetRemotes>,
2137 mut cx: AsyncApp,
2138 ) -> Result<proto::GetRemotesResponse> {
2139 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2140 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2141
2142 let branch_name = envelope.payload.branch_name;
2143 let is_push = envelope.payload.is_push;
2144
2145 let remotes = repository_handle
2146 .update(&mut cx, |repository_handle, _| {
2147 repository_handle.get_remotes(branch_name, is_push)
2148 })
2149 .await??;
2150
2151 Ok(proto::GetRemotesResponse {
2152 remotes: remotes
2153 .into_iter()
2154 .map(|remotes| proto::get_remotes_response::Remote {
2155 name: remotes.name.to_string(),
2156 })
2157 .collect::<Vec<_>>(),
2158 })
2159 }
2160
2161 async fn handle_get_worktrees(
2162 this: Entity<Self>,
2163 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2164 mut cx: AsyncApp,
2165 ) -> Result<proto::GitWorktreesResponse> {
2166 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2167 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2168
2169 let worktrees = repository_handle
2170 .update(&mut cx, |repository_handle, _| {
2171 repository_handle.worktrees()
2172 })
2173 .await??;
2174
2175 Ok(proto::GitWorktreesResponse {
2176 worktrees: worktrees
2177 .into_iter()
2178 .map(|worktree| worktree_to_proto(&worktree))
2179 .collect::<Vec<_>>(),
2180 })
2181 }
2182
2183 async fn handle_create_worktree(
2184 this: Entity<Self>,
2185 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2186 mut cx: AsyncApp,
2187 ) -> Result<proto::Ack> {
2188 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2189 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2190 let directory = PathBuf::from(envelope.payload.directory);
2191 let name = envelope.payload.name;
2192 let commit = envelope.payload.commit;
2193
2194 repository_handle
2195 .update(&mut cx, |repository_handle, _| {
2196 repository_handle.create_worktree(name, directory, commit)
2197 })
2198 .await??;
2199
2200 Ok(proto::Ack {})
2201 }
2202
2203 async fn handle_get_branches(
2204 this: Entity<Self>,
2205 envelope: TypedEnvelope<proto::GitGetBranches>,
2206 mut cx: AsyncApp,
2207 ) -> Result<proto::GitBranchesResponse> {
2208 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2209 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2210
2211 let branches = repository_handle
2212 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2213 .await??;
2214
2215 Ok(proto::GitBranchesResponse {
2216 branches: branches
2217 .into_iter()
2218 .map(|branch| branch_to_proto(&branch))
2219 .collect::<Vec<_>>(),
2220 })
2221 }
2222 async fn handle_get_default_branch(
2223 this: Entity<Self>,
2224 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2225 mut cx: AsyncApp,
2226 ) -> Result<proto::GetDefaultBranchResponse> {
2227 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2228 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2229
2230 let branch = repository_handle
2231 .update(&mut cx, |repository_handle, _| {
2232 repository_handle.default_branch(false)
2233 })
2234 .await??
2235 .map(Into::into);
2236
2237 Ok(proto::GetDefaultBranchResponse { branch })
2238 }
2239 async fn handle_create_branch(
2240 this: Entity<Self>,
2241 envelope: TypedEnvelope<proto::GitCreateBranch>,
2242 mut cx: AsyncApp,
2243 ) -> Result<proto::Ack> {
2244 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2245 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2246 let branch_name = envelope.payload.branch_name;
2247
2248 repository_handle
2249 .update(&mut cx, |repository_handle, _| {
2250 repository_handle.create_branch(branch_name, None)
2251 })
2252 .await??;
2253
2254 Ok(proto::Ack {})
2255 }
2256
2257 async fn handle_change_branch(
2258 this: Entity<Self>,
2259 envelope: TypedEnvelope<proto::GitChangeBranch>,
2260 mut cx: AsyncApp,
2261 ) -> Result<proto::Ack> {
2262 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2263 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2264 let branch_name = envelope.payload.branch_name;
2265
2266 repository_handle
2267 .update(&mut cx, |repository_handle, _| {
2268 repository_handle.change_branch(branch_name)
2269 })
2270 .await??;
2271
2272 Ok(proto::Ack {})
2273 }
2274
2275 async fn handle_rename_branch(
2276 this: Entity<Self>,
2277 envelope: TypedEnvelope<proto::GitRenameBranch>,
2278 mut cx: AsyncApp,
2279 ) -> Result<proto::Ack> {
2280 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2281 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2282 let branch = envelope.payload.branch;
2283 let new_name = envelope.payload.new_name;
2284
2285 repository_handle
2286 .update(&mut cx, |repository_handle, _| {
2287 repository_handle.rename_branch(branch, new_name)
2288 })
2289 .await??;
2290
2291 Ok(proto::Ack {})
2292 }
2293
2294 async fn handle_create_remote(
2295 this: Entity<Self>,
2296 envelope: TypedEnvelope<proto::GitCreateRemote>,
2297 mut cx: AsyncApp,
2298 ) -> Result<proto::Ack> {
2299 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2300 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2301 let remote_name = envelope.payload.remote_name;
2302 let remote_url = envelope.payload.remote_url;
2303
2304 repository_handle
2305 .update(&mut cx, |repository_handle, _| {
2306 repository_handle.create_remote(remote_name, remote_url)
2307 })
2308 .await??;
2309
2310 Ok(proto::Ack {})
2311 }
2312
2313 async fn handle_delete_branch(
2314 this: Entity<Self>,
2315 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2316 mut cx: AsyncApp,
2317 ) -> Result<proto::Ack> {
2318 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2319 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2320 let branch_name = envelope.payload.branch_name;
2321
2322 repository_handle
2323 .update(&mut cx, |repository_handle, _| {
2324 repository_handle.delete_branch(branch_name)
2325 })
2326 .await??;
2327
2328 Ok(proto::Ack {})
2329 }
2330
2331 async fn handle_remove_remote(
2332 this: Entity<Self>,
2333 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2334 mut cx: AsyncApp,
2335 ) -> Result<proto::Ack> {
2336 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2337 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2338 let remote_name = envelope.payload.remote_name;
2339
2340 repository_handle
2341 .update(&mut cx, |repository_handle, _| {
2342 repository_handle.remove_remote(remote_name)
2343 })
2344 .await??;
2345
2346 Ok(proto::Ack {})
2347 }
2348
2349 async fn handle_show(
2350 this: Entity<Self>,
2351 envelope: TypedEnvelope<proto::GitShow>,
2352 mut cx: AsyncApp,
2353 ) -> Result<proto::GitCommitDetails> {
2354 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2355 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2356
2357 let commit = repository_handle
2358 .update(&mut cx, |repository_handle, _| {
2359 repository_handle.show(envelope.payload.commit)
2360 })
2361 .await??;
2362 Ok(proto::GitCommitDetails {
2363 sha: commit.sha.into(),
2364 message: commit.message.into(),
2365 commit_timestamp: commit.commit_timestamp,
2366 author_email: commit.author_email.into(),
2367 author_name: commit.author_name.into(),
2368 })
2369 }
2370
2371 async fn handle_load_commit_diff(
2372 this: Entity<Self>,
2373 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2374 mut cx: AsyncApp,
2375 ) -> Result<proto::LoadCommitDiffResponse> {
2376 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2377 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2378
2379 let commit_diff = repository_handle
2380 .update(&mut cx, |repository_handle, _| {
2381 repository_handle.load_commit_diff(envelope.payload.commit)
2382 })
2383 .await??;
2384 Ok(proto::LoadCommitDiffResponse {
2385 files: commit_diff
2386 .files
2387 .into_iter()
2388 .map(|file| proto::CommitFile {
2389 path: file.path.to_proto(),
2390 old_text: file.old_text,
2391 new_text: file.new_text,
2392 is_binary: file.is_binary,
2393 })
2394 .collect(),
2395 })
2396 }
2397
2398 async fn handle_file_history(
2399 this: Entity<Self>,
2400 envelope: TypedEnvelope<proto::GitFileHistory>,
2401 mut cx: AsyncApp,
2402 ) -> Result<proto::GitFileHistoryResponse> {
2403 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2404 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2405 let path = RepoPath::from_proto(&envelope.payload.path)?;
2406 let skip = envelope.payload.skip as usize;
2407 let limit = envelope.payload.limit.map(|l| l as usize);
2408
2409 let file_history = repository_handle
2410 .update(&mut cx, |repository_handle, _| {
2411 repository_handle.file_history_paginated(path, skip, limit)
2412 })
2413 .await??;
2414
2415 Ok(proto::GitFileHistoryResponse {
2416 entries: file_history
2417 .entries
2418 .into_iter()
2419 .map(|entry| proto::FileHistoryEntry {
2420 sha: entry.sha.to_string(),
2421 subject: entry.subject.to_string(),
2422 message: entry.message.to_string(),
2423 commit_timestamp: entry.commit_timestamp,
2424 author_name: entry.author_name.to_string(),
2425 author_email: entry.author_email.to_string(),
2426 })
2427 .collect(),
2428 path: file_history.path.to_proto(),
2429 })
2430 }
2431
2432 async fn handle_reset(
2433 this: Entity<Self>,
2434 envelope: TypedEnvelope<proto::GitReset>,
2435 mut cx: AsyncApp,
2436 ) -> Result<proto::Ack> {
2437 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2438 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2439
2440 let mode = match envelope.payload.mode() {
2441 git_reset::ResetMode::Soft => ResetMode::Soft,
2442 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2443 };
2444
2445 repository_handle
2446 .update(&mut cx, |repository_handle, cx| {
2447 repository_handle.reset(envelope.payload.commit, mode, cx)
2448 })
2449 .await??;
2450 Ok(proto::Ack {})
2451 }
2452
2453 async fn handle_checkout_files(
2454 this: Entity<Self>,
2455 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2456 mut cx: AsyncApp,
2457 ) -> Result<proto::Ack> {
2458 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2459 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2460 let paths = envelope
2461 .payload
2462 .paths
2463 .iter()
2464 .map(|s| RepoPath::from_proto(s))
2465 .collect::<Result<Vec<_>>>()?;
2466
2467 repository_handle
2468 .update(&mut cx, |repository_handle, cx| {
2469 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2470 })
2471 .await?;
2472 Ok(proto::Ack {})
2473 }
2474
2475 async fn handle_open_commit_message_buffer(
2476 this: Entity<Self>,
2477 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2478 mut cx: AsyncApp,
2479 ) -> Result<proto::OpenBufferResponse> {
2480 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2481 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2482 let buffer = repository
2483 .update(&mut cx, |repository, cx| {
2484 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2485 })
2486 .await?;
2487
2488 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2489 this.update(&mut cx, |this, cx| {
2490 this.buffer_store.update(cx, |buffer_store, cx| {
2491 buffer_store
2492 .create_buffer_for_peer(
2493 &buffer,
2494 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2495 cx,
2496 )
2497 .detach_and_log_err(cx);
2498 })
2499 });
2500
2501 Ok(proto::OpenBufferResponse {
2502 buffer_id: buffer_id.to_proto(),
2503 })
2504 }
2505
2506 async fn handle_askpass(
2507 this: Entity<Self>,
2508 envelope: TypedEnvelope<proto::AskPassRequest>,
2509 mut cx: AsyncApp,
2510 ) -> Result<proto::AskPassResponse> {
2511 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2512 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2513
2514 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2515 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2516 debug_panic!("no askpass found");
2517 anyhow::bail!("no askpass found");
2518 };
2519
2520 let response = askpass
2521 .ask_password(envelope.payload.prompt)
2522 .await
2523 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2524
2525 delegates
2526 .lock()
2527 .insert(envelope.payload.askpass_id, askpass);
2528
2529 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2530 Ok(proto::AskPassResponse {
2531 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2532 })
2533 }
2534
2535 async fn handle_check_for_pushed_commits(
2536 this: Entity<Self>,
2537 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2538 mut cx: AsyncApp,
2539 ) -> Result<proto::CheckForPushedCommitsResponse> {
2540 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2541 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2542
2543 let branches = repository_handle
2544 .update(&mut cx, |repository_handle, _| {
2545 repository_handle.check_for_pushed_commits()
2546 })
2547 .await??;
2548 Ok(proto::CheckForPushedCommitsResponse {
2549 pushed_to: branches
2550 .into_iter()
2551 .map(|commit| commit.to_string())
2552 .collect(),
2553 })
2554 }
2555
2556 async fn handle_git_diff(
2557 this: Entity<Self>,
2558 envelope: TypedEnvelope<proto::GitDiff>,
2559 mut cx: AsyncApp,
2560 ) -> Result<proto::GitDiffResponse> {
2561 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2562 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2563 let diff_type = match envelope.payload.diff_type() {
2564 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2565 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2566 };
2567
2568 let mut diff = repository_handle
2569 .update(&mut cx, |repository_handle, cx| {
2570 repository_handle.diff(diff_type, cx)
2571 })
2572 .await??;
2573 const ONE_MB: usize = 1_000_000;
2574 if diff.len() > ONE_MB {
2575 diff = diff.chars().take(ONE_MB).collect()
2576 }
2577
2578 Ok(proto::GitDiffResponse { diff })
2579 }
2580
2581 async fn handle_tree_diff(
2582 this: Entity<Self>,
2583 request: TypedEnvelope<proto::GetTreeDiff>,
2584 mut cx: AsyncApp,
2585 ) -> Result<proto::GetTreeDiffResponse> {
2586 let repository_id = RepositoryId(request.payload.repository_id);
2587 let diff_type = if request.payload.is_merge {
2588 DiffTreeType::MergeBase {
2589 base: request.payload.base.into(),
2590 head: request.payload.head.into(),
2591 }
2592 } else {
2593 DiffTreeType::Since {
2594 base: request.payload.base.into(),
2595 head: request.payload.head.into(),
2596 }
2597 };
2598
2599 let diff = this
2600 .update(&mut cx, |this, cx| {
2601 let repository = this.repositories().get(&repository_id)?;
2602 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2603 })
2604 .context("missing repository")?
2605 .await??;
2606
2607 Ok(proto::GetTreeDiffResponse {
2608 entries: diff
2609 .entries
2610 .into_iter()
2611 .map(|(path, status)| proto::TreeDiffStatus {
2612 path: path.as_ref().to_proto(),
2613 status: match status {
2614 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2615 TreeDiffStatus::Modified { .. } => {
2616 proto::tree_diff_status::Status::Modified.into()
2617 }
2618 TreeDiffStatus::Deleted { .. } => {
2619 proto::tree_diff_status::Status::Deleted.into()
2620 }
2621 },
2622 oid: match status {
2623 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2624 Some(old.to_string())
2625 }
2626 TreeDiffStatus::Added => None,
2627 },
2628 })
2629 .collect(),
2630 })
2631 }
2632
2633 async fn handle_get_blob_content(
2634 this: Entity<Self>,
2635 request: TypedEnvelope<proto::GetBlobContent>,
2636 mut cx: AsyncApp,
2637 ) -> Result<proto::GetBlobContentResponse> {
2638 let oid = git::Oid::from_str(&request.payload.oid)?;
2639 let repository_id = RepositoryId(request.payload.repository_id);
2640 let content = this
2641 .update(&mut cx, |this, cx| {
2642 let repository = this.repositories().get(&repository_id)?;
2643 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2644 })
2645 .context("missing repository")?
2646 .await?;
2647 Ok(proto::GetBlobContentResponse { content })
2648 }
2649
2650 async fn handle_open_unstaged_diff(
2651 this: Entity<Self>,
2652 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2653 mut cx: AsyncApp,
2654 ) -> Result<proto::OpenUnstagedDiffResponse> {
2655 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2656 let diff = this
2657 .update(&mut cx, |this, cx| {
2658 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2659 Some(this.open_unstaged_diff(buffer, cx))
2660 })
2661 .context("missing buffer")?
2662 .await?;
2663 this.update(&mut cx, |this, _| {
2664 let shared_diffs = this
2665 .shared_diffs
2666 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2667 .or_default();
2668 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2669 });
2670 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2671 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2672 }
2673
2674 async fn handle_open_uncommitted_diff(
2675 this: Entity<Self>,
2676 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2677 mut cx: AsyncApp,
2678 ) -> Result<proto::OpenUncommittedDiffResponse> {
2679 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2680 let diff = this
2681 .update(&mut cx, |this, cx| {
2682 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2683 Some(this.open_uncommitted_diff(buffer, cx))
2684 })
2685 .context("missing buffer")?
2686 .await?;
2687 this.update(&mut cx, |this, _| {
2688 let shared_diffs = this
2689 .shared_diffs
2690 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2691 .or_default();
2692 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2693 });
2694 Ok(diff.read_with(&cx, |diff, cx| {
2695 use proto::open_uncommitted_diff_response::Mode;
2696
2697 let unstaged_diff = diff.secondary_diff();
2698 let index_snapshot = unstaged_diff.and_then(|diff| {
2699 let diff = diff.read(cx);
2700 diff.base_text_exists().then(|| diff.base_text(cx))
2701 });
2702
2703 let mode;
2704 let staged_text;
2705 let committed_text;
2706 if diff.base_text_exists() {
2707 let committed_snapshot = diff.base_text(cx);
2708 committed_text = Some(committed_snapshot.text());
2709 if let Some(index_text) = index_snapshot {
2710 if index_text.remote_id() == committed_snapshot.remote_id() {
2711 mode = Mode::IndexMatchesHead;
2712 staged_text = None;
2713 } else {
2714 mode = Mode::IndexAndHead;
2715 staged_text = Some(index_text.text());
2716 }
2717 } else {
2718 mode = Mode::IndexAndHead;
2719 staged_text = None;
2720 }
2721 } else {
2722 mode = Mode::IndexAndHead;
2723 committed_text = None;
2724 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2725 }
2726
2727 proto::OpenUncommittedDiffResponse {
2728 committed_text,
2729 staged_text,
2730 mode: mode.into(),
2731 }
2732 }))
2733 }
2734
2735 async fn handle_update_diff_bases(
2736 this: Entity<Self>,
2737 request: TypedEnvelope<proto::UpdateDiffBases>,
2738 mut cx: AsyncApp,
2739 ) -> Result<()> {
2740 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2741 this.update(&mut cx, |this, cx| {
2742 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2743 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2744 {
2745 let buffer = buffer.read(cx).text_snapshot();
2746 diff_state.update(cx, |diff_state, cx| {
2747 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2748 })
2749 }
2750 });
2751 Ok(())
2752 }
2753
2754 async fn handle_blame_buffer(
2755 this: Entity<Self>,
2756 envelope: TypedEnvelope<proto::BlameBuffer>,
2757 mut cx: AsyncApp,
2758 ) -> Result<proto::BlameBufferResponse> {
2759 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2760 let version = deserialize_version(&envelope.payload.version);
2761 let buffer = this.read_with(&cx, |this, cx| {
2762 this.buffer_store.read(cx).get_existing(buffer_id)
2763 })?;
2764 buffer
2765 .update(&mut cx, |buffer, _| {
2766 buffer.wait_for_version(version.clone())
2767 })
2768 .await?;
2769 let blame = this
2770 .update(&mut cx, |this, cx| {
2771 this.blame_buffer(&buffer, Some(version), cx)
2772 })
2773 .await?;
2774 Ok(serialize_blame_buffer_response(blame))
2775 }
2776
2777 async fn handle_get_permalink_to_line(
2778 this: Entity<Self>,
2779 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2780 mut cx: AsyncApp,
2781 ) -> Result<proto::GetPermalinkToLineResponse> {
2782 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2783 // let version = deserialize_version(&envelope.payload.version);
2784 let selection = {
2785 let proto_selection = envelope
2786 .payload
2787 .selection
2788 .context("no selection to get permalink for defined")?;
2789 proto_selection.start as u32..proto_selection.end as u32
2790 };
2791 let buffer = this.read_with(&cx, |this, cx| {
2792 this.buffer_store.read(cx).get_existing(buffer_id)
2793 })?;
2794 let permalink = this
2795 .update(&mut cx, |this, cx| {
2796 this.get_permalink_to_line(&buffer, selection, cx)
2797 })
2798 .await?;
2799 Ok(proto::GetPermalinkToLineResponse {
2800 permalink: permalink.to_string(),
2801 })
2802 }
2803
2804 fn repository_for_request(
2805 this: &Entity<Self>,
2806 id: RepositoryId,
2807 cx: &mut AsyncApp,
2808 ) -> Result<Entity<Repository>> {
2809 this.read_with(cx, |this, _| {
2810 this.repositories
2811 .get(&id)
2812 .context("missing repository handle")
2813 .cloned()
2814 })
2815 }
2816
2817 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2818 self.repositories
2819 .iter()
2820 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2821 .collect()
2822 }
2823
2824 fn process_updated_entries(
2825 &self,
2826 worktree: &Entity<Worktree>,
2827 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2828 cx: &mut App,
2829 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2830 let path_style = worktree.read(cx).path_style();
2831 let mut repo_paths = self
2832 .repositories
2833 .values()
2834 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2835 .collect::<Vec<_>>();
2836 let mut entries: Vec<_> = updated_entries
2837 .iter()
2838 .map(|(path, _, _)| path.clone())
2839 .collect();
2840 entries.sort();
2841 let worktree = worktree.read(cx);
2842
2843 let entries = entries
2844 .into_iter()
2845 .map(|path| worktree.absolutize(&path))
2846 .collect::<Arc<[_]>>();
2847
2848 let executor = cx.background_executor().clone();
2849 cx.background_executor().spawn(async move {
2850 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2851 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2852 let mut tasks = FuturesOrdered::new();
2853 for (repo_path, repo) in repo_paths.into_iter().rev() {
2854 let entries = entries.clone();
2855 let task = executor.spawn(async move {
2856 // Find all repository paths that belong to this repo
2857 let mut ix = entries.partition_point(|path| path < &*repo_path);
2858 if ix == entries.len() {
2859 return None;
2860 };
2861
2862 let mut paths = Vec::new();
2863 // All paths prefixed by a given repo will constitute a continuous range.
2864 while let Some(path) = entries.get(ix)
2865 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2866 &repo_path, path, path_style,
2867 )
2868 {
2869 paths.push((repo_path, ix));
2870 ix += 1;
2871 }
2872 if paths.is_empty() {
2873 None
2874 } else {
2875 Some((repo, paths))
2876 }
2877 });
2878 tasks.push_back(task);
2879 }
2880
2881 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2882 let mut path_was_used = vec![false; entries.len()];
2883 let tasks = tasks.collect::<Vec<_>>().await;
2884 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2885 // We always want to assign a path to it's innermost repository.
2886 for t in tasks {
2887 let Some((repo, paths)) = t else {
2888 continue;
2889 };
2890 let entry = paths_by_git_repo.entry(repo).or_default();
2891 for (repo_path, ix) in paths {
2892 if path_was_used[ix] {
2893 continue;
2894 }
2895 path_was_used[ix] = true;
2896 entry.push(repo_path);
2897 }
2898 }
2899
2900 paths_by_git_repo
2901 })
2902 }
2903}
2904
2905impl BufferGitState {
2906 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2907 Self {
2908 unstaged_diff: Default::default(),
2909 uncommitted_diff: Default::default(),
2910 recalculate_diff_task: Default::default(),
2911 language: Default::default(),
2912 language_registry: Default::default(),
2913 recalculating_tx: postage::watch::channel_with(false).0,
2914 hunk_staging_operation_count: 0,
2915 hunk_staging_operation_count_as_of_write: 0,
2916 head_text: Default::default(),
2917 index_text: Default::default(),
2918 head_changed: Default::default(),
2919 index_changed: Default::default(),
2920 language_changed: Default::default(),
2921 conflict_updated_futures: Default::default(),
2922 conflict_set: Default::default(),
2923 reparse_conflict_markers_task: Default::default(),
2924 }
2925 }
2926
2927 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2928 self.language = buffer.read(cx).language().cloned();
2929 self.language_changed = true;
2930 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2931 }
2932
2933 fn reparse_conflict_markers(
2934 &mut self,
2935 buffer: text::BufferSnapshot,
2936 cx: &mut Context<Self>,
2937 ) -> oneshot::Receiver<()> {
2938 let (tx, rx) = oneshot::channel();
2939
2940 let Some(conflict_set) = self
2941 .conflict_set
2942 .as_ref()
2943 .and_then(|conflict_set| conflict_set.upgrade())
2944 else {
2945 return rx;
2946 };
2947
2948 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2949 if conflict_set.has_conflict {
2950 Some(conflict_set.snapshot())
2951 } else {
2952 None
2953 }
2954 });
2955
2956 if let Some(old_snapshot) = old_snapshot {
2957 self.conflict_updated_futures.push(tx);
2958 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2959 let (snapshot, changed_range) = cx
2960 .background_spawn(async move {
2961 let new_snapshot = ConflictSet::parse(&buffer);
2962 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2963 (new_snapshot, changed_range)
2964 })
2965 .await;
2966 this.update(cx, |this, cx| {
2967 if let Some(conflict_set) = &this.conflict_set {
2968 conflict_set
2969 .update(cx, |conflict_set, cx| {
2970 conflict_set.set_snapshot(snapshot, changed_range, cx);
2971 })
2972 .ok();
2973 }
2974 let futures = std::mem::take(&mut this.conflict_updated_futures);
2975 for tx in futures {
2976 tx.send(()).ok();
2977 }
2978 })
2979 }))
2980 }
2981
2982 rx
2983 }
2984
2985 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2986 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2987 }
2988
2989 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2990 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2991 }
2992
2993 fn handle_base_texts_updated(
2994 &mut self,
2995 buffer: text::BufferSnapshot,
2996 message: proto::UpdateDiffBases,
2997 cx: &mut Context<Self>,
2998 ) {
2999 use proto::update_diff_bases::Mode;
3000
3001 let Some(mode) = Mode::from_i32(message.mode) else {
3002 return;
3003 };
3004
3005 let diff_bases_change = match mode {
3006 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3007 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3008 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3009 Mode::IndexAndHead => DiffBasesChange::SetEach {
3010 index: message.staged_text,
3011 head: message.committed_text,
3012 },
3013 };
3014
3015 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3016 }
3017
3018 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3019 if *self.recalculating_tx.borrow() {
3020 let mut rx = self.recalculating_tx.subscribe();
3021 Some(async move {
3022 loop {
3023 let is_recalculating = rx.recv().await;
3024 if is_recalculating != Some(true) {
3025 break;
3026 }
3027 }
3028 })
3029 } else {
3030 None
3031 }
3032 }
3033
3034 fn diff_bases_changed(
3035 &mut self,
3036 buffer: text::BufferSnapshot,
3037 diff_bases_change: Option<DiffBasesChange>,
3038 cx: &mut Context<Self>,
3039 ) {
3040 match diff_bases_change {
3041 Some(DiffBasesChange::SetIndex(index)) => {
3042 self.index_text = index.map(|mut index| {
3043 text::LineEnding::normalize(&mut index);
3044 Arc::from(index.as_str())
3045 });
3046 self.index_changed = true;
3047 }
3048 Some(DiffBasesChange::SetHead(head)) => {
3049 self.head_text = head.map(|mut head| {
3050 text::LineEnding::normalize(&mut head);
3051 Arc::from(head.as_str())
3052 });
3053 self.head_changed = true;
3054 }
3055 Some(DiffBasesChange::SetBoth(text)) => {
3056 let text = text.map(|mut text| {
3057 text::LineEnding::normalize(&mut text);
3058 Arc::from(text.as_str())
3059 });
3060 self.head_text = text.clone();
3061 self.index_text = text;
3062 self.head_changed = true;
3063 self.index_changed = true;
3064 }
3065 Some(DiffBasesChange::SetEach { index, head }) => {
3066 self.index_text = index.map(|mut index| {
3067 text::LineEnding::normalize(&mut index);
3068 Arc::from(index.as_str())
3069 });
3070 self.index_changed = true;
3071 self.head_text = head.map(|mut head| {
3072 text::LineEnding::normalize(&mut head);
3073 Arc::from(head.as_str())
3074 });
3075 self.head_changed = true;
3076 }
3077 None => {}
3078 }
3079
3080 self.recalculate_diffs(buffer, cx)
3081 }
3082
3083 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3084 *self.recalculating_tx.borrow_mut() = true;
3085
3086 let language = self.language.clone();
3087 let language_registry = self.language_registry.clone();
3088 let unstaged_diff = self.unstaged_diff();
3089 let uncommitted_diff = self.uncommitted_diff();
3090 let head = self.head_text.clone();
3091 let index = self.index_text.clone();
3092 let index_changed = self.index_changed;
3093 let head_changed = self.head_changed;
3094 let language_changed = self.language_changed;
3095 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3096 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3097 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3098 (None, None) => true,
3099 _ => false,
3100 };
3101 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3102 log::debug!(
3103 "start recalculating diffs for buffer {}",
3104 buffer.remote_id()
3105 );
3106
3107 let mut new_unstaged_diff = None;
3108 if let Some(unstaged_diff) = &unstaged_diff {
3109 new_unstaged_diff = Some(
3110 cx.update(|cx| {
3111 unstaged_diff.read(cx).update_diff(
3112 buffer.clone(),
3113 index,
3114 index_changed,
3115 language.clone(),
3116 cx,
3117 )
3118 })
3119 .await,
3120 );
3121 }
3122
3123 // Dropping BufferDiff can be expensive, so yield back to the event loop
3124 // for a bit
3125 yield_now().await;
3126
3127 let mut new_uncommitted_diff = None;
3128 if let Some(uncommitted_diff) = &uncommitted_diff {
3129 new_uncommitted_diff = if index_matches_head {
3130 new_unstaged_diff.clone()
3131 } else {
3132 Some(
3133 cx.update(|cx| {
3134 uncommitted_diff.read(cx).update_diff(
3135 buffer.clone(),
3136 head,
3137 head_changed,
3138 language.clone(),
3139 cx,
3140 )
3141 })
3142 .await,
3143 )
3144 }
3145 }
3146
3147 // Dropping BufferDiff can be expensive, so yield back to the event loop
3148 // for a bit
3149 yield_now().await;
3150
3151 let cancel = this.update(cx, |this, _| {
3152 // This checks whether all pending stage/unstage operations
3153 // have quiesced (i.e. both the corresponding write and the
3154 // read of that write have completed). If not, then we cancel
3155 // this recalculation attempt to avoid invalidating pending
3156 // state too quickly; another recalculation will come along
3157 // later and clear the pending state once the state of the index has settled.
3158 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3159 *this.recalculating_tx.borrow_mut() = false;
3160 true
3161 } else {
3162 false
3163 }
3164 })?;
3165 if cancel {
3166 log::debug!(
3167 concat!(
3168 "aborting recalculating diffs for buffer {}",
3169 "due to subsequent hunk operations",
3170 ),
3171 buffer.remote_id()
3172 );
3173 return Ok(());
3174 }
3175
3176 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3177 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3178 {
3179 let task = unstaged_diff.update(cx, |diff, cx| {
3180 if language_changed {
3181 diff.language_changed(language.clone(), language_registry.clone(), cx);
3182 }
3183 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3184 });
3185 Some(task.await)
3186 } else {
3187 None
3188 };
3189
3190 yield_now().await;
3191
3192 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3193 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3194 {
3195 uncommitted_diff
3196 .update(cx, |diff, cx| {
3197 if language_changed {
3198 diff.language_changed(language, language_registry, cx);
3199 }
3200 diff.set_snapshot_with_secondary(
3201 new_uncommitted_diff,
3202 &buffer,
3203 unstaged_changed_range.flatten(),
3204 true,
3205 cx,
3206 )
3207 })
3208 .await;
3209 }
3210
3211 log::debug!(
3212 "finished recalculating diffs for buffer {}",
3213 buffer.remote_id()
3214 );
3215
3216 if let Some(this) = this.upgrade() {
3217 this.update(cx, |this, _| {
3218 this.index_changed = false;
3219 this.head_changed = false;
3220 this.language_changed = false;
3221 *this.recalculating_tx.borrow_mut() = false;
3222 });
3223 }
3224
3225 Ok(())
3226 }));
3227 }
3228}
3229
3230fn make_remote_delegate(
3231 this: Entity<GitStore>,
3232 project_id: u64,
3233 repository_id: RepositoryId,
3234 askpass_id: u64,
3235 cx: &mut AsyncApp,
3236) -> AskPassDelegate {
3237 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3238 this.update(cx, |this, cx| {
3239 let Some((client, _)) = this.downstream_client() else {
3240 return;
3241 };
3242 let response = client.request(proto::AskPassRequest {
3243 project_id,
3244 repository_id: repository_id.to_proto(),
3245 askpass_id,
3246 prompt,
3247 });
3248 cx.spawn(async move |_, _| {
3249 let mut response = response.await?.response;
3250 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3251 .ok();
3252 response.zeroize();
3253 anyhow::Ok(())
3254 })
3255 .detach_and_log_err(cx);
3256 });
3257 })
3258}
3259
3260impl RepositoryId {
3261 pub fn to_proto(self) -> u64 {
3262 self.0
3263 }
3264
3265 pub fn from_proto(id: u64) -> Self {
3266 RepositoryId(id)
3267 }
3268}
3269
3270impl RepositorySnapshot {
3271 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3272 Self {
3273 id,
3274 statuses_by_path: Default::default(),
3275 work_directory_abs_path,
3276 branch: None,
3277 head_commit: None,
3278 scan_id: 0,
3279 merge: Default::default(),
3280 remote_origin_url: None,
3281 remote_upstream_url: None,
3282 stash_entries: Default::default(),
3283 path_style,
3284 }
3285 }
3286
3287 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3288 proto::UpdateRepository {
3289 branch_summary: self.branch.as_ref().map(branch_to_proto),
3290 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3291 updated_statuses: self
3292 .statuses_by_path
3293 .iter()
3294 .map(|entry| entry.to_proto())
3295 .collect(),
3296 removed_statuses: Default::default(),
3297 current_merge_conflicts: self
3298 .merge
3299 .conflicted_paths
3300 .iter()
3301 .map(|repo_path| repo_path.to_proto())
3302 .collect(),
3303 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3304 project_id,
3305 id: self.id.to_proto(),
3306 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3307 entry_ids: vec![self.id.to_proto()],
3308 scan_id: self.scan_id,
3309 is_last_update: true,
3310 stash_entries: self
3311 .stash_entries
3312 .entries
3313 .iter()
3314 .map(stash_to_proto)
3315 .collect(),
3316 remote_upstream_url: self.remote_upstream_url.clone(),
3317 remote_origin_url: self.remote_origin_url.clone(),
3318 }
3319 }
3320
3321 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3322 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3323 let mut removed_statuses: Vec<String> = Vec::new();
3324
3325 let mut new_statuses = self.statuses_by_path.iter().peekable();
3326 let mut old_statuses = old.statuses_by_path.iter().peekable();
3327
3328 let mut current_new_entry = new_statuses.next();
3329 let mut current_old_entry = old_statuses.next();
3330 loop {
3331 match (current_new_entry, current_old_entry) {
3332 (Some(new_entry), Some(old_entry)) => {
3333 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3334 Ordering::Less => {
3335 updated_statuses.push(new_entry.to_proto());
3336 current_new_entry = new_statuses.next();
3337 }
3338 Ordering::Equal => {
3339 if new_entry.status != old_entry.status {
3340 updated_statuses.push(new_entry.to_proto());
3341 }
3342 current_old_entry = old_statuses.next();
3343 current_new_entry = new_statuses.next();
3344 }
3345 Ordering::Greater => {
3346 removed_statuses.push(old_entry.repo_path.to_proto());
3347 current_old_entry = old_statuses.next();
3348 }
3349 }
3350 }
3351 (None, Some(old_entry)) => {
3352 removed_statuses.push(old_entry.repo_path.to_proto());
3353 current_old_entry = old_statuses.next();
3354 }
3355 (Some(new_entry), None) => {
3356 updated_statuses.push(new_entry.to_proto());
3357 current_new_entry = new_statuses.next();
3358 }
3359 (None, None) => break,
3360 }
3361 }
3362
3363 proto::UpdateRepository {
3364 branch_summary: self.branch.as_ref().map(branch_to_proto),
3365 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3366 updated_statuses,
3367 removed_statuses,
3368 current_merge_conflicts: self
3369 .merge
3370 .conflicted_paths
3371 .iter()
3372 .map(|path| path.to_proto())
3373 .collect(),
3374 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3375 project_id,
3376 id: self.id.to_proto(),
3377 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3378 entry_ids: vec![],
3379 scan_id: self.scan_id,
3380 is_last_update: true,
3381 stash_entries: self
3382 .stash_entries
3383 .entries
3384 .iter()
3385 .map(stash_to_proto)
3386 .collect(),
3387 remote_upstream_url: self.remote_upstream_url.clone(),
3388 remote_origin_url: self.remote_origin_url.clone(),
3389 }
3390 }
3391
3392 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3393 self.statuses_by_path.iter().cloned()
3394 }
3395
3396 pub fn status_summary(&self) -> GitSummary {
3397 self.statuses_by_path.summary().item_summary
3398 }
3399
3400 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3401 self.statuses_by_path
3402 .get(&PathKey(path.as_ref().clone()), ())
3403 .cloned()
3404 }
3405
3406 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3407 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3408 }
3409
3410 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3411 self.path_style
3412 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3413 .unwrap()
3414 .into()
3415 }
3416
3417 #[inline]
3418 fn abs_path_to_repo_path_inner(
3419 work_directory_abs_path: &Path,
3420 abs_path: &Path,
3421 path_style: PathStyle,
3422 ) -> Option<RepoPath> {
3423 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3424 Some(RepoPath::from_rel_path(&rel_path))
3425 }
3426
3427 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3428 self.merge.conflicted_paths.contains(repo_path)
3429 }
3430
3431 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3432 let had_conflict_on_last_merge_head_change =
3433 self.merge.conflicted_paths.contains(repo_path);
3434 let has_conflict_currently = self
3435 .status_for_path(repo_path)
3436 .is_some_and(|entry| entry.status.is_conflicted());
3437 had_conflict_on_last_merge_head_change || has_conflict_currently
3438 }
3439
3440 /// This is the name that will be displayed in the repository selector for this repository.
3441 pub fn display_name(&self) -> SharedString {
3442 self.work_directory_abs_path
3443 .file_name()
3444 .unwrap_or_default()
3445 .to_string_lossy()
3446 .to_string()
3447 .into()
3448 }
3449}
3450
3451pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3452 proto::StashEntry {
3453 oid: entry.oid.as_bytes().to_vec(),
3454 message: entry.message.clone(),
3455 branch: entry.branch.clone(),
3456 index: entry.index as u64,
3457 timestamp: entry.timestamp,
3458 }
3459}
3460
3461pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3462 Ok(StashEntry {
3463 oid: Oid::from_bytes(&entry.oid)?,
3464 message: entry.message.clone(),
3465 index: entry.index as usize,
3466 branch: entry.branch.clone(),
3467 timestamp: entry.timestamp,
3468 })
3469}
3470
3471impl MergeDetails {
3472 async fn load(
3473 backend: &Arc<dyn GitRepository>,
3474 status: &SumTree<StatusEntry>,
3475 prev_snapshot: &RepositorySnapshot,
3476 ) -> Result<(MergeDetails, bool)> {
3477 log::debug!("load merge details");
3478 let message = backend.merge_message().await;
3479 let heads = backend
3480 .revparse_batch(vec![
3481 "MERGE_HEAD".into(),
3482 "CHERRY_PICK_HEAD".into(),
3483 "REBASE_HEAD".into(),
3484 "REVERT_HEAD".into(),
3485 "APPLY_HEAD".into(),
3486 ])
3487 .await
3488 .log_err()
3489 .unwrap_or_default()
3490 .into_iter()
3491 .map(|opt| opt.map(SharedString::from))
3492 .collect::<Vec<_>>();
3493 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3494 let conflicted_paths = if merge_heads_changed {
3495 let current_conflicted_paths = TreeSet::from_ordered_entries(
3496 status
3497 .iter()
3498 .filter(|entry| entry.status.is_conflicted())
3499 .map(|entry| entry.repo_path.clone()),
3500 );
3501
3502 // It can happen that we run a scan while a lengthy merge is in progress
3503 // that will eventually result in conflicts, but before those conflicts
3504 // are reported by `git status`. Since for the moment we only care about
3505 // the merge heads state for the purposes of tracking conflicts, don't update
3506 // this state until we see some conflicts.
3507 if heads.iter().any(Option::is_some)
3508 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3509 && current_conflicted_paths.is_empty()
3510 {
3511 log::debug!("not updating merge heads because no conflicts found");
3512 return Ok((
3513 MergeDetails {
3514 message: message.map(SharedString::from),
3515 ..prev_snapshot.merge.clone()
3516 },
3517 false,
3518 ));
3519 }
3520
3521 current_conflicted_paths
3522 } else {
3523 prev_snapshot.merge.conflicted_paths.clone()
3524 };
3525 let details = MergeDetails {
3526 conflicted_paths,
3527 message: message.map(SharedString::from),
3528 heads,
3529 };
3530 Ok((details, merge_heads_changed))
3531 }
3532}
3533
3534impl Repository {
3535 pub fn snapshot(&self) -> RepositorySnapshot {
3536 self.snapshot.clone()
3537 }
3538
3539 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3540 self.pending_ops.iter().cloned()
3541 }
3542
3543 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3544 self.pending_ops.summary().clone()
3545 }
3546
3547 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3548 self.pending_ops
3549 .get(&PathKey(path.as_ref().clone()), ())
3550 .cloned()
3551 }
3552
3553 fn local(
3554 id: RepositoryId,
3555 work_directory_abs_path: Arc<Path>,
3556 dot_git_abs_path: Arc<Path>,
3557 project_environment: WeakEntity<ProjectEnvironment>,
3558 fs: Arc<dyn Fs>,
3559 git_store: WeakEntity<GitStore>,
3560 cx: &mut Context<Self>,
3561 ) -> Self {
3562 let snapshot =
3563 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3564 let state = cx
3565 .spawn(async move |_, cx| {
3566 LocalRepositoryState::new(
3567 work_directory_abs_path,
3568 dot_git_abs_path,
3569 project_environment,
3570 fs,
3571 cx,
3572 )
3573 .await
3574 .map_err(|err| err.to_string())
3575 })
3576 .shared();
3577 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3578 let state = cx
3579 .spawn(async move |_, _| {
3580 let state = state.await?;
3581 Ok(RepositoryState::Local(state))
3582 })
3583 .shared();
3584
3585 Repository {
3586 this: cx.weak_entity(),
3587 git_store,
3588 snapshot,
3589 pending_ops: Default::default(),
3590 repository_state: state,
3591 commit_message_buffer: None,
3592 askpass_delegates: Default::default(),
3593 paths_needing_status_update: Default::default(),
3594 latest_askpass_id: 0,
3595 job_sender,
3596 job_id: 0,
3597 active_jobs: Default::default(),
3598 }
3599 }
3600
3601 fn remote(
3602 id: RepositoryId,
3603 work_directory_abs_path: Arc<Path>,
3604 path_style: PathStyle,
3605 project_id: ProjectId,
3606 client: AnyProtoClient,
3607 git_store: WeakEntity<GitStore>,
3608 cx: &mut Context<Self>,
3609 ) -> Self {
3610 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3611 let repository_state = RemoteRepositoryState { project_id, client };
3612 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3613 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3614 Self {
3615 this: cx.weak_entity(),
3616 snapshot,
3617 commit_message_buffer: None,
3618 git_store,
3619 pending_ops: Default::default(),
3620 paths_needing_status_update: Default::default(),
3621 job_sender,
3622 repository_state,
3623 askpass_delegates: Default::default(),
3624 latest_askpass_id: 0,
3625 active_jobs: Default::default(),
3626 job_id: 0,
3627 }
3628 }
3629
3630 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3631 self.git_store.upgrade()
3632 }
3633
3634 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3635 let this = cx.weak_entity();
3636 let git_store = self.git_store.clone();
3637 let _ = self.send_keyed_job(
3638 Some(GitJobKey::ReloadBufferDiffBases),
3639 None,
3640 |state, mut cx| async move {
3641 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3642 log::error!("tried to recompute diffs for a non-local repository");
3643 return Ok(());
3644 };
3645
3646 let Some(this) = this.upgrade() else {
3647 return Ok(());
3648 };
3649
3650 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3651 git_store.update(cx, |git_store, cx| {
3652 git_store
3653 .diffs
3654 .iter()
3655 .filter_map(|(buffer_id, diff_state)| {
3656 let buffer_store = git_store.buffer_store.read(cx);
3657 let buffer = buffer_store.get(*buffer_id)?;
3658 let file = File::from_dyn(buffer.read(cx).file())?;
3659 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3660 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3661 log::debug!(
3662 "start reload diff bases for repo path {}",
3663 repo_path.as_unix_str()
3664 );
3665 diff_state.update(cx, |diff_state, _| {
3666 let has_unstaged_diff = diff_state
3667 .unstaged_diff
3668 .as_ref()
3669 .is_some_and(|diff| diff.is_upgradable());
3670 let has_uncommitted_diff = diff_state
3671 .uncommitted_diff
3672 .as_ref()
3673 .is_some_and(|set| set.is_upgradable());
3674
3675 Some((
3676 buffer,
3677 repo_path,
3678 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3679 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3680 ))
3681 })
3682 })
3683 .collect::<Vec<_>>()
3684 })
3685 })?;
3686
3687 let buffer_diff_base_changes = cx
3688 .background_spawn(async move {
3689 let mut changes = Vec::new();
3690 for (buffer, repo_path, current_index_text, current_head_text) in
3691 &repo_diff_state_updates
3692 {
3693 let index_text = if current_index_text.is_some() {
3694 backend.load_index_text(repo_path.clone()).await
3695 } else {
3696 None
3697 };
3698 let head_text = if current_head_text.is_some() {
3699 backend.load_committed_text(repo_path.clone()).await
3700 } else {
3701 None
3702 };
3703
3704 let change =
3705 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3706 (Some(current_index), Some(current_head)) => {
3707 let index_changed =
3708 index_text.as_deref() != current_index.as_deref();
3709 let head_changed =
3710 head_text.as_deref() != current_head.as_deref();
3711 if index_changed && head_changed {
3712 if index_text == head_text {
3713 Some(DiffBasesChange::SetBoth(head_text))
3714 } else {
3715 Some(DiffBasesChange::SetEach {
3716 index: index_text,
3717 head: head_text,
3718 })
3719 }
3720 } else if index_changed {
3721 Some(DiffBasesChange::SetIndex(index_text))
3722 } else if head_changed {
3723 Some(DiffBasesChange::SetHead(head_text))
3724 } else {
3725 None
3726 }
3727 }
3728 (Some(current_index), None) => {
3729 let index_changed =
3730 index_text.as_deref() != current_index.as_deref();
3731 index_changed
3732 .then_some(DiffBasesChange::SetIndex(index_text))
3733 }
3734 (None, Some(current_head)) => {
3735 let head_changed =
3736 head_text.as_deref() != current_head.as_deref();
3737 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3738 }
3739 (None, None) => None,
3740 };
3741
3742 changes.push((buffer.clone(), change))
3743 }
3744 changes
3745 })
3746 .await;
3747
3748 git_store.update(&mut cx, |git_store, cx| {
3749 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3750 let buffer_snapshot = buffer.read(cx).text_snapshot();
3751 let buffer_id = buffer_snapshot.remote_id();
3752 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3753 continue;
3754 };
3755
3756 let downstream_client = git_store.downstream_client();
3757 diff_state.update(cx, |diff_state, cx| {
3758 use proto::update_diff_bases::Mode;
3759
3760 if let Some((diff_bases_change, (client, project_id))) =
3761 diff_bases_change.clone().zip(downstream_client)
3762 {
3763 let (staged_text, committed_text, mode) = match diff_bases_change {
3764 DiffBasesChange::SetIndex(index) => {
3765 (index, None, Mode::IndexOnly)
3766 }
3767 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3768 DiffBasesChange::SetEach { index, head } => {
3769 (index, head, Mode::IndexAndHead)
3770 }
3771 DiffBasesChange::SetBoth(text) => {
3772 (None, text, Mode::IndexMatchesHead)
3773 }
3774 };
3775 client
3776 .send(proto::UpdateDiffBases {
3777 project_id: project_id.to_proto(),
3778 buffer_id: buffer_id.to_proto(),
3779 staged_text,
3780 committed_text,
3781 mode: mode as i32,
3782 })
3783 .log_err();
3784 }
3785
3786 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3787 });
3788 }
3789 })
3790 },
3791 );
3792 }
3793
3794 pub fn send_job<F, Fut, R>(
3795 &mut self,
3796 status: Option<SharedString>,
3797 job: F,
3798 ) -> oneshot::Receiver<R>
3799 where
3800 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3801 Fut: Future<Output = R> + 'static,
3802 R: Send + 'static,
3803 {
3804 self.send_keyed_job(None, status, job)
3805 }
3806
3807 fn send_keyed_job<F, Fut, R>(
3808 &mut self,
3809 key: Option<GitJobKey>,
3810 status: Option<SharedString>,
3811 job: F,
3812 ) -> oneshot::Receiver<R>
3813 where
3814 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3815 Fut: Future<Output = R> + 'static,
3816 R: Send + 'static,
3817 {
3818 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3819 let job_id = post_inc(&mut self.job_id);
3820 let this = self.this.clone();
3821 self.job_sender
3822 .unbounded_send(GitJob {
3823 key,
3824 job: Box::new(move |state, cx: &mut AsyncApp| {
3825 let job = job(state, cx.clone());
3826 cx.spawn(async move |cx| {
3827 if let Some(s) = status.clone() {
3828 this.update(cx, |this, cx| {
3829 this.active_jobs.insert(
3830 job_id,
3831 JobInfo {
3832 start: Instant::now(),
3833 message: s.clone(),
3834 },
3835 );
3836
3837 cx.notify();
3838 })
3839 .ok();
3840 }
3841 let result = job.await;
3842
3843 this.update(cx, |this, cx| {
3844 this.active_jobs.remove(&job_id);
3845 cx.notify();
3846 })
3847 .ok();
3848
3849 result_tx.send(result).ok();
3850 })
3851 }),
3852 })
3853 .ok();
3854 result_rx
3855 }
3856
3857 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3858 let Some(git_store) = self.git_store.upgrade() else {
3859 return;
3860 };
3861 let entity = cx.entity();
3862 git_store.update(cx, |git_store, cx| {
3863 let Some((&id, _)) = git_store
3864 .repositories
3865 .iter()
3866 .find(|(_, handle)| *handle == &entity)
3867 else {
3868 return;
3869 };
3870 git_store.active_repo_id = Some(id);
3871 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3872 });
3873 }
3874
3875 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3876 self.snapshot.status()
3877 }
3878
3879 pub fn cached_stash(&self) -> GitStash {
3880 self.snapshot.stash_entries.clone()
3881 }
3882
3883 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3884 let git_store = self.git_store.upgrade()?;
3885 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3886 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3887 let abs_path = SanitizedPath::new(&abs_path);
3888 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3889 Some(ProjectPath {
3890 worktree_id: worktree.read(cx).id(),
3891 path: relative_path,
3892 })
3893 }
3894
3895 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3896 let git_store = self.git_store.upgrade()?;
3897 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3898 let abs_path = worktree_store.absolutize(path, cx)?;
3899 self.snapshot.abs_path_to_repo_path(&abs_path)
3900 }
3901
3902 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3903 other
3904 .read(cx)
3905 .snapshot
3906 .work_directory_abs_path
3907 .starts_with(&self.snapshot.work_directory_abs_path)
3908 }
3909
3910 pub fn open_commit_buffer(
3911 &mut self,
3912 languages: Option<Arc<LanguageRegistry>>,
3913 buffer_store: Entity<BufferStore>,
3914 cx: &mut Context<Self>,
3915 ) -> Task<Result<Entity<Buffer>>> {
3916 let id = self.id;
3917 if let Some(buffer) = self.commit_message_buffer.clone() {
3918 return Task::ready(Ok(buffer));
3919 }
3920 let this = cx.weak_entity();
3921
3922 let rx = self.send_job(None, move |state, mut cx| async move {
3923 let Some(this) = this.upgrade() else {
3924 bail!("git store was dropped");
3925 };
3926 match state {
3927 RepositoryState::Local(..) => {
3928 this.update(&mut cx, |_, cx| {
3929 Self::open_local_commit_buffer(languages, buffer_store, cx)
3930 })
3931 .await
3932 }
3933 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3934 let request = client.request(proto::OpenCommitMessageBuffer {
3935 project_id: project_id.0,
3936 repository_id: id.to_proto(),
3937 });
3938 let response = request.await.context("requesting to open commit buffer")?;
3939 let buffer_id = BufferId::new(response.buffer_id)?;
3940 let buffer = buffer_store
3941 .update(&mut cx, |buffer_store, cx| {
3942 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3943 })
3944 .await?;
3945 if let Some(language_registry) = languages {
3946 let git_commit_language =
3947 language_registry.language_for_name("Git Commit").await?;
3948 buffer.update(&mut cx, |buffer, cx| {
3949 buffer.set_language(Some(git_commit_language), cx);
3950 });
3951 }
3952 this.update(&mut cx, |this, _| {
3953 this.commit_message_buffer = Some(buffer.clone());
3954 });
3955 Ok(buffer)
3956 }
3957 }
3958 });
3959
3960 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3961 }
3962
3963 fn open_local_commit_buffer(
3964 language_registry: Option<Arc<LanguageRegistry>>,
3965 buffer_store: Entity<BufferStore>,
3966 cx: &mut Context<Self>,
3967 ) -> Task<Result<Entity<Buffer>>> {
3968 cx.spawn(async move |repository, cx| {
3969 let buffer = buffer_store
3970 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))
3971 .await?;
3972
3973 if let Some(language_registry) = language_registry {
3974 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3975 buffer.update(cx, |buffer, cx| {
3976 buffer.set_language(Some(git_commit_language), cx);
3977 });
3978 }
3979
3980 repository.update(cx, |repository, _| {
3981 repository.commit_message_buffer = Some(buffer.clone());
3982 })?;
3983 Ok(buffer)
3984 })
3985 }
3986
3987 pub fn checkout_files(
3988 &mut self,
3989 commit: &str,
3990 paths: Vec<RepoPath>,
3991 cx: &mut Context<Self>,
3992 ) -> Task<Result<()>> {
3993 let commit = commit.to_string();
3994 let id = self.id;
3995
3996 self.spawn_job_with_tracking(
3997 paths.clone(),
3998 pending_op::GitStatus::Reverted,
3999 cx,
4000 async move |this, cx| {
4001 this.update(cx, |this, _cx| {
4002 this.send_job(
4003 Some(format!("git checkout {}", commit).into()),
4004 move |git_repo, _| async move {
4005 match git_repo {
4006 RepositoryState::Local(LocalRepositoryState {
4007 backend,
4008 environment,
4009 ..
4010 }) => {
4011 backend
4012 .checkout_files(commit, paths, environment.clone())
4013 .await
4014 }
4015 RepositoryState::Remote(RemoteRepositoryState {
4016 project_id,
4017 client,
4018 }) => {
4019 client
4020 .request(proto::GitCheckoutFiles {
4021 project_id: project_id.0,
4022 repository_id: id.to_proto(),
4023 commit,
4024 paths: paths
4025 .into_iter()
4026 .map(|p| p.to_proto())
4027 .collect(),
4028 })
4029 .await?;
4030
4031 Ok(())
4032 }
4033 }
4034 },
4035 )
4036 })?
4037 .await?
4038 },
4039 )
4040 }
4041
4042 pub fn reset(
4043 &mut self,
4044 commit: String,
4045 reset_mode: ResetMode,
4046 _cx: &mut App,
4047 ) -> oneshot::Receiver<Result<()>> {
4048 let id = self.id;
4049
4050 self.send_job(None, move |git_repo, _| async move {
4051 match git_repo {
4052 RepositoryState::Local(LocalRepositoryState {
4053 backend,
4054 environment,
4055 ..
4056 }) => backend.reset(commit, reset_mode, environment).await,
4057 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4058 client
4059 .request(proto::GitReset {
4060 project_id: project_id.0,
4061 repository_id: id.to_proto(),
4062 commit,
4063 mode: match reset_mode {
4064 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4065 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4066 },
4067 })
4068 .await?;
4069
4070 Ok(())
4071 }
4072 }
4073 })
4074 }
4075
4076 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4077 let id = self.id;
4078 self.send_job(None, move |git_repo, _cx| async move {
4079 match git_repo {
4080 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4081 backend.show(commit).await
4082 }
4083 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4084 let resp = client
4085 .request(proto::GitShow {
4086 project_id: project_id.0,
4087 repository_id: id.to_proto(),
4088 commit,
4089 })
4090 .await?;
4091
4092 Ok(CommitDetails {
4093 sha: resp.sha.into(),
4094 message: resp.message.into(),
4095 commit_timestamp: resp.commit_timestamp,
4096 author_email: resp.author_email.into(),
4097 author_name: resp.author_name.into(),
4098 })
4099 }
4100 }
4101 })
4102 }
4103
4104 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4105 let id = self.id;
4106 self.send_job(None, move |git_repo, cx| async move {
4107 match git_repo {
4108 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4109 backend.load_commit(commit, cx).await
4110 }
4111 RepositoryState::Remote(RemoteRepositoryState {
4112 client, project_id, ..
4113 }) => {
4114 let response = client
4115 .request(proto::LoadCommitDiff {
4116 project_id: project_id.0,
4117 repository_id: id.to_proto(),
4118 commit,
4119 })
4120 .await?;
4121 Ok(CommitDiff {
4122 files: response
4123 .files
4124 .into_iter()
4125 .map(|file| {
4126 Ok(CommitFile {
4127 path: RepoPath::from_proto(&file.path)?,
4128 old_text: file.old_text,
4129 new_text: file.new_text,
4130 is_binary: file.is_binary,
4131 })
4132 })
4133 .collect::<Result<Vec<_>>>()?,
4134 })
4135 }
4136 }
4137 })
4138 }
4139
4140 pub fn file_history(
4141 &mut self,
4142 path: RepoPath,
4143 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4144 self.file_history_paginated(path, 0, None)
4145 }
4146
4147 pub fn file_history_paginated(
4148 &mut self,
4149 path: RepoPath,
4150 skip: usize,
4151 limit: Option<usize>,
4152 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4153 let id = self.id;
4154 self.send_job(None, move |git_repo, _cx| async move {
4155 match git_repo {
4156 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4157 backend.file_history_paginated(path, skip, limit).await
4158 }
4159 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4160 let response = client
4161 .request(proto::GitFileHistory {
4162 project_id: project_id.0,
4163 repository_id: id.to_proto(),
4164 path: path.to_proto(),
4165 skip: skip as u64,
4166 limit: limit.map(|l| l as u64),
4167 })
4168 .await?;
4169 Ok(git::repository::FileHistory {
4170 entries: response
4171 .entries
4172 .into_iter()
4173 .map(|entry| git::repository::FileHistoryEntry {
4174 sha: entry.sha.into(),
4175 subject: entry.subject.into(),
4176 message: entry.message.into(),
4177 commit_timestamp: entry.commit_timestamp,
4178 author_name: entry.author_name.into(),
4179 author_email: entry.author_email.into(),
4180 })
4181 .collect(),
4182 path: RepoPath::from_proto(&response.path)?,
4183 })
4184 }
4185 }
4186 })
4187 }
4188
4189 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4190 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4191 }
4192
4193 fn save_buffers<'a>(
4194 &self,
4195 entries: impl IntoIterator<Item = &'a RepoPath>,
4196 cx: &mut Context<Self>,
4197 ) -> Vec<Task<anyhow::Result<()>>> {
4198 let mut save_futures = Vec::new();
4199 if let Some(buffer_store) = self.buffer_store(cx) {
4200 buffer_store.update(cx, |buffer_store, cx| {
4201 for path in entries {
4202 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4203 continue;
4204 };
4205 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4206 && buffer
4207 .read(cx)
4208 .file()
4209 .is_some_and(|file| file.disk_state().exists())
4210 && buffer.read(cx).has_unsaved_edits()
4211 {
4212 save_futures.push(buffer_store.save_buffer(buffer, cx));
4213 }
4214 }
4215 })
4216 }
4217 save_futures
4218 }
4219
4220 pub fn stage_entries(
4221 &mut self,
4222 entries: Vec<RepoPath>,
4223 cx: &mut Context<Self>,
4224 ) -> Task<anyhow::Result<()>> {
4225 self.stage_or_unstage_entries(true, entries, cx)
4226 }
4227
4228 pub fn unstage_entries(
4229 &mut self,
4230 entries: Vec<RepoPath>,
4231 cx: &mut Context<Self>,
4232 ) -> Task<anyhow::Result<()>> {
4233 self.stage_or_unstage_entries(false, entries, cx)
4234 }
4235
4236 fn stage_or_unstage_entries(
4237 &mut self,
4238 stage: bool,
4239 entries: Vec<RepoPath>,
4240 cx: &mut Context<Self>,
4241 ) -> Task<anyhow::Result<()>> {
4242 if entries.is_empty() {
4243 return Task::ready(Ok(()));
4244 }
4245 let Some(git_store) = self.git_store.upgrade() else {
4246 return Task::ready(Ok(()));
4247 };
4248 let id = self.id;
4249 let save_tasks = self.save_buffers(&entries, cx);
4250 let paths = entries
4251 .iter()
4252 .map(|p| p.as_unix_str())
4253 .collect::<Vec<_>>()
4254 .join(" ");
4255 let status = if stage {
4256 format!("git add {paths}")
4257 } else {
4258 format!("git reset {paths}")
4259 };
4260 let job_key = GitJobKey::WriteIndex(entries.clone());
4261
4262 self.spawn_job_with_tracking(
4263 entries.clone(),
4264 if stage {
4265 pending_op::GitStatus::Staged
4266 } else {
4267 pending_op::GitStatus::Unstaged
4268 },
4269 cx,
4270 async move |this, cx| {
4271 for save_task in save_tasks {
4272 save_task.await?;
4273 }
4274
4275 this.update(cx, |this, cx| {
4276 let weak_this = cx.weak_entity();
4277 this.send_keyed_job(
4278 Some(job_key),
4279 Some(status.into()),
4280 move |git_repo, mut cx| async move {
4281 let hunk_staging_operation_counts = weak_this
4282 .update(&mut cx, |this, cx| {
4283 let mut hunk_staging_operation_counts = HashMap::default();
4284 for path in &entries {
4285 let Some(project_path) =
4286 this.repo_path_to_project_path(path, cx)
4287 else {
4288 continue;
4289 };
4290 let Some(buffer) = git_store
4291 .read(cx)
4292 .buffer_store
4293 .read(cx)
4294 .get_by_path(&project_path)
4295 else {
4296 continue;
4297 };
4298 let Some(diff_state) = git_store
4299 .read(cx)
4300 .diffs
4301 .get(&buffer.read(cx).remote_id())
4302 .cloned()
4303 else {
4304 continue;
4305 };
4306 let Some(uncommitted_diff) =
4307 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4308 |uncommitted_diff| uncommitted_diff.upgrade(),
4309 )
4310 else {
4311 continue;
4312 };
4313 let buffer_snapshot = buffer.read(cx).text_snapshot();
4314 let file_exists = buffer
4315 .read(cx)
4316 .file()
4317 .is_some_and(|file| file.disk_state().exists());
4318 let hunk_staging_operation_count =
4319 diff_state.update(cx, |diff_state, cx| {
4320 uncommitted_diff.update(
4321 cx,
4322 |uncommitted_diff, cx| {
4323 uncommitted_diff
4324 .stage_or_unstage_all_hunks(
4325 stage,
4326 &buffer_snapshot,
4327 file_exists,
4328 cx,
4329 );
4330 },
4331 );
4332
4333 diff_state.hunk_staging_operation_count += 1;
4334 diff_state.hunk_staging_operation_count
4335 });
4336 hunk_staging_operation_counts.insert(
4337 diff_state.downgrade(),
4338 hunk_staging_operation_count,
4339 );
4340 }
4341 hunk_staging_operation_counts
4342 })
4343 .unwrap_or_default();
4344
4345 let result = match git_repo {
4346 RepositoryState::Local(LocalRepositoryState {
4347 backend,
4348 environment,
4349 ..
4350 }) => {
4351 if stage {
4352 backend.stage_paths(entries, environment.clone()).await
4353 } else {
4354 backend.unstage_paths(entries, environment.clone()).await
4355 }
4356 }
4357 RepositoryState::Remote(RemoteRepositoryState {
4358 project_id,
4359 client,
4360 }) => {
4361 if stage {
4362 client
4363 .request(proto::Stage {
4364 project_id: project_id.0,
4365 repository_id: id.to_proto(),
4366 paths: entries
4367 .into_iter()
4368 .map(|repo_path| repo_path.to_proto())
4369 .collect(),
4370 })
4371 .await
4372 .context("sending stage request")
4373 .map(|_| ())
4374 } else {
4375 client
4376 .request(proto::Unstage {
4377 project_id: project_id.0,
4378 repository_id: id.to_proto(),
4379 paths: entries
4380 .into_iter()
4381 .map(|repo_path| repo_path.to_proto())
4382 .collect(),
4383 })
4384 .await
4385 .context("sending unstage request")
4386 .map(|_| ())
4387 }
4388 }
4389 };
4390
4391 for (diff_state, hunk_staging_operation_count) in
4392 hunk_staging_operation_counts
4393 {
4394 diff_state
4395 .update(&mut cx, |diff_state, cx| {
4396 if result.is_ok() {
4397 diff_state.hunk_staging_operation_count_as_of_write =
4398 hunk_staging_operation_count;
4399 } else if let Some(uncommitted_diff) =
4400 &diff_state.uncommitted_diff
4401 {
4402 uncommitted_diff
4403 .update(cx, |uncommitted_diff, cx| {
4404 uncommitted_diff.clear_pending_hunks(cx);
4405 })
4406 .ok();
4407 }
4408 })
4409 .ok();
4410 }
4411
4412 result
4413 },
4414 )
4415 })?
4416 .await?
4417 },
4418 )
4419 }
4420
4421 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4422 let to_stage = self
4423 .cached_status()
4424 .filter_map(|entry| {
4425 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4426 if ops.staging() || ops.staged() {
4427 None
4428 } else {
4429 Some(entry.repo_path)
4430 }
4431 } else if entry.status.staging().is_fully_staged() {
4432 None
4433 } else {
4434 Some(entry.repo_path)
4435 }
4436 })
4437 .collect();
4438 self.stage_or_unstage_entries(true, to_stage, cx)
4439 }
4440
4441 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4442 let to_unstage = self
4443 .cached_status()
4444 .filter_map(|entry| {
4445 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4446 if !ops.staging() && !ops.staged() {
4447 None
4448 } else {
4449 Some(entry.repo_path)
4450 }
4451 } else if entry.status.staging().is_fully_unstaged() {
4452 None
4453 } else {
4454 Some(entry.repo_path)
4455 }
4456 })
4457 .collect();
4458 self.stage_or_unstage_entries(false, to_unstage, cx)
4459 }
4460
4461 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4462 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4463
4464 self.stash_entries(to_stash, cx)
4465 }
4466
4467 pub fn stash_entries(
4468 &mut self,
4469 entries: Vec<RepoPath>,
4470 cx: &mut Context<Self>,
4471 ) -> Task<anyhow::Result<()>> {
4472 let id = self.id;
4473
4474 cx.spawn(async move |this, cx| {
4475 this.update(cx, |this, _| {
4476 this.send_job(None, move |git_repo, _cx| async move {
4477 match git_repo {
4478 RepositoryState::Local(LocalRepositoryState {
4479 backend,
4480 environment,
4481 ..
4482 }) => backend.stash_paths(entries, environment).await,
4483 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4484 client
4485 .request(proto::Stash {
4486 project_id: project_id.0,
4487 repository_id: id.to_proto(),
4488 paths: entries
4489 .into_iter()
4490 .map(|repo_path| repo_path.to_proto())
4491 .collect(),
4492 })
4493 .await
4494 .context("sending stash request")?;
4495 Ok(())
4496 }
4497 }
4498 })
4499 })?
4500 .await??;
4501 Ok(())
4502 })
4503 }
4504
4505 pub fn stash_pop(
4506 &mut self,
4507 index: Option<usize>,
4508 cx: &mut Context<Self>,
4509 ) -> Task<anyhow::Result<()>> {
4510 let id = self.id;
4511 cx.spawn(async move |this, cx| {
4512 this.update(cx, |this, _| {
4513 this.send_job(None, move |git_repo, _cx| async move {
4514 match git_repo {
4515 RepositoryState::Local(LocalRepositoryState {
4516 backend,
4517 environment,
4518 ..
4519 }) => backend.stash_pop(index, environment).await,
4520 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4521 client
4522 .request(proto::StashPop {
4523 project_id: project_id.0,
4524 repository_id: id.to_proto(),
4525 stash_index: index.map(|i| i as u64),
4526 })
4527 .await
4528 .context("sending stash pop request")?;
4529 Ok(())
4530 }
4531 }
4532 })
4533 })?
4534 .await??;
4535 Ok(())
4536 })
4537 }
4538
4539 pub fn stash_apply(
4540 &mut self,
4541 index: Option<usize>,
4542 cx: &mut Context<Self>,
4543 ) -> Task<anyhow::Result<()>> {
4544 let id = self.id;
4545 cx.spawn(async move |this, cx| {
4546 this.update(cx, |this, _| {
4547 this.send_job(None, move |git_repo, _cx| async move {
4548 match git_repo {
4549 RepositoryState::Local(LocalRepositoryState {
4550 backend,
4551 environment,
4552 ..
4553 }) => backend.stash_apply(index, environment).await,
4554 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4555 client
4556 .request(proto::StashApply {
4557 project_id: project_id.0,
4558 repository_id: id.to_proto(),
4559 stash_index: index.map(|i| i as u64),
4560 })
4561 .await
4562 .context("sending stash apply request")?;
4563 Ok(())
4564 }
4565 }
4566 })
4567 })?
4568 .await??;
4569 Ok(())
4570 })
4571 }
4572
4573 pub fn stash_drop(
4574 &mut self,
4575 index: Option<usize>,
4576 cx: &mut Context<Self>,
4577 ) -> oneshot::Receiver<anyhow::Result<()>> {
4578 let id = self.id;
4579 let updates_tx = self
4580 .git_store()
4581 .and_then(|git_store| match &git_store.read(cx).state {
4582 GitStoreState::Local { downstream, .. } => downstream
4583 .as_ref()
4584 .map(|downstream| downstream.updates_tx.clone()),
4585 _ => None,
4586 });
4587 let this = cx.weak_entity();
4588 self.send_job(None, move |git_repo, mut cx| async move {
4589 match git_repo {
4590 RepositoryState::Local(LocalRepositoryState {
4591 backend,
4592 environment,
4593 ..
4594 }) => {
4595 // TODO would be nice to not have to do this manually
4596 let result = backend.stash_drop(index, environment).await;
4597 if result.is_ok()
4598 && let Ok(stash_entries) = backend.stash_entries().await
4599 {
4600 let snapshot = this.update(&mut cx, |this, cx| {
4601 this.snapshot.stash_entries = stash_entries;
4602 cx.emit(RepositoryEvent::StashEntriesChanged);
4603 this.snapshot.clone()
4604 })?;
4605 if let Some(updates_tx) = updates_tx {
4606 updates_tx
4607 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4608 .ok();
4609 }
4610 }
4611
4612 result
4613 }
4614 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4615 client
4616 .request(proto::StashDrop {
4617 project_id: project_id.0,
4618 repository_id: id.to_proto(),
4619 stash_index: index.map(|i| i as u64),
4620 })
4621 .await
4622 .context("sending stash pop request")?;
4623 Ok(())
4624 }
4625 }
4626 })
4627 }
4628
4629 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4630 let id = self.id;
4631 self.send_job(
4632 Some(format!("git hook {}", hook.as_str()).into()),
4633 move |git_repo, _cx| async move {
4634 match git_repo {
4635 RepositoryState::Local(LocalRepositoryState {
4636 backend,
4637 environment,
4638 ..
4639 }) => backend.run_hook(hook, environment.clone()).await,
4640 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4641 client
4642 .request(proto::RunGitHook {
4643 project_id: project_id.0,
4644 repository_id: id.to_proto(),
4645 hook: hook.to_proto(),
4646 })
4647 .await?;
4648
4649 Ok(())
4650 }
4651 }
4652 },
4653 )
4654 }
4655
4656 pub fn commit(
4657 &mut self,
4658 message: SharedString,
4659 name_and_email: Option<(SharedString, SharedString)>,
4660 options: CommitOptions,
4661 askpass: AskPassDelegate,
4662 cx: &mut App,
4663 ) -> oneshot::Receiver<Result<()>> {
4664 let id = self.id;
4665 let askpass_delegates = self.askpass_delegates.clone();
4666 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4667
4668 let rx = self.run_hook(RunHook::PreCommit, cx);
4669
4670 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4671 rx.await??;
4672
4673 match git_repo {
4674 RepositoryState::Local(LocalRepositoryState {
4675 backend,
4676 environment,
4677 ..
4678 }) => {
4679 backend
4680 .commit(message, name_and_email, options, askpass, environment)
4681 .await
4682 }
4683 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4684 askpass_delegates.lock().insert(askpass_id, askpass);
4685 let _defer = util::defer(|| {
4686 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4687 debug_assert!(askpass_delegate.is_some());
4688 });
4689 let (name, email) = name_and_email.unzip();
4690 client
4691 .request(proto::Commit {
4692 project_id: project_id.0,
4693 repository_id: id.to_proto(),
4694 message: String::from(message),
4695 name: name.map(String::from),
4696 email: email.map(String::from),
4697 options: Some(proto::commit::CommitOptions {
4698 amend: options.amend,
4699 signoff: options.signoff,
4700 }),
4701 askpass_id,
4702 })
4703 .await
4704 .context("sending commit request")?;
4705
4706 Ok(())
4707 }
4708 }
4709 })
4710 }
4711
4712 pub fn fetch(
4713 &mut self,
4714 fetch_options: FetchOptions,
4715 askpass: AskPassDelegate,
4716 _cx: &mut App,
4717 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4718 let askpass_delegates = self.askpass_delegates.clone();
4719 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4720 let id = self.id;
4721
4722 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4723 match git_repo {
4724 RepositoryState::Local(LocalRepositoryState {
4725 backend,
4726 environment,
4727 ..
4728 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4729 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4730 askpass_delegates.lock().insert(askpass_id, askpass);
4731 let _defer = util::defer(|| {
4732 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4733 debug_assert!(askpass_delegate.is_some());
4734 });
4735
4736 let response = client
4737 .request(proto::Fetch {
4738 project_id: project_id.0,
4739 repository_id: id.to_proto(),
4740 askpass_id,
4741 remote: fetch_options.to_proto(),
4742 })
4743 .await
4744 .context("sending fetch request")?;
4745
4746 Ok(RemoteCommandOutput {
4747 stdout: response.stdout,
4748 stderr: response.stderr,
4749 })
4750 }
4751 }
4752 })
4753 }
4754
4755 pub fn push(
4756 &mut self,
4757 branch: SharedString,
4758 remote_branch: SharedString,
4759 remote: SharedString,
4760 options: Option<PushOptions>,
4761 askpass: AskPassDelegate,
4762 cx: &mut Context<Self>,
4763 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4764 let askpass_delegates = self.askpass_delegates.clone();
4765 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4766 let id = self.id;
4767
4768 let args = options
4769 .map(|option| match option {
4770 PushOptions::SetUpstream => " --set-upstream",
4771 PushOptions::Force => " --force-with-lease",
4772 })
4773 .unwrap_or("");
4774
4775 let updates_tx = self
4776 .git_store()
4777 .and_then(|git_store| match &git_store.read(cx).state {
4778 GitStoreState::Local { downstream, .. } => downstream
4779 .as_ref()
4780 .map(|downstream| downstream.updates_tx.clone()),
4781 _ => None,
4782 });
4783
4784 let this = cx.weak_entity();
4785 self.send_job(
4786 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
4787 move |git_repo, mut cx| async move {
4788 match git_repo {
4789 RepositoryState::Local(LocalRepositoryState {
4790 backend,
4791 environment,
4792 ..
4793 }) => {
4794 let result = backend
4795 .push(
4796 branch.to_string(),
4797 remote_branch.to_string(),
4798 remote.to_string(),
4799 options,
4800 askpass,
4801 environment.clone(),
4802 cx.clone(),
4803 )
4804 .await;
4805 // TODO would be nice to not have to do this manually
4806 if result.is_ok() {
4807 let branches = backend.branches().await?;
4808 let branch = branches.into_iter().find(|branch| branch.is_head);
4809 log::info!("head branch after scan is {branch:?}");
4810 let snapshot = this.update(&mut cx, |this, cx| {
4811 this.snapshot.branch = branch;
4812 cx.emit(RepositoryEvent::BranchChanged);
4813 this.snapshot.clone()
4814 })?;
4815 if let Some(updates_tx) = updates_tx {
4816 updates_tx
4817 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4818 .ok();
4819 }
4820 }
4821 result
4822 }
4823 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4824 askpass_delegates.lock().insert(askpass_id, askpass);
4825 let _defer = util::defer(|| {
4826 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4827 debug_assert!(askpass_delegate.is_some());
4828 });
4829 let response = client
4830 .request(proto::Push {
4831 project_id: project_id.0,
4832 repository_id: id.to_proto(),
4833 askpass_id,
4834 branch_name: branch.to_string(),
4835 remote_branch_name: remote_branch.to_string(),
4836 remote_name: remote.to_string(),
4837 options: options.map(|options| match options {
4838 PushOptions::Force => proto::push::PushOptions::Force,
4839 PushOptions::SetUpstream => {
4840 proto::push::PushOptions::SetUpstream
4841 }
4842 }
4843 as i32),
4844 })
4845 .await
4846 .context("sending push request")?;
4847
4848 Ok(RemoteCommandOutput {
4849 stdout: response.stdout,
4850 stderr: response.stderr,
4851 })
4852 }
4853 }
4854 },
4855 )
4856 }
4857
4858 pub fn pull(
4859 &mut self,
4860 branch: Option<SharedString>,
4861 remote: SharedString,
4862 rebase: bool,
4863 askpass: AskPassDelegate,
4864 _cx: &mut App,
4865 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4866 let askpass_delegates = self.askpass_delegates.clone();
4867 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4868 let id = self.id;
4869
4870 let mut status = "git pull".to_string();
4871 if rebase {
4872 status.push_str(" --rebase");
4873 }
4874 status.push_str(&format!(" {}", remote));
4875 if let Some(b) = &branch {
4876 status.push_str(&format!(" {}", b));
4877 }
4878
4879 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4880 match git_repo {
4881 RepositoryState::Local(LocalRepositoryState {
4882 backend,
4883 environment,
4884 ..
4885 }) => {
4886 backend
4887 .pull(
4888 branch.as_ref().map(|b| b.to_string()),
4889 remote.to_string(),
4890 rebase,
4891 askpass,
4892 environment.clone(),
4893 cx,
4894 )
4895 .await
4896 }
4897 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4898 askpass_delegates.lock().insert(askpass_id, askpass);
4899 let _defer = util::defer(|| {
4900 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4901 debug_assert!(askpass_delegate.is_some());
4902 });
4903 let response = client
4904 .request(proto::Pull {
4905 project_id: project_id.0,
4906 repository_id: id.to_proto(),
4907 askpass_id,
4908 rebase,
4909 branch_name: branch.as_ref().map(|b| b.to_string()),
4910 remote_name: remote.to_string(),
4911 })
4912 .await
4913 .context("sending pull request")?;
4914
4915 Ok(RemoteCommandOutput {
4916 stdout: response.stdout,
4917 stderr: response.stderr,
4918 })
4919 }
4920 }
4921 })
4922 }
4923
4924 fn spawn_set_index_text_job(
4925 &mut self,
4926 path: RepoPath,
4927 content: Option<String>,
4928 hunk_staging_operation_count: Option<usize>,
4929 cx: &mut Context<Self>,
4930 ) -> oneshot::Receiver<anyhow::Result<()>> {
4931 let id = self.id;
4932 let this = cx.weak_entity();
4933 let git_store = self.git_store.clone();
4934 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4935 self.send_keyed_job(
4936 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4937 None,
4938 move |git_repo, mut cx| async move {
4939 log::debug!(
4940 "start updating index text for buffer {}",
4941 path.as_unix_str()
4942 );
4943
4944 match git_repo {
4945 RepositoryState::Local(LocalRepositoryState {
4946 fs,
4947 backend,
4948 environment,
4949 ..
4950 }) => {
4951 let executable = match fs.metadata(&abs_path).await {
4952 Ok(Some(meta)) => meta.is_executable,
4953 Ok(None) => false,
4954 Err(_err) => false,
4955 };
4956 backend
4957 .set_index_text(path.clone(), content, environment.clone(), executable)
4958 .await?;
4959 }
4960 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4961 client
4962 .request(proto::SetIndexText {
4963 project_id: project_id.0,
4964 repository_id: id.to_proto(),
4965 path: path.to_proto(),
4966 text: content,
4967 })
4968 .await?;
4969 }
4970 }
4971 log::debug!(
4972 "finish updating index text for buffer {}",
4973 path.as_unix_str()
4974 );
4975
4976 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4977 let project_path = this
4978 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4979 .ok()
4980 .flatten();
4981 git_store
4982 .update(&mut cx, |git_store, cx| {
4983 let buffer_id = git_store
4984 .buffer_store
4985 .read(cx)
4986 .get_by_path(&project_path?)?
4987 .read(cx)
4988 .remote_id();
4989 let diff_state = git_store.diffs.get(&buffer_id)?;
4990 diff_state.update(cx, |diff_state, _| {
4991 diff_state.hunk_staging_operation_count_as_of_write =
4992 hunk_staging_operation_count;
4993 });
4994 Some(())
4995 })
4996 .context("Git store dropped")?;
4997 }
4998 Ok(())
4999 },
5000 )
5001 }
5002
5003 pub fn create_remote(
5004 &mut self,
5005 remote_name: String,
5006 remote_url: String,
5007 ) -> oneshot::Receiver<Result<()>> {
5008 let id = self.id;
5009 self.send_job(
5010 Some(format!("git remote add {remote_name} {remote_url}").into()),
5011 move |repo, _cx| async move {
5012 match repo {
5013 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5014 backend.create_remote(remote_name, remote_url).await
5015 }
5016 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5017 client
5018 .request(proto::GitCreateRemote {
5019 project_id: project_id.0,
5020 repository_id: id.to_proto(),
5021 remote_name,
5022 remote_url,
5023 })
5024 .await?;
5025
5026 Ok(())
5027 }
5028 }
5029 },
5030 )
5031 }
5032
5033 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5034 let id = self.id;
5035 self.send_job(
5036 Some(format!("git remove remote {remote_name}").into()),
5037 move |repo, _cx| async move {
5038 match repo {
5039 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5040 backend.remove_remote(remote_name).await
5041 }
5042 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5043 client
5044 .request(proto::GitRemoveRemote {
5045 project_id: project_id.0,
5046 repository_id: id.to_proto(),
5047 remote_name,
5048 })
5049 .await?;
5050
5051 Ok(())
5052 }
5053 }
5054 },
5055 )
5056 }
5057
5058 pub fn get_remotes(
5059 &mut self,
5060 branch_name: Option<String>,
5061 is_push: bool,
5062 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5063 let id = self.id;
5064 self.send_job(None, move |repo, _cx| async move {
5065 match repo {
5066 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5067 let remote = if let Some(branch_name) = branch_name {
5068 if is_push {
5069 backend.get_push_remote(branch_name).await?
5070 } else {
5071 backend.get_branch_remote(branch_name).await?
5072 }
5073 } else {
5074 None
5075 };
5076
5077 match remote {
5078 Some(remote) => Ok(vec![remote]),
5079 None => backend.get_all_remotes().await,
5080 }
5081 }
5082 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5083 let response = client
5084 .request(proto::GetRemotes {
5085 project_id: project_id.0,
5086 repository_id: id.to_proto(),
5087 branch_name,
5088 is_push,
5089 })
5090 .await?;
5091
5092 let remotes = response
5093 .remotes
5094 .into_iter()
5095 .map(|remotes| Remote {
5096 name: remotes.name.into(),
5097 })
5098 .collect();
5099
5100 Ok(remotes)
5101 }
5102 }
5103 })
5104 }
5105
5106 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5107 let id = self.id;
5108 self.send_job(None, move |repo, _| async move {
5109 match repo {
5110 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5111 backend.branches().await
5112 }
5113 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5114 let response = client
5115 .request(proto::GitGetBranches {
5116 project_id: project_id.0,
5117 repository_id: id.to_proto(),
5118 })
5119 .await?;
5120
5121 let branches = response
5122 .branches
5123 .into_iter()
5124 .map(|branch| proto_to_branch(&branch))
5125 .collect();
5126
5127 Ok(branches)
5128 }
5129 }
5130 })
5131 }
5132
5133 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5134 let id = self.id;
5135 self.send_job(None, move |repo, _| async move {
5136 match repo {
5137 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5138 backend.worktrees().await
5139 }
5140 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5141 let response = client
5142 .request(proto::GitGetWorktrees {
5143 project_id: project_id.0,
5144 repository_id: id.to_proto(),
5145 })
5146 .await?;
5147
5148 let worktrees = response
5149 .worktrees
5150 .into_iter()
5151 .map(|worktree| proto_to_worktree(&worktree))
5152 .collect();
5153
5154 Ok(worktrees)
5155 }
5156 }
5157 })
5158 }
5159
5160 pub fn create_worktree(
5161 &mut self,
5162 name: String,
5163 path: PathBuf,
5164 commit: Option<String>,
5165 ) -> oneshot::Receiver<Result<()>> {
5166 let id = self.id;
5167 self.send_job(
5168 Some("git worktree add".into()),
5169 move |repo, _cx| async move {
5170 match repo {
5171 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5172 backend.create_worktree(name, path, commit).await
5173 }
5174 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5175 client
5176 .request(proto::GitCreateWorktree {
5177 project_id: project_id.0,
5178 repository_id: id.to_proto(),
5179 name,
5180 directory: path.to_string_lossy().to_string(),
5181 commit,
5182 })
5183 .await?;
5184
5185 Ok(())
5186 }
5187 }
5188 },
5189 )
5190 }
5191
5192 pub fn default_branch(
5193 &mut self,
5194 include_remote_name: bool,
5195 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5196 let id = self.id;
5197 self.send_job(None, move |repo, _| async move {
5198 match repo {
5199 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5200 backend.default_branch(include_remote_name).await
5201 }
5202 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5203 let response = client
5204 .request(proto::GetDefaultBranch {
5205 project_id: project_id.0,
5206 repository_id: id.to_proto(),
5207 })
5208 .await?;
5209
5210 anyhow::Ok(response.branch.map(SharedString::from))
5211 }
5212 }
5213 })
5214 }
5215
5216 pub fn diff_tree(
5217 &mut self,
5218 diff_type: DiffTreeType,
5219 _cx: &App,
5220 ) -> oneshot::Receiver<Result<TreeDiff>> {
5221 let repository_id = self.snapshot.id;
5222 self.send_job(None, move |repo, _cx| async move {
5223 match repo {
5224 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5225 backend.diff_tree(diff_type).await
5226 }
5227 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5228 let response = client
5229 .request(proto::GetTreeDiff {
5230 project_id: project_id.0,
5231 repository_id: repository_id.0,
5232 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5233 base: diff_type.base().to_string(),
5234 head: diff_type.head().to_string(),
5235 })
5236 .await?;
5237
5238 let entries = response
5239 .entries
5240 .into_iter()
5241 .filter_map(|entry| {
5242 let status = match entry.status() {
5243 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5244 proto::tree_diff_status::Status::Modified => {
5245 TreeDiffStatus::Modified {
5246 old: git::Oid::from_str(
5247 &entry.oid.context("missing oid").log_err()?,
5248 )
5249 .log_err()?,
5250 }
5251 }
5252 proto::tree_diff_status::Status::Deleted => {
5253 TreeDiffStatus::Deleted {
5254 old: git::Oid::from_str(
5255 &entry.oid.context("missing oid").log_err()?,
5256 )
5257 .log_err()?,
5258 }
5259 }
5260 };
5261 Some((
5262 RepoPath::from_rel_path(
5263 &RelPath::from_proto(&entry.path).log_err()?,
5264 ),
5265 status,
5266 ))
5267 })
5268 .collect();
5269
5270 Ok(TreeDiff { entries })
5271 }
5272 }
5273 })
5274 }
5275
5276 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5277 let id = self.id;
5278 self.send_job(None, move |repo, _cx| async move {
5279 match repo {
5280 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5281 backend.diff(diff_type).await
5282 }
5283 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5284 let response = client
5285 .request(proto::GitDiff {
5286 project_id: project_id.0,
5287 repository_id: id.to_proto(),
5288 diff_type: match diff_type {
5289 DiffType::HeadToIndex => {
5290 proto::git_diff::DiffType::HeadToIndex.into()
5291 }
5292 DiffType::HeadToWorktree => {
5293 proto::git_diff::DiffType::HeadToWorktree.into()
5294 }
5295 },
5296 })
5297 .await?;
5298
5299 Ok(response.diff)
5300 }
5301 }
5302 })
5303 }
5304
5305 pub fn create_branch(
5306 &mut self,
5307 branch_name: String,
5308 base_branch: Option<String>,
5309 ) -> oneshot::Receiver<Result<()>> {
5310 let id = self.id;
5311 let status_msg = if let Some(ref base) = base_branch {
5312 format!("git switch -c {branch_name} {base}").into()
5313 } else {
5314 format!("git switch -c {branch_name}").into()
5315 };
5316 self.send_job(Some(status_msg), move |repo, _cx| async move {
5317 match repo {
5318 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5319 backend.create_branch(branch_name, base_branch).await
5320 }
5321 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5322 client
5323 .request(proto::GitCreateBranch {
5324 project_id: project_id.0,
5325 repository_id: id.to_proto(),
5326 branch_name,
5327 })
5328 .await?;
5329
5330 Ok(())
5331 }
5332 }
5333 })
5334 }
5335
5336 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5337 let id = self.id;
5338 self.send_job(
5339 Some(format!("git switch {branch_name}").into()),
5340 move |repo, _cx| async move {
5341 match repo {
5342 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5343 backend.change_branch(branch_name).await
5344 }
5345 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5346 client
5347 .request(proto::GitChangeBranch {
5348 project_id: project_id.0,
5349 repository_id: id.to_proto(),
5350 branch_name,
5351 })
5352 .await?;
5353
5354 Ok(())
5355 }
5356 }
5357 },
5358 )
5359 }
5360
5361 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5362 let id = self.id;
5363 self.send_job(
5364 Some(format!("git branch -d {branch_name}").into()),
5365 move |repo, _cx| async move {
5366 match repo {
5367 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5368 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5369 client
5370 .request(proto::GitDeleteBranch {
5371 project_id: project_id.0,
5372 repository_id: id.to_proto(),
5373 branch_name,
5374 })
5375 .await?;
5376
5377 Ok(())
5378 }
5379 }
5380 },
5381 )
5382 }
5383
5384 pub fn rename_branch(
5385 &mut self,
5386 branch: String,
5387 new_name: String,
5388 ) -> oneshot::Receiver<Result<()>> {
5389 let id = self.id;
5390 self.send_job(
5391 Some(format!("git branch -m {branch} {new_name}").into()),
5392 move |repo, _cx| async move {
5393 match repo {
5394 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5395 backend.rename_branch(branch, new_name).await
5396 }
5397 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5398 client
5399 .request(proto::GitRenameBranch {
5400 project_id: project_id.0,
5401 repository_id: id.to_proto(),
5402 branch,
5403 new_name,
5404 })
5405 .await?;
5406
5407 Ok(())
5408 }
5409 }
5410 },
5411 )
5412 }
5413
5414 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5415 let id = self.id;
5416 self.send_job(None, move |repo, _cx| async move {
5417 match repo {
5418 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5419 backend.check_for_pushed_commit().await
5420 }
5421 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5422 let response = client
5423 .request(proto::CheckForPushedCommits {
5424 project_id: project_id.0,
5425 repository_id: id.to_proto(),
5426 })
5427 .await?;
5428
5429 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5430
5431 Ok(branches)
5432 }
5433 }
5434 })
5435 }
5436
5437 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5438 self.send_job(None, |repo, _cx| async move {
5439 match repo {
5440 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5441 backend.checkpoint().await
5442 }
5443 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5444 }
5445 })
5446 }
5447
5448 pub fn restore_checkpoint(
5449 &mut self,
5450 checkpoint: GitRepositoryCheckpoint,
5451 ) -> oneshot::Receiver<Result<()>> {
5452 self.send_job(None, move |repo, _cx| async move {
5453 match repo {
5454 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5455 backend.restore_checkpoint(checkpoint).await
5456 }
5457 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5458 }
5459 })
5460 }
5461
5462 pub(crate) fn apply_remote_update(
5463 &mut self,
5464 update: proto::UpdateRepository,
5465 cx: &mut Context<Self>,
5466 ) -> Result<()> {
5467 let conflicted_paths = TreeSet::from_ordered_entries(
5468 update
5469 .current_merge_conflicts
5470 .into_iter()
5471 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5472 );
5473 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5474 let new_head_commit = update
5475 .head_commit_details
5476 .as_ref()
5477 .map(proto_to_commit_details);
5478 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5479 cx.emit(RepositoryEvent::BranchChanged)
5480 }
5481 self.snapshot.branch = new_branch;
5482 self.snapshot.head_commit = new_head_commit;
5483
5484 self.snapshot.merge.conflicted_paths = conflicted_paths;
5485 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5486 let new_stash_entries = GitStash {
5487 entries: update
5488 .stash_entries
5489 .iter()
5490 .filter_map(|entry| proto_to_stash(entry).ok())
5491 .collect(),
5492 };
5493 if self.snapshot.stash_entries != new_stash_entries {
5494 cx.emit(RepositoryEvent::StashEntriesChanged)
5495 }
5496 self.snapshot.stash_entries = new_stash_entries;
5497 self.snapshot.remote_upstream_url = update.remote_upstream_url;
5498 self.snapshot.remote_origin_url = update.remote_origin_url;
5499
5500 let edits = update
5501 .removed_statuses
5502 .into_iter()
5503 .filter_map(|path| {
5504 Some(sum_tree::Edit::Remove(PathKey(
5505 RelPath::from_proto(&path).log_err()?,
5506 )))
5507 })
5508 .chain(
5509 update
5510 .updated_statuses
5511 .into_iter()
5512 .filter_map(|updated_status| {
5513 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5514 }),
5515 )
5516 .collect::<Vec<_>>();
5517 if !edits.is_empty() {
5518 cx.emit(RepositoryEvent::StatusesChanged);
5519 }
5520 self.snapshot.statuses_by_path.edit(edits, ());
5521 if update.is_last_update {
5522 self.snapshot.scan_id = update.scan_id;
5523 }
5524 self.clear_pending_ops(cx);
5525 Ok(())
5526 }
5527
5528 pub fn compare_checkpoints(
5529 &mut self,
5530 left: GitRepositoryCheckpoint,
5531 right: GitRepositoryCheckpoint,
5532 ) -> oneshot::Receiver<Result<bool>> {
5533 self.send_job(None, move |repo, _cx| async move {
5534 match repo {
5535 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5536 backend.compare_checkpoints(left, right).await
5537 }
5538 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5539 }
5540 })
5541 }
5542
5543 pub fn diff_checkpoints(
5544 &mut self,
5545 base_checkpoint: GitRepositoryCheckpoint,
5546 target_checkpoint: GitRepositoryCheckpoint,
5547 ) -> oneshot::Receiver<Result<String>> {
5548 self.send_job(None, move |repo, _cx| async move {
5549 match repo {
5550 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5551 backend
5552 .diff_checkpoints(base_checkpoint, target_checkpoint)
5553 .await
5554 }
5555 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5556 }
5557 })
5558 }
5559
5560 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5561 let updated = SumTree::from_iter(
5562 self.pending_ops.iter().filter_map(|ops| {
5563 let inner_ops: Vec<PendingOp> =
5564 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5565 if inner_ops.is_empty() {
5566 None
5567 } else {
5568 Some(PendingOps {
5569 repo_path: ops.repo_path.clone(),
5570 ops: inner_ops,
5571 })
5572 }
5573 }),
5574 (),
5575 );
5576
5577 if updated != self.pending_ops {
5578 cx.emit(RepositoryEvent::PendingOpsChanged {
5579 pending_ops: self.pending_ops.clone(),
5580 })
5581 }
5582
5583 self.pending_ops = updated;
5584 }
5585
5586 fn schedule_scan(
5587 &mut self,
5588 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5589 cx: &mut Context<Self>,
5590 ) {
5591 let this = cx.weak_entity();
5592 let _ = self.send_keyed_job(
5593 Some(GitJobKey::ReloadGitState),
5594 None,
5595 |state, mut cx| async move {
5596 log::debug!("run scheduled git status scan");
5597
5598 let Some(this) = this.upgrade() else {
5599 return Ok(());
5600 };
5601 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5602 bail!("not a local repository")
5603 };
5604 let (snapshot, events) = this
5605 .update(&mut cx, |this, _| {
5606 this.paths_needing_status_update.clear();
5607 compute_snapshot(
5608 this.id,
5609 this.work_directory_abs_path.clone(),
5610 this.snapshot.clone(),
5611 backend.clone(),
5612 )
5613 })
5614 .await?;
5615 this.update(&mut cx, |this, cx| {
5616 this.snapshot = snapshot.clone();
5617 this.clear_pending_ops(cx);
5618 for event in events {
5619 cx.emit(event);
5620 }
5621 });
5622 if let Some(updates_tx) = updates_tx {
5623 updates_tx
5624 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5625 .ok();
5626 }
5627 Ok(())
5628 },
5629 );
5630 }
5631
5632 fn spawn_local_git_worker(
5633 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5634 cx: &mut Context<Self>,
5635 ) -> mpsc::UnboundedSender<GitJob> {
5636 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5637
5638 cx.spawn(async move |_, cx| {
5639 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5640 if let Some(git_hosting_provider_registry) =
5641 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
5642 {
5643 git_hosting_providers::register_additional_providers(
5644 git_hosting_provider_registry,
5645 state.backend.clone(),
5646 )
5647 .await;
5648 }
5649 let state = RepositoryState::Local(state);
5650 let mut jobs = VecDeque::new();
5651 loop {
5652 while let Ok(Some(next_job)) = job_rx.try_next() {
5653 jobs.push_back(next_job);
5654 }
5655
5656 if let Some(job) = jobs.pop_front() {
5657 if let Some(current_key) = &job.key
5658 && jobs
5659 .iter()
5660 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5661 {
5662 continue;
5663 }
5664 (job.job)(state.clone(), cx).await;
5665 } else if let Some(job) = job_rx.next().await {
5666 jobs.push_back(job);
5667 } else {
5668 break;
5669 }
5670 }
5671 anyhow::Ok(())
5672 })
5673 .detach_and_log_err(cx);
5674
5675 job_tx
5676 }
5677
5678 fn spawn_remote_git_worker(
5679 state: RemoteRepositoryState,
5680 cx: &mut Context<Self>,
5681 ) -> mpsc::UnboundedSender<GitJob> {
5682 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5683
5684 cx.spawn(async move |_, cx| {
5685 let state = RepositoryState::Remote(state);
5686 let mut jobs = VecDeque::new();
5687 loop {
5688 while let Ok(Some(next_job)) = job_rx.try_next() {
5689 jobs.push_back(next_job);
5690 }
5691
5692 if let Some(job) = jobs.pop_front() {
5693 if let Some(current_key) = &job.key
5694 && jobs
5695 .iter()
5696 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5697 {
5698 continue;
5699 }
5700 (job.job)(state.clone(), cx).await;
5701 } else if let Some(job) = job_rx.next().await {
5702 jobs.push_back(job);
5703 } else {
5704 break;
5705 }
5706 }
5707 anyhow::Ok(())
5708 })
5709 .detach_and_log_err(cx);
5710
5711 job_tx
5712 }
5713
5714 fn load_staged_text(
5715 &mut self,
5716 buffer_id: BufferId,
5717 repo_path: RepoPath,
5718 cx: &App,
5719 ) -> Task<Result<Option<String>>> {
5720 let rx = self.send_job(None, move |state, _| async move {
5721 match state {
5722 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5723 anyhow::Ok(backend.load_index_text(repo_path).await)
5724 }
5725 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5726 let response = client
5727 .request(proto::OpenUnstagedDiff {
5728 project_id: project_id.to_proto(),
5729 buffer_id: buffer_id.to_proto(),
5730 })
5731 .await?;
5732 Ok(response.staged_text)
5733 }
5734 }
5735 });
5736 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5737 }
5738
5739 fn load_committed_text(
5740 &mut self,
5741 buffer_id: BufferId,
5742 repo_path: RepoPath,
5743 cx: &App,
5744 ) -> Task<Result<DiffBasesChange>> {
5745 let rx = self.send_job(None, move |state, _| async move {
5746 match state {
5747 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5748 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5749 let staged_text = backend.load_index_text(repo_path).await;
5750 let diff_bases_change = if committed_text == staged_text {
5751 DiffBasesChange::SetBoth(committed_text)
5752 } else {
5753 DiffBasesChange::SetEach {
5754 index: staged_text,
5755 head: committed_text,
5756 }
5757 };
5758 anyhow::Ok(diff_bases_change)
5759 }
5760 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5761 use proto::open_uncommitted_diff_response::Mode;
5762
5763 let response = client
5764 .request(proto::OpenUncommittedDiff {
5765 project_id: project_id.to_proto(),
5766 buffer_id: buffer_id.to_proto(),
5767 })
5768 .await?;
5769 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5770 let bases = match mode {
5771 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5772 Mode::IndexAndHead => DiffBasesChange::SetEach {
5773 head: response.committed_text,
5774 index: response.staged_text,
5775 },
5776 };
5777 Ok(bases)
5778 }
5779 }
5780 });
5781
5782 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5783 }
5784
5785 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5786 let repository_id = self.snapshot.id;
5787 let rx = self.send_job(None, move |state, _| async move {
5788 match state {
5789 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5790 backend.load_blob_content(oid).await
5791 }
5792 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5793 let response = client
5794 .request(proto::GetBlobContent {
5795 project_id: project_id.to_proto(),
5796 repository_id: repository_id.0,
5797 oid: oid.to_string(),
5798 })
5799 .await?;
5800 Ok(response.content)
5801 }
5802 }
5803 });
5804 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5805 }
5806
5807 fn paths_changed(
5808 &mut self,
5809 paths: Vec<RepoPath>,
5810 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5811 cx: &mut Context<Self>,
5812 ) {
5813 self.paths_needing_status_update.extend(paths);
5814
5815 let this = cx.weak_entity();
5816 let _ = self.send_keyed_job(
5817 Some(GitJobKey::RefreshStatuses),
5818 None,
5819 |state, mut cx| async move {
5820 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5821 (
5822 this.snapshot.clone(),
5823 mem::take(&mut this.paths_needing_status_update),
5824 )
5825 })?;
5826 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5827 bail!("not a local repository")
5828 };
5829
5830 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5831 if paths.is_empty() {
5832 return Ok(());
5833 }
5834 let statuses = backend.status(&paths).await?;
5835 let stash_entries = backend.stash_entries().await?;
5836
5837 let changed_path_statuses = cx
5838 .background_spawn(async move {
5839 let mut changed_path_statuses = Vec::new();
5840 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5841 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5842
5843 for (repo_path, status) in &*statuses.entries {
5844 changed_paths.remove(repo_path);
5845 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5846 && cursor.item().is_some_and(|entry| entry.status == *status)
5847 {
5848 continue;
5849 }
5850
5851 changed_path_statuses.push(Edit::Insert(StatusEntry {
5852 repo_path: repo_path.clone(),
5853 status: *status,
5854 }));
5855 }
5856 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5857 for path in changed_paths.into_iter() {
5858 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5859 changed_path_statuses
5860 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5861 }
5862 }
5863 changed_path_statuses
5864 })
5865 .await;
5866
5867 this.update(&mut cx, |this, cx| {
5868 if this.snapshot.stash_entries != stash_entries {
5869 cx.emit(RepositoryEvent::StashEntriesChanged);
5870 this.snapshot.stash_entries = stash_entries;
5871 }
5872
5873 if !changed_path_statuses.is_empty() {
5874 cx.emit(RepositoryEvent::StatusesChanged);
5875 this.snapshot
5876 .statuses_by_path
5877 .edit(changed_path_statuses, ());
5878 this.snapshot.scan_id += 1;
5879 }
5880
5881 if let Some(updates_tx) = updates_tx {
5882 updates_tx
5883 .unbounded_send(DownstreamUpdate::UpdateRepository(
5884 this.snapshot.clone(),
5885 ))
5886 .ok();
5887 }
5888 })
5889 },
5890 );
5891 }
5892
5893 /// currently running git command and when it started
5894 pub fn current_job(&self) -> Option<JobInfo> {
5895 self.active_jobs.values().next().cloned()
5896 }
5897
5898 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5899 self.send_job(None, |_, _| async {})
5900 }
5901
5902 fn spawn_job_with_tracking<AsyncFn>(
5903 &mut self,
5904 paths: Vec<RepoPath>,
5905 git_status: pending_op::GitStatus,
5906 cx: &mut Context<Self>,
5907 f: AsyncFn,
5908 ) -> Task<Result<()>>
5909 where
5910 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5911 {
5912 let ids = self.new_pending_ops_for_paths(paths, git_status);
5913
5914 cx.spawn(async move |this, cx| {
5915 let (job_status, result) = match f(this.clone(), cx).await {
5916 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5917 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5918 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5919 };
5920
5921 this.update(cx, |this, _| {
5922 let mut edits = Vec::with_capacity(ids.len());
5923 for (id, entry) in ids {
5924 if let Some(mut ops) = this
5925 .pending_ops
5926 .get(&PathKey(entry.as_ref().clone()), ())
5927 .cloned()
5928 {
5929 if let Some(op) = ops.op_by_id_mut(id) {
5930 op.job_status = job_status;
5931 }
5932 edits.push(sum_tree::Edit::Insert(ops));
5933 }
5934 }
5935 this.pending_ops.edit(edits, ());
5936 })?;
5937
5938 result
5939 })
5940 }
5941
5942 fn new_pending_ops_for_paths(
5943 &mut self,
5944 paths: Vec<RepoPath>,
5945 git_status: pending_op::GitStatus,
5946 ) -> Vec<(PendingOpId, RepoPath)> {
5947 let mut edits = Vec::with_capacity(paths.len());
5948 let mut ids = Vec::with_capacity(paths.len());
5949 for path in paths {
5950 let mut ops = self
5951 .pending_ops
5952 .get(&PathKey(path.as_ref().clone()), ())
5953 .cloned()
5954 .unwrap_or_else(|| PendingOps::new(&path));
5955 let id = ops.max_id() + 1;
5956 ops.ops.push(PendingOp {
5957 id,
5958 git_status,
5959 job_status: pending_op::JobStatus::Running,
5960 });
5961 edits.push(sum_tree::Edit::Insert(ops));
5962 ids.push((id, path));
5963 }
5964 self.pending_ops.edit(edits, ());
5965 ids
5966 }
5967 pub fn default_remote_url(&self) -> Option<String> {
5968 self.remote_upstream_url
5969 .clone()
5970 .or(self.remote_origin_url.clone())
5971 }
5972}
5973
5974fn get_permalink_in_rust_registry_src(
5975 provider_registry: Arc<GitHostingProviderRegistry>,
5976 path: PathBuf,
5977 selection: Range<u32>,
5978) -> Result<url::Url> {
5979 #[derive(Deserialize)]
5980 struct CargoVcsGit {
5981 sha1: String,
5982 }
5983
5984 #[derive(Deserialize)]
5985 struct CargoVcsInfo {
5986 git: CargoVcsGit,
5987 path_in_vcs: String,
5988 }
5989
5990 #[derive(Deserialize)]
5991 struct CargoPackage {
5992 repository: String,
5993 }
5994
5995 #[derive(Deserialize)]
5996 struct CargoToml {
5997 package: CargoPackage,
5998 }
5999
6000 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6001 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6002 Some((dir, json))
6003 }) else {
6004 bail!("No .cargo_vcs_info.json found in parent directories")
6005 };
6006 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6007 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6008 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6009 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6010 .context("parsing package.repository field of manifest")?;
6011 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6012 let permalink = provider.build_permalink(
6013 remote,
6014 BuildPermalinkParams::new(
6015 &cargo_vcs_info.git.sha1,
6016 &RepoPath::from_rel_path(
6017 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6018 ),
6019 Some(selection),
6020 ),
6021 );
6022 Ok(permalink)
6023}
6024
6025fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6026 let Some(blame) = blame else {
6027 return proto::BlameBufferResponse {
6028 blame_response: None,
6029 };
6030 };
6031
6032 let entries = blame
6033 .entries
6034 .into_iter()
6035 .map(|entry| proto::BlameEntry {
6036 sha: entry.sha.as_bytes().into(),
6037 start_line: entry.range.start,
6038 end_line: entry.range.end,
6039 original_line_number: entry.original_line_number,
6040 author: entry.author,
6041 author_mail: entry.author_mail,
6042 author_time: entry.author_time,
6043 author_tz: entry.author_tz,
6044 committer: entry.committer_name,
6045 committer_mail: entry.committer_email,
6046 committer_time: entry.committer_time,
6047 committer_tz: entry.committer_tz,
6048 summary: entry.summary,
6049 previous: entry.previous,
6050 filename: entry.filename,
6051 })
6052 .collect::<Vec<_>>();
6053
6054 let messages = blame
6055 .messages
6056 .into_iter()
6057 .map(|(oid, message)| proto::CommitMessage {
6058 oid: oid.as_bytes().into(),
6059 message,
6060 })
6061 .collect::<Vec<_>>();
6062
6063 proto::BlameBufferResponse {
6064 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6065 }
6066}
6067
6068fn deserialize_blame_buffer_response(
6069 response: proto::BlameBufferResponse,
6070) -> Option<git::blame::Blame> {
6071 let response = response.blame_response?;
6072 let entries = response
6073 .entries
6074 .into_iter()
6075 .filter_map(|entry| {
6076 Some(git::blame::BlameEntry {
6077 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6078 range: entry.start_line..entry.end_line,
6079 original_line_number: entry.original_line_number,
6080 committer_name: entry.committer,
6081 committer_time: entry.committer_time,
6082 committer_tz: entry.committer_tz,
6083 committer_email: entry.committer_mail,
6084 author: entry.author,
6085 author_mail: entry.author_mail,
6086 author_time: entry.author_time,
6087 author_tz: entry.author_tz,
6088 summary: entry.summary,
6089 previous: entry.previous,
6090 filename: entry.filename,
6091 })
6092 })
6093 .collect::<Vec<_>>();
6094
6095 let messages = response
6096 .messages
6097 .into_iter()
6098 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6099 .collect::<HashMap<_, _>>();
6100
6101 Some(Blame { entries, messages })
6102}
6103
6104fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6105 proto::Branch {
6106 is_head: branch.is_head,
6107 ref_name: branch.ref_name.to_string(),
6108 unix_timestamp: branch
6109 .most_recent_commit
6110 .as_ref()
6111 .map(|commit| commit.commit_timestamp as u64),
6112 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6113 ref_name: upstream.ref_name.to_string(),
6114 tracking: upstream
6115 .tracking
6116 .status()
6117 .map(|upstream| proto::UpstreamTracking {
6118 ahead: upstream.ahead as u64,
6119 behind: upstream.behind as u64,
6120 }),
6121 }),
6122 most_recent_commit: branch
6123 .most_recent_commit
6124 .as_ref()
6125 .map(|commit| proto::CommitSummary {
6126 sha: commit.sha.to_string(),
6127 subject: commit.subject.to_string(),
6128 commit_timestamp: commit.commit_timestamp,
6129 author_name: commit.author_name.to_string(),
6130 }),
6131 }
6132}
6133
6134fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6135 proto::Worktree {
6136 path: worktree.path.to_string_lossy().to_string(),
6137 ref_name: worktree.ref_name.to_string(),
6138 sha: worktree.sha.to_string(),
6139 }
6140}
6141
6142fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6143 git::repository::Worktree {
6144 path: PathBuf::from(proto.path.clone()),
6145 ref_name: proto.ref_name.clone().into(),
6146 sha: proto.sha.clone().into(),
6147 }
6148}
6149
6150fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6151 git::repository::Branch {
6152 is_head: proto.is_head,
6153 ref_name: proto.ref_name.clone().into(),
6154 upstream: proto
6155 .upstream
6156 .as_ref()
6157 .map(|upstream| git::repository::Upstream {
6158 ref_name: upstream.ref_name.to_string().into(),
6159 tracking: upstream
6160 .tracking
6161 .as_ref()
6162 .map(|tracking| {
6163 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6164 ahead: tracking.ahead as u32,
6165 behind: tracking.behind as u32,
6166 })
6167 })
6168 .unwrap_or(git::repository::UpstreamTracking::Gone),
6169 }),
6170 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6171 git::repository::CommitSummary {
6172 sha: commit.sha.to_string().into(),
6173 subject: commit.subject.to_string().into(),
6174 commit_timestamp: commit.commit_timestamp,
6175 author_name: commit.author_name.to_string().into(),
6176 has_parent: true,
6177 }
6178 }),
6179 }
6180}
6181
6182fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6183 proto::GitCommitDetails {
6184 sha: commit.sha.to_string(),
6185 message: commit.message.to_string(),
6186 commit_timestamp: commit.commit_timestamp,
6187 author_email: commit.author_email.to_string(),
6188 author_name: commit.author_name.to_string(),
6189 }
6190}
6191
6192fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6193 CommitDetails {
6194 sha: proto.sha.clone().into(),
6195 message: proto.message.clone().into(),
6196 commit_timestamp: proto.commit_timestamp,
6197 author_email: proto.author_email.clone().into(),
6198 author_name: proto.author_name.clone().into(),
6199 }
6200}
6201
6202async fn compute_snapshot(
6203 id: RepositoryId,
6204 work_directory_abs_path: Arc<Path>,
6205 prev_snapshot: RepositorySnapshot,
6206 backend: Arc<dyn GitRepository>,
6207) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6208 let mut events = Vec::new();
6209 let branches = backend.branches().await?;
6210 let branch = branches.into_iter().find(|branch| branch.is_head);
6211 let statuses = backend
6212 .status(&[RepoPath::from_rel_path(
6213 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6214 )])
6215 .await?;
6216 let stash_entries = backend.stash_entries().await?;
6217 let statuses_by_path = SumTree::from_iter(
6218 statuses
6219 .entries
6220 .iter()
6221 .map(|(repo_path, status)| StatusEntry {
6222 repo_path: repo_path.clone(),
6223 status: *status,
6224 }),
6225 (),
6226 );
6227 let (merge_details, merge_heads_changed) =
6228 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6229 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6230
6231 if merge_heads_changed {
6232 events.push(RepositoryEvent::MergeHeadsChanged);
6233 }
6234
6235 if statuses_by_path != prev_snapshot.statuses_by_path {
6236 events.push(RepositoryEvent::StatusesChanged)
6237 }
6238
6239 // Useful when branch is None in detached head state
6240 let head_commit = match backend.head_sha().await {
6241 Some(head_sha) => backend.show(head_sha).await.log_err(),
6242 None => None,
6243 };
6244
6245 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6246 events.push(RepositoryEvent::BranchChanged);
6247 }
6248
6249 let remote_origin_url = backend.remote_url("origin").await;
6250 let remote_upstream_url = backend.remote_url("upstream").await;
6251
6252 let snapshot = RepositorySnapshot {
6253 id,
6254 statuses_by_path,
6255 work_directory_abs_path,
6256 path_style: prev_snapshot.path_style,
6257 scan_id: prev_snapshot.scan_id + 1,
6258 branch,
6259 head_commit,
6260 merge: merge_details,
6261 remote_origin_url,
6262 remote_upstream_url,
6263 stash_entries,
6264 };
6265
6266 Ok((snapshot, events))
6267}
6268
6269fn status_from_proto(
6270 simple_status: i32,
6271 status: Option<proto::GitFileStatus>,
6272) -> anyhow::Result<FileStatus> {
6273 use proto::git_file_status::Variant;
6274
6275 let Some(variant) = status.and_then(|status| status.variant) else {
6276 let code = proto::GitStatus::from_i32(simple_status)
6277 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6278 let result = match code {
6279 proto::GitStatus::Added => TrackedStatus {
6280 worktree_status: StatusCode::Added,
6281 index_status: StatusCode::Unmodified,
6282 }
6283 .into(),
6284 proto::GitStatus::Modified => TrackedStatus {
6285 worktree_status: StatusCode::Modified,
6286 index_status: StatusCode::Unmodified,
6287 }
6288 .into(),
6289 proto::GitStatus::Conflict => UnmergedStatus {
6290 first_head: UnmergedStatusCode::Updated,
6291 second_head: UnmergedStatusCode::Updated,
6292 }
6293 .into(),
6294 proto::GitStatus::Deleted => TrackedStatus {
6295 worktree_status: StatusCode::Deleted,
6296 index_status: StatusCode::Unmodified,
6297 }
6298 .into(),
6299 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6300 };
6301 return Ok(result);
6302 };
6303
6304 let result = match variant {
6305 Variant::Untracked(_) => FileStatus::Untracked,
6306 Variant::Ignored(_) => FileStatus::Ignored,
6307 Variant::Unmerged(unmerged) => {
6308 let [first_head, second_head] =
6309 [unmerged.first_head, unmerged.second_head].map(|head| {
6310 let code = proto::GitStatus::from_i32(head)
6311 .with_context(|| format!("Invalid git status code: {head}"))?;
6312 let result = match code {
6313 proto::GitStatus::Added => UnmergedStatusCode::Added,
6314 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6315 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6316 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6317 };
6318 Ok(result)
6319 });
6320 let [first_head, second_head] = [first_head?, second_head?];
6321 UnmergedStatus {
6322 first_head,
6323 second_head,
6324 }
6325 .into()
6326 }
6327 Variant::Tracked(tracked) => {
6328 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6329 .map(|status| {
6330 let code = proto::GitStatus::from_i32(status)
6331 .with_context(|| format!("Invalid git status code: {status}"))?;
6332 let result = match code {
6333 proto::GitStatus::Modified => StatusCode::Modified,
6334 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6335 proto::GitStatus::Added => StatusCode::Added,
6336 proto::GitStatus::Deleted => StatusCode::Deleted,
6337 proto::GitStatus::Renamed => StatusCode::Renamed,
6338 proto::GitStatus::Copied => StatusCode::Copied,
6339 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6340 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6341 };
6342 Ok(result)
6343 });
6344 let [index_status, worktree_status] = [index_status?, worktree_status?];
6345 TrackedStatus {
6346 index_status,
6347 worktree_status,
6348 }
6349 .into()
6350 }
6351 };
6352 Ok(result)
6353}
6354
6355fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6356 use proto::git_file_status::{Tracked, Unmerged, Variant};
6357
6358 let variant = match status {
6359 FileStatus::Untracked => Variant::Untracked(Default::default()),
6360 FileStatus::Ignored => Variant::Ignored(Default::default()),
6361 FileStatus::Unmerged(UnmergedStatus {
6362 first_head,
6363 second_head,
6364 }) => Variant::Unmerged(Unmerged {
6365 first_head: unmerged_status_to_proto(first_head),
6366 second_head: unmerged_status_to_proto(second_head),
6367 }),
6368 FileStatus::Tracked(TrackedStatus {
6369 index_status,
6370 worktree_status,
6371 }) => Variant::Tracked(Tracked {
6372 index_status: tracked_status_to_proto(index_status),
6373 worktree_status: tracked_status_to_proto(worktree_status),
6374 }),
6375 };
6376 proto::GitFileStatus {
6377 variant: Some(variant),
6378 }
6379}
6380
6381fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6382 match code {
6383 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6384 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6385 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6386 }
6387}
6388
6389fn tracked_status_to_proto(code: StatusCode) -> i32 {
6390 match code {
6391 StatusCode::Added => proto::GitStatus::Added as _,
6392 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6393 StatusCode::Modified => proto::GitStatus::Modified as _,
6394 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6395 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6396 StatusCode::Copied => proto::GitStatus::Copied as _,
6397 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6398 }
6399}