1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<str>>,
133 index_text: Option<Arc<str>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_create_remote);
476 client.add_entity_request_handler(Self::handle_remove_remote);
477 client.add_entity_request_handler(Self::handle_delete_branch);
478 client.add_entity_request_handler(Self::handle_git_init);
479 client.add_entity_request_handler(Self::handle_push);
480 client.add_entity_request_handler(Self::handle_pull);
481 client.add_entity_request_handler(Self::handle_fetch);
482 client.add_entity_request_handler(Self::handle_stage);
483 client.add_entity_request_handler(Self::handle_unstage);
484 client.add_entity_request_handler(Self::handle_stash);
485 client.add_entity_request_handler(Self::handle_stash_pop);
486 client.add_entity_request_handler(Self::handle_stash_apply);
487 client.add_entity_request_handler(Self::handle_stash_drop);
488 client.add_entity_request_handler(Self::handle_commit);
489 client.add_entity_request_handler(Self::handle_run_hook);
490 client.add_entity_request_handler(Self::handle_reset);
491 client.add_entity_request_handler(Self::handle_show);
492 client.add_entity_request_handler(Self::handle_load_commit_diff);
493 client.add_entity_request_handler(Self::handle_file_history);
494 client.add_entity_request_handler(Self::handle_checkout_files);
495 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
496 client.add_entity_request_handler(Self::handle_set_index_text);
497 client.add_entity_request_handler(Self::handle_askpass);
498 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
499 client.add_entity_request_handler(Self::handle_git_diff);
500 client.add_entity_request_handler(Self::handle_tree_diff);
501 client.add_entity_request_handler(Self::handle_get_blob_content);
502 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
503 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
504 client.add_entity_message_handler(Self::handle_update_diff_bases);
505 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
506 client.add_entity_request_handler(Self::handle_blame_buffer);
507 client.add_entity_message_handler(Self::handle_update_repository);
508 client.add_entity_message_handler(Self::handle_remove_repository);
509 client.add_entity_request_handler(Self::handle_git_clone);
510 client.add_entity_request_handler(Self::handle_get_worktrees);
511 client.add_entity_request_handler(Self::handle_create_worktree);
512 }
513
514 pub fn is_local(&self) -> bool {
515 matches!(self.state, GitStoreState::Local { .. })
516 }
517 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
518 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
519 let id = repo.read(cx).id;
520 if self.active_repo_id != Some(id) {
521 self.active_repo_id = Some(id);
522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
523 }
524 }
525 }
526
527 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
528 match &mut self.state {
529 GitStoreState::Remote {
530 downstream: downstream_client,
531 ..
532 } => {
533 for repo in self.repositories.values() {
534 let update = repo.read(cx).snapshot.initial_update(project_id);
535 for update in split_repository_update(update) {
536 client.send(update).log_err();
537 }
538 }
539 *downstream_client = Some((client, ProjectId(project_id)));
540 }
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 let mut snapshots = HashMap::default();
546 let (updates_tx, mut updates_rx) = mpsc::unbounded();
547 for repo in self.repositories.values() {
548 updates_tx
549 .unbounded_send(DownstreamUpdate::UpdateRepository(
550 repo.read(cx).snapshot.clone(),
551 ))
552 .ok();
553 }
554 *downstream_client = Some(LocalDownstreamState {
555 client: client.clone(),
556 project_id: ProjectId(project_id),
557 updates_tx,
558 _task: cx.spawn(async move |this, cx| {
559 cx.background_spawn(async move {
560 while let Some(update) = updates_rx.next().await {
561 match update {
562 DownstreamUpdate::UpdateRepository(snapshot) => {
563 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
564 {
565 let update =
566 snapshot.build_update(old_snapshot, project_id);
567 *old_snapshot = snapshot;
568 for update in split_repository_update(update) {
569 client.send(update)?;
570 }
571 } else {
572 let update = snapshot.initial_update(project_id);
573 for update in split_repository_update(update) {
574 client.send(update)?;
575 }
576 snapshots.insert(snapshot.id, snapshot);
577 }
578 }
579 DownstreamUpdate::RemoveRepository(id) => {
580 client.send(proto::RemoveRepository {
581 project_id,
582 id: id.to_proto(),
583 })?;
584 }
585 }
586 }
587 anyhow::Ok(())
588 })
589 .await
590 .ok();
591 this.update(cx, |this, _| {
592 if let GitStoreState::Local {
593 downstream: downstream_client,
594 ..
595 } = &mut this.state
596 {
597 downstream_client.take();
598 } else {
599 unreachable!("unshared called on remote store");
600 }
601 })
602 }),
603 });
604 }
605 }
606 }
607
608 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
609 match &mut self.state {
610 GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } => {
614 downstream_client.take();
615 }
616 GitStoreState::Remote {
617 downstream: downstream_client,
618 ..
619 } => {
620 downstream_client.take();
621 }
622 }
623 self.shared_diffs.clear();
624 }
625
626 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
627 self.shared_diffs.remove(peer_id);
628 }
629
630 pub fn active_repository(&self) -> Option<Entity<Repository>> {
631 self.active_repo_id
632 .as_ref()
633 .map(|id| self.repositories[id].clone())
634 }
635
636 pub fn open_unstaged_diff(
637 &mut self,
638 buffer: Entity<Buffer>,
639 cx: &mut Context<Self>,
640 ) -> Task<Result<Entity<BufferDiff>>> {
641 let buffer_id = buffer.read(cx).remote_id();
642 if let Some(diff_state) = self.diffs.get(&buffer_id)
643 && let Some(unstaged_diff) = diff_state
644 .read(cx)
645 .unstaged_diff
646 .as_ref()
647 .and_then(|weak| weak.upgrade())
648 {
649 if let Some(task) =
650 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
651 {
652 return cx.background_executor().spawn(async move {
653 task.await;
654 Ok(unstaged_diff)
655 });
656 }
657 return Task::ready(Ok(unstaged_diff));
658 }
659
660 let Some((repo, repo_path)) =
661 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
662 else {
663 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
664 };
665
666 let task = self
667 .loading_diffs
668 .entry((buffer_id, DiffKind::Unstaged))
669 .or_insert_with(|| {
670 let staged_text = repo.update(cx, |repo, cx| {
671 repo.load_staged_text(buffer_id, repo_path, cx)
672 });
673 cx.spawn(async move |this, cx| {
674 Self::open_diff_internal(
675 this,
676 DiffKind::Unstaged,
677 staged_text.await.map(DiffBasesChange::SetIndex),
678 buffer,
679 cx,
680 )
681 .await
682 .map_err(Arc::new)
683 })
684 .shared()
685 })
686 .clone();
687
688 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
689 }
690
691 pub fn open_diff_since(
692 &mut self,
693 oid: Option<git::Oid>,
694 buffer: Entity<Buffer>,
695 repo: Entity<Repository>,
696 cx: &mut Context<Self>,
697 ) -> Task<Result<Entity<BufferDiff>>> {
698 cx.spawn(async move |this, cx| {
699 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
700 let language_registry = buffer.update(cx, |buffer, _| buffer.language_registry())?;
701 let content = match oid {
702 None => None,
703 Some(oid) => Some(
704 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
705 .await?,
706 ),
707 };
708 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
709
710 buffer_diff
711 .update(cx, |buffer_diff, cx| {
712 buffer_diff.language_changed(
713 buffer_snapshot.language().cloned(),
714 language_registry,
715 cx,
716 );
717 buffer_diff.set_base_text(
718 content.map(|s| s.as_str().into()),
719 buffer_snapshot.language().cloned(),
720 buffer_snapshot.text,
721 cx,
722 )
723 })?
724 .await?;
725 let unstaged_diff = this
726 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
727 .await?;
728 buffer_diff.update(cx, |buffer_diff, _| {
729 buffer_diff.set_secondary_diff(unstaged_diff);
730 })?;
731
732 this.update(cx, |_, cx| {
733 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
734 .detach();
735 })?;
736
737 Ok(buffer_diff)
738 })
739 }
740
741 pub fn open_uncommitted_diff(
742 &mut self,
743 buffer: Entity<Buffer>,
744 cx: &mut Context<Self>,
745 ) -> Task<Result<Entity<BufferDiff>>> {
746 let buffer_id = buffer.read(cx).remote_id();
747
748 if let Some(diff_state) = self.diffs.get(&buffer_id)
749 && let Some(uncommitted_diff) = diff_state
750 .read(cx)
751 .uncommitted_diff
752 .as_ref()
753 .and_then(|weak| weak.upgrade())
754 {
755 if let Some(task) =
756 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
757 {
758 return cx.background_executor().spawn(async move {
759 task.await;
760 Ok(uncommitted_diff)
761 });
762 }
763 return Task::ready(Ok(uncommitted_diff));
764 }
765
766 let Some((repo, repo_path)) =
767 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
768 else {
769 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
770 };
771
772 let task = self
773 .loading_diffs
774 .entry((buffer_id, DiffKind::Uncommitted))
775 .or_insert_with(|| {
776 let changes = repo.update(cx, |repo, cx| {
777 repo.load_committed_text(buffer_id, repo_path, cx)
778 });
779
780 // todo(lw): hot foreground spawn
781 cx.spawn(async move |this, cx| {
782 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
783 .await
784 .map_err(Arc::new)
785 })
786 .shared()
787 })
788 .clone();
789
790 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
791 }
792
793 async fn open_diff_internal(
794 this: WeakEntity<Self>,
795 kind: DiffKind,
796 texts: Result<DiffBasesChange>,
797 buffer_entity: Entity<Buffer>,
798 cx: &mut AsyncApp,
799 ) -> Result<Entity<BufferDiff>> {
800 let diff_bases_change = match texts {
801 Err(e) => {
802 this.update(cx, |this, cx| {
803 let buffer = buffer_entity.read(cx);
804 let buffer_id = buffer.remote_id();
805 this.loading_diffs.remove(&(buffer_id, kind));
806 })?;
807 return Err(e);
808 }
809 Ok(change) => change,
810 };
811
812 this.update(cx, |this, cx| {
813 let buffer = buffer_entity.read(cx);
814 let buffer_id = buffer.remote_id();
815 let language = buffer.language().cloned();
816 let language_registry = buffer.language_registry();
817 let text_snapshot = buffer.text_snapshot();
818 this.loading_diffs.remove(&(buffer_id, kind));
819
820 let git_store = cx.weak_entity();
821 let diff_state = this
822 .diffs
823 .entry(buffer_id)
824 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
825
826 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
827
828 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
829 diff_state.update(cx, |diff_state, cx| {
830 diff_state.language_changed = true;
831 diff_state.language = language;
832 diff_state.language_registry = language_registry;
833
834 match kind {
835 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
836 DiffKind::Uncommitted => {
837 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
838 diff
839 } else {
840 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
841 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
842 unstaged_diff
843 };
844
845 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
846 diff_state.uncommitted_diff = Some(diff.downgrade())
847 }
848 }
849
850 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
851 let rx = diff_state.wait_for_recalculation();
852
853 anyhow::Ok(async move {
854 if let Some(rx) = rx {
855 rx.await;
856 }
857 Ok(diff)
858 })
859 })
860 })??
861 .await
862 }
863
864 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
865 let diff_state = self.diffs.get(&buffer_id)?;
866 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
867 }
868
869 pub fn get_uncommitted_diff(
870 &self,
871 buffer_id: BufferId,
872 cx: &App,
873 ) -> Option<Entity<BufferDiff>> {
874 let diff_state = self.diffs.get(&buffer_id)?;
875 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
876 }
877
878 pub fn open_conflict_set(
879 &mut self,
880 buffer: Entity<Buffer>,
881 cx: &mut Context<Self>,
882 ) -> Entity<ConflictSet> {
883 log::debug!("open conflict set");
884 let buffer_id = buffer.read(cx).remote_id();
885
886 if let Some(git_state) = self.diffs.get(&buffer_id)
887 && let Some(conflict_set) = git_state
888 .read(cx)
889 .conflict_set
890 .as_ref()
891 .and_then(|weak| weak.upgrade())
892 {
893 let conflict_set = conflict_set;
894 let buffer_snapshot = buffer.read(cx).text_snapshot();
895
896 git_state.update(cx, |state, cx| {
897 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
898 });
899
900 return conflict_set;
901 }
902
903 let is_unmerged = self
904 .repository_and_path_for_buffer_id(buffer_id, cx)
905 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
906 let git_store = cx.weak_entity();
907 let buffer_git_state = self
908 .diffs
909 .entry(buffer_id)
910 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
911 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
912
913 self._subscriptions
914 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
915 cx.emit(GitStoreEvent::ConflictsUpdated);
916 }));
917
918 buffer_git_state.update(cx, |state, cx| {
919 state.conflict_set = Some(conflict_set.downgrade());
920 let buffer_snapshot = buffer.read(cx).text_snapshot();
921 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
922 });
923
924 conflict_set
925 }
926
927 pub fn project_path_git_status(
928 &self,
929 project_path: &ProjectPath,
930 cx: &App,
931 ) -> Option<FileStatus> {
932 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
933 Some(repo.read(cx).status_for_path(&repo_path)?.status)
934 }
935
936 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
937 let mut work_directory_abs_paths = Vec::new();
938 let mut checkpoints = Vec::new();
939 for repository in self.repositories.values() {
940 repository.update(cx, |repository, _| {
941 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
942 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
943 });
944 }
945
946 cx.background_executor().spawn(async move {
947 let checkpoints = future::try_join_all(checkpoints).await?;
948 Ok(GitStoreCheckpoint {
949 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
950 .into_iter()
951 .zip(checkpoints)
952 .collect(),
953 })
954 })
955 }
956
957 pub fn restore_checkpoint(
958 &self,
959 checkpoint: GitStoreCheckpoint,
960 cx: &mut App,
961 ) -> Task<Result<()>> {
962 let repositories_by_work_dir_abs_path = self
963 .repositories
964 .values()
965 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
966 .collect::<HashMap<_, _>>();
967
968 let mut tasks = Vec::new();
969 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
970 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
971 let restore = repository.update(cx, |repository, _| {
972 repository.restore_checkpoint(checkpoint)
973 });
974 tasks.push(async move { restore.await? });
975 }
976 }
977 cx.background_spawn(async move {
978 future::try_join_all(tasks).await?;
979 Ok(())
980 })
981 }
982
983 /// Compares two checkpoints, returning true if they are equal.
984 pub fn compare_checkpoints(
985 &self,
986 left: GitStoreCheckpoint,
987 mut right: GitStoreCheckpoint,
988 cx: &mut App,
989 ) -> Task<Result<bool>> {
990 let repositories_by_work_dir_abs_path = self
991 .repositories
992 .values()
993 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
994 .collect::<HashMap<_, _>>();
995
996 let mut tasks = Vec::new();
997 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
998 if let Some(right_checkpoint) = right
999 .checkpoints_by_work_dir_abs_path
1000 .remove(&work_dir_abs_path)
1001 {
1002 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1003 {
1004 let compare = repository.update(cx, |repository, _| {
1005 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1006 });
1007
1008 tasks.push(async move { compare.await? });
1009 }
1010 } else {
1011 return Task::ready(Ok(false));
1012 }
1013 }
1014 cx.background_spawn(async move {
1015 Ok(future::try_join_all(tasks)
1016 .await?
1017 .into_iter()
1018 .all(|result| result))
1019 })
1020 }
1021
1022 /// Blames a buffer.
1023 pub fn blame_buffer(
1024 &self,
1025 buffer: &Entity<Buffer>,
1026 version: Option<clock::Global>,
1027 cx: &mut Context<Self>,
1028 ) -> Task<Result<Option<Blame>>> {
1029 let buffer = buffer.read(cx);
1030 let Some((repo, repo_path)) =
1031 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1032 else {
1033 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1034 };
1035 let content = match &version {
1036 Some(version) => buffer.rope_for_version(version),
1037 None => buffer.as_rope().clone(),
1038 };
1039 let line_ending = buffer.line_ending();
1040 let version = version.unwrap_or(buffer.version());
1041 let buffer_id = buffer.remote_id();
1042
1043 let repo = repo.downgrade();
1044 cx.spawn(async move |_, cx| {
1045 let repository_state = repo
1046 .update(cx, |repo, _| repo.repository_state.clone())?
1047 .await
1048 .map_err(|err| anyhow::anyhow!(err))?;
1049 match repository_state {
1050 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1051 .blame(repo_path.clone(), content, line_ending)
1052 .await
1053 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1054 .map(Some),
1055 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1056 let response = client
1057 .request(proto::BlameBuffer {
1058 project_id: project_id.to_proto(),
1059 buffer_id: buffer_id.into(),
1060 version: serialize_version(&version),
1061 })
1062 .await?;
1063 Ok(deserialize_blame_buffer_response(response))
1064 }
1065 }
1066 })
1067 }
1068
1069 pub fn file_history(
1070 &self,
1071 repo: &Entity<Repository>,
1072 path: RepoPath,
1073 cx: &mut App,
1074 ) -> Task<Result<git::repository::FileHistory>> {
1075 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1076
1077 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1078 }
1079
1080 pub fn file_history_paginated(
1081 &self,
1082 repo: &Entity<Repository>,
1083 path: RepoPath,
1084 skip: usize,
1085 limit: Option<usize>,
1086 cx: &mut App,
1087 ) -> Task<Result<git::repository::FileHistory>> {
1088 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1089
1090 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1091 }
1092
1093 pub fn get_permalink_to_line(
1094 &self,
1095 buffer: &Entity<Buffer>,
1096 selection: Range<u32>,
1097 cx: &mut App,
1098 ) -> Task<Result<url::Url>> {
1099 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1100 return Task::ready(Err(anyhow!("buffer has no file")));
1101 };
1102
1103 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1104 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1105 cx,
1106 ) else {
1107 // If we're not in a Git repo, check whether this is a Rust source
1108 // file in the Cargo registry (presumably opened with go-to-definition
1109 // from a normal Rust file). If so, we can put together a permalink
1110 // using crate metadata.
1111 if buffer
1112 .read(cx)
1113 .language()
1114 .is_none_or(|lang| lang.name() != "Rust".into())
1115 {
1116 return Task::ready(Err(anyhow!("no permalink available")));
1117 }
1118 let file_path = file.worktree.read(cx).absolutize(&file.path);
1119 return cx.spawn(async move |cx| {
1120 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1121 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1122 .context("no permalink available")
1123 });
1124 };
1125
1126 let buffer_id = buffer.read(cx).remote_id();
1127 let branch = repo.read(cx).branch.clone();
1128 let remote = branch
1129 .as_ref()
1130 .and_then(|b| b.upstream.as_ref())
1131 .and_then(|b| b.remote_name())
1132 .unwrap_or("origin")
1133 .to_string();
1134
1135 let rx = repo.update(cx, |repo, _| {
1136 repo.send_job(None, move |state, cx| async move {
1137 match state {
1138 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1139 let origin_url = backend
1140 .remote_url(&remote)
1141 .await
1142 .with_context(|| format!("remote \"{remote}\" not found"))?;
1143
1144 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1145
1146 let provider_registry =
1147 cx.update(GitHostingProviderRegistry::default_global)?;
1148
1149 let (provider, remote) =
1150 parse_git_remote_url(provider_registry, &origin_url)
1151 .context("parsing Git remote URL")?;
1152
1153 Ok(provider.build_permalink(
1154 remote,
1155 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1156 ))
1157 }
1158 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1159 let response = client
1160 .request(proto::GetPermalinkToLine {
1161 project_id: project_id.to_proto(),
1162 buffer_id: buffer_id.into(),
1163 selection: Some(proto::Range {
1164 start: selection.start as u64,
1165 end: selection.end as u64,
1166 }),
1167 })
1168 .await?;
1169
1170 url::Url::parse(&response.permalink).context("failed to parse permalink")
1171 }
1172 }
1173 })
1174 });
1175 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1176 }
1177
1178 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1179 match &self.state {
1180 GitStoreState::Local {
1181 downstream: downstream_client,
1182 ..
1183 } => downstream_client
1184 .as_ref()
1185 .map(|state| (state.client.clone(), state.project_id)),
1186 GitStoreState::Remote {
1187 downstream: downstream_client,
1188 ..
1189 } => downstream_client.clone(),
1190 }
1191 }
1192
1193 fn upstream_client(&self) -> Option<AnyProtoClient> {
1194 match &self.state {
1195 GitStoreState::Local { .. } => None,
1196 GitStoreState::Remote {
1197 upstream_client, ..
1198 } => Some(upstream_client.clone()),
1199 }
1200 }
1201
1202 fn on_worktree_store_event(
1203 &mut self,
1204 worktree_store: Entity<WorktreeStore>,
1205 event: &WorktreeStoreEvent,
1206 cx: &mut Context<Self>,
1207 ) {
1208 let GitStoreState::Local {
1209 project_environment,
1210 downstream,
1211 next_repository_id,
1212 fs,
1213 } = &self.state
1214 else {
1215 return;
1216 };
1217
1218 match event {
1219 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1220 if let Some(worktree) = self
1221 .worktree_store
1222 .read(cx)
1223 .worktree_for_id(*worktree_id, cx)
1224 {
1225 let paths_by_git_repo =
1226 self.process_updated_entries(&worktree, updated_entries, cx);
1227 let downstream = downstream
1228 .as_ref()
1229 .map(|downstream| downstream.updates_tx.clone());
1230 cx.spawn(async move |_, cx| {
1231 let paths_by_git_repo = paths_by_git_repo.await;
1232 for (repo, paths) in paths_by_git_repo {
1233 repo.update(cx, |repo, cx| {
1234 repo.paths_changed(paths, downstream.clone(), cx);
1235 })
1236 .ok();
1237 }
1238 })
1239 .detach();
1240 }
1241 }
1242 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1243 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1244 else {
1245 return;
1246 };
1247 if !worktree.read(cx).is_visible() {
1248 log::debug!(
1249 "not adding repositories for local worktree {:?} because it's not visible",
1250 worktree.read(cx).abs_path()
1251 );
1252 return;
1253 }
1254 self.update_repositories_from_worktree(
1255 *worktree_id,
1256 project_environment.clone(),
1257 next_repository_id.clone(),
1258 downstream
1259 .as_ref()
1260 .map(|downstream| downstream.updates_tx.clone()),
1261 changed_repos.clone(),
1262 fs.clone(),
1263 cx,
1264 );
1265 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1266 }
1267 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1268 let repos_without_worktree: Vec<RepositoryId> = self
1269 .worktree_ids
1270 .iter_mut()
1271 .filter_map(|(repo_id, worktree_ids)| {
1272 worktree_ids.remove(worktree_id);
1273 if worktree_ids.is_empty() {
1274 Some(*repo_id)
1275 } else {
1276 None
1277 }
1278 })
1279 .collect();
1280 let is_active_repo_removed = repos_without_worktree
1281 .iter()
1282 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1283
1284 for repo_id in repos_without_worktree {
1285 self.repositories.remove(&repo_id);
1286 self.worktree_ids.remove(&repo_id);
1287 if let Some(updates_tx) =
1288 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1289 {
1290 updates_tx
1291 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1292 .ok();
1293 }
1294 }
1295
1296 if is_active_repo_removed {
1297 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1298 self.active_repo_id = Some(repo_id);
1299 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1300 } else {
1301 self.active_repo_id = None;
1302 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1303 }
1304 }
1305 }
1306 _ => {}
1307 }
1308 }
1309 fn on_repository_event(
1310 &mut self,
1311 repo: Entity<Repository>,
1312 event: &RepositoryEvent,
1313 cx: &mut Context<Self>,
1314 ) {
1315 let id = repo.read(cx).id;
1316 let repo_snapshot = repo.read(cx).snapshot.clone();
1317 for (buffer_id, diff) in self.diffs.iter() {
1318 if let Some((buffer_repo, repo_path)) =
1319 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1320 && buffer_repo == repo
1321 {
1322 diff.update(cx, |diff, cx| {
1323 if let Some(conflict_set) = &diff.conflict_set {
1324 let conflict_status_changed =
1325 conflict_set.update(cx, |conflict_set, cx| {
1326 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1327 conflict_set.set_has_conflict(has_conflict, cx)
1328 })?;
1329 if conflict_status_changed {
1330 let buffer_store = self.buffer_store.read(cx);
1331 if let Some(buffer) = buffer_store.get(*buffer_id) {
1332 let _ = diff
1333 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1334 }
1335 }
1336 }
1337 anyhow::Ok(())
1338 })
1339 .ok();
1340 }
1341 }
1342 cx.emit(GitStoreEvent::RepositoryUpdated(
1343 id,
1344 event.clone(),
1345 self.active_repo_id == Some(id),
1346 ))
1347 }
1348
1349 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1350 cx.emit(GitStoreEvent::JobsUpdated)
1351 }
1352
1353 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1354 fn update_repositories_from_worktree(
1355 &mut self,
1356 worktree_id: WorktreeId,
1357 project_environment: Entity<ProjectEnvironment>,
1358 next_repository_id: Arc<AtomicU64>,
1359 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1360 updated_git_repositories: UpdatedGitRepositoriesSet,
1361 fs: Arc<dyn Fs>,
1362 cx: &mut Context<Self>,
1363 ) {
1364 let mut removed_ids = Vec::new();
1365 for update in updated_git_repositories.iter() {
1366 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1367 let existing_work_directory_abs_path =
1368 repo.read(cx).work_directory_abs_path.clone();
1369 Some(&existing_work_directory_abs_path)
1370 == update.old_work_directory_abs_path.as_ref()
1371 || Some(&existing_work_directory_abs_path)
1372 == update.new_work_directory_abs_path.as_ref()
1373 }) {
1374 let repo_id = *id;
1375 if let Some(new_work_directory_abs_path) =
1376 update.new_work_directory_abs_path.clone()
1377 {
1378 self.worktree_ids
1379 .entry(repo_id)
1380 .or_insert_with(HashSet::new)
1381 .insert(worktree_id);
1382 existing.update(cx, |existing, cx| {
1383 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1384 existing.schedule_scan(updates_tx.clone(), cx);
1385 });
1386 } else {
1387 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1388 worktree_ids.remove(&worktree_id);
1389 if worktree_ids.is_empty() {
1390 removed_ids.push(repo_id);
1391 }
1392 }
1393 }
1394 } else if let UpdatedGitRepository {
1395 new_work_directory_abs_path: Some(work_directory_abs_path),
1396 dot_git_abs_path: Some(dot_git_abs_path),
1397 repository_dir_abs_path: Some(_repository_dir_abs_path),
1398 common_dir_abs_path: Some(_common_dir_abs_path),
1399 ..
1400 } = update
1401 {
1402 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1403 let git_store = cx.weak_entity();
1404 let repo = cx.new(|cx| {
1405 let mut repo = Repository::local(
1406 id,
1407 work_directory_abs_path.clone(),
1408 dot_git_abs_path.clone(),
1409 project_environment.downgrade(),
1410 fs.clone(),
1411 git_store,
1412 cx,
1413 );
1414 if let Some(updates_tx) = updates_tx.as_ref() {
1415 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1416 updates_tx
1417 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1418 .ok();
1419 }
1420 repo.schedule_scan(updates_tx.clone(), cx);
1421 repo
1422 });
1423 self._subscriptions
1424 .push(cx.subscribe(&repo, Self::on_repository_event));
1425 self._subscriptions
1426 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1427 self.repositories.insert(id, repo);
1428 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1429 cx.emit(GitStoreEvent::RepositoryAdded);
1430 self.active_repo_id.get_or_insert_with(|| {
1431 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1432 id
1433 });
1434 }
1435 }
1436
1437 for id in removed_ids {
1438 if self.active_repo_id == Some(id) {
1439 self.active_repo_id = None;
1440 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1441 }
1442 self.repositories.remove(&id);
1443 if let Some(updates_tx) = updates_tx.as_ref() {
1444 updates_tx
1445 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1446 .ok();
1447 }
1448 }
1449 }
1450
1451 fn on_buffer_store_event(
1452 &mut self,
1453 _: Entity<BufferStore>,
1454 event: &BufferStoreEvent,
1455 cx: &mut Context<Self>,
1456 ) {
1457 match event {
1458 BufferStoreEvent::BufferAdded(buffer) => {
1459 cx.subscribe(buffer, |this, buffer, event, cx| {
1460 if let BufferEvent::LanguageChanged(_) = event {
1461 let buffer_id = buffer.read(cx).remote_id();
1462 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1463 diff_state.update(cx, |diff_state, cx| {
1464 diff_state.buffer_language_changed(buffer, cx);
1465 });
1466 }
1467 }
1468 })
1469 .detach();
1470 }
1471 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1472 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1473 diffs.remove(buffer_id);
1474 }
1475 }
1476 BufferStoreEvent::BufferDropped(buffer_id) => {
1477 self.diffs.remove(buffer_id);
1478 for diffs in self.shared_diffs.values_mut() {
1479 diffs.remove(buffer_id);
1480 }
1481 }
1482 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1483 // Whenever a buffer's file path changes, it's possible that the
1484 // new path is actually a path that is being tracked by a git
1485 // repository. In that case, we'll want to update the buffer's
1486 // `BufferDiffState`, in case it already has one.
1487 let buffer_id = buffer.read(cx).remote_id();
1488 let diff_state = self.diffs.get(&buffer_id);
1489 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1490
1491 if let Some(diff_state) = diff_state
1492 && let Some((repo, repo_path)) = repo
1493 {
1494 let buffer = buffer.clone();
1495 let diff_state = diff_state.clone();
1496
1497 cx.spawn(async move |_git_store, cx| {
1498 async {
1499 let diff_bases_change = repo
1500 .update(cx, |repo, cx| {
1501 repo.load_committed_text(buffer_id, repo_path, cx)
1502 })?
1503 .await?;
1504
1505 diff_state.update(cx, |diff_state, cx| {
1506 let buffer_snapshot = buffer.read(cx).text_snapshot();
1507 diff_state.diff_bases_changed(
1508 buffer_snapshot,
1509 Some(diff_bases_change),
1510 cx,
1511 );
1512 })
1513 }
1514 .await
1515 .log_err();
1516 })
1517 .detach();
1518 }
1519 }
1520 _ => {}
1521 }
1522 }
1523
1524 pub fn recalculate_buffer_diffs(
1525 &mut self,
1526 buffers: Vec<Entity<Buffer>>,
1527 cx: &mut Context<Self>,
1528 ) -> impl Future<Output = ()> + use<> {
1529 let mut futures = Vec::new();
1530 for buffer in buffers {
1531 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1532 let buffer = buffer.read(cx).text_snapshot();
1533 diff_state.update(cx, |diff_state, cx| {
1534 diff_state.recalculate_diffs(buffer.clone(), cx);
1535 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1536 });
1537 futures.push(diff_state.update(cx, |diff_state, cx| {
1538 diff_state
1539 .reparse_conflict_markers(buffer, cx)
1540 .map(|_| {})
1541 .boxed()
1542 }));
1543 }
1544 }
1545 async move {
1546 futures::future::join_all(futures).await;
1547 }
1548 }
1549
1550 fn on_buffer_diff_event(
1551 &mut self,
1552 diff: Entity<buffer_diff::BufferDiff>,
1553 event: &BufferDiffEvent,
1554 cx: &mut Context<Self>,
1555 ) {
1556 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1557 let buffer_id = diff.read(cx).buffer_id;
1558 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1559 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1560 diff_state.hunk_staging_operation_count += 1;
1561 diff_state.hunk_staging_operation_count
1562 });
1563 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1564 let recv = repo.update(cx, |repo, cx| {
1565 log::debug!("hunks changed for {}", path.as_unix_str());
1566 repo.spawn_set_index_text_job(
1567 path,
1568 new_index_text.as_ref().map(|rope| rope.to_string()),
1569 Some(hunk_staging_operation_count),
1570 cx,
1571 )
1572 });
1573 let diff = diff.downgrade();
1574 cx.spawn(async move |this, cx| {
1575 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1576 diff.update(cx, |diff, cx| {
1577 diff.clear_pending_hunks(cx);
1578 })
1579 .ok();
1580 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1581 .ok();
1582 }
1583 })
1584 .detach();
1585 }
1586 }
1587 }
1588 }
1589
1590 fn local_worktree_git_repos_changed(
1591 &mut self,
1592 worktree: Entity<Worktree>,
1593 changed_repos: &UpdatedGitRepositoriesSet,
1594 cx: &mut Context<Self>,
1595 ) {
1596 log::debug!("local worktree repos changed");
1597 debug_assert!(worktree.read(cx).is_local());
1598
1599 for repository in self.repositories.values() {
1600 repository.update(cx, |repository, cx| {
1601 let repo_abs_path = &repository.work_directory_abs_path;
1602 if changed_repos.iter().any(|update| {
1603 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1604 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1605 }) {
1606 repository.reload_buffer_diff_bases(cx);
1607 }
1608 });
1609 }
1610 }
1611
1612 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1613 &self.repositories
1614 }
1615
1616 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1617 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1618 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1619 Some(status.status)
1620 }
1621
1622 pub fn repository_and_path_for_buffer_id(
1623 &self,
1624 buffer_id: BufferId,
1625 cx: &App,
1626 ) -> Option<(Entity<Repository>, RepoPath)> {
1627 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1628 let project_path = buffer.read(cx).project_path(cx)?;
1629 self.repository_and_path_for_project_path(&project_path, cx)
1630 }
1631
1632 pub fn repository_and_path_for_project_path(
1633 &self,
1634 path: &ProjectPath,
1635 cx: &App,
1636 ) -> Option<(Entity<Repository>, RepoPath)> {
1637 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1638 self.repositories
1639 .values()
1640 .filter_map(|repo| {
1641 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1642 Some((repo.clone(), repo_path))
1643 })
1644 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1645 }
1646
1647 pub fn git_init(
1648 &self,
1649 path: Arc<Path>,
1650 fallback_branch_name: String,
1651 cx: &App,
1652 ) -> Task<Result<()>> {
1653 match &self.state {
1654 GitStoreState::Local { fs, .. } => {
1655 let fs = fs.clone();
1656 cx.background_executor()
1657 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1658 }
1659 GitStoreState::Remote {
1660 upstream_client,
1661 upstream_project_id: project_id,
1662 ..
1663 } => {
1664 let client = upstream_client.clone();
1665 let project_id = *project_id;
1666 cx.background_executor().spawn(async move {
1667 client
1668 .request(proto::GitInit {
1669 project_id: project_id,
1670 abs_path: path.to_string_lossy().into_owned(),
1671 fallback_branch_name,
1672 })
1673 .await?;
1674 Ok(())
1675 })
1676 }
1677 }
1678 }
1679
1680 pub fn git_clone(
1681 &self,
1682 repo: String,
1683 path: impl Into<Arc<std::path::Path>>,
1684 cx: &App,
1685 ) -> Task<Result<()>> {
1686 let path = path.into();
1687 match &self.state {
1688 GitStoreState::Local { fs, .. } => {
1689 let fs = fs.clone();
1690 cx.background_executor()
1691 .spawn(async move { fs.git_clone(&repo, &path).await })
1692 }
1693 GitStoreState::Remote {
1694 upstream_client,
1695 upstream_project_id,
1696 ..
1697 } => {
1698 if upstream_client.is_via_collab() {
1699 return Task::ready(Err(anyhow!(
1700 "Git Clone isn't supported for project guests"
1701 )));
1702 }
1703 let request = upstream_client.request(proto::GitClone {
1704 project_id: *upstream_project_id,
1705 abs_path: path.to_string_lossy().into_owned(),
1706 remote_repo: repo,
1707 });
1708
1709 cx.background_spawn(async move {
1710 let result = request.await?;
1711
1712 match result.success {
1713 true => Ok(()),
1714 false => Err(anyhow!("Git Clone failed")),
1715 }
1716 })
1717 }
1718 }
1719 }
1720
1721 async fn handle_update_repository(
1722 this: Entity<Self>,
1723 envelope: TypedEnvelope<proto::UpdateRepository>,
1724 mut cx: AsyncApp,
1725 ) -> Result<()> {
1726 this.update(&mut cx, |this, cx| {
1727 let path_style = this.worktree_store.read(cx).path_style();
1728 let mut update = envelope.payload;
1729
1730 let id = RepositoryId::from_proto(update.id);
1731 let client = this.upstream_client().context("no upstream client")?;
1732
1733 let mut repo_subscription = None;
1734 let repo = this.repositories.entry(id).or_insert_with(|| {
1735 let git_store = cx.weak_entity();
1736 let repo = cx.new(|cx| {
1737 Repository::remote(
1738 id,
1739 Path::new(&update.abs_path).into(),
1740 path_style,
1741 ProjectId(update.project_id),
1742 client,
1743 git_store,
1744 cx,
1745 )
1746 });
1747 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1748 cx.emit(GitStoreEvent::RepositoryAdded);
1749 repo
1750 });
1751 this._subscriptions.extend(repo_subscription);
1752
1753 repo.update(cx, {
1754 let update = update.clone();
1755 |repo, cx| repo.apply_remote_update(update, cx)
1756 })?;
1757
1758 this.active_repo_id.get_or_insert_with(|| {
1759 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1760 id
1761 });
1762
1763 if let Some((client, project_id)) = this.downstream_client() {
1764 update.project_id = project_id.to_proto();
1765 client.send(update).log_err();
1766 }
1767 Ok(())
1768 })?
1769 }
1770
1771 async fn handle_remove_repository(
1772 this: Entity<Self>,
1773 envelope: TypedEnvelope<proto::RemoveRepository>,
1774 mut cx: AsyncApp,
1775 ) -> Result<()> {
1776 this.update(&mut cx, |this, cx| {
1777 let mut update = envelope.payload;
1778 let id = RepositoryId::from_proto(update.id);
1779 this.repositories.remove(&id);
1780 if let Some((client, project_id)) = this.downstream_client() {
1781 update.project_id = project_id.to_proto();
1782 client.send(update).log_err();
1783 }
1784 if this.active_repo_id == Some(id) {
1785 this.active_repo_id = None;
1786 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1787 }
1788 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1789 })
1790 }
1791
1792 async fn handle_git_init(
1793 this: Entity<Self>,
1794 envelope: TypedEnvelope<proto::GitInit>,
1795 cx: AsyncApp,
1796 ) -> Result<proto::Ack> {
1797 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1798 let name = envelope.payload.fallback_branch_name;
1799 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1800 .await?;
1801
1802 Ok(proto::Ack {})
1803 }
1804
1805 async fn handle_git_clone(
1806 this: Entity<Self>,
1807 envelope: TypedEnvelope<proto::GitClone>,
1808 cx: AsyncApp,
1809 ) -> Result<proto::GitCloneResponse> {
1810 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1811 let repo_name = envelope.payload.remote_repo;
1812 let result = cx
1813 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1814 .await;
1815
1816 Ok(proto::GitCloneResponse {
1817 success: result.is_ok(),
1818 })
1819 }
1820
1821 async fn handle_fetch(
1822 this: Entity<Self>,
1823 envelope: TypedEnvelope<proto::Fetch>,
1824 mut cx: AsyncApp,
1825 ) -> Result<proto::RemoteMessageResponse> {
1826 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1827 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1828 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1829 let askpass_id = envelope.payload.askpass_id;
1830
1831 let askpass = make_remote_delegate(
1832 this,
1833 envelope.payload.project_id,
1834 repository_id,
1835 askpass_id,
1836 &mut cx,
1837 );
1838
1839 let remote_output = repository_handle
1840 .update(&mut cx, |repository_handle, cx| {
1841 repository_handle.fetch(fetch_options, askpass, cx)
1842 })?
1843 .await??;
1844
1845 Ok(proto::RemoteMessageResponse {
1846 stdout: remote_output.stdout,
1847 stderr: remote_output.stderr,
1848 })
1849 }
1850
1851 async fn handle_push(
1852 this: Entity<Self>,
1853 envelope: TypedEnvelope<proto::Push>,
1854 mut cx: AsyncApp,
1855 ) -> Result<proto::RemoteMessageResponse> {
1856 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1857 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1858
1859 let askpass_id = envelope.payload.askpass_id;
1860 let askpass = make_remote_delegate(
1861 this,
1862 envelope.payload.project_id,
1863 repository_id,
1864 askpass_id,
1865 &mut cx,
1866 );
1867
1868 let options = envelope
1869 .payload
1870 .options
1871 .as_ref()
1872 .map(|_| match envelope.payload.options() {
1873 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1874 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1875 });
1876
1877 let branch_name = envelope.payload.branch_name.into();
1878 let remote_branch_name = envelope.payload.remote_branch_name.into();
1879 let remote_name = envelope.payload.remote_name.into();
1880
1881 let remote_output = repository_handle
1882 .update(&mut cx, |repository_handle, cx| {
1883 repository_handle.push(
1884 branch_name,
1885 remote_branch_name,
1886 remote_name,
1887 options,
1888 askpass,
1889 cx,
1890 )
1891 })?
1892 .await??;
1893 Ok(proto::RemoteMessageResponse {
1894 stdout: remote_output.stdout,
1895 stderr: remote_output.stderr,
1896 })
1897 }
1898
1899 async fn handle_pull(
1900 this: Entity<Self>,
1901 envelope: TypedEnvelope<proto::Pull>,
1902 mut cx: AsyncApp,
1903 ) -> Result<proto::RemoteMessageResponse> {
1904 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1905 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1906 let askpass_id = envelope.payload.askpass_id;
1907 let askpass = make_remote_delegate(
1908 this,
1909 envelope.payload.project_id,
1910 repository_id,
1911 askpass_id,
1912 &mut cx,
1913 );
1914
1915 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1916 let remote_name = envelope.payload.remote_name.into();
1917 let rebase = envelope.payload.rebase;
1918
1919 let remote_message = repository_handle
1920 .update(&mut cx, |repository_handle, cx| {
1921 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1922 })?
1923 .await??;
1924
1925 Ok(proto::RemoteMessageResponse {
1926 stdout: remote_message.stdout,
1927 stderr: remote_message.stderr,
1928 })
1929 }
1930
1931 async fn handle_stage(
1932 this: Entity<Self>,
1933 envelope: TypedEnvelope<proto::Stage>,
1934 mut cx: AsyncApp,
1935 ) -> Result<proto::Ack> {
1936 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1937 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1938
1939 let entries = envelope
1940 .payload
1941 .paths
1942 .into_iter()
1943 .map(|path| RepoPath::new(&path))
1944 .collect::<Result<Vec<_>>>()?;
1945
1946 repository_handle
1947 .update(&mut cx, |repository_handle, cx| {
1948 repository_handle.stage_entries(entries, cx)
1949 })?
1950 .await?;
1951 Ok(proto::Ack {})
1952 }
1953
1954 async fn handle_unstage(
1955 this: Entity<Self>,
1956 envelope: TypedEnvelope<proto::Unstage>,
1957 mut cx: AsyncApp,
1958 ) -> Result<proto::Ack> {
1959 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1960 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1961
1962 let entries = envelope
1963 .payload
1964 .paths
1965 .into_iter()
1966 .map(|path| RepoPath::new(&path))
1967 .collect::<Result<Vec<_>>>()?;
1968
1969 repository_handle
1970 .update(&mut cx, |repository_handle, cx| {
1971 repository_handle.unstage_entries(entries, cx)
1972 })?
1973 .await?;
1974
1975 Ok(proto::Ack {})
1976 }
1977
1978 async fn handle_stash(
1979 this: Entity<Self>,
1980 envelope: TypedEnvelope<proto::Stash>,
1981 mut cx: AsyncApp,
1982 ) -> Result<proto::Ack> {
1983 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1984 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1985
1986 let entries = envelope
1987 .payload
1988 .paths
1989 .into_iter()
1990 .map(|path| RepoPath::new(&path))
1991 .collect::<Result<Vec<_>>>()?;
1992
1993 repository_handle
1994 .update(&mut cx, |repository_handle, cx| {
1995 repository_handle.stash_entries(entries, cx)
1996 })?
1997 .await?;
1998
1999 Ok(proto::Ack {})
2000 }
2001
2002 async fn handle_stash_pop(
2003 this: Entity<Self>,
2004 envelope: TypedEnvelope<proto::StashPop>,
2005 mut cx: AsyncApp,
2006 ) -> Result<proto::Ack> {
2007 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2008 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2009 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2010
2011 repository_handle
2012 .update(&mut cx, |repository_handle, cx| {
2013 repository_handle.stash_pop(stash_index, cx)
2014 })?
2015 .await?;
2016
2017 Ok(proto::Ack {})
2018 }
2019
2020 async fn handle_stash_apply(
2021 this: Entity<Self>,
2022 envelope: TypedEnvelope<proto::StashApply>,
2023 mut cx: AsyncApp,
2024 ) -> Result<proto::Ack> {
2025 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2026 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2027 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2028
2029 repository_handle
2030 .update(&mut cx, |repository_handle, cx| {
2031 repository_handle.stash_apply(stash_index, cx)
2032 })?
2033 .await?;
2034
2035 Ok(proto::Ack {})
2036 }
2037
2038 async fn handle_stash_drop(
2039 this: Entity<Self>,
2040 envelope: TypedEnvelope<proto::StashDrop>,
2041 mut cx: AsyncApp,
2042 ) -> Result<proto::Ack> {
2043 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2044 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2045 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2046
2047 repository_handle
2048 .update(&mut cx, |repository_handle, cx| {
2049 repository_handle.stash_drop(stash_index, cx)
2050 })?
2051 .await??;
2052
2053 Ok(proto::Ack {})
2054 }
2055
2056 async fn handle_set_index_text(
2057 this: Entity<Self>,
2058 envelope: TypedEnvelope<proto::SetIndexText>,
2059 mut cx: AsyncApp,
2060 ) -> Result<proto::Ack> {
2061 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2062 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2063 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2064
2065 repository_handle
2066 .update(&mut cx, |repository_handle, cx| {
2067 repository_handle.spawn_set_index_text_job(
2068 repo_path,
2069 envelope.payload.text,
2070 None,
2071 cx,
2072 )
2073 })?
2074 .await??;
2075 Ok(proto::Ack {})
2076 }
2077
2078 async fn handle_run_hook(
2079 this: Entity<Self>,
2080 envelope: TypedEnvelope<proto::RunGitHook>,
2081 mut cx: AsyncApp,
2082 ) -> Result<proto::Ack> {
2083 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2084 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2085 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2086 repository_handle
2087 .update(&mut cx, |repository_handle, cx| {
2088 repository_handle.run_hook(hook, cx)
2089 })?
2090 .await??;
2091 Ok(proto::Ack {})
2092 }
2093
2094 async fn handle_commit(
2095 this: Entity<Self>,
2096 envelope: TypedEnvelope<proto::Commit>,
2097 mut cx: AsyncApp,
2098 ) -> Result<proto::Ack> {
2099 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2100 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2101 let askpass_id = envelope.payload.askpass_id;
2102
2103 let askpass = make_remote_delegate(
2104 this,
2105 envelope.payload.project_id,
2106 repository_id,
2107 askpass_id,
2108 &mut cx,
2109 );
2110
2111 let message = SharedString::from(envelope.payload.message);
2112 let name = envelope.payload.name.map(SharedString::from);
2113 let email = envelope.payload.email.map(SharedString::from);
2114 let options = envelope.payload.options.unwrap_or_default();
2115
2116 repository_handle
2117 .update(&mut cx, |repository_handle, cx| {
2118 repository_handle.commit(
2119 message,
2120 name.zip(email),
2121 CommitOptions {
2122 amend: options.amend,
2123 signoff: options.signoff,
2124 },
2125 askpass,
2126 cx,
2127 )
2128 })?
2129 .await??;
2130 Ok(proto::Ack {})
2131 }
2132
2133 async fn handle_get_remotes(
2134 this: Entity<Self>,
2135 envelope: TypedEnvelope<proto::GetRemotes>,
2136 mut cx: AsyncApp,
2137 ) -> Result<proto::GetRemotesResponse> {
2138 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2139 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2140
2141 let branch_name = envelope.payload.branch_name;
2142 let is_push = envelope.payload.is_push;
2143
2144 let remotes = repository_handle
2145 .update(&mut cx, |repository_handle, _| {
2146 repository_handle.get_remotes(branch_name, is_push)
2147 })?
2148 .await??;
2149
2150 Ok(proto::GetRemotesResponse {
2151 remotes: remotes
2152 .into_iter()
2153 .map(|remotes| proto::get_remotes_response::Remote {
2154 name: remotes.name.to_string(),
2155 })
2156 .collect::<Vec<_>>(),
2157 })
2158 }
2159
2160 async fn handle_get_worktrees(
2161 this: Entity<Self>,
2162 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2163 mut cx: AsyncApp,
2164 ) -> Result<proto::GitWorktreesResponse> {
2165 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2166 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2167
2168 let worktrees = repository_handle
2169 .update(&mut cx, |repository_handle, _| {
2170 repository_handle.worktrees()
2171 })?
2172 .await??;
2173
2174 Ok(proto::GitWorktreesResponse {
2175 worktrees: worktrees
2176 .into_iter()
2177 .map(|worktree| worktree_to_proto(&worktree))
2178 .collect::<Vec<_>>(),
2179 })
2180 }
2181
2182 async fn handle_create_worktree(
2183 this: Entity<Self>,
2184 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2185 mut cx: AsyncApp,
2186 ) -> Result<proto::Ack> {
2187 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2188 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2189 let directory = PathBuf::from(envelope.payload.directory);
2190 let name = envelope.payload.name;
2191 let commit = envelope.payload.commit;
2192
2193 repository_handle
2194 .update(&mut cx, |repository_handle, _| {
2195 repository_handle.create_worktree(name, directory, commit)
2196 })?
2197 .await??;
2198
2199 Ok(proto::Ack {})
2200 }
2201
2202 async fn handle_get_branches(
2203 this: Entity<Self>,
2204 envelope: TypedEnvelope<proto::GitGetBranches>,
2205 mut cx: AsyncApp,
2206 ) -> Result<proto::GitBranchesResponse> {
2207 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2208 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2209
2210 let branches = repository_handle
2211 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2212 .await??;
2213
2214 Ok(proto::GitBranchesResponse {
2215 branches: branches
2216 .into_iter()
2217 .map(|branch| branch_to_proto(&branch))
2218 .collect::<Vec<_>>(),
2219 })
2220 }
2221 async fn handle_get_default_branch(
2222 this: Entity<Self>,
2223 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2224 mut cx: AsyncApp,
2225 ) -> Result<proto::GetDefaultBranchResponse> {
2226 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2227 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2228
2229 let branch = repository_handle
2230 .update(&mut cx, |repository_handle, _| {
2231 repository_handle.default_branch()
2232 })?
2233 .await??
2234 .map(Into::into);
2235
2236 Ok(proto::GetDefaultBranchResponse { branch })
2237 }
2238 async fn handle_create_branch(
2239 this: Entity<Self>,
2240 envelope: TypedEnvelope<proto::GitCreateBranch>,
2241 mut cx: AsyncApp,
2242 ) -> Result<proto::Ack> {
2243 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2244 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2245 let branch_name = envelope.payload.branch_name;
2246
2247 repository_handle
2248 .update(&mut cx, |repository_handle, _| {
2249 repository_handle.create_branch(branch_name, None)
2250 })?
2251 .await??;
2252
2253 Ok(proto::Ack {})
2254 }
2255
2256 async fn handle_change_branch(
2257 this: Entity<Self>,
2258 envelope: TypedEnvelope<proto::GitChangeBranch>,
2259 mut cx: AsyncApp,
2260 ) -> Result<proto::Ack> {
2261 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2262 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2263 let branch_name = envelope.payload.branch_name;
2264
2265 repository_handle
2266 .update(&mut cx, |repository_handle, _| {
2267 repository_handle.change_branch(branch_name)
2268 })?
2269 .await??;
2270
2271 Ok(proto::Ack {})
2272 }
2273
2274 async fn handle_rename_branch(
2275 this: Entity<Self>,
2276 envelope: TypedEnvelope<proto::GitRenameBranch>,
2277 mut cx: AsyncApp,
2278 ) -> Result<proto::Ack> {
2279 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2280 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2281 let branch = envelope.payload.branch;
2282 let new_name = envelope.payload.new_name;
2283
2284 repository_handle
2285 .update(&mut cx, |repository_handle, _| {
2286 repository_handle.rename_branch(branch, new_name)
2287 })?
2288 .await??;
2289
2290 Ok(proto::Ack {})
2291 }
2292
2293 async fn handle_create_remote(
2294 this: Entity<Self>,
2295 envelope: TypedEnvelope<proto::GitCreateRemote>,
2296 mut cx: AsyncApp,
2297 ) -> Result<proto::Ack> {
2298 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2299 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2300 let remote_name = envelope.payload.remote_name;
2301 let remote_url = envelope.payload.remote_url;
2302
2303 repository_handle
2304 .update(&mut cx, |repository_handle, _| {
2305 repository_handle.create_remote(remote_name, remote_url)
2306 })?
2307 .await??;
2308
2309 Ok(proto::Ack {})
2310 }
2311
2312 async fn handle_delete_branch(
2313 this: Entity<Self>,
2314 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2315 mut cx: AsyncApp,
2316 ) -> Result<proto::Ack> {
2317 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2318 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2319 let branch_name = envelope.payload.branch_name;
2320
2321 repository_handle
2322 .update(&mut cx, |repository_handle, _| {
2323 repository_handle.delete_branch(branch_name)
2324 })?
2325 .await??;
2326
2327 Ok(proto::Ack {})
2328 }
2329
2330 async fn handle_remove_remote(
2331 this: Entity<Self>,
2332 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2333 mut cx: AsyncApp,
2334 ) -> Result<proto::Ack> {
2335 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2336 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2337 let remote_name = envelope.payload.remote_name;
2338
2339 repository_handle
2340 .update(&mut cx, |repository_handle, _| {
2341 repository_handle.remove_remote(remote_name)
2342 })?
2343 .await??;
2344
2345 Ok(proto::Ack {})
2346 }
2347
2348 async fn handle_show(
2349 this: Entity<Self>,
2350 envelope: TypedEnvelope<proto::GitShow>,
2351 mut cx: AsyncApp,
2352 ) -> Result<proto::GitCommitDetails> {
2353 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2354 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2355
2356 let commit = repository_handle
2357 .update(&mut cx, |repository_handle, _| {
2358 repository_handle.show(envelope.payload.commit)
2359 })?
2360 .await??;
2361 Ok(proto::GitCommitDetails {
2362 sha: commit.sha.into(),
2363 message: commit.message.into(),
2364 commit_timestamp: commit.commit_timestamp,
2365 author_email: commit.author_email.into(),
2366 author_name: commit.author_name.into(),
2367 })
2368 }
2369
2370 async fn handle_load_commit_diff(
2371 this: Entity<Self>,
2372 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2373 mut cx: AsyncApp,
2374 ) -> Result<proto::LoadCommitDiffResponse> {
2375 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2376 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2377
2378 let commit_diff = repository_handle
2379 .update(&mut cx, |repository_handle, _| {
2380 repository_handle.load_commit_diff(envelope.payload.commit)
2381 })?
2382 .await??;
2383 Ok(proto::LoadCommitDiffResponse {
2384 files: commit_diff
2385 .files
2386 .into_iter()
2387 .map(|file| proto::CommitFile {
2388 path: file.path.to_proto(),
2389 old_text: file.old_text,
2390 new_text: file.new_text,
2391 })
2392 .collect(),
2393 })
2394 }
2395
2396 async fn handle_file_history(
2397 this: Entity<Self>,
2398 envelope: TypedEnvelope<proto::GitFileHistory>,
2399 mut cx: AsyncApp,
2400 ) -> Result<proto::GitFileHistoryResponse> {
2401 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2402 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2403 let path = RepoPath::from_proto(&envelope.payload.path)?;
2404 let skip = envelope.payload.skip as usize;
2405 let limit = envelope.payload.limit.map(|l| l as usize);
2406
2407 let file_history = repository_handle
2408 .update(&mut cx, |repository_handle, _| {
2409 repository_handle.file_history_paginated(path, skip, limit)
2410 })?
2411 .await??;
2412
2413 Ok(proto::GitFileHistoryResponse {
2414 entries: file_history
2415 .entries
2416 .into_iter()
2417 .map(|entry| proto::FileHistoryEntry {
2418 sha: entry.sha.to_string(),
2419 subject: entry.subject.to_string(),
2420 message: entry.message.to_string(),
2421 commit_timestamp: entry.commit_timestamp,
2422 author_name: entry.author_name.to_string(),
2423 author_email: entry.author_email.to_string(),
2424 })
2425 .collect(),
2426 path: file_history.path.to_proto(),
2427 })
2428 }
2429
2430 async fn handle_reset(
2431 this: Entity<Self>,
2432 envelope: TypedEnvelope<proto::GitReset>,
2433 mut cx: AsyncApp,
2434 ) -> Result<proto::Ack> {
2435 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2436 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2437
2438 let mode = match envelope.payload.mode() {
2439 git_reset::ResetMode::Soft => ResetMode::Soft,
2440 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2441 };
2442
2443 repository_handle
2444 .update(&mut cx, |repository_handle, cx| {
2445 repository_handle.reset(envelope.payload.commit, mode, cx)
2446 })?
2447 .await??;
2448 Ok(proto::Ack {})
2449 }
2450
2451 async fn handle_checkout_files(
2452 this: Entity<Self>,
2453 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2454 mut cx: AsyncApp,
2455 ) -> Result<proto::Ack> {
2456 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2457 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2458 let paths = envelope
2459 .payload
2460 .paths
2461 .iter()
2462 .map(|s| RepoPath::from_proto(s))
2463 .collect::<Result<Vec<_>>>()?;
2464
2465 repository_handle
2466 .update(&mut cx, |repository_handle, cx| {
2467 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2468 })?
2469 .await?;
2470 Ok(proto::Ack {})
2471 }
2472
2473 async fn handle_open_commit_message_buffer(
2474 this: Entity<Self>,
2475 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2476 mut cx: AsyncApp,
2477 ) -> Result<proto::OpenBufferResponse> {
2478 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2479 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2480 let buffer = repository
2481 .update(&mut cx, |repository, cx| {
2482 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2483 })?
2484 .await?;
2485
2486 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2487 this.update(&mut cx, |this, cx| {
2488 this.buffer_store.update(cx, |buffer_store, cx| {
2489 buffer_store
2490 .create_buffer_for_peer(
2491 &buffer,
2492 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2493 cx,
2494 )
2495 .detach_and_log_err(cx);
2496 })
2497 })?;
2498
2499 Ok(proto::OpenBufferResponse {
2500 buffer_id: buffer_id.to_proto(),
2501 })
2502 }
2503
2504 async fn handle_askpass(
2505 this: Entity<Self>,
2506 envelope: TypedEnvelope<proto::AskPassRequest>,
2507 mut cx: AsyncApp,
2508 ) -> Result<proto::AskPassResponse> {
2509 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2510 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2511
2512 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2513 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2514 debug_panic!("no askpass found");
2515 anyhow::bail!("no askpass found");
2516 };
2517
2518 let response = askpass
2519 .ask_password(envelope.payload.prompt)
2520 .await
2521 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2522
2523 delegates
2524 .lock()
2525 .insert(envelope.payload.askpass_id, askpass);
2526
2527 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2528 Ok(proto::AskPassResponse {
2529 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2530 })
2531 }
2532
2533 async fn handle_check_for_pushed_commits(
2534 this: Entity<Self>,
2535 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2536 mut cx: AsyncApp,
2537 ) -> Result<proto::CheckForPushedCommitsResponse> {
2538 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2539 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2540
2541 let branches = repository_handle
2542 .update(&mut cx, |repository_handle, _| {
2543 repository_handle.check_for_pushed_commits()
2544 })?
2545 .await??;
2546 Ok(proto::CheckForPushedCommitsResponse {
2547 pushed_to: branches
2548 .into_iter()
2549 .map(|commit| commit.to_string())
2550 .collect(),
2551 })
2552 }
2553
2554 async fn handle_git_diff(
2555 this: Entity<Self>,
2556 envelope: TypedEnvelope<proto::GitDiff>,
2557 mut cx: AsyncApp,
2558 ) -> Result<proto::GitDiffResponse> {
2559 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2560 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2561 let diff_type = match envelope.payload.diff_type() {
2562 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2563 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2564 };
2565
2566 let mut diff = repository_handle
2567 .update(&mut cx, |repository_handle, cx| {
2568 repository_handle.diff(diff_type, cx)
2569 })?
2570 .await??;
2571 const ONE_MB: usize = 1_000_000;
2572 if diff.len() > ONE_MB {
2573 diff = diff.chars().take(ONE_MB).collect()
2574 }
2575
2576 Ok(proto::GitDiffResponse { diff })
2577 }
2578
2579 async fn handle_tree_diff(
2580 this: Entity<Self>,
2581 request: TypedEnvelope<proto::GetTreeDiff>,
2582 mut cx: AsyncApp,
2583 ) -> Result<proto::GetTreeDiffResponse> {
2584 let repository_id = RepositoryId(request.payload.repository_id);
2585 let diff_type = if request.payload.is_merge {
2586 DiffTreeType::MergeBase {
2587 base: request.payload.base.into(),
2588 head: request.payload.head.into(),
2589 }
2590 } else {
2591 DiffTreeType::Since {
2592 base: request.payload.base.into(),
2593 head: request.payload.head.into(),
2594 }
2595 };
2596
2597 let diff = this
2598 .update(&mut cx, |this, cx| {
2599 let repository = this.repositories().get(&repository_id)?;
2600 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2601 })?
2602 .context("missing repository")?
2603 .await??;
2604
2605 Ok(proto::GetTreeDiffResponse {
2606 entries: diff
2607 .entries
2608 .into_iter()
2609 .map(|(path, status)| proto::TreeDiffStatus {
2610 path: path.as_ref().to_proto(),
2611 status: match status {
2612 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2613 TreeDiffStatus::Modified { .. } => {
2614 proto::tree_diff_status::Status::Modified.into()
2615 }
2616 TreeDiffStatus::Deleted { .. } => {
2617 proto::tree_diff_status::Status::Deleted.into()
2618 }
2619 },
2620 oid: match status {
2621 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2622 Some(old.to_string())
2623 }
2624 TreeDiffStatus::Added => None,
2625 },
2626 })
2627 .collect(),
2628 })
2629 }
2630
2631 async fn handle_get_blob_content(
2632 this: Entity<Self>,
2633 request: TypedEnvelope<proto::GetBlobContent>,
2634 mut cx: AsyncApp,
2635 ) -> Result<proto::GetBlobContentResponse> {
2636 let oid = git::Oid::from_str(&request.payload.oid)?;
2637 let repository_id = RepositoryId(request.payload.repository_id);
2638 let content = this
2639 .update(&mut cx, |this, cx| {
2640 let repository = this.repositories().get(&repository_id)?;
2641 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2642 })?
2643 .context("missing repository")?
2644 .await?;
2645 Ok(proto::GetBlobContentResponse { content })
2646 }
2647
2648 async fn handle_open_unstaged_diff(
2649 this: Entity<Self>,
2650 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2651 mut cx: AsyncApp,
2652 ) -> Result<proto::OpenUnstagedDiffResponse> {
2653 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2654 let diff = this
2655 .update(&mut cx, |this, cx| {
2656 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2657 Some(this.open_unstaged_diff(buffer, cx))
2658 })?
2659 .context("missing buffer")?
2660 .await?;
2661 this.update(&mut cx, |this, _| {
2662 let shared_diffs = this
2663 .shared_diffs
2664 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2665 .or_default();
2666 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2667 })?;
2668 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx))?;
2669 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2670 }
2671
2672 async fn handle_open_uncommitted_diff(
2673 this: Entity<Self>,
2674 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2675 mut cx: AsyncApp,
2676 ) -> Result<proto::OpenUncommittedDiffResponse> {
2677 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2678 let diff = this
2679 .update(&mut cx, |this, cx| {
2680 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2681 Some(this.open_uncommitted_diff(buffer, cx))
2682 })?
2683 .context("missing buffer")?
2684 .await?;
2685 this.update(&mut cx, |this, _| {
2686 let shared_diffs = this
2687 .shared_diffs
2688 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2689 .or_default();
2690 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2691 })?;
2692 diff.read_with(&cx, |diff, cx| {
2693 use proto::open_uncommitted_diff_response::Mode;
2694
2695 let unstaged_diff = diff.secondary_diff();
2696 let index_snapshot = unstaged_diff.and_then(|diff| {
2697 let diff = diff.read(cx);
2698 diff.base_text_exists().then(|| diff.base_text(cx))
2699 });
2700
2701 let mode;
2702 let staged_text;
2703 let committed_text;
2704 if diff.base_text_exists() {
2705 let committed_snapshot = diff.base_text(cx);
2706 committed_text = Some(committed_snapshot.text());
2707 if let Some(index_text) = index_snapshot {
2708 if index_text.remote_id() == committed_snapshot.remote_id() {
2709 mode = Mode::IndexMatchesHead;
2710 staged_text = None;
2711 } else {
2712 mode = Mode::IndexAndHead;
2713 staged_text = Some(index_text.text());
2714 }
2715 } else {
2716 mode = Mode::IndexAndHead;
2717 staged_text = None;
2718 }
2719 } else {
2720 mode = Mode::IndexAndHead;
2721 committed_text = None;
2722 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2723 }
2724
2725 proto::OpenUncommittedDiffResponse {
2726 committed_text,
2727 staged_text,
2728 mode: mode.into(),
2729 }
2730 })
2731 }
2732
2733 async fn handle_update_diff_bases(
2734 this: Entity<Self>,
2735 request: TypedEnvelope<proto::UpdateDiffBases>,
2736 mut cx: AsyncApp,
2737 ) -> Result<()> {
2738 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2739 this.update(&mut cx, |this, cx| {
2740 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2741 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2742 {
2743 let buffer = buffer.read(cx).text_snapshot();
2744 diff_state.update(cx, |diff_state, cx| {
2745 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2746 })
2747 }
2748 })
2749 }
2750
2751 async fn handle_blame_buffer(
2752 this: Entity<Self>,
2753 envelope: TypedEnvelope<proto::BlameBuffer>,
2754 mut cx: AsyncApp,
2755 ) -> Result<proto::BlameBufferResponse> {
2756 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2757 let version = deserialize_version(&envelope.payload.version);
2758 let buffer = this.read_with(&cx, |this, cx| {
2759 this.buffer_store.read(cx).get_existing(buffer_id)
2760 })??;
2761 buffer
2762 .update(&mut cx, |buffer, _| {
2763 buffer.wait_for_version(version.clone())
2764 })?
2765 .await?;
2766 let blame = this
2767 .update(&mut cx, |this, cx| {
2768 this.blame_buffer(&buffer, Some(version), cx)
2769 })?
2770 .await?;
2771 Ok(serialize_blame_buffer_response(blame))
2772 }
2773
2774 async fn handle_get_permalink_to_line(
2775 this: Entity<Self>,
2776 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2777 mut cx: AsyncApp,
2778 ) -> Result<proto::GetPermalinkToLineResponse> {
2779 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2780 // let version = deserialize_version(&envelope.payload.version);
2781 let selection = {
2782 let proto_selection = envelope
2783 .payload
2784 .selection
2785 .context("no selection to get permalink for defined")?;
2786 proto_selection.start as u32..proto_selection.end as u32
2787 };
2788 let buffer = this.read_with(&cx, |this, cx| {
2789 this.buffer_store.read(cx).get_existing(buffer_id)
2790 })??;
2791 let permalink = this
2792 .update(&mut cx, |this, cx| {
2793 this.get_permalink_to_line(&buffer, selection, cx)
2794 })?
2795 .await?;
2796 Ok(proto::GetPermalinkToLineResponse {
2797 permalink: permalink.to_string(),
2798 })
2799 }
2800
2801 fn repository_for_request(
2802 this: &Entity<Self>,
2803 id: RepositoryId,
2804 cx: &mut AsyncApp,
2805 ) -> Result<Entity<Repository>> {
2806 this.read_with(cx, |this, _| {
2807 this.repositories
2808 .get(&id)
2809 .context("missing repository handle")
2810 .cloned()
2811 })?
2812 }
2813
2814 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2815 self.repositories
2816 .iter()
2817 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2818 .collect()
2819 }
2820
2821 fn process_updated_entries(
2822 &self,
2823 worktree: &Entity<Worktree>,
2824 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2825 cx: &mut App,
2826 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2827 let path_style = worktree.read(cx).path_style();
2828 let mut repo_paths = self
2829 .repositories
2830 .values()
2831 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2832 .collect::<Vec<_>>();
2833 let mut entries: Vec<_> = updated_entries
2834 .iter()
2835 .map(|(path, _, _)| path.clone())
2836 .collect();
2837 entries.sort();
2838 let worktree = worktree.read(cx);
2839
2840 let entries = entries
2841 .into_iter()
2842 .map(|path| worktree.absolutize(&path))
2843 .collect::<Arc<[_]>>();
2844
2845 let executor = cx.background_executor().clone();
2846 cx.background_executor().spawn(async move {
2847 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2848 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2849 let mut tasks = FuturesOrdered::new();
2850 for (repo_path, repo) in repo_paths.into_iter().rev() {
2851 let entries = entries.clone();
2852 let task = executor.spawn(async move {
2853 // Find all repository paths that belong to this repo
2854 let mut ix = entries.partition_point(|path| path < &*repo_path);
2855 if ix == entries.len() {
2856 return None;
2857 };
2858
2859 let mut paths = Vec::new();
2860 // All paths prefixed by a given repo will constitute a continuous range.
2861 while let Some(path) = entries.get(ix)
2862 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2863 &repo_path, path, path_style,
2864 )
2865 {
2866 paths.push((repo_path, ix));
2867 ix += 1;
2868 }
2869 if paths.is_empty() {
2870 None
2871 } else {
2872 Some((repo, paths))
2873 }
2874 });
2875 tasks.push_back(task);
2876 }
2877
2878 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2879 let mut path_was_used = vec![false; entries.len()];
2880 let tasks = tasks.collect::<Vec<_>>().await;
2881 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2882 // We always want to assign a path to it's innermost repository.
2883 for t in tasks {
2884 let Some((repo, paths)) = t else {
2885 continue;
2886 };
2887 let entry = paths_by_git_repo.entry(repo).or_default();
2888 for (repo_path, ix) in paths {
2889 if path_was_used[ix] {
2890 continue;
2891 }
2892 path_was_used[ix] = true;
2893 entry.push(repo_path);
2894 }
2895 }
2896
2897 paths_by_git_repo
2898 })
2899 }
2900}
2901
2902impl BufferGitState {
2903 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2904 Self {
2905 unstaged_diff: Default::default(),
2906 uncommitted_diff: Default::default(),
2907 recalculate_diff_task: Default::default(),
2908 language: Default::default(),
2909 language_registry: Default::default(),
2910 recalculating_tx: postage::watch::channel_with(false).0,
2911 hunk_staging_operation_count: 0,
2912 hunk_staging_operation_count_as_of_write: 0,
2913 head_text: Default::default(),
2914 index_text: Default::default(),
2915 head_changed: Default::default(),
2916 index_changed: Default::default(),
2917 language_changed: Default::default(),
2918 conflict_updated_futures: Default::default(),
2919 conflict_set: Default::default(),
2920 reparse_conflict_markers_task: Default::default(),
2921 }
2922 }
2923
2924 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2925 self.language = buffer.read(cx).language().cloned();
2926 self.language_changed = true;
2927 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2928 }
2929
2930 fn reparse_conflict_markers(
2931 &mut self,
2932 buffer: text::BufferSnapshot,
2933 cx: &mut Context<Self>,
2934 ) -> oneshot::Receiver<()> {
2935 let (tx, rx) = oneshot::channel();
2936
2937 let Some(conflict_set) = self
2938 .conflict_set
2939 .as_ref()
2940 .and_then(|conflict_set| conflict_set.upgrade())
2941 else {
2942 return rx;
2943 };
2944
2945 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2946 if conflict_set.has_conflict {
2947 Some(conflict_set.snapshot())
2948 } else {
2949 None
2950 }
2951 });
2952
2953 if let Some(old_snapshot) = old_snapshot {
2954 self.conflict_updated_futures.push(tx);
2955 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2956 let (snapshot, changed_range) = cx
2957 .background_spawn(async move {
2958 let new_snapshot = ConflictSet::parse(&buffer);
2959 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2960 (new_snapshot, changed_range)
2961 })
2962 .await;
2963 this.update(cx, |this, cx| {
2964 if let Some(conflict_set) = &this.conflict_set {
2965 conflict_set
2966 .update(cx, |conflict_set, cx| {
2967 conflict_set.set_snapshot(snapshot, changed_range, cx);
2968 })
2969 .ok();
2970 }
2971 let futures = std::mem::take(&mut this.conflict_updated_futures);
2972 for tx in futures {
2973 tx.send(()).ok();
2974 }
2975 })
2976 }))
2977 }
2978
2979 rx
2980 }
2981
2982 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2983 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2984 }
2985
2986 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2987 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2988 }
2989
2990 fn handle_base_texts_updated(
2991 &mut self,
2992 buffer: text::BufferSnapshot,
2993 message: proto::UpdateDiffBases,
2994 cx: &mut Context<Self>,
2995 ) {
2996 use proto::update_diff_bases::Mode;
2997
2998 let Some(mode) = Mode::from_i32(message.mode) else {
2999 return;
3000 };
3001
3002 let diff_bases_change = match mode {
3003 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3004 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3005 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3006 Mode::IndexAndHead => DiffBasesChange::SetEach {
3007 index: message.staged_text,
3008 head: message.committed_text,
3009 },
3010 };
3011
3012 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3013 }
3014
3015 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3016 if *self.recalculating_tx.borrow() {
3017 let mut rx = self.recalculating_tx.subscribe();
3018 Some(async move {
3019 loop {
3020 let is_recalculating = rx.recv().await;
3021 if is_recalculating != Some(true) {
3022 break;
3023 }
3024 }
3025 })
3026 } else {
3027 None
3028 }
3029 }
3030
3031 fn diff_bases_changed(
3032 &mut self,
3033 buffer: text::BufferSnapshot,
3034 diff_bases_change: Option<DiffBasesChange>,
3035 cx: &mut Context<Self>,
3036 ) {
3037 match diff_bases_change {
3038 Some(DiffBasesChange::SetIndex(index)) => {
3039 self.index_text = index.map(|mut index| {
3040 text::LineEnding::normalize(&mut index);
3041 Arc::from(index.as_str())
3042 });
3043 self.index_changed = true;
3044 }
3045 Some(DiffBasesChange::SetHead(head)) => {
3046 self.head_text = head.map(|mut head| {
3047 text::LineEnding::normalize(&mut head);
3048 Arc::from(head.as_str())
3049 });
3050 self.head_changed = true;
3051 }
3052 Some(DiffBasesChange::SetBoth(text)) => {
3053 let text = text.map(|mut text| {
3054 text::LineEnding::normalize(&mut text);
3055 Arc::from(text.as_str())
3056 });
3057 self.head_text = text.clone();
3058 self.index_text = text;
3059 self.head_changed = true;
3060 self.index_changed = true;
3061 }
3062 Some(DiffBasesChange::SetEach { index, head }) => {
3063 self.index_text = index.map(|mut index| {
3064 text::LineEnding::normalize(&mut index);
3065 Arc::from(index.as_str())
3066 });
3067 self.index_changed = true;
3068 self.head_text = head.map(|mut head| {
3069 text::LineEnding::normalize(&mut head);
3070 Arc::from(head.as_str())
3071 });
3072 self.head_changed = true;
3073 }
3074 None => {}
3075 }
3076
3077 self.recalculate_diffs(buffer, cx)
3078 }
3079
3080 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3081 *self.recalculating_tx.borrow_mut() = true;
3082
3083 let language = self.language.clone();
3084 let language_registry = self.language_registry.clone();
3085 let unstaged_diff = self.unstaged_diff();
3086 let uncommitted_diff = self.uncommitted_diff();
3087 let head = self.head_text.clone();
3088 let index = self.index_text.clone();
3089 let index_changed = self.index_changed;
3090 let head_changed = self.head_changed;
3091 let language_changed = self.language_changed;
3092 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3093 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3094 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3095 (None, None) => true,
3096 _ => false,
3097 };
3098 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3099 log::debug!(
3100 "start recalculating diffs for buffer {}",
3101 buffer.remote_id()
3102 );
3103
3104 let mut new_unstaged_diff = None;
3105 if let Some(unstaged_diff) = &unstaged_diff {
3106 new_unstaged_diff = Some(
3107 cx.update(|cx| {
3108 unstaged_diff.read(cx).update_diff(
3109 buffer.clone(),
3110 index,
3111 index_changed,
3112 language.clone(),
3113 cx,
3114 )
3115 })?
3116 .await,
3117 );
3118 }
3119
3120 // Dropping BufferDiff can be expensive, so yield back to the event loop
3121 // for a bit
3122 yield_now().await;
3123
3124 let mut new_uncommitted_diff = None;
3125 if let Some(uncommitted_diff) = &uncommitted_diff {
3126 new_uncommitted_diff = if index_matches_head {
3127 new_unstaged_diff.clone()
3128 } else {
3129 Some(
3130 cx.update(|cx| {
3131 uncommitted_diff.read(cx).update_diff(
3132 buffer.clone(),
3133 head,
3134 head_changed,
3135 language.clone(),
3136 cx,
3137 )
3138 })?
3139 .await,
3140 )
3141 }
3142 }
3143
3144 // Dropping BufferDiff can be expensive, so yield back to the event loop
3145 // for a bit
3146 yield_now().await;
3147
3148 let cancel = this.update(cx, |this, _| {
3149 // This checks whether all pending stage/unstage operations
3150 // have quiesced (i.e. both the corresponding write and the
3151 // read of that write have completed). If not, then we cancel
3152 // this recalculation attempt to avoid invalidating pending
3153 // state too quickly; another recalculation will come along
3154 // later and clear the pending state once the state of the index has settled.
3155 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3156 *this.recalculating_tx.borrow_mut() = false;
3157 true
3158 } else {
3159 false
3160 }
3161 })?;
3162 if cancel {
3163 log::debug!(
3164 concat!(
3165 "aborting recalculating diffs for buffer {}",
3166 "due to subsequent hunk operations",
3167 ),
3168 buffer.remote_id()
3169 );
3170 return Ok(());
3171 }
3172
3173 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3174 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3175 {
3176 let task = unstaged_diff.update(cx, |diff, cx| {
3177 if language_changed {
3178 diff.language_changed(language.clone(), language_registry.clone(), cx);
3179 }
3180 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3181 })?;
3182 Some(task.await)
3183 } else {
3184 None
3185 };
3186
3187 yield_now().await;
3188
3189 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3190 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3191 {
3192 uncommitted_diff
3193 .update(cx, |diff, cx| {
3194 if language_changed {
3195 diff.language_changed(language, language_registry, cx);
3196 }
3197 diff.set_snapshot_with_secondary(
3198 new_uncommitted_diff,
3199 &buffer,
3200 unstaged_changed_range.flatten(),
3201 true,
3202 cx,
3203 )
3204 })?
3205 .await;
3206 }
3207
3208 log::debug!(
3209 "finished recalculating diffs for buffer {}",
3210 buffer.remote_id()
3211 );
3212
3213 if let Some(this) = this.upgrade() {
3214 this.update(cx, |this, _| {
3215 this.index_changed = false;
3216 this.head_changed = false;
3217 this.language_changed = false;
3218 *this.recalculating_tx.borrow_mut() = false;
3219 })?;
3220 }
3221
3222 Ok(())
3223 }));
3224 }
3225}
3226
3227fn make_remote_delegate(
3228 this: Entity<GitStore>,
3229 project_id: u64,
3230 repository_id: RepositoryId,
3231 askpass_id: u64,
3232 cx: &mut AsyncApp,
3233) -> AskPassDelegate {
3234 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3235 this.update(cx, |this, cx| {
3236 let Some((client, _)) = this.downstream_client() else {
3237 return;
3238 };
3239 let response = client.request(proto::AskPassRequest {
3240 project_id,
3241 repository_id: repository_id.to_proto(),
3242 askpass_id,
3243 prompt,
3244 });
3245 cx.spawn(async move |_, _| {
3246 let mut response = response.await?.response;
3247 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3248 .ok();
3249 response.zeroize();
3250 anyhow::Ok(())
3251 })
3252 .detach_and_log_err(cx);
3253 })
3254 .log_err();
3255 })
3256}
3257
3258impl RepositoryId {
3259 pub fn to_proto(self) -> u64 {
3260 self.0
3261 }
3262
3263 pub fn from_proto(id: u64) -> Self {
3264 RepositoryId(id)
3265 }
3266}
3267
3268impl RepositorySnapshot {
3269 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3270 Self {
3271 id,
3272 statuses_by_path: Default::default(),
3273 work_directory_abs_path,
3274 branch: None,
3275 head_commit: None,
3276 scan_id: 0,
3277 merge: Default::default(),
3278 remote_origin_url: None,
3279 remote_upstream_url: None,
3280 stash_entries: Default::default(),
3281 path_style,
3282 }
3283 }
3284
3285 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3286 proto::UpdateRepository {
3287 branch_summary: self.branch.as_ref().map(branch_to_proto),
3288 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3289 updated_statuses: self
3290 .statuses_by_path
3291 .iter()
3292 .map(|entry| entry.to_proto())
3293 .collect(),
3294 removed_statuses: Default::default(),
3295 current_merge_conflicts: self
3296 .merge
3297 .conflicted_paths
3298 .iter()
3299 .map(|repo_path| repo_path.to_proto())
3300 .collect(),
3301 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3302 project_id,
3303 id: self.id.to_proto(),
3304 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3305 entry_ids: vec![self.id.to_proto()],
3306 scan_id: self.scan_id,
3307 is_last_update: true,
3308 stash_entries: self
3309 .stash_entries
3310 .entries
3311 .iter()
3312 .map(stash_to_proto)
3313 .collect(),
3314 remote_upstream_url: self.remote_upstream_url.clone(),
3315 remote_origin_url: self.remote_origin_url.clone(),
3316 }
3317 }
3318
3319 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3320 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3321 let mut removed_statuses: Vec<String> = Vec::new();
3322
3323 let mut new_statuses = self.statuses_by_path.iter().peekable();
3324 let mut old_statuses = old.statuses_by_path.iter().peekable();
3325
3326 let mut current_new_entry = new_statuses.next();
3327 let mut current_old_entry = old_statuses.next();
3328 loop {
3329 match (current_new_entry, current_old_entry) {
3330 (Some(new_entry), Some(old_entry)) => {
3331 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3332 Ordering::Less => {
3333 updated_statuses.push(new_entry.to_proto());
3334 current_new_entry = new_statuses.next();
3335 }
3336 Ordering::Equal => {
3337 if new_entry.status != old_entry.status {
3338 updated_statuses.push(new_entry.to_proto());
3339 }
3340 current_old_entry = old_statuses.next();
3341 current_new_entry = new_statuses.next();
3342 }
3343 Ordering::Greater => {
3344 removed_statuses.push(old_entry.repo_path.to_proto());
3345 current_old_entry = old_statuses.next();
3346 }
3347 }
3348 }
3349 (None, Some(old_entry)) => {
3350 removed_statuses.push(old_entry.repo_path.to_proto());
3351 current_old_entry = old_statuses.next();
3352 }
3353 (Some(new_entry), None) => {
3354 updated_statuses.push(new_entry.to_proto());
3355 current_new_entry = new_statuses.next();
3356 }
3357 (None, None) => break,
3358 }
3359 }
3360
3361 proto::UpdateRepository {
3362 branch_summary: self.branch.as_ref().map(branch_to_proto),
3363 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3364 updated_statuses,
3365 removed_statuses,
3366 current_merge_conflicts: self
3367 .merge
3368 .conflicted_paths
3369 .iter()
3370 .map(|path| path.to_proto())
3371 .collect(),
3372 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3373 project_id,
3374 id: self.id.to_proto(),
3375 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3376 entry_ids: vec![],
3377 scan_id: self.scan_id,
3378 is_last_update: true,
3379 stash_entries: self
3380 .stash_entries
3381 .entries
3382 .iter()
3383 .map(stash_to_proto)
3384 .collect(),
3385 remote_upstream_url: self.remote_upstream_url.clone(),
3386 remote_origin_url: self.remote_origin_url.clone(),
3387 }
3388 }
3389
3390 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3391 self.statuses_by_path.iter().cloned()
3392 }
3393
3394 pub fn status_summary(&self) -> GitSummary {
3395 self.statuses_by_path.summary().item_summary
3396 }
3397
3398 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3399 self.statuses_by_path
3400 .get(&PathKey(path.as_ref().clone()), ())
3401 .cloned()
3402 }
3403
3404 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3405 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3406 }
3407
3408 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3409 self.path_style
3410 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3411 .unwrap()
3412 .into()
3413 }
3414
3415 #[inline]
3416 fn abs_path_to_repo_path_inner(
3417 work_directory_abs_path: &Path,
3418 abs_path: &Path,
3419 path_style: PathStyle,
3420 ) -> Option<RepoPath> {
3421 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3422 Some(RepoPath::from_rel_path(&rel_path))
3423 }
3424
3425 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3426 self.merge.conflicted_paths.contains(repo_path)
3427 }
3428
3429 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3430 let had_conflict_on_last_merge_head_change =
3431 self.merge.conflicted_paths.contains(repo_path);
3432 let has_conflict_currently = self
3433 .status_for_path(repo_path)
3434 .is_some_and(|entry| entry.status.is_conflicted());
3435 had_conflict_on_last_merge_head_change || has_conflict_currently
3436 }
3437
3438 /// This is the name that will be displayed in the repository selector for this repository.
3439 pub fn display_name(&self) -> SharedString {
3440 self.work_directory_abs_path
3441 .file_name()
3442 .unwrap_or_default()
3443 .to_string_lossy()
3444 .to_string()
3445 .into()
3446 }
3447}
3448
3449pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3450 proto::StashEntry {
3451 oid: entry.oid.as_bytes().to_vec(),
3452 message: entry.message.clone(),
3453 branch: entry.branch.clone(),
3454 index: entry.index as u64,
3455 timestamp: entry.timestamp,
3456 }
3457}
3458
3459pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3460 Ok(StashEntry {
3461 oid: Oid::from_bytes(&entry.oid)?,
3462 message: entry.message.clone(),
3463 index: entry.index as usize,
3464 branch: entry.branch.clone(),
3465 timestamp: entry.timestamp,
3466 })
3467}
3468
3469impl MergeDetails {
3470 async fn load(
3471 backend: &Arc<dyn GitRepository>,
3472 status: &SumTree<StatusEntry>,
3473 prev_snapshot: &RepositorySnapshot,
3474 ) -> Result<(MergeDetails, bool)> {
3475 log::debug!("load merge details");
3476 let message = backend.merge_message().await;
3477 let heads = backend
3478 .revparse_batch(vec![
3479 "MERGE_HEAD".into(),
3480 "CHERRY_PICK_HEAD".into(),
3481 "REBASE_HEAD".into(),
3482 "REVERT_HEAD".into(),
3483 "APPLY_HEAD".into(),
3484 ])
3485 .await
3486 .log_err()
3487 .unwrap_or_default()
3488 .into_iter()
3489 .map(|opt| opt.map(SharedString::from))
3490 .collect::<Vec<_>>();
3491 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3492 let conflicted_paths = if merge_heads_changed {
3493 let current_conflicted_paths = TreeSet::from_ordered_entries(
3494 status
3495 .iter()
3496 .filter(|entry| entry.status.is_conflicted())
3497 .map(|entry| entry.repo_path.clone()),
3498 );
3499
3500 // It can happen that we run a scan while a lengthy merge is in progress
3501 // that will eventually result in conflicts, but before those conflicts
3502 // are reported by `git status`. Since for the moment we only care about
3503 // the merge heads state for the purposes of tracking conflicts, don't update
3504 // this state until we see some conflicts.
3505 if heads.iter().any(Option::is_some)
3506 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3507 && current_conflicted_paths.is_empty()
3508 {
3509 log::debug!("not updating merge heads because no conflicts found");
3510 return Ok((
3511 MergeDetails {
3512 message: message.map(SharedString::from),
3513 ..prev_snapshot.merge.clone()
3514 },
3515 false,
3516 ));
3517 }
3518
3519 current_conflicted_paths
3520 } else {
3521 prev_snapshot.merge.conflicted_paths.clone()
3522 };
3523 let details = MergeDetails {
3524 conflicted_paths,
3525 message: message.map(SharedString::from),
3526 heads,
3527 };
3528 Ok((details, merge_heads_changed))
3529 }
3530}
3531
3532impl Repository {
3533 pub fn snapshot(&self) -> RepositorySnapshot {
3534 self.snapshot.clone()
3535 }
3536
3537 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3538 self.pending_ops.iter().cloned()
3539 }
3540
3541 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3542 self.pending_ops.summary().clone()
3543 }
3544
3545 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3546 self.pending_ops
3547 .get(&PathKey(path.as_ref().clone()), ())
3548 .cloned()
3549 }
3550
3551 fn local(
3552 id: RepositoryId,
3553 work_directory_abs_path: Arc<Path>,
3554 dot_git_abs_path: Arc<Path>,
3555 project_environment: WeakEntity<ProjectEnvironment>,
3556 fs: Arc<dyn Fs>,
3557 git_store: WeakEntity<GitStore>,
3558 cx: &mut Context<Self>,
3559 ) -> Self {
3560 let snapshot =
3561 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3562 let state = cx
3563 .spawn(async move |_, cx| {
3564 LocalRepositoryState::new(
3565 work_directory_abs_path,
3566 dot_git_abs_path,
3567 project_environment,
3568 fs,
3569 cx,
3570 )
3571 .await
3572 .map_err(|err| err.to_string())
3573 })
3574 .shared();
3575 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3576 let state = cx
3577 .spawn(async move |_, _| {
3578 let state = state.await?;
3579 Ok(RepositoryState::Local(state))
3580 })
3581 .shared();
3582
3583 Repository {
3584 this: cx.weak_entity(),
3585 git_store,
3586 snapshot,
3587 pending_ops: Default::default(),
3588 repository_state: state,
3589 commit_message_buffer: None,
3590 askpass_delegates: Default::default(),
3591 paths_needing_status_update: Default::default(),
3592 latest_askpass_id: 0,
3593 job_sender,
3594 job_id: 0,
3595 active_jobs: Default::default(),
3596 }
3597 }
3598
3599 fn remote(
3600 id: RepositoryId,
3601 work_directory_abs_path: Arc<Path>,
3602 path_style: PathStyle,
3603 project_id: ProjectId,
3604 client: AnyProtoClient,
3605 git_store: WeakEntity<GitStore>,
3606 cx: &mut Context<Self>,
3607 ) -> Self {
3608 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3609 let repository_state = RemoteRepositoryState { project_id, client };
3610 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3611 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3612 Self {
3613 this: cx.weak_entity(),
3614 snapshot,
3615 commit_message_buffer: None,
3616 git_store,
3617 pending_ops: Default::default(),
3618 paths_needing_status_update: Default::default(),
3619 job_sender,
3620 repository_state,
3621 askpass_delegates: Default::default(),
3622 latest_askpass_id: 0,
3623 active_jobs: Default::default(),
3624 job_id: 0,
3625 }
3626 }
3627
3628 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3629 self.git_store.upgrade()
3630 }
3631
3632 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3633 let this = cx.weak_entity();
3634 let git_store = self.git_store.clone();
3635 let _ = self.send_keyed_job(
3636 Some(GitJobKey::ReloadBufferDiffBases),
3637 None,
3638 |state, mut cx| async move {
3639 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3640 log::error!("tried to recompute diffs for a non-local repository");
3641 return Ok(());
3642 };
3643
3644 let Some(this) = this.upgrade() else {
3645 return Ok(());
3646 };
3647
3648 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3649 git_store.update(cx, |git_store, cx| {
3650 git_store
3651 .diffs
3652 .iter()
3653 .filter_map(|(buffer_id, diff_state)| {
3654 let buffer_store = git_store.buffer_store.read(cx);
3655 let buffer = buffer_store.get(*buffer_id)?;
3656 let file = File::from_dyn(buffer.read(cx).file())?;
3657 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3658 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3659 log::debug!(
3660 "start reload diff bases for repo path {}",
3661 repo_path.as_unix_str()
3662 );
3663 diff_state.update(cx, |diff_state, _| {
3664 let has_unstaged_diff = diff_state
3665 .unstaged_diff
3666 .as_ref()
3667 .is_some_and(|diff| diff.is_upgradable());
3668 let has_uncommitted_diff = diff_state
3669 .uncommitted_diff
3670 .as_ref()
3671 .is_some_and(|set| set.is_upgradable());
3672
3673 Some((
3674 buffer,
3675 repo_path,
3676 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3677 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3678 ))
3679 })
3680 })
3681 .collect::<Vec<_>>()
3682 })
3683 })??;
3684
3685 let buffer_diff_base_changes = cx
3686 .background_spawn(async move {
3687 let mut changes = Vec::new();
3688 for (buffer, repo_path, current_index_text, current_head_text) in
3689 &repo_diff_state_updates
3690 {
3691 let index_text = if current_index_text.is_some() {
3692 backend.load_index_text(repo_path.clone()).await
3693 } else {
3694 None
3695 };
3696 let head_text = if current_head_text.is_some() {
3697 backend.load_committed_text(repo_path.clone()).await
3698 } else {
3699 None
3700 };
3701
3702 let change =
3703 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3704 (Some(current_index), Some(current_head)) => {
3705 let index_changed =
3706 index_text.as_deref() != current_index.as_deref();
3707 let head_changed =
3708 head_text.as_deref() != current_head.as_deref();
3709 if index_changed && head_changed {
3710 if index_text == head_text {
3711 Some(DiffBasesChange::SetBoth(head_text))
3712 } else {
3713 Some(DiffBasesChange::SetEach {
3714 index: index_text,
3715 head: head_text,
3716 })
3717 }
3718 } else if index_changed {
3719 Some(DiffBasesChange::SetIndex(index_text))
3720 } else if head_changed {
3721 Some(DiffBasesChange::SetHead(head_text))
3722 } else {
3723 None
3724 }
3725 }
3726 (Some(current_index), None) => {
3727 let index_changed =
3728 index_text.as_deref() != current_index.as_deref();
3729 index_changed
3730 .then_some(DiffBasesChange::SetIndex(index_text))
3731 }
3732 (None, Some(current_head)) => {
3733 let head_changed =
3734 head_text.as_deref() != current_head.as_deref();
3735 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3736 }
3737 (None, None) => None,
3738 };
3739
3740 changes.push((buffer.clone(), change))
3741 }
3742 changes
3743 })
3744 .await;
3745
3746 git_store.update(&mut cx, |git_store, cx| {
3747 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3748 let buffer_snapshot = buffer.read(cx).text_snapshot();
3749 let buffer_id = buffer_snapshot.remote_id();
3750 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3751 continue;
3752 };
3753
3754 let downstream_client = git_store.downstream_client();
3755 diff_state.update(cx, |diff_state, cx| {
3756 use proto::update_diff_bases::Mode;
3757
3758 if let Some((diff_bases_change, (client, project_id))) =
3759 diff_bases_change.clone().zip(downstream_client)
3760 {
3761 let (staged_text, committed_text, mode) = match diff_bases_change {
3762 DiffBasesChange::SetIndex(index) => {
3763 (index, None, Mode::IndexOnly)
3764 }
3765 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3766 DiffBasesChange::SetEach { index, head } => {
3767 (index, head, Mode::IndexAndHead)
3768 }
3769 DiffBasesChange::SetBoth(text) => {
3770 (None, text, Mode::IndexMatchesHead)
3771 }
3772 };
3773 client
3774 .send(proto::UpdateDiffBases {
3775 project_id: project_id.to_proto(),
3776 buffer_id: buffer_id.to_proto(),
3777 staged_text,
3778 committed_text,
3779 mode: mode as i32,
3780 })
3781 .log_err();
3782 }
3783
3784 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3785 });
3786 }
3787 })
3788 },
3789 );
3790 }
3791
3792 pub fn send_job<F, Fut, R>(
3793 &mut self,
3794 status: Option<SharedString>,
3795 job: F,
3796 ) -> oneshot::Receiver<R>
3797 where
3798 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3799 Fut: Future<Output = R> + 'static,
3800 R: Send + 'static,
3801 {
3802 self.send_keyed_job(None, status, job)
3803 }
3804
3805 fn send_keyed_job<F, Fut, R>(
3806 &mut self,
3807 key: Option<GitJobKey>,
3808 status: Option<SharedString>,
3809 job: F,
3810 ) -> oneshot::Receiver<R>
3811 where
3812 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3813 Fut: Future<Output = R> + 'static,
3814 R: Send + 'static,
3815 {
3816 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3817 let job_id = post_inc(&mut self.job_id);
3818 let this = self.this.clone();
3819 self.job_sender
3820 .unbounded_send(GitJob {
3821 key,
3822 job: Box::new(move |state, cx: &mut AsyncApp| {
3823 let job = job(state, cx.clone());
3824 cx.spawn(async move |cx| {
3825 if let Some(s) = status.clone() {
3826 this.update(cx, |this, cx| {
3827 this.active_jobs.insert(
3828 job_id,
3829 JobInfo {
3830 start: Instant::now(),
3831 message: s.clone(),
3832 },
3833 );
3834
3835 cx.notify();
3836 })
3837 .ok();
3838 }
3839 let result = job.await;
3840
3841 this.update(cx, |this, cx| {
3842 this.active_jobs.remove(&job_id);
3843 cx.notify();
3844 })
3845 .ok();
3846
3847 result_tx.send(result).ok();
3848 })
3849 }),
3850 })
3851 .ok();
3852 result_rx
3853 }
3854
3855 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3856 let Some(git_store) = self.git_store.upgrade() else {
3857 return;
3858 };
3859 let entity = cx.entity();
3860 git_store.update(cx, |git_store, cx| {
3861 let Some((&id, _)) = git_store
3862 .repositories
3863 .iter()
3864 .find(|(_, handle)| *handle == &entity)
3865 else {
3866 return;
3867 };
3868 git_store.active_repo_id = Some(id);
3869 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3870 });
3871 }
3872
3873 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3874 self.snapshot.status()
3875 }
3876
3877 pub fn cached_stash(&self) -> GitStash {
3878 self.snapshot.stash_entries.clone()
3879 }
3880
3881 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3882 let git_store = self.git_store.upgrade()?;
3883 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3884 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3885 let abs_path = SanitizedPath::new(&abs_path);
3886 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3887 Some(ProjectPath {
3888 worktree_id: worktree.read(cx).id(),
3889 path: relative_path,
3890 })
3891 }
3892
3893 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3894 let git_store = self.git_store.upgrade()?;
3895 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3896 let abs_path = worktree_store.absolutize(path, cx)?;
3897 self.snapshot.abs_path_to_repo_path(&abs_path)
3898 }
3899
3900 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3901 other
3902 .read(cx)
3903 .snapshot
3904 .work_directory_abs_path
3905 .starts_with(&self.snapshot.work_directory_abs_path)
3906 }
3907
3908 pub fn open_commit_buffer(
3909 &mut self,
3910 languages: Option<Arc<LanguageRegistry>>,
3911 buffer_store: Entity<BufferStore>,
3912 cx: &mut Context<Self>,
3913 ) -> Task<Result<Entity<Buffer>>> {
3914 let id = self.id;
3915 if let Some(buffer) = self.commit_message_buffer.clone() {
3916 return Task::ready(Ok(buffer));
3917 }
3918 let this = cx.weak_entity();
3919
3920 let rx = self.send_job(None, move |state, mut cx| async move {
3921 let Some(this) = this.upgrade() else {
3922 bail!("git store was dropped");
3923 };
3924 match state {
3925 RepositoryState::Local(..) => {
3926 this.update(&mut cx, |_, cx| {
3927 Self::open_local_commit_buffer(languages, buffer_store, cx)
3928 })?
3929 .await
3930 }
3931 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3932 let request = client.request(proto::OpenCommitMessageBuffer {
3933 project_id: project_id.0,
3934 repository_id: id.to_proto(),
3935 });
3936 let response = request.await.context("requesting to open commit buffer")?;
3937 let buffer_id = BufferId::new(response.buffer_id)?;
3938 let buffer = buffer_store
3939 .update(&mut cx, |buffer_store, cx| {
3940 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3941 })?
3942 .await?;
3943 if let Some(language_registry) = languages {
3944 let git_commit_language =
3945 language_registry.language_for_name("Git Commit").await?;
3946 buffer.update(&mut cx, |buffer, cx| {
3947 buffer.set_language(Some(git_commit_language), cx);
3948 })?;
3949 }
3950 this.update(&mut cx, |this, _| {
3951 this.commit_message_buffer = Some(buffer.clone());
3952 })?;
3953 Ok(buffer)
3954 }
3955 }
3956 });
3957
3958 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3959 }
3960
3961 fn open_local_commit_buffer(
3962 language_registry: Option<Arc<LanguageRegistry>>,
3963 buffer_store: Entity<BufferStore>,
3964 cx: &mut Context<Self>,
3965 ) -> Task<Result<Entity<Buffer>>> {
3966 cx.spawn(async move |repository, cx| {
3967 let buffer = buffer_store
3968 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3969 .await?;
3970
3971 if let Some(language_registry) = language_registry {
3972 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3973 buffer.update(cx, |buffer, cx| {
3974 buffer.set_language(Some(git_commit_language), cx);
3975 })?;
3976 }
3977
3978 repository.update(cx, |repository, _| {
3979 repository.commit_message_buffer = Some(buffer.clone());
3980 })?;
3981 Ok(buffer)
3982 })
3983 }
3984
3985 pub fn checkout_files(
3986 &mut self,
3987 commit: &str,
3988 paths: Vec<RepoPath>,
3989 cx: &mut Context<Self>,
3990 ) -> Task<Result<()>> {
3991 let commit = commit.to_string();
3992 let id = self.id;
3993
3994 self.spawn_job_with_tracking(
3995 paths.clone(),
3996 pending_op::GitStatus::Reverted,
3997 cx,
3998 async move |this, cx| {
3999 this.update(cx, |this, _cx| {
4000 this.send_job(
4001 Some(format!("git checkout {}", commit).into()),
4002 move |git_repo, _| async move {
4003 match git_repo {
4004 RepositoryState::Local(LocalRepositoryState {
4005 backend,
4006 environment,
4007 ..
4008 }) => {
4009 backend
4010 .checkout_files(commit, paths, environment.clone())
4011 .await
4012 }
4013 RepositoryState::Remote(RemoteRepositoryState {
4014 project_id,
4015 client,
4016 }) => {
4017 client
4018 .request(proto::GitCheckoutFiles {
4019 project_id: project_id.0,
4020 repository_id: id.to_proto(),
4021 commit,
4022 paths: paths
4023 .into_iter()
4024 .map(|p| p.to_proto())
4025 .collect(),
4026 })
4027 .await?;
4028
4029 Ok(())
4030 }
4031 }
4032 },
4033 )
4034 })?
4035 .await?
4036 },
4037 )
4038 }
4039
4040 pub fn reset(
4041 &mut self,
4042 commit: String,
4043 reset_mode: ResetMode,
4044 _cx: &mut App,
4045 ) -> oneshot::Receiver<Result<()>> {
4046 let id = self.id;
4047
4048 self.send_job(None, move |git_repo, _| async move {
4049 match git_repo {
4050 RepositoryState::Local(LocalRepositoryState {
4051 backend,
4052 environment,
4053 ..
4054 }) => backend.reset(commit, reset_mode, environment).await,
4055 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4056 client
4057 .request(proto::GitReset {
4058 project_id: project_id.0,
4059 repository_id: id.to_proto(),
4060 commit,
4061 mode: match reset_mode {
4062 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4063 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4064 },
4065 })
4066 .await?;
4067
4068 Ok(())
4069 }
4070 }
4071 })
4072 }
4073
4074 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4075 let id = self.id;
4076 self.send_job(None, move |git_repo, _cx| async move {
4077 match git_repo {
4078 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4079 backend.show(commit).await
4080 }
4081 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4082 let resp = client
4083 .request(proto::GitShow {
4084 project_id: project_id.0,
4085 repository_id: id.to_proto(),
4086 commit,
4087 })
4088 .await?;
4089
4090 Ok(CommitDetails {
4091 sha: resp.sha.into(),
4092 message: resp.message.into(),
4093 commit_timestamp: resp.commit_timestamp,
4094 author_email: resp.author_email.into(),
4095 author_name: resp.author_name.into(),
4096 })
4097 }
4098 }
4099 })
4100 }
4101
4102 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4103 let id = self.id;
4104 self.send_job(None, move |git_repo, cx| async move {
4105 match git_repo {
4106 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4107 backend.load_commit(commit, cx).await
4108 }
4109 RepositoryState::Remote(RemoteRepositoryState {
4110 client, project_id, ..
4111 }) => {
4112 let response = client
4113 .request(proto::LoadCommitDiff {
4114 project_id: project_id.0,
4115 repository_id: id.to_proto(),
4116 commit,
4117 })
4118 .await?;
4119 Ok(CommitDiff {
4120 files: response
4121 .files
4122 .into_iter()
4123 .map(|file| {
4124 Ok(CommitFile {
4125 path: RepoPath::from_proto(&file.path)?,
4126 old_text: file.old_text,
4127 new_text: file.new_text,
4128 })
4129 })
4130 .collect::<Result<Vec<_>>>()?,
4131 })
4132 }
4133 }
4134 })
4135 }
4136
4137 pub fn file_history(
4138 &mut self,
4139 path: RepoPath,
4140 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4141 self.file_history_paginated(path, 0, None)
4142 }
4143
4144 pub fn file_history_paginated(
4145 &mut self,
4146 path: RepoPath,
4147 skip: usize,
4148 limit: Option<usize>,
4149 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4150 let id = self.id;
4151 self.send_job(None, move |git_repo, _cx| async move {
4152 match git_repo {
4153 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4154 backend.file_history_paginated(path, skip, limit).await
4155 }
4156 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4157 let response = client
4158 .request(proto::GitFileHistory {
4159 project_id: project_id.0,
4160 repository_id: id.to_proto(),
4161 path: path.to_proto(),
4162 skip: skip as u64,
4163 limit: limit.map(|l| l as u64),
4164 })
4165 .await?;
4166 Ok(git::repository::FileHistory {
4167 entries: response
4168 .entries
4169 .into_iter()
4170 .map(|entry| git::repository::FileHistoryEntry {
4171 sha: entry.sha.into(),
4172 subject: entry.subject.into(),
4173 message: entry.message.into(),
4174 commit_timestamp: entry.commit_timestamp,
4175 author_name: entry.author_name.into(),
4176 author_email: entry.author_email.into(),
4177 })
4178 .collect(),
4179 path: RepoPath::from_proto(&response.path)?,
4180 })
4181 }
4182 }
4183 })
4184 }
4185
4186 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4187 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4188 }
4189
4190 fn save_buffers<'a>(
4191 &self,
4192 entries: impl IntoIterator<Item = &'a RepoPath>,
4193 cx: &mut Context<Self>,
4194 ) -> Vec<Task<anyhow::Result<()>>> {
4195 let mut save_futures = Vec::new();
4196 if let Some(buffer_store) = self.buffer_store(cx) {
4197 buffer_store.update(cx, |buffer_store, cx| {
4198 for path in entries {
4199 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4200 continue;
4201 };
4202 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4203 && buffer
4204 .read(cx)
4205 .file()
4206 .is_some_and(|file| file.disk_state().exists())
4207 && buffer.read(cx).has_unsaved_edits()
4208 {
4209 save_futures.push(buffer_store.save_buffer(buffer, cx));
4210 }
4211 }
4212 })
4213 }
4214 save_futures
4215 }
4216
4217 pub fn stage_entries(
4218 &mut self,
4219 entries: Vec<RepoPath>,
4220 cx: &mut Context<Self>,
4221 ) -> Task<anyhow::Result<()>> {
4222 self.stage_or_unstage_entries(true, entries, cx)
4223 }
4224
4225 pub fn unstage_entries(
4226 &mut self,
4227 entries: Vec<RepoPath>,
4228 cx: &mut Context<Self>,
4229 ) -> Task<anyhow::Result<()>> {
4230 self.stage_or_unstage_entries(false, entries, cx)
4231 }
4232
4233 fn stage_or_unstage_entries(
4234 &mut self,
4235 stage: bool,
4236 entries: Vec<RepoPath>,
4237 cx: &mut Context<Self>,
4238 ) -> Task<anyhow::Result<()>> {
4239 if entries.is_empty() {
4240 return Task::ready(Ok(()));
4241 }
4242 let Some(git_store) = self.git_store.upgrade() else {
4243 return Task::ready(Ok(()));
4244 };
4245 let id = self.id;
4246 let save_tasks = self.save_buffers(&entries, cx);
4247 let paths = entries
4248 .iter()
4249 .map(|p| p.as_unix_str())
4250 .collect::<Vec<_>>()
4251 .join(" ");
4252 let status = if stage {
4253 format!("git add {paths}")
4254 } else {
4255 format!("git reset {paths}")
4256 };
4257 let job_key = GitJobKey::WriteIndex(entries.clone());
4258
4259 self.spawn_job_with_tracking(
4260 entries.clone(),
4261 if stage {
4262 pending_op::GitStatus::Staged
4263 } else {
4264 pending_op::GitStatus::Unstaged
4265 },
4266 cx,
4267 async move |this, cx| {
4268 for save_task in save_tasks {
4269 save_task.await?;
4270 }
4271
4272 this.update(cx, |this, cx| {
4273 let weak_this = cx.weak_entity();
4274 this.send_keyed_job(
4275 Some(job_key),
4276 Some(status.into()),
4277 move |git_repo, mut cx| async move {
4278 let hunk_staging_operation_counts = weak_this
4279 .update(&mut cx, |this, cx| {
4280 let mut hunk_staging_operation_counts = HashMap::default();
4281 for path in &entries {
4282 let Some(project_path) =
4283 this.repo_path_to_project_path(path, cx)
4284 else {
4285 continue;
4286 };
4287 let Some(buffer) = git_store
4288 .read(cx)
4289 .buffer_store
4290 .read(cx)
4291 .get_by_path(&project_path)
4292 else {
4293 continue;
4294 };
4295 let Some(diff_state) = git_store
4296 .read(cx)
4297 .diffs
4298 .get(&buffer.read(cx).remote_id())
4299 .cloned()
4300 else {
4301 continue;
4302 };
4303 let Some(uncommitted_diff) =
4304 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4305 |uncommitted_diff| uncommitted_diff.upgrade(),
4306 )
4307 else {
4308 continue;
4309 };
4310 let buffer_snapshot = buffer.read(cx).text_snapshot();
4311 let file_exists = buffer
4312 .read(cx)
4313 .file()
4314 .is_some_and(|file| file.disk_state().exists());
4315 let hunk_staging_operation_count =
4316 diff_state.update(cx, |diff_state, cx| {
4317 uncommitted_diff.update(
4318 cx,
4319 |uncommitted_diff, cx| {
4320 uncommitted_diff
4321 .stage_or_unstage_all_hunks(
4322 stage,
4323 &buffer_snapshot,
4324 file_exists,
4325 cx,
4326 );
4327 },
4328 );
4329
4330 diff_state.hunk_staging_operation_count += 1;
4331 diff_state.hunk_staging_operation_count
4332 });
4333 hunk_staging_operation_counts.insert(
4334 diff_state.downgrade(),
4335 hunk_staging_operation_count,
4336 );
4337 }
4338 hunk_staging_operation_counts
4339 })
4340 .unwrap_or_default();
4341
4342 let result = match git_repo {
4343 RepositoryState::Local(LocalRepositoryState {
4344 backend,
4345 environment,
4346 ..
4347 }) => {
4348 if stage {
4349 backend.stage_paths(entries, environment.clone()).await
4350 } else {
4351 backend.unstage_paths(entries, environment.clone()).await
4352 }
4353 }
4354 RepositoryState::Remote(RemoteRepositoryState {
4355 project_id,
4356 client,
4357 }) => {
4358 if stage {
4359 client
4360 .request(proto::Stage {
4361 project_id: project_id.0,
4362 repository_id: id.to_proto(),
4363 paths: entries
4364 .into_iter()
4365 .map(|repo_path| repo_path.to_proto())
4366 .collect(),
4367 })
4368 .await
4369 .context("sending stage request")
4370 .map(|_| ())
4371 } else {
4372 client
4373 .request(proto::Unstage {
4374 project_id: project_id.0,
4375 repository_id: id.to_proto(),
4376 paths: entries
4377 .into_iter()
4378 .map(|repo_path| repo_path.to_proto())
4379 .collect(),
4380 })
4381 .await
4382 .context("sending unstage request")
4383 .map(|_| ())
4384 }
4385 }
4386 };
4387
4388 for (diff_state, hunk_staging_operation_count) in
4389 hunk_staging_operation_counts
4390 {
4391 diff_state
4392 .update(&mut cx, |diff_state, cx| {
4393 if result.is_ok() {
4394 diff_state.hunk_staging_operation_count_as_of_write =
4395 hunk_staging_operation_count;
4396 } else if let Some(uncommitted_diff) =
4397 &diff_state.uncommitted_diff
4398 {
4399 uncommitted_diff
4400 .update(cx, |uncommitted_diff, cx| {
4401 uncommitted_diff.clear_pending_hunks(cx);
4402 })
4403 .ok();
4404 }
4405 })
4406 .ok();
4407 }
4408
4409 result
4410 },
4411 )
4412 })?
4413 .await?
4414 },
4415 )
4416 }
4417
4418 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4419 let to_stage = self
4420 .cached_status()
4421 .filter_map(|entry| {
4422 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4423 if ops.staging() || ops.staged() {
4424 None
4425 } else {
4426 Some(entry.repo_path)
4427 }
4428 } else if entry.status.staging().is_fully_staged() {
4429 None
4430 } else {
4431 Some(entry.repo_path)
4432 }
4433 })
4434 .collect();
4435 self.stage_or_unstage_entries(true, to_stage, cx)
4436 }
4437
4438 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4439 let to_unstage = self
4440 .cached_status()
4441 .filter_map(|entry| {
4442 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4443 if !ops.staging() && !ops.staged() {
4444 None
4445 } else {
4446 Some(entry.repo_path)
4447 }
4448 } else if entry.status.staging().is_fully_unstaged() {
4449 None
4450 } else {
4451 Some(entry.repo_path)
4452 }
4453 })
4454 .collect();
4455 self.stage_or_unstage_entries(false, to_unstage, cx)
4456 }
4457
4458 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4459 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4460
4461 self.stash_entries(to_stash, cx)
4462 }
4463
4464 pub fn stash_entries(
4465 &mut self,
4466 entries: Vec<RepoPath>,
4467 cx: &mut Context<Self>,
4468 ) -> Task<anyhow::Result<()>> {
4469 let id = self.id;
4470
4471 cx.spawn(async move |this, cx| {
4472 this.update(cx, |this, _| {
4473 this.send_job(None, move |git_repo, _cx| async move {
4474 match git_repo {
4475 RepositoryState::Local(LocalRepositoryState {
4476 backend,
4477 environment,
4478 ..
4479 }) => backend.stash_paths(entries, environment).await,
4480 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4481 client
4482 .request(proto::Stash {
4483 project_id: project_id.0,
4484 repository_id: id.to_proto(),
4485 paths: entries
4486 .into_iter()
4487 .map(|repo_path| repo_path.to_proto())
4488 .collect(),
4489 })
4490 .await
4491 .context("sending stash request")?;
4492 Ok(())
4493 }
4494 }
4495 })
4496 })?
4497 .await??;
4498 Ok(())
4499 })
4500 }
4501
4502 pub fn stash_pop(
4503 &mut self,
4504 index: Option<usize>,
4505 cx: &mut Context<Self>,
4506 ) -> Task<anyhow::Result<()>> {
4507 let id = self.id;
4508 cx.spawn(async move |this, cx| {
4509 this.update(cx, |this, _| {
4510 this.send_job(None, move |git_repo, _cx| async move {
4511 match git_repo {
4512 RepositoryState::Local(LocalRepositoryState {
4513 backend,
4514 environment,
4515 ..
4516 }) => backend.stash_pop(index, environment).await,
4517 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4518 client
4519 .request(proto::StashPop {
4520 project_id: project_id.0,
4521 repository_id: id.to_proto(),
4522 stash_index: index.map(|i| i as u64),
4523 })
4524 .await
4525 .context("sending stash pop request")?;
4526 Ok(())
4527 }
4528 }
4529 })
4530 })?
4531 .await??;
4532 Ok(())
4533 })
4534 }
4535
4536 pub fn stash_apply(
4537 &mut self,
4538 index: Option<usize>,
4539 cx: &mut Context<Self>,
4540 ) -> Task<anyhow::Result<()>> {
4541 let id = self.id;
4542 cx.spawn(async move |this, cx| {
4543 this.update(cx, |this, _| {
4544 this.send_job(None, move |git_repo, _cx| async move {
4545 match git_repo {
4546 RepositoryState::Local(LocalRepositoryState {
4547 backend,
4548 environment,
4549 ..
4550 }) => backend.stash_apply(index, environment).await,
4551 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4552 client
4553 .request(proto::StashApply {
4554 project_id: project_id.0,
4555 repository_id: id.to_proto(),
4556 stash_index: index.map(|i| i as u64),
4557 })
4558 .await
4559 .context("sending stash apply request")?;
4560 Ok(())
4561 }
4562 }
4563 })
4564 })?
4565 .await??;
4566 Ok(())
4567 })
4568 }
4569
4570 pub fn stash_drop(
4571 &mut self,
4572 index: Option<usize>,
4573 cx: &mut Context<Self>,
4574 ) -> oneshot::Receiver<anyhow::Result<()>> {
4575 let id = self.id;
4576 let updates_tx = self
4577 .git_store()
4578 .and_then(|git_store| match &git_store.read(cx).state {
4579 GitStoreState::Local { downstream, .. } => downstream
4580 .as_ref()
4581 .map(|downstream| downstream.updates_tx.clone()),
4582 _ => None,
4583 });
4584 let this = cx.weak_entity();
4585 self.send_job(None, move |git_repo, mut cx| async move {
4586 match git_repo {
4587 RepositoryState::Local(LocalRepositoryState {
4588 backend,
4589 environment,
4590 ..
4591 }) => {
4592 // TODO would be nice to not have to do this manually
4593 let result = backend.stash_drop(index, environment).await;
4594 if result.is_ok()
4595 && let Ok(stash_entries) = backend.stash_entries().await
4596 {
4597 let snapshot = this.update(&mut cx, |this, cx| {
4598 this.snapshot.stash_entries = stash_entries;
4599 cx.emit(RepositoryEvent::StashEntriesChanged);
4600 this.snapshot.clone()
4601 })?;
4602 if let Some(updates_tx) = updates_tx {
4603 updates_tx
4604 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4605 .ok();
4606 }
4607 }
4608
4609 result
4610 }
4611 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4612 client
4613 .request(proto::StashDrop {
4614 project_id: project_id.0,
4615 repository_id: id.to_proto(),
4616 stash_index: index.map(|i| i as u64),
4617 })
4618 .await
4619 .context("sending stash pop request")?;
4620 Ok(())
4621 }
4622 }
4623 })
4624 }
4625
4626 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4627 let id = self.id;
4628 self.send_job(
4629 Some(format!("git hook {}", hook.as_str()).into()),
4630 move |git_repo, _cx| async move {
4631 match git_repo {
4632 RepositoryState::Local(LocalRepositoryState {
4633 backend,
4634 environment,
4635 ..
4636 }) => backend.run_hook(hook, environment.clone()).await,
4637 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4638 client
4639 .request(proto::RunGitHook {
4640 project_id: project_id.0,
4641 repository_id: id.to_proto(),
4642 hook: hook.to_proto(),
4643 })
4644 .await?;
4645
4646 Ok(())
4647 }
4648 }
4649 },
4650 )
4651 }
4652
4653 pub fn commit(
4654 &mut self,
4655 message: SharedString,
4656 name_and_email: Option<(SharedString, SharedString)>,
4657 options: CommitOptions,
4658 askpass: AskPassDelegate,
4659 cx: &mut App,
4660 ) -> oneshot::Receiver<Result<()>> {
4661 let id = self.id;
4662 let askpass_delegates = self.askpass_delegates.clone();
4663 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4664
4665 let rx = self.run_hook(RunHook::PreCommit, cx);
4666
4667 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4668 rx.await??;
4669
4670 match git_repo {
4671 RepositoryState::Local(LocalRepositoryState {
4672 backend,
4673 environment,
4674 ..
4675 }) => {
4676 backend
4677 .commit(message, name_and_email, options, askpass, environment)
4678 .await
4679 }
4680 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4681 askpass_delegates.lock().insert(askpass_id, askpass);
4682 let _defer = util::defer(|| {
4683 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4684 debug_assert!(askpass_delegate.is_some());
4685 });
4686 let (name, email) = name_and_email.unzip();
4687 client
4688 .request(proto::Commit {
4689 project_id: project_id.0,
4690 repository_id: id.to_proto(),
4691 message: String::from(message),
4692 name: name.map(String::from),
4693 email: email.map(String::from),
4694 options: Some(proto::commit::CommitOptions {
4695 amend: options.amend,
4696 signoff: options.signoff,
4697 }),
4698 askpass_id,
4699 })
4700 .await
4701 .context("sending commit request")?;
4702
4703 Ok(())
4704 }
4705 }
4706 })
4707 }
4708
4709 pub fn fetch(
4710 &mut self,
4711 fetch_options: FetchOptions,
4712 askpass: AskPassDelegate,
4713 _cx: &mut App,
4714 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4715 let askpass_delegates = self.askpass_delegates.clone();
4716 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4717 let id = self.id;
4718
4719 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4720 match git_repo {
4721 RepositoryState::Local(LocalRepositoryState {
4722 backend,
4723 environment,
4724 ..
4725 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4726 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4727 askpass_delegates.lock().insert(askpass_id, askpass);
4728 let _defer = util::defer(|| {
4729 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4730 debug_assert!(askpass_delegate.is_some());
4731 });
4732
4733 let response = client
4734 .request(proto::Fetch {
4735 project_id: project_id.0,
4736 repository_id: id.to_proto(),
4737 askpass_id,
4738 remote: fetch_options.to_proto(),
4739 })
4740 .await
4741 .context("sending fetch request")?;
4742
4743 Ok(RemoteCommandOutput {
4744 stdout: response.stdout,
4745 stderr: response.stderr,
4746 })
4747 }
4748 }
4749 })
4750 }
4751
4752 pub fn push(
4753 &mut self,
4754 branch: SharedString,
4755 remote_branch: SharedString,
4756 remote: SharedString,
4757 options: Option<PushOptions>,
4758 askpass: AskPassDelegate,
4759 cx: &mut Context<Self>,
4760 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4761 let askpass_delegates = self.askpass_delegates.clone();
4762 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4763 let id = self.id;
4764
4765 let args = options
4766 .map(|option| match option {
4767 PushOptions::SetUpstream => " --set-upstream",
4768 PushOptions::Force => " --force-with-lease",
4769 })
4770 .unwrap_or("");
4771
4772 let updates_tx = self
4773 .git_store()
4774 .and_then(|git_store| match &git_store.read(cx).state {
4775 GitStoreState::Local { downstream, .. } => downstream
4776 .as_ref()
4777 .map(|downstream| downstream.updates_tx.clone()),
4778 _ => None,
4779 });
4780
4781 let this = cx.weak_entity();
4782 self.send_job(
4783 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
4784 move |git_repo, mut cx| async move {
4785 match git_repo {
4786 RepositoryState::Local(LocalRepositoryState {
4787 backend,
4788 environment,
4789 ..
4790 }) => {
4791 let result = backend
4792 .push(
4793 branch.to_string(),
4794 remote_branch.to_string(),
4795 remote.to_string(),
4796 options,
4797 askpass,
4798 environment.clone(),
4799 cx.clone(),
4800 )
4801 .await;
4802 // TODO would be nice to not have to do this manually
4803 if result.is_ok() {
4804 let branches = backend.branches().await?;
4805 let branch = branches.into_iter().find(|branch| branch.is_head);
4806 log::info!("head branch after scan is {branch:?}");
4807 let snapshot = this.update(&mut cx, |this, cx| {
4808 this.snapshot.branch = branch;
4809 cx.emit(RepositoryEvent::BranchChanged);
4810 this.snapshot.clone()
4811 })?;
4812 if let Some(updates_tx) = updates_tx {
4813 updates_tx
4814 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4815 .ok();
4816 }
4817 }
4818 result
4819 }
4820 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4821 askpass_delegates.lock().insert(askpass_id, askpass);
4822 let _defer = util::defer(|| {
4823 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4824 debug_assert!(askpass_delegate.is_some());
4825 });
4826 let response = client
4827 .request(proto::Push {
4828 project_id: project_id.0,
4829 repository_id: id.to_proto(),
4830 askpass_id,
4831 branch_name: branch.to_string(),
4832 remote_branch_name: remote_branch.to_string(),
4833 remote_name: remote.to_string(),
4834 options: options.map(|options| match options {
4835 PushOptions::Force => proto::push::PushOptions::Force,
4836 PushOptions::SetUpstream => {
4837 proto::push::PushOptions::SetUpstream
4838 }
4839 }
4840 as i32),
4841 })
4842 .await
4843 .context("sending push request")?;
4844
4845 Ok(RemoteCommandOutput {
4846 stdout: response.stdout,
4847 stderr: response.stderr,
4848 })
4849 }
4850 }
4851 },
4852 )
4853 }
4854
4855 pub fn pull(
4856 &mut self,
4857 branch: Option<SharedString>,
4858 remote: SharedString,
4859 rebase: bool,
4860 askpass: AskPassDelegate,
4861 _cx: &mut App,
4862 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4863 let askpass_delegates = self.askpass_delegates.clone();
4864 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4865 let id = self.id;
4866
4867 let mut status = "git pull".to_string();
4868 if rebase {
4869 status.push_str(" --rebase");
4870 }
4871 status.push_str(&format!(" {}", remote));
4872 if let Some(b) = &branch {
4873 status.push_str(&format!(" {}", b));
4874 }
4875
4876 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4877 match git_repo {
4878 RepositoryState::Local(LocalRepositoryState {
4879 backend,
4880 environment,
4881 ..
4882 }) => {
4883 backend
4884 .pull(
4885 branch.as_ref().map(|b| b.to_string()),
4886 remote.to_string(),
4887 rebase,
4888 askpass,
4889 environment.clone(),
4890 cx,
4891 )
4892 .await
4893 }
4894 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4895 askpass_delegates.lock().insert(askpass_id, askpass);
4896 let _defer = util::defer(|| {
4897 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4898 debug_assert!(askpass_delegate.is_some());
4899 });
4900 let response = client
4901 .request(proto::Pull {
4902 project_id: project_id.0,
4903 repository_id: id.to_proto(),
4904 askpass_id,
4905 rebase,
4906 branch_name: branch.as_ref().map(|b| b.to_string()),
4907 remote_name: remote.to_string(),
4908 })
4909 .await
4910 .context("sending pull request")?;
4911
4912 Ok(RemoteCommandOutput {
4913 stdout: response.stdout,
4914 stderr: response.stderr,
4915 })
4916 }
4917 }
4918 })
4919 }
4920
4921 fn spawn_set_index_text_job(
4922 &mut self,
4923 path: RepoPath,
4924 content: Option<String>,
4925 hunk_staging_operation_count: Option<usize>,
4926 cx: &mut Context<Self>,
4927 ) -> oneshot::Receiver<anyhow::Result<()>> {
4928 let id = self.id;
4929 let this = cx.weak_entity();
4930 let git_store = self.git_store.clone();
4931 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4932 self.send_keyed_job(
4933 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4934 None,
4935 move |git_repo, mut cx| async move {
4936 log::debug!(
4937 "start updating index text for buffer {}",
4938 path.as_unix_str()
4939 );
4940
4941 match git_repo {
4942 RepositoryState::Local(LocalRepositoryState {
4943 fs,
4944 backend,
4945 environment,
4946 ..
4947 }) => {
4948 let executable = match fs.metadata(&abs_path).await {
4949 Ok(Some(meta)) => meta.is_executable,
4950 Ok(None) => false,
4951 Err(_err) => false,
4952 };
4953 backend
4954 .set_index_text(path.clone(), content, environment.clone(), executable)
4955 .await?;
4956 }
4957 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4958 client
4959 .request(proto::SetIndexText {
4960 project_id: project_id.0,
4961 repository_id: id.to_proto(),
4962 path: path.to_proto(),
4963 text: content,
4964 })
4965 .await?;
4966 }
4967 }
4968 log::debug!(
4969 "finish updating index text for buffer {}",
4970 path.as_unix_str()
4971 );
4972
4973 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4974 let project_path = this
4975 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4976 .ok()
4977 .flatten();
4978 git_store.update(&mut cx, |git_store, cx| {
4979 let buffer_id = git_store
4980 .buffer_store
4981 .read(cx)
4982 .get_by_path(&project_path?)?
4983 .read(cx)
4984 .remote_id();
4985 let diff_state = git_store.diffs.get(&buffer_id)?;
4986 diff_state.update(cx, |diff_state, _| {
4987 diff_state.hunk_staging_operation_count_as_of_write =
4988 hunk_staging_operation_count;
4989 });
4990 Some(())
4991 })?;
4992 }
4993 Ok(())
4994 },
4995 )
4996 }
4997
4998 pub fn create_remote(
4999 &mut self,
5000 remote_name: String,
5001 remote_url: String,
5002 ) -> oneshot::Receiver<Result<()>> {
5003 let id = self.id;
5004 self.send_job(
5005 Some(format!("git remote add {remote_name} {remote_url}").into()),
5006 move |repo, _cx| async move {
5007 match repo {
5008 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5009 backend.create_remote(remote_name, remote_url).await
5010 }
5011 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5012 client
5013 .request(proto::GitCreateRemote {
5014 project_id: project_id.0,
5015 repository_id: id.to_proto(),
5016 remote_name,
5017 remote_url,
5018 })
5019 .await?;
5020
5021 Ok(())
5022 }
5023 }
5024 },
5025 )
5026 }
5027
5028 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5029 let id = self.id;
5030 self.send_job(
5031 Some(format!("git remove remote {remote_name}").into()),
5032 move |repo, _cx| async move {
5033 match repo {
5034 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5035 backend.remove_remote(remote_name).await
5036 }
5037 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5038 client
5039 .request(proto::GitRemoveRemote {
5040 project_id: project_id.0,
5041 repository_id: id.to_proto(),
5042 remote_name,
5043 })
5044 .await?;
5045
5046 Ok(())
5047 }
5048 }
5049 },
5050 )
5051 }
5052
5053 pub fn get_remotes(
5054 &mut self,
5055 branch_name: Option<String>,
5056 is_push: bool,
5057 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5058 let id = self.id;
5059 self.send_job(None, move |repo, _cx| async move {
5060 match repo {
5061 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5062 let remote = if let Some(branch_name) = branch_name {
5063 if is_push {
5064 backend.get_push_remote(branch_name).await?
5065 } else {
5066 backend.get_branch_remote(branch_name).await?
5067 }
5068 } else {
5069 None
5070 };
5071
5072 match remote {
5073 Some(remote) => Ok(vec![remote]),
5074 None => backend.get_all_remotes().await,
5075 }
5076 }
5077 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5078 let response = client
5079 .request(proto::GetRemotes {
5080 project_id: project_id.0,
5081 repository_id: id.to_proto(),
5082 branch_name,
5083 is_push,
5084 })
5085 .await?;
5086
5087 let remotes = response
5088 .remotes
5089 .into_iter()
5090 .map(|remotes| Remote {
5091 name: remotes.name.into(),
5092 })
5093 .collect();
5094
5095 Ok(remotes)
5096 }
5097 }
5098 })
5099 }
5100
5101 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5102 let id = self.id;
5103 self.send_job(None, move |repo, _| async move {
5104 match repo {
5105 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5106 backend.branches().await
5107 }
5108 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5109 let response = client
5110 .request(proto::GitGetBranches {
5111 project_id: project_id.0,
5112 repository_id: id.to_proto(),
5113 })
5114 .await?;
5115
5116 let branches = response
5117 .branches
5118 .into_iter()
5119 .map(|branch| proto_to_branch(&branch))
5120 .collect();
5121
5122 Ok(branches)
5123 }
5124 }
5125 })
5126 }
5127
5128 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5129 let id = self.id;
5130 self.send_job(None, move |repo, _| async move {
5131 match repo {
5132 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5133 backend.worktrees().await
5134 }
5135 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5136 let response = client
5137 .request(proto::GitGetWorktrees {
5138 project_id: project_id.0,
5139 repository_id: id.to_proto(),
5140 })
5141 .await?;
5142
5143 let worktrees = response
5144 .worktrees
5145 .into_iter()
5146 .map(|worktree| proto_to_worktree(&worktree))
5147 .collect();
5148
5149 Ok(worktrees)
5150 }
5151 }
5152 })
5153 }
5154
5155 pub fn create_worktree(
5156 &mut self,
5157 name: String,
5158 path: PathBuf,
5159 commit: Option<String>,
5160 ) -> oneshot::Receiver<Result<()>> {
5161 let id = self.id;
5162 self.send_job(
5163 Some("git worktree add".into()),
5164 move |repo, _cx| async move {
5165 match repo {
5166 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5167 backend.create_worktree(name, path, commit).await
5168 }
5169 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5170 client
5171 .request(proto::GitCreateWorktree {
5172 project_id: project_id.0,
5173 repository_id: id.to_proto(),
5174 name,
5175 directory: path.to_string_lossy().to_string(),
5176 commit,
5177 })
5178 .await?;
5179
5180 Ok(())
5181 }
5182 }
5183 },
5184 )
5185 }
5186
5187 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
5188 let id = self.id;
5189 self.send_job(None, move |repo, _| async move {
5190 match repo {
5191 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5192 backend.default_branch().await
5193 }
5194 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5195 let response = client
5196 .request(proto::GetDefaultBranch {
5197 project_id: project_id.0,
5198 repository_id: id.to_proto(),
5199 })
5200 .await?;
5201
5202 anyhow::Ok(response.branch.map(SharedString::from))
5203 }
5204 }
5205 })
5206 }
5207
5208 pub fn diff_tree(
5209 &mut self,
5210 diff_type: DiffTreeType,
5211 _cx: &App,
5212 ) -> oneshot::Receiver<Result<TreeDiff>> {
5213 let repository_id = self.snapshot.id;
5214 self.send_job(None, move |repo, _cx| async move {
5215 match repo {
5216 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5217 backend.diff_tree(diff_type).await
5218 }
5219 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5220 let response = client
5221 .request(proto::GetTreeDiff {
5222 project_id: project_id.0,
5223 repository_id: repository_id.0,
5224 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5225 base: diff_type.base().to_string(),
5226 head: diff_type.head().to_string(),
5227 })
5228 .await?;
5229
5230 let entries = response
5231 .entries
5232 .into_iter()
5233 .filter_map(|entry| {
5234 let status = match entry.status() {
5235 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5236 proto::tree_diff_status::Status::Modified => {
5237 TreeDiffStatus::Modified {
5238 old: git::Oid::from_str(
5239 &entry.oid.context("missing oid").log_err()?,
5240 )
5241 .log_err()?,
5242 }
5243 }
5244 proto::tree_diff_status::Status::Deleted => {
5245 TreeDiffStatus::Deleted {
5246 old: git::Oid::from_str(
5247 &entry.oid.context("missing oid").log_err()?,
5248 )
5249 .log_err()?,
5250 }
5251 }
5252 };
5253 Some((
5254 RepoPath::from_rel_path(
5255 &RelPath::from_proto(&entry.path).log_err()?,
5256 ),
5257 status,
5258 ))
5259 })
5260 .collect();
5261
5262 Ok(TreeDiff { entries })
5263 }
5264 }
5265 })
5266 }
5267
5268 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5269 let id = self.id;
5270 self.send_job(None, move |repo, _cx| async move {
5271 match repo {
5272 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5273 backend.diff(diff_type).await
5274 }
5275 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5276 let response = client
5277 .request(proto::GitDiff {
5278 project_id: project_id.0,
5279 repository_id: id.to_proto(),
5280 diff_type: match diff_type {
5281 DiffType::HeadToIndex => {
5282 proto::git_diff::DiffType::HeadToIndex.into()
5283 }
5284 DiffType::HeadToWorktree => {
5285 proto::git_diff::DiffType::HeadToWorktree.into()
5286 }
5287 },
5288 })
5289 .await?;
5290
5291 Ok(response.diff)
5292 }
5293 }
5294 })
5295 }
5296
5297 pub fn create_branch(
5298 &mut self,
5299 branch_name: String,
5300 base_branch: Option<String>,
5301 ) -> oneshot::Receiver<Result<()>> {
5302 let id = self.id;
5303 let status_msg = if let Some(ref base) = base_branch {
5304 format!("git switch -c {branch_name} {base}").into()
5305 } else {
5306 format!("git switch -c {branch_name}").into()
5307 };
5308 self.send_job(Some(status_msg), move |repo, _cx| async move {
5309 match repo {
5310 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5311 backend.create_branch(branch_name, base_branch).await
5312 }
5313 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5314 client
5315 .request(proto::GitCreateBranch {
5316 project_id: project_id.0,
5317 repository_id: id.to_proto(),
5318 branch_name,
5319 })
5320 .await?;
5321
5322 Ok(())
5323 }
5324 }
5325 })
5326 }
5327
5328 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5329 let id = self.id;
5330 self.send_job(
5331 Some(format!("git switch {branch_name}").into()),
5332 move |repo, _cx| async move {
5333 match repo {
5334 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5335 backend.change_branch(branch_name).await
5336 }
5337 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5338 client
5339 .request(proto::GitChangeBranch {
5340 project_id: project_id.0,
5341 repository_id: id.to_proto(),
5342 branch_name,
5343 })
5344 .await?;
5345
5346 Ok(())
5347 }
5348 }
5349 },
5350 )
5351 }
5352
5353 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5354 let id = self.id;
5355 self.send_job(
5356 Some(format!("git branch -d {branch_name}").into()),
5357 move |repo, _cx| async move {
5358 match repo {
5359 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5360 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5361 client
5362 .request(proto::GitDeleteBranch {
5363 project_id: project_id.0,
5364 repository_id: id.to_proto(),
5365 branch_name,
5366 })
5367 .await?;
5368
5369 Ok(())
5370 }
5371 }
5372 },
5373 )
5374 }
5375
5376 pub fn rename_branch(
5377 &mut self,
5378 branch: String,
5379 new_name: String,
5380 ) -> oneshot::Receiver<Result<()>> {
5381 let id = self.id;
5382 self.send_job(
5383 Some(format!("git branch -m {branch} {new_name}").into()),
5384 move |repo, _cx| async move {
5385 match repo {
5386 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5387 backend.rename_branch(branch, new_name).await
5388 }
5389 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5390 client
5391 .request(proto::GitRenameBranch {
5392 project_id: project_id.0,
5393 repository_id: id.to_proto(),
5394 branch,
5395 new_name,
5396 })
5397 .await?;
5398
5399 Ok(())
5400 }
5401 }
5402 },
5403 )
5404 }
5405
5406 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5407 let id = self.id;
5408 self.send_job(None, move |repo, _cx| async move {
5409 match repo {
5410 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5411 backend.check_for_pushed_commit().await
5412 }
5413 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5414 let response = client
5415 .request(proto::CheckForPushedCommits {
5416 project_id: project_id.0,
5417 repository_id: id.to_proto(),
5418 })
5419 .await?;
5420
5421 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5422
5423 Ok(branches)
5424 }
5425 }
5426 })
5427 }
5428
5429 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5430 self.send_job(None, |repo, _cx| async move {
5431 match repo {
5432 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5433 backend.checkpoint().await
5434 }
5435 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5436 }
5437 })
5438 }
5439
5440 pub fn restore_checkpoint(
5441 &mut self,
5442 checkpoint: GitRepositoryCheckpoint,
5443 ) -> oneshot::Receiver<Result<()>> {
5444 self.send_job(None, move |repo, _cx| async move {
5445 match repo {
5446 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5447 backend.restore_checkpoint(checkpoint).await
5448 }
5449 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5450 }
5451 })
5452 }
5453
5454 pub(crate) fn apply_remote_update(
5455 &mut self,
5456 update: proto::UpdateRepository,
5457 cx: &mut Context<Self>,
5458 ) -> Result<()> {
5459 let conflicted_paths = TreeSet::from_ordered_entries(
5460 update
5461 .current_merge_conflicts
5462 .into_iter()
5463 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5464 );
5465 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5466 let new_head_commit = update
5467 .head_commit_details
5468 .as_ref()
5469 .map(proto_to_commit_details);
5470 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5471 cx.emit(RepositoryEvent::BranchChanged)
5472 }
5473 self.snapshot.branch = new_branch;
5474 self.snapshot.head_commit = new_head_commit;
5475
5476 self.snapshot.merge.conflicted_paths = conflicted_paths;
5477 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5478 let new_stash_entries = GitStash {
5479 entries: update
5480 .stash_entries
5481 .iter()
5482 .filter_map(|entry| proto_to_stash(entry).ok())
5483 .collect(),
5484 };
5485 if self.snapshot.stash_entries != new_stash_entries {
5486 cx.emit(RepositoryEvent::StashEntriesChanged)
5487 }
5488 self.snapshot.stash_entries = new_stash_entries;
5489 self.snapshot.remote_upstream_url = update.remote_upstream_url;
5490 self.snapshot.remote_origin_url = update.remote_origin_url;
5491
5492 let edits = update
5493 .removed_statuses
5494 .into_iter()
5495 .filter_map(|path| {
5496 Some(sum_tree::Edit::Remove(PathKey(
5497 RelPath::from_proto(&path).log_err()?,
5498 )))
5499 })
5500 .chain(
5501 update
5502 .updated_statuses
5503 .into_iter()
5504 .filter_map(|updated_status| {
5505 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5506 }),
5507 )
5508 .collect::<Vec<_>>();
5509 if !edits.is_empty() {
5510 cx.emit(RepositoryEvent::StatusesChanged);
5511 }
5512 self.snapshot.statuses_by_path.edit(edits, ());
5513 if update.is_last_update {
5514 self.snapshot.scan_id = update.scan_id;
5515 }
5516 self.clear_pending_ops(cx);
5517 Ok(())
5518 }
5519
5520 pub fn compare_checkpoints(
5521 &mut self,
5522 left: GitRepositoryCheckpoint,
5523 right: GitRepositoryCheckpoint,
5524 ) -> oneshot::Receiver<Result<bool>> {
5525 self.send_job(None, move |repo, _cx| async move {
5526 match repo {
5527 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5528 backend.compare_checkpoints(left, right).await
5529 }
5530 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5531 }
5532 })
5533 }
5534
5535 pub fn diff_checkpoints(
5536 &mut self,
5537 base_checkpoint: GitRepositoryCheckpoint,
5538 target_checkpoint: GitRepositoryCheckpoint,
5539 ) -> oneshot::Receiver<Result<String>> {
5540 self.send_job(None, move |repo, _cx| async move {
5541 match repo {
5542 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5543 backend
5544 .diff_checkpoints(base_checkpoint, target_checkpoint)
5545 .await
5546 }
5547 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5548 }
5549 })
5550 }
5551
5552 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5553 let updated = SumTree::from_iter(
5554 self.pending_ops.iter().filter_map(|ops| {
5555 let inner_ops: Vec<PendingOp> =
5556 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5557 if inner_ops.is_empty() {
5558 None
5559 } else {
5560 Some(PendingOps {
5561 repo_path: ops.repo_path.clone(),
5562 ops: inner_ops,
5563 })
5564 }
5565 }),
5566 (),
5567 );
5568
5569 if updated != self.pending_ops {
5570 cx.emit(RepositoryEvent::PendingOpsChanged {
5571 pending_ops: self.pending_ops.clone(),
5572 })
5573 }
5574
5575 self.pending_ops = updated;
5576 }
5577
5578 fn schedule_scan(
5579 &mut self,
5580 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5581 cx: &mut Context<Self>,
5582 ) {
5583 let this = cx.weak_entity();
5584 let _ = self.send_keyed_job(
5585 Some(GitJobKey::ReloadGitState),
5586 None,
5587 |state, mut cx| async move {
5588 log::debug!("run scheduled git status scan");
5589
5590 let Some(this) = this.upgrade() else {
5591 return Ok(());
5592 };
5593 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5594 bail!("not a local repository")
5595 };
5596 let (snapshot, events) = this
5597 .update(&mut cx, |this, _| {
5598 this.paths_needing_status_update.clear();
5599 compute_snapshot(
5600 this.id,
5601 this.work_directory_abs_path.clone(),
5602 this.snapshot.clone(),
5603 backend.clone(),
5604 )
5605 })?
5606 .await?;
5607 this.update(&mut cx, |this, cx| {
5608 this.snapshot = snapshot.clone();
5609 this.clear_pending_ops(cx);
5610 for event in events {
5611 cx.emit(event);
5612 }
5613 })?;
5614 if let Some(updates_tx) = updates_tx {
5615 updates_tx
5616 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5617 .ok();
5618 }
5619 Ok(())
5620 },
5621 );
5622 }
5623
5624 fn spawn_local_git_worker(
5625 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5626 cx: &mut Context<Self>,
5627 ) -> mpsc::UnboundedSender<GitJob> {
5628 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5629
5630 cx.spawn(async move |_, cx| {
5631 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5632 if let Some(git_hosting_provider_registry) =
5633 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5634 {
5635 git_hosting_providers::register_additional_providers(
5636 git_hosting_provider_registry,
5637 state.backend.clone(),
5638 )
5639 .await;
5640 }
5641 let state = RepositoryState::Local(state);
5642 let mut jobs = VecDeque::new();
5643 loop {
5644 while let Ok(Some(next_job)) = job_rx.try_next() {
5645 jobs.push_back(next_job);
5646 }
5647
5648 if let Some(job) = jobs.pop_front() {
5649 if let Some(current_key) = &job.key
5650 && jobs
5651 .iter()
5652 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5653 {
5654 continue;
5655 }
5656 (job.job)(state.clone(), cx).await;
5657 } else if let Some(job) = job_rx.next().await {
5658 jobs.push_back(job);
5659 } else {
5660 break;
5661 }
5662 }
5663 anyhow::Ok(())
5664 })
5665 .detach_and_log_err(cx);
5666
5667 job_tx
5668 }
5669
5670 fn spawn_remote_git_worker(
5671 state: RemoteRepositoryState,
5672 cx: &mut Context<Self>,
5673 ) -> mpsc::UnboundedSender<GitJob> {
5674 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5675
5676 cx.spawn(async move |_, cx| {
5677 let state = RepositoryState::Remote(state);
5678 let mut jobs = VecDeque::new();
5679 loop {
5680 while let Ok(Some(next_job)) = job_rx.try_next() {
5681 jobs.push_back(next_job);
5682 }
5683
5684 if let Some(job) = jobs.pop_front() {
5685 if let Some(current_key) = &job.key
5686 && jobs
5687 .iter()
5688 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5689 {
5690 continue;
5691 }
5692 (job.job)(state.clone(), cx).await;
5693 } else if let Some(job) = job_rx.next().await {
5694 jobs.push_back(job);
5695 } else {
5696 break;
5697 }
5698 }
5699 anyhow::Ok(())
5700 })
5701 .detach_and_log_err(cx);
5702
5703 job_tx
5704 }
5705
5706 fn load_staged_text(
5707 &mut self,
5708 buffer_id: BufferId,
5709 repo_path: RepoPath,
5710 cx: &App,
5711 ) -> Task<Result<Option<String>>> {
5712 let rx = self.send_job(None, move |state, _| async move {
5713 match state {
5714 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5715 anyhow::Ok(backend.load_index_text(repo_path).await)
5716 }
5717 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5718 let response = client
5719 .request(proto::OpenUnstagedDiff {
5720 project_id: project_id.to_proto(),
5721 buffer_id: buffer_id.to_proto(),
5722 })
5723 .await?;
5724 Ok(response.staged_text)
5725 }
5726 }
5727 });
5728 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5729 }
5730
5731 fn load_committed_text(
5732 &mut self,
5733 buffer_id: BufferId,
5734 repo_path: RepoPath,
5735 cx: &App,
5736 ) -> Task<Result<DiffBasesChange>> {
5737 let rx = self.send_job(None, move |state, _| async move {
5738 match state {
5739 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5740 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5741 let staged_text = backend.load_index_text(repo_path).await;
5742 let diff_bases_change = if committed_text == staged_text {
5743 DiffBasesChange::SetBoth(committed_text)
5744 } else {
5745 DiffBasesChange::SetEach {
5746 index: staged_text,
5747 head: committed_text,
5748 }
5749 };
5750 anyhow::Ok(diff_bases_change)
5751 }
5752 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5753 use proto::open_uncommitted_diff_response::Mode;
5754
5755 let response = client
5756 .request(proto::OpenUncommittedDiff {
5757 project_id: project_id.to_proto(),
5758 buffer_id: buffer_id.to_proto(),
5759 })
5760 .await?;
5761 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5762 let bases = match mode {
5763 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5764 Mode::IndexAndHead => DiffBasesChange::SetEach {
5765 head: response.committed_text,
5766 index: response.staged_text,
5767 },
5768 };
5769 Ok(bases)
5770 }
5771 }
5772 });
5773
5774 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5775 }
5776
5777 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5778 let repository_id = self.snapshot.id;
5779 let rx = self.send_job(None, move |state, _| async move {
5780 match state {
5781 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5782 backend.load_blob_content(oid).await
5783 }
5784 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5785 let response = client
5786 .request(proto::GetBlobContent {
5787 project_id: project_id.to_proto(),
5788 repository_id: repository_id.0,
5789 oid: oid.to_string(),
5790 })
5791 .await?;
5792 Ok(response.content)
5793 }
5794 }
5795 });
5796 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5797 }
5798
5799 fn paths_changed(
5800 &mut self,
5801 paths: Vec<RepoPath>,
5802 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5803 cx: &mut Context<Self>,
5804 ) {
5805 self.paths_needing_status_update.extend(paths);
5806
5807 let this = cx.weak_entity();
5808 let _ = self.send_keyed_job(
5809 Some(GitJobKey::RefreshStatuses),
5810 None,
5811 |state, mut cx| async move {
5812 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5813 (
5814 this.snapshot.clone(),
5815 mem::take(&mut this.paths_needing_status_update),
5816 )
5817 })?;
5818 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5819 bail!("not a local repository")
5820 };
5821
5822 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5823 if paths.is_empty() {
5824 return Ok(());
5825 }
5826 let statuses = backend.status(&paths).await?;
5827 let stash_entries = backend.stash_entries().await?;
5828
5829 let changed_path_statuses = cx
5830 .background_spawn(async move {
5831 let mut changed_path_statuses = Vec::new();
5832 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5833 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5834
5835 for (repo_path, status) in &*statuses.entries {
5836 changed_paths.remove(repo_path);
5837 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5838 && cursor.item().is_some_and(|entry| entry.status == *status)
5839 {
5840 continue;
5841 }
5842
5843 changed_path_statuses.push(Edit::Insert(StatusEntry {
5844 repo_path: repo_path.clone(),
5845 status: *status,
5846 }));
5847 }
5848 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5849 for path in changed_paths.into_iter() {
5850 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5851 changed_path_statuses
5852 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5853 }
5854 }
5855 changed_path_statuses
5856 })
5857 .await;
5858
5859 this.update(&mut cx, |this, cx| {
5860 if this.snapshot.stash_entries != stash_entries {
5861 cx.emit(RepositoryEvent::StashEntriesChanged);
5862 this.snapshot.stash_entries = stash_entries;
5863 }
5864
5865 if !changed_path_statuses.is_empty() {
5866 cx.emit(RepositoryEvent::StatusesChanged);
5867 this.snapshot
5868 .statuses_by_path
5869 .edit(changed_path_statuses, ());
5870 this.snapshot.scan_id += 1;
5871 }
5872
5873 if let Some(updates_tx) = updates_tx {
5874 updates_tx
5875 .unbounded_send(DownstreamUpdate::UpdateRepository(
5876 this.snapshot.clone(),
5877 ))
5878 .ok();
5879 }
5880 })
5881 },
5882 );
5883 }
5884
5885 /// currently running git command and when it started
5886 pub fn current_job(&self) -> Option<JobInfo> {
5887 self.active_jobs.values().next().cloned()
5888 }
5889
5890 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5891 self.send_job(None, |_, _| async {})
5892 }
5893
5894 fn spawn_job_with_tracking<AsyncFn>(
5895 &mut self,
5896 paths: Vec<RepoPath>,
5897 git_status: pending_op::GitStatus,
5898 cx: &mut Context<Self>,
5899 f: AsyncFn,
5900 ) -> Task<Result<()>>
5901 where
5902 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5903 {
5904 let ids = self.new_pending_ops_for_paths(paths, git_status);
5905
5906 cx.spawn(async move |this, cx| {
5907 let (job_status, result) = match f(this.clone(), cx).await {
5908 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5909 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5910 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5911 };
5912
5913 this.update(cx, |this, _| {
5914 let mut edits = Vec::with_capacity(ids.len());
5915 for (id, entry) in ids {
5916 if let Some(mut ops) = this
5917 .pending_ops
5918 .get(&PathKey(entry.as_ref().clone()), ())
5919 .cloned()
5920 {
5921 if let Some(op) = ops.op_by_id_mut(id) {
5922 op.job_status = job_status;
5923 }
5924 edits.push(sum_tree::Edit::Insert(ops));
5925 }
5926 }
5927 this.pending_ops.edit(edits, ());
5928 })?;
5929
5930 result
5931 })
5932 }
5933
5934 fn new_pending_ops_for_paths(
5935 &mut self,
5936 paths: Vec<RepoPath>,
5937 git_status: pending_op::GitStatus,
5938 ) -> Vec<(PendingOpId, RepoPath)> {
5939 let mut edits = Vec::with_capacity(paths.len());
5940 let mut ids = Vec::with_capacity(paths.len());
5941 for path in paths {
5942 let mut ops = self
5943 .pending_ops
5944 .get(&PathKey(path.as_ref().clone()), ())
5945 .cloned()
5946 .unwrap_or_else(|| PendingOps::new(&path));
5947 let id = ops.max_id() + 1;
5948 ops.ops.push(PendingOp {
5949 id,
5950 git_status,
5951 job_status: pending_op::JobStatus::Running,
5952 });
5953 edits.push(sum_tree::Edit::Insert(ops));
5954 ids.push((id, path));
5955 }
5956 self.pending_ops.edit(edits, ());
5957 ids
5958 }
5959 pub fn default_remote_url(&self) -> Option<String> {
5960 self.remote_upstream_url
5961 .clone()
5962 .or(self.remote_origin_url.clone())
5963 }
5964}
5965
5966fn get_permalink_in_rust_registry_src(
5967 provider_registry: Arc<GitHostingProviderRegistry>,
5968 path: PathBuf,
5969 selection: Range<u32>,
5970) -> Result<url::Url> {
5971 #[derive(Deserialize)]
5972 struct CargoVcsGit {
5973 sha1: String,
5974 }
5975
5976 #[derive(Deserialize)]
5977 struct CargoVcsInfo {
5978 git: CargoVcsGit,
5979 path_in_vcs: String,
5980 }
5981
5982 #[derive(Deserialize)]
5983 struct CargoPackage {
5984 repository: String,
5985 }
5986
5987 #[derive(Deserialize)]
5988 struct CargoToml {
5989 package: CargoPackage,
5990 }
5991
5992 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5993 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5994 Some((dir, json))
5995 }) else {
5996 bail!("No .cargo_vcs_info.json found in parent directories")
5997 };
5998 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5999 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6000 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6001 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6002 .context("parsing package.repository field of manifest")?;
6003 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6004 let permalink = provider.build_permalink(
6005 remote,
6006 BuildPermalinkParams::new(
6007 &cargo_vcs_info.git.sha1,
6008 &RepoPath::from_rel_path(
6009 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6010 ),
6011 Some(selection),
6012 ),
6013 );
6014 Ok(permalink)
6015}
6016
6017fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6018 let Some(blame) = blame else {
6019 return proto::BlameBufferResponse {
6020 blame_response: None,
6021 };
6022 };
6023
6024 let entries = blame
6025 .entries
6026 .into_iter()
6027 .map(|entry| proto::BlameEntry {
6028 sha: entry.sha.as_bytes().into(),
6029 start_line: entry.range.start,
6030 end_line: entry.range.end,
6031 original_line_number: entry.original_line_number,
6032 author: entry.author,
6033 author_mail: entry.author_mail,
6034 author_time: entry.author_time,
6035 author_tz: entry.author_tz,
6036 committer: entry.committer_name,
6037 committer_mail: entry.committer_email,
6038 committer_time: entry.committer_time,
6039 committer_tz: entry.committer_tz,
6040 summary: entry.summary,
6041 previous: entry.previous,
6042 filename: entry.filename,
6043 })
6044 .collect::<Vec<_>>();
6045
6046 let messages = blame
6047 .messages
6048 .into_iter()
6049 .map(|(oid, message)| proto::CommitMessage {
6050 oid: oid.as_bytes().into(),
6051 message,
6052 })
6053 .collect::<Vec<_>>();
6054
6055 proto::BlameBufferResponse {
6056 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6057 }
6058}
6059
6060fn deserialize_blame_buffer_response(
6061 response: proto::BlameBufferResponse,
6062) -> Option<git::blame::Blame> {
6063 let response = response.blame_response?;
6064 let entries = response
6065 .entries
6066 .into_iter()
6067 .filter_map(|entry| {
6068 Some(git::blame::BlameEntry {
6069 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6070 range: entry.start_line..entry.end_line,
6071 original_line_number: entry.original_line_number,
6072 committer_name: entry.committer,
6073 committer_time: entry.committer_time,
6074 committer_tz: entry.committer_tz,
6075 committer_email: entry.committer_mail,
6076 author: entry.author,
6077 author_mail: entry.author_mail,
6078 author_time: entry.author_time,
6079 author_tz: entry.author_tz,
6080 summary: entry.summary,
6081 previous: entry.previous,
6082 filename: entry.filename,
6083 })
6084 })
6085 .collect::<Vec<_>>();
6086
6087 let messages = response
6088 .messages
6089 .into_iter()
6090 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6091 .collect::<HashMap<_, _>>();
6092
6093 Some(Blame { entries, messages })
6094}
6095
6096fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6097 proto::Branch {
6098 is_head: branch.is_head,
6099 ref_name: branch.ref_name.to_string(),
6100 unix_timestamp: branch
6101 .most_recent_commit
6102 .as_ref()
6103 .map(|commit| commit.commit_timestamp as u64),
6104 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6105 ref_name: upstream.ref_name.to_string(),
6106 tracking: upstream
6107 .tracking
6108 .status()
6109 .map(|upstream| proto::UpstreamTracking {
6110 ahead: upstream.ahead as u64,
6111 behind: upstream.behind as u64,
6112 }),
6113 }),
6114 most_recent_commit: branch
6115 .most_recent_commit
6116 .as_ref()
6117 .map(|commit| proto::CommitSummary {
6118 sha: commit.sha.to_string(),
6119 subject: commit.subject.to_string(),
6120 commit_timestamp: commit.commit_timestamp,
6121 author_name: commit.author_name.to_string(),
6122 }),
6123 }
6124}
6125
6126fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6127 proto::Worktree {
6128 path: worktree.path.to_string_lossy().to_string(),
6129 ref_name: worktree.ref_name.to_string(),
6130 sha: worktree.sha.to_string(),
6131 }
6132}
6133
6134fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6135 git::repository::Worktree {
6136 path: PathBuf::from(proto.path.clone()),
6137 ref_name: proto.ref_name.clone().into(),
6138 sha: proto.sha.clone().into(),
6139 }
6140}
6141
6142fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6143 git::repository::Branch {
6144 is_head: proto.is_head,
6145 ref_name: proto.ref_name.clone().into(),
6146 upstream: proto
6147 .upstream
6148 .as_ref()
6149 .map(|upstream| git::repository::Upstream {
6150 ref_name: upstream.ref_name.to_string().into(),
6151 tracking: upstream
6152 .tracking
6153 .as_ref()
6154 .map(|tracking| {
6155 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6156 ahead: tracking.ahead as u32,
6157 behind: tracking.behind as u32,
6158 })
6159 })
6160 .unwrap_or(git::repository::UpstreamTracking::Gone),
6161 }),
6162 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6163 git::repository::CommitSummary {
6164 sha: commit.sha.to_string().into(),
6165 subject: commit.subject.to_string().into(),
6166 commit_timestamp: commit.commit_timestamp,
6167 author_name: commit.author_name.to_string().into(),
6168 has_parent: true,
6169 }
6170 }),
6171 }
6172}
6173
6174fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6175 proto::GitCommitDetails {
6176 sha: commit.sha.to_string(),
6177 message: commit.message.to_string(),
6178 commit_timestamp: commit.commit_timestamp,
6179 author_email: commit.author_email.to_string(),
6180 author_name: commit.author_name.to_string(),
6181 }
6182}
6183
6184fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6185 CommitDetails {
6186 sha: proto.sha.clone().into(),
6187 message: proto.message.clone().into(),
6188 commit_timestamp: proto.commit_timestamp,
6189 author_email: proto.author_email.clone().into(),
6190 author_name: proto.author_name.clone().into(),
6191 }
6192}
6193
6194async fn compute_snapshot(
6195 id: RepositoryId,
6196 work_directory_abs_path: Arc<Path>,
6197 prev_snapshot: RepositorySnapshot,
6198 backend: Arc<dyn GitRepository>,
6199) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6200 let mut events = Vec::new();
6201 let branches = backend.branches().await?;
6202 let branch = branches.into_iter().find(|branch| branch.is_head);
6203 let statuses = backend
6204 .status(&[RepoPath::from_rel_path(
6205 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6206 )])
6207 .await?;
6208 let stash_entries = backend.stash_entries().await?;
6209 let statuses_by_path = SumTree::from_iter(
6210 statuses
6211 .entries
6212 .iter()
6213 .map(|(repo_path, status)| StatusEntry {
6214 repo_path: repo_path.clone(),
6215 status: *status,
6216 }),
6217 (),
6218 );
6219 let (merge_details, merge_heads_changed) =
6220 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6221 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6222
6223 if merge_heads_changed {
6224 events.push(RepositoryEvent::MergeHeadsChanged);
6225 }
6226
6227 if statuses_by_path != prev_snapshot.statuses_by_path {
6228 events.push(RepositoryEvent::StatusesChanged)
6229 }
6230
6231 // Useful when branch is None in detached head state
6232 let head_commit = match backend.head_sha().await {
6233 Some(head_sha) => backend.show(head_sha).await.log_err(),
6234 None => None,
6235 };
6236
6237 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6238 events.push(RepositoryEvent::BranchChanged);
6239 }
6240
6241 let remote_origin_url = backend.remote_url("origin").await;
6242 let remote_upstream_url = backend.remote_url("upstream").await;
6243
6244 let snapshot = RepositorySnapshot {
6245 id,
6246 statuses_by_path,
6247 work_directory_abs_path,
6248 path_style: prev_snapshot.path_style,
6249 scan_id: prev_snapshot.scan_id + 1,
6250 branch,
6251 head_commit,
6252 merge: merge_details,
6253 remote_origin_url,
6254 remote_upstream_url,
6255 stash_entries,
6256 };
6257
6258 Ok((snapshot, events))
6259}
6260
6261fn status_from_proto(
6262 simple_status: i32,
6263 status: Option<proto::GitFileStatus>,
6264) -> anyhow::Result<FileStatus> {
6265 use proto::git_file_status::Variant;
6266
6267 let Some(variant) = status.and_then(|status| status.variant) else {
6268 let code = proto::GitStatus::from_i32(simple_status)
6269 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6270 let result = match code {
6271 proto::GitStatus::Added => TrackedStatus {
6272 worktree_status: StatusCode::Added,
6273 index_status: StatusCode::Unmodified,
6274 }
6275 .into(),
6276 proto::GitStatus::Modified => TrackedStatus {
6277 worktree_status: StatusCode::Modified,
6278 index_status: StatusCode::Unmodified,
6279 }
6280 .into(),
6281 proto::GitStatus::Conflict => UnmergedStatus {
6282 first_head: UnmergedStatusCode::Updated,
6283 second_head: UnmergedStatusCode::Updated,
6284 }
6285 .into(),
6286 proto::GitStatus::Deleted => TrackedStatus {
6287 worktree_status: StatusCode::Deleted,
6288 index_status: StatusCode::Unmodified,
6289 }
6290 .into(),
6291 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6292 };
6293 return Ok(result);
6294 };
6295
6296 let result = match variant {
6297 Variant::Untracked(_) => FileStatus::Untracked,
6298 Variant::Ignored(_) => FileStatus::Ignored,
6299 Variant::Unmerged(unmerged) => {
6300 let [first_head, second_head] =
6301 [unmerged.first_head, unmerged.second_head].map(|head| {
6302 let code = proto::GitStatus::from_i32(head)
6303 .with_context(|| format!("Invalid git status code: {head}"))?;
6304 let result = match code {
6305 proto::GitStatus::Added => UnmergedStatusCode::Added,
6306 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6307 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6308 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6309 };
6310 Ok(result)
6311 });
6312 let [first_head, second_head] = [first_head?, second_head?];
6313 UnmergedStatus {
6314 first_head,
6315 second_head,
6316 }
6317 .into()
6318 }
6319 Variant::Tracked(tracked) => {
6320 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6321 .map(|status| {
6322 let code = proto::GitStatus::from_i32(status)
6323 .with_context(|| format!("Invalid git status code: {status}"))?;
6324 let result = match code {
6325 proto::GitStatus::Modified => StatusCode::Modified,
6326 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6327 proto::GitStatus::Added => StatusCode::Added,
6328 proto::GitStatus::Deleted => StatusCode::Deleted,
6329 proto::GitStatus::Renamed => StatusCode::Renamed,
6330 proto::GitStatus::Copied => StatusCode::Copied,
6331 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6332 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6333 };
6334 Ok(result)
6335 });
6336 let [index_status, worktree_status] = [index_status?, worktree_status?];
6337 TrackedStatus {
6338 index_status,
6339 worktree_status,
6340 }
6341 .into()
6342 }
6343 };
6344 Ok(result)
6345}
6346
6347fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6348 use proto::git_file_status::{Tracked, Unmerged, Variant};
6349
6350 let variant = match status {
6351 FileStatus::Untracked => Variant::Untracked(Default::default()),
6352 FileStatus::Ignored => Variant::Ignored(Default::default()),
6353 FileStatus::Unmerged(UnmergedStatus {
6354 first_head,
6355 second_head,
6356 }) => Variant::Unmerged(Unmerged {
6357 first_head: unmerged_status_to_proto(first_head),
6358 second_head: unmerged_status_to_proto(second_head),
6359 }),
6360 FileStatus::Tracked(TrackedStatus {
6361 index_status,
6362 worktree_status,
6363 }) => Variant::Tracked(Tracked {
6364 index_status: tracked_status_to_proto(index_status),
6365 worktree_status: tracked_status_to_proto(worktree_status),
6366 }),
6367 };
6368 proto::GitFileStatus {
6369 variant: Some(variant),
6370 }
6371}
6372
6373fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6374 match code {
6375 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6376 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6377 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6378 }
6379}
6380
6381fn tracked_status_to_proto(code: StatusCode) -> i32 {
6382 match code {
6383 StatusCode::Added => proto::GitStatus::Added as _,
6384 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6385 StatusCode::Modified => proto::GitStatus::Modified as _,
6386 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6387 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6388 StatusCode::Copied => proto::GitStatus::Copied as _,
6389 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6390 }
6391}