1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<str>>,
133 index_text: Option<Arc<str>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_create_remote);
476 client.add_entity_request_handler(Self::handle_remove_remote);
477 client.add_entity_request_handler(Self::handle_delete_branch);
478 client.add_entity_request_handler(Self::handle_git_init);
479 client.add_entity_request_handler(Self::handle_push);
480 client.add_entity_request_handler(Self::handle_pull);
481 client.add_entity_request_handler(Self::handle_fetch);
482 client.add_entity_request_handler(Self::handle_stage);
483 client.add_entity_request_handler(Self::handle_unstage);
484 client.add_entity_request_handler(Self::handle_stash);
485 client.add_entity_request_handler(Self::handle_stash_pop);
486 client.add_entity_request_handler(Self::handle_stash_apply);
487 client.add_entity_request_handler(Self::handle_stash_drop);
488 client.add_entity_request_handler(Self::handle_commit);
489 client.add_entity_request_handler(Self::handle_run_hook);
490 client.add_entity_request_handler(Self::handle_reset);
491 client.add_entity_request_handler(Self::handle_show);
492 client.add_entity_request_handler(Self::handle_load_commit_diff);
493 client.add_entity_request_handler(Self::handle_file_history);
494 client.add_entity_request_handler(Self::handle_checkout_files);
495 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
496 client.add_entity_request_handler(Self::handle_set_index_text);
497 client.add_entity_request_handler(Self::handle_askpass);
498 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
499 client.add_entity_request_handler(Self::handle_git_diff);
500 client.add_entity_request_handler(Self::handle_tree_diff);
501 client.add_entity_request_handler(Self::handle_get_blob_content);
502 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
503 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
504 client.add_entity_message_handler(Self::handle_update_diff_bases);
505 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
506 client.add_entity_request_handler(Self::handle_blame_buffer);
507 client.add_entity_message_handler(Self::handle_update_repository);
508 client.add_entity_message_handler(Self::handle_remove_repository);
509 client.add_entity_request_handler(Self::handle_git_clone);
510 client.add_entity_request_handler(Self::handle_get_worktrees);
511 client.add_entity_request_handler(Self::handle_create_worktree);
512 }
513
514 pub fn is_local(&self) -> bool {
515 matches!(self.state, GitStoreState::Local { .. })
516 }
517 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
518 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
519 let id = repo.read(cx).id;
520 if self.active_repo_id != Some(id) {
521 self.active_repo_id = Some(id);
522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
523 }
524 }
525 }
526
527 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
528 match &mut self.state {
529 GitStoreState::Remote {
530 downstream: downstream_client,
531 ..
532 } => {
533 for repo in self.repositories.values() {
534 let update = repo.read(cx).snapshot.initial_update(project_id);
535 for update in split_repository_update(update) {
536 client.send(update).log_err();
537 }
538 }
539 *downstream_client = Some((client, ProjectId(project_id)));
540 }
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 let mut snapshots = HashMap::default();
546 let (updates_tx, mut updates_rx) = mpsc::unbounded();
547 for repo in self.repositories.values() {
548 updates_tx
549 .unbounded_send(DownstreamUpdate::UpdateRepository(
550 repo.read(cx).snapshot.clone(),
551 ))
552 .ok();
553 }
554 *downstream_client = Some(LocalDownstreamState {
555 client: client.clone(),
556 project_id: ProjectId(project_id),
557 updates_tx,
558 _task: cx.spawn(async move |this, cx| {
559 cx.background_spawn(async move {
560 while let Some(update) = updates_rx.next().await {
561 match update {
562 DownstreamUpdate::UpdateRepository(snapshot) => {
563 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
564 {
565 let update =
566 snapshot.build_update(old_snapshot, project_id);
567 *old_snapshot = snapshot;
568 for update in split_repository_update(update) {
569 client.send(update)?;
570 }
571 } else {
572 let update = snapshot.initial_update(project_id);
573 for update in split_repository_update(update) {
574 client.send(update)?;
575 }
576 snapshots.insert(snapshot.id, snapshot);
577 }
578 }
579 DownstreamUpdate::RemoveRepository(id) => {
580 client.send(proto::RemoveRepository {
581 project_id,
582 id: id.to_proto(),
583 })?;
584 }
585 }
586 }
587 anyhow::Ok(())
588 })
589 .await
590 .ok();
591 this.update(cx, |this, _| {
592 if let GitStoreState::Local {
593 downstream: downstream_client,
594 ..
595 } = &mut this.state
596 {
597 downstream_client.take();
598 } else {
599 unreachable!("unshared called on remote store");
600 }
601 })
602 }),
603 });
604 }
605 }
606 }
607
608 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
609 match &mut self.state {
610 GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } => {
614 downstream_client.take();
615 }
616 GitStoreState::Remote {
617 downstream: downstream_client,
618 ..
619 } => {
620 downstream_client.take();
621 }
622 }
623 self.shared_diffs.clear();
624 }
625
626 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
627 self.shared_diffs.remove(peer_id);
628 }
629
630 pub fn active_repository(&self) -> Option<Entity<Repository>> {
631 self.active_repo_id
632 .as_ref()
633 .map(|id| self.repositories[id].clone())
634 }
635
636 pub fn open_unstaged_diff(
637 &mut self,
638 buffer: Entity<Buffer>,
639 cx: &mut Context<Self>,
640 ) -> Task<Result<Entity<BufferDiff>>> {
641 let buffer_id = buffer.read(cx).remote_id();
642 if let Some(diff_state) = self.diffs.get(&buffer_id)
643 && let Some(unstaged_diff) = diff_state
644 .read(cx)
645 .unstaged_diff
646 .as_ref()
647 .and_then(|weak| weak.upgrade())
648 {
649 if let Some(task) =
650 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
651 {
652 return cx.background_executor().spawn(async move {
653 task.await;
654 Ok(unstaged_diff)
655 });
656 }
657 return Task::ready(Ok(unstaged_diff));
658 }
659
660 let Some((repo, repo_path)) =
661 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
662 else {
663 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
664 };
665
666 let task = self
667 .loading_diffs
668 .entry((buffer_id, DiffKind::Unstaged))
669 .or_insert_with(|| {
670 let staged_text = repo.update(cx, |repo, cx| {
671 repo.load_staged_text(buffer_id, repo_path, cx)
672 });
673 cx.spawn(async move |this, cx| {
674 Self::open_diff_internal(
675 this,
676 DiffKind::Unstaged,
677 staged_text.await.map(DiffBasesChange::SetIndex),
678 buffer,
679 cx,
680 )
681 .await
682 .map_err(Arc::new)
683 })
684 .shared()
685 })
686 .clone();
687
688 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
689 }
690
691 pub fn open_diff_since(
692 &mut self,
693 oid: Option<git::Oid>,
694 buffer: Entity<Buffer>,
695 repo: Entity<Repository>,
696 cx: &mut Context<Self>,
697 ) -> Task<Result<Entity<BufferDiff>>> {
698 cx.spawn(async move |this, cx| {
699 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
700 let content = match oid {
701 None => None,
702 Some(oid) => Some(
703 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
704 .await?,
705 ),
706 };
707 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
708
709 buffer_diff
710 .update(cx, |buffer_diff, cx| {
711 buffer_diff.set_base_text(
712 content.map(|s| s.as_str().into()),
713 buffer_snapshot.language().cloned(),
714 buffer_snapshot.text,
715 cx,
716 )
717 })?
718 .await?;
719 let unstaged_diff = this
720 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
721 .await?;
722 buffer_diff.update(cx, |buffer_diff, _| {
723 buffer_diff.set_secondary_diff(unstaged_diff);
724 })?;
725
726 this.update(cx, |_, cx| {
727 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
728 .detach();
729 })?;
730
731 Ok(buffer_diff)
732 })
733 }
734
735 pub fn open_uncommitted_diff(
736 &mut self,
737 buffer: Entity<Buffer>,
738 cx: &mut Context<Self>,
739 ) -> Task<Result<Entity<BufferDiff>>> {
740 let buffer_id = buffer.read(cx).remote_id();
741
742 if let Some(diff_state) = self.diffs.get(&buffer_id)
743 && let Some(uncommitted_diff) = diff_state
744 .read(cx)
745 .uncommitted_diff
746 .as_ref()
747 .and_then(|weak| weak.upgrade())
748 {
749 if let Some(task) =
750 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
751 {
752 return cx.background_executor().spawn(async move {
753 task.await;
754 Ok(uncommitted_diff)
755 });
756 }
757 return Task::ready(Ok(uncommitted_diff));
758 }
759
760 let Some((repo, repo_path)) =
761 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
762 else {
763 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
764 };
765
766 let task = self
767 .loading_diffs
768 .entry((buffer_id, DiffKind::Uncommitted))
769 .or_insert_with(|| {
770 let changes = repo.update(cx, |repo, cx| {
771 repo.load_committed_text(buffer_id, repo_path, cx)
772 });
773
774 // todo(lw): hot foreground spawn
775 cx.spawn(async move |this, cx| {
776 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
777 .await
778 .map_err(Arc::new)
779 })
780 .shared()
781 })
782 .clone();
783
784 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
785 }
786
787 async fn open_diff_internal(
788 this: WeakEntity<Self>,
789 kind: DiffKind,
790 texts: Result<DiffBasesChange>,
791 buffer_entity: Entity<Buffer>,
792 cx: &mut AsyncApp,
793 ) -> Result<Entity<BufferDiff>> {
794 let diff_bases_change = match texts {
795 Err(e) => {
796 this.update(cx, |this, cx| {
797 let buffer = buffer_entity.read(cx);
798 let buffer_id = buffer.remote_id();
799 this.loading_diffs.remove(&(buffer_id, kind));
800 })?;
801 return Err(e);
802 }
803 Ok(change) => change,
804 };
805
806 this.update(cx, |this, cx| {
807 let buffer = buffer_entity.read(cx);
808 let buffer_id = buffer.remote_id();
809 let language = buffer.language().cloned();
810 let language_registry = buffer.language_registry();
811 let text_snapshot = buffer.text_snapshot();
812 this.loading_diffs.remove(&(buffer_id, kind));
813
814 let git_store = cx.weak_entity();
815 let diff_state = this
816 .diffs
817 .entry(buffer_id)
818 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
819
820 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
821
822 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
823 diff_state.update(cx, |diff_state, cx| {
824 diff_state.language_changed = true;
825 diff_state.language = language;
826 diff_state.language_registry = language_registry;
827
828 match kind {
829 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
830 DiffKind::Uncommitted => {
831 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
832 diff
833 } else {
834 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
835 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
836 unstaged_diff
837 };
838
839 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
840 diff_state.uncommitted_diff = Some(diff.downgrade())
841 }
842 }
843
844 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
845 let rx = diff_state.wait_for_recalculation();
846
847 anyhow::Ok(async move {
848 if let Some(rx) = rx {
849 rx.await;
850 }
851 Ok(diff)
852 })
853 })
854 })??
855 .await
856 }
857
858 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
859 let diff_state = self.diffs.get(&buffer_id)?;
860 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
861 }
862
863 pub fn get_uncommitted_diff(
864 &self,
865 buffer_id: BufferId,
866 cx: &App,
867 ) -> Option<Entity<BufferDiff>> {
868 let diff_state = self.diffs.get(&buffer_id)?;
869 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
870 }
871
872 pub fn open_conflict_set(
873 &mut self,
874 buffer: Entity<Buffer>,
875 cx: &mut Context<Self>,
876 ) -> Entity<ConflictSet> {
877 log::debug!("open conflict set");
878 let buffer_id = buffer.read(cx).remote_id();
879
880 if let Some(git_state) = self.diffs.get(&buffer_id)
881 && let Some(conflict_set) = git_state
882 .read(cx)
883 .conflict_set
884 .as_ref()
885 .and_then(|weak| weak.upgrade())
886 {
887 let conflict_set = conflict_set;
888 let buffer_snapshot = buffer.read(cx).text_snapshot();
889
890 git_state.update(cx, |state, cx| {
891 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
892 });
893
894 return conflict_set;
895 }
896
897 let is_unmerged = self
898 .repository_and_path_for_buffer_id(buffer_id, cx)
899 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
900 let git_store = cx.weak_entity();
901 let buffer_git_state = self
902 .diffs
903 .entry(buffer_id)
904 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
905 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
906
907 self._subscriptions
908 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
909 cx.emit(GitStoreEvent::ConflictsUpdated);
910 }));
911
912 buffer_git_state.update(cx, |state, cx| {
913 state.conflict_set = Some(conflict_set.downgrade());
914 let buffer_snapshot = buffer.read(cx).text_snapshot();
915 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
916 });
917
918 conflict_set
919 }
920
921 pub fn project_path_git_status(
922 &self,
923 project_path: &ProjectPath,
924 cx: &App,
925 ) -> Option<FileStatus> {
926 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
927 Some(repo.read(cx).status_for_path(&repo_path)?.status)
928 }
929
930 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
931 let mut work_directory_abs_paths = Vec::new();
932 let mut checkpoints = Vec::new();
933 for repository in self.repositories.values() {
934 repository.update(cx, |repository, _| {
935 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
936 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
937 });
938 }
939
940 cx.background_executor().spawn(async move {
941 let checkpoints = future::try_join_all(checkpoints).await?;
942 Ok(GitStoreCheckpoint {
943 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
944 .into_iter()
945 .zip(checkpoints)
946 .collect(),
947 })
948 })
949 }
950
951 pub fn restore_checkpoint(
952 &self,
953 checkpoint: GitStoreCheckpoint,
954 cx: &mut App,
955 ) -> Task<Result<()>> {
956 let repositories_by_work_dir_abs_path = self
957 .repositories
958 .values()
959 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
960 .collect::<HashMap<_, _>>();
961
962 let mut tasks = Vec::new();
963 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
964 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
965 let restore = repository.update(cx, |repository, _| {
966 repository.restore_checkpoint(checkpoint)
967 });
968 tasks.push(async move { restore.await? });
969 }
970 }
971 cx.background_spawn(async move {
972 future::try_join_all(tasks).await?;
973 Ok(())
974 })
975 }
976
977 /// Compares two checkpoints, returning true if they are equal.
978 pub fn compare_checkpoints(
979 &self,
980 left: GitStoreCheckpoint,
981 mut right: GitStoreCheckpoint,
982 cx: &mut App,
983 ) -> Task<Result<bool>> {
984 let repositories_by_work_dir_abs_path = self
985 .repositories
986 .values()
987 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
988 .collect::<HashMap<_, _>>();
989
990 let mut tasks = Vec::new();
991 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
992 if let Some(right_checkpoint) = right
993 .checkpoints_by_work_dir_abs_path
994 .remove(&work_dir_abs_path)
995 {
996 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
997 {
998 let compare = repository.update(cx, |repository, _| {
999 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1000 });
1001
1002 tasks.push(async move { compare.await? });
1003 }
1004 } else {
1005 return Task::ready(Ok(false));
1006 }
1007 }
1008 cx.background_spawn(async move {
1009 Ok(future::try_join_all(tasks)
1010 .await?
1011 .into_iter()
1012 .all(|result| result))
1013 })
1014 }
1015
1016 /// Blames a buffer.
1017 pub fn blame_buffer(
1018 &self,
1019 buffer: &Entity<Buffer>,
1020 version: Option<clock::Global>,
1021 cx: &mut Context<Self>,
1022 ) -> Task<Result<Option<Blame>>> {
1023 let buffer = buffer.read(cx);
1024 let Some((repo, repo_path)) =
1025 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1026 else {
1027 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1028 };
1029 let content = match &version {
1030 Some(version) => buffer.rope_for_version(version),
1031 None => buffer.as_rope().clone(),
1032 };
1033 let line_ending = buffer.line_ending();
1034 let version = version.unwrap_or(buffer.version());
1035 let buffer_id = buffer.remote_id();
1036
1037 let repo = repo.downgrade();
1038 cx.spawn(async move |_, cx| {
1039 let repository_state = repo
1040 .update(cx, |repo, _| repo.repository_state.clone())?
1041 .await
1042 .map_err(|err| anyhow::anyhow!(err))?;
1043 match repository_state {
1044 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1045 .blame(repo_path.clone(), content, line_ending)
1046 .await
1047 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1048 .map(Some),
1049 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1050 let response = client
1051 .request(proto::BlameBuffer {
1052 project_id: project_id.to_proto(),
1053 buffer_id: buffer_id.into(),
1054 version: serialize_version(&version),
1055 })
1056 .await?;
1057 Ok(deserialize_blame_buffer_response(response))
1058 }
1059 }
1060 })
1061 }
1062
1063 pub fn file_history(
1064 &self,
1065 repo: &Entity<Repository>,
1066 path: RepoPath,
1067 cx: &mut App,
1068 ) -> Task<Result<git::repository::FileHistory>> {
1069 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1070
1071 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1072 }
1073
1074 pub fn file_history_paginated(
1075 &self,
1076 repo: &Entity<Repository>,
1077 path: RepoPath,
1078 skip: usize,
1079 limit: Option<usize>,
1080 cx: &mut App,
1081 ) -> Task<Result<git::repository::FileHistory>> {
1082 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1083
1084 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1085 }
1086
1087 pub fn get_permalink_to_line(
1088 &self,
1089 buffer: &Entity<Buffer>,
1090 selection: Range<u32>,
1091 cx: &mut App,
1092 ) -> Task<Result<url::Url>> {
1093 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1094 return Task::ready(Err(anyhow!("buffer has no file")));
1095 };
1096
1097 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1098 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1099 cx,
1100 ) else {
1101 // If we're not in a Git repo, check whether this is a Rust source
1102 // file in the Cargo registry (presumably opened with go-to-definition
1103 // from a normal Rust file). If so, we can put together a permalink
1104 // using crate metadata.
1105 if buffer
1106 .read(cx)
1107 .language()
1108 .is_none_or(|lang| lang.name() != "Rust".into())
1109 {
1110 return Task::ready(Err(anyhow!("no permalink available")));
1111 }
1112 let file_path = file.worktree.read(cx).absolutize(&file.path);
1113 return cx.spawn(async move |cx| {
1114 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1115 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1116 .context("no permalink available")
1117 });
1118 };
1119
1120 let buffer_id = buffer.read(cx).remote_id();
1121 let branch = repo.read(cx).branch.clone();
1122 let remote = branch
1123 .as_ref()
1124 .and_then(|b| b.upstream.as_ref())
1125 .and_then(|b| b.remote_name())
1126 .unwrap_or("origin")
1127 .to_string();
1128
1129 let rx = repo.update(cx, |repo, _| {
1130 repo.send_job(None, move |state, cx| async move {
1131 match state {
1132 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1133 let origin_url = backend
1134 .remote_url(&remote)
1135 .await
1136 .with_context(|| format!("remote \"{remote}\" not found"))?;
1137
1138 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1139
1140 let provider_registry =
1141 cx.update(GitHostingProviderRegistry::default_global)?;
1142
1143 let (provider, remote) =
1144 parse_git_remote_url(provider_registry, &origin_url)
1145 .context("parsing Git remote URL")?;
1146
1147 Ok(provider.build_permalink(
1148 remote,
1149 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1150 ))
1151 }
1152 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1153 let response = client
1154 .request(proto::GetPermalinkToLine {
1155 project_id: project_id.to_proto(),
1156 buffer_id: buffer_id.into(),
1157 selection: Some(proto::Range {
1158 start: selection.start as u64,
1159 end: selection.end as u64,
1160 }),
1161 })
1162 .await?;
1163
1164 url::Url::parse(&response.permalink).context("failed to parse permalink")
1165 }
1166 }
1167 })
1168 });
1169 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1170 }
1171
1172 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1173 match &self.state {
1174 GitStoreState::Local {
1175 downstream: downstream_client,
1176 ..
1177 } => downstream_client
1178 .as_ref()
1179 .map(|state| (state.client.clone(), state.project_id)),
1180 GitStoreState::Remote {
1181 downstream: downstream_client,
1182 ..
1183 } => downstream_client.clone(),
1184 }
1185 }
1186
1187 fn upstream_client(&self) -> Option<AnyProtoClient> {
1188 match &self.state {
1189 GitStoreState::Local { .. } => None,
1190 GitStoreState::Remote {
1191 upstream_client, ..
1192 } => Some(upstream_client.clone()),
1193 }
1194 }
1195
1196 fn on_worktree_store_event(
1197 &mut self,
1198 worktree_store: Entity<WorktreeStore>,
1199 event: &WorktreeStoreEvent,
1200 cx: &mut Context<Self>,
1201 ) {
1202 let GitStoreState::Local {
1203 project_environment,
1204 downstream,
1205 next_repository_id,
1206 fs,
1207 } = &self.state
1208 else {
1209 return;
1210 };
1211
1212 match event {
1213 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1214 if let Some(worktree) = self
1215 .worktree_store
1216 .read(cx)
1217 .worktree_for_id(*worktree_id, cx)
1218 {
1219 let paths_by_git_repo =
1220 self.process_updated_entries(&worktree, updated_entries, cx);
1221 let downstream = downstream
1222 .as_ref()
1223 .map(|downstream| downstream.updates_tx.clone());
1224 cx.spawn(async move |_, cx| {
1225 let paths_by_git_repo = paths_by_git_repo.await;
1226 for (repo, paths) in paths_by_git_repo {
1227 repo.update(cx, |repo, cx| {
1228 repo.paths_changed(paths, downstream.clone(), cx);
1229 })
1230 .ok();
1231 }
1232 })
1233 .detach();
1234 }
1235 }
1236 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1237 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1238 else {
1239 return;
1240 };
1241 if !worktree.read(cx).is_visible() {
1242 log::debug!(
1243 "not adding repositories for local worktree {:?} because it's not visible",
1244 worktree.read(cx).abs_path()
1245 );
1246 return;
1247 }
1248 self.update_repositories_from_worktree(
1249 *worktree_id,
1250 project_environment.clone(),
1251 next_repository_id.clone(),
1252 downstream
1253 .as_ref()
1254 .map(|downstream| downstream.updates_tx.clone()),
1255 changed_repos.clone(),
1256 fs.clone(),
1257 cx,
1258 );
1259 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1260 }
1261 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1262 let repos_without_worktree: Vec<RepositoryId> = self
1263 .worktree_ids
1264 .iter_mut()
1265 .filter_map(|(repo_id, worktree_ids)| {
1266 worktree_ids.remove(worktree_id);
1267 if worktree_ids.is_empty() {
1268 Some(*repo_id)
1269 } else {
1270 None
1271 }
1272 })
1273 .collect();
1274 let is_active_repo_removed = repos_without_worktree
1275 .iter()
1276 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1277
1278 for repo_id in repos_without_worktree {
1279 self.repositories.remove(&repo_id);
1280 self.worktree_ids.remove(&repo_id);
1281 if let Some(updates_tx) =
1282 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1283 {
1284 updates_tx
1285 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1286 .ok();
1287 }
1288 }
1289
1290 if is_active_repo_removed {
1291 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1292 self.active_repo_id = Some(repo_id);
1293 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1294 } else {
1295 self.active_repo_id = None;
1296 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1297 }
1298 }
1299 }
1300 _ => {}
1301 }
1302 }
1303 fn on_repository_event(
1304 &mut self,
1305 repo: Entity<Repository>,
1306 event: &RepositoryEvent,
1307 cx: &mut Context<Self>,
1308 ) {
1309 let id = repo.read(cx).id;
1310 let repo_snapshot = repo.read(cx).snapshot.clone();
1311 for (buffer_id, diff) in self.diffs.iter() {
1312 if let Some((buffer_repo, repo_path)) =
1313 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1314 && buffer_repo == repo
1315 {
1316 diff.update(cx, |diff, cx| {
1317 if let Some(conflict_set) = &diff.conflict_set {
1318 let conflict_status_changed =
1319 conflict_set.update(cx, |conflict_set, cx| {
1320 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1321 conflict_set.set_has_conflict(has_conflict, cx)
1322 })?;
1323 if conflict_status_changed {
1324 let buffer_store = self.buffer_store.read(cx);
1325 if let Some(buffer) = buffer_store.get(*buffer_id) {
1326 let _ = diff
1327 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1328 }
1329 }
1330 }
1331 anyhow::Ok(())
1332 })
1333 .ok();
1334 }
1335 }
1336 cx.emit(GitStoreEvent::RepositoryUpdated(
1337 id,
1338 event.clone(),
1339 self.active_repo_id == Some(id),
1340 ))
1341 }
1342
1343 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1344 cx.emit(GitStoreEvent::JobsUpdated)
1345 }
1346
1347 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1348 fn update_repositories_from_worktree(
1349 &mut self,
1350 worktree_id: WorktreeId,
1351 project_environment: Entity<ProjectEnvironment>,
1352 next_repository_id: Arc<AtomicU64>,
1353 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1354 updated_git_repositories: UpdatedGitRepositoriesSet,
1355 fs: Arc<dyn Fs>,
1356 cx: &mut Context<Self>,
1357 ) {
1358 let mut removed_ids = Vec::new();
1359 for update in updated_git_repositories.iter() {
1360 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1361 let existing_work_directory_abs_path =
1362 repo.read(cx).work_directory_abs_path.clone();
1363 Some(&existing_work_directory_abs_path)
1364 == update.old_work_directory_abs_path.as_ref()
1365 || Some(&existing_work_directory_abs_path)
1366 == update.new_work_directory_abs_path.as_ref()
1367 }) {
1368 let repo_id = *id;
1369 if let Some(new_work_directory_abs_path) =
1370 update.new_work_directory_abs_path.clone()
1371 {
1372 self.worktree_ids
1373 .entry(repo_id)
1374 .or_insert_with(HashSet::new)
1375 .insert(worktree_id);
1376 existing.update(cx, |existing, cx| {
1377 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1378 existing.schedule_scan(updates_tx.clone(), cx);
1379 });
1380 } else {
1381 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1382 worktree_ids.remove(&worktree_id);
1383 if worktree_ids.is_empty() {
1384 removed_ids.push(repo_id);
1385 }
1386 }
1387 }
1388 } else if let UpdatedGitRepository {
1389 new_work_directory_abs_path: Some(work_directory_abs_path),
1390 dot_git_abs_path: Some(dot_git_abs_path),
1391 repository_dir_abs_path: Some(_repository_dir_abs_path),
1392 common_dir_abs_path: Some(_common_dir_abs_path),
1393 ..
1394 } = update
1395 {
1396 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1397 let git_store = cx.weak_entity();
1398 let repo = cx.new(|cx| {
1399 let mut repo = Repository::local(
1400 id,
1401 work_directory_abs_path.clone(),
1402 dot_git_abs_path.clone(),
1403 project_environment.downgrade(),
1404 fs.clone(),
1405 git_store,
1406 cx,
1407 );
1408 if let Some(updates_tx) = updates_tx.as_ref() {
1409 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1410 updates_tx
1411 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1412 .ok();
1413 }
1414 repo.schedule_scan(updates_tx.clone(), cx);
1415 repo
1416 });
1417 self._subscriptions
1418 .push(cx.subscribe(&repo, Self::on_repository_event));
1419 self._subscriptions
1420 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1421 self.repositories.insert(id, repo);
1422 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1423 cx.emit(GitStoreEvent::RepositoryAdded);
1424 self.active_repo_id.get_or_insert_with(|| {
1425 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1426 id
1427 });
1428 }
1429 }
1430
1431 for id in removed_ids {
1432 if self.active_repo_id == Some(id) {
1433 self.active_repo_id = None;
1434 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1435 }
1436 self.repositories.remove(&id);
1437 if let Some(updates_tx) = updates_tx.as_ref() {
1438 updates_tx
1439 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1440 .ok();
1441 }
1442 }
1443 }
1444
1445 fn on_buffer_store_event(
1446 &mut self,
1447 _: Entity<BufferStore>,
1448 event: &BufferStoreEvent,
1449 cx: &mut Context<Self>,
1450 ) {
1451 match event {
1452 BufferStoreEvent::BufferAdded(buffer) => {
1453 cx.subscribe(buffer, |this, buffer, event, cx| {
1454 if let BufferEvent::LanguageChanged(_) = event {
1455 let buffer_id = buffer.read(cx).remote_id();
1456 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1457 diff_state.update(cx, |diff_state, cx| {
1458 diff_state.buffer_language_changed(buffer, cx);
1459 });
1460 }
1461 }
1462 })
1463 .detach();
1464 }
1465 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1466 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1467 diffs.remove(buffer_id);
1468 }
1469 }
1470 BufferStoreEvent::BufferDropped(buffer_id) => {
1471 self.diffs.remove(buffer_id);
1472 for diffs in self.shared_diffs.values_mut() {
1473 diffs.remove(buffer_id);
1474 }
1475 }
1476 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1477 // Whenever a buffer's file path changes, it's possible that the
1478 // new path is actually a path that is being tracked by a git
1479 // repository. In that case, we'll want to update the buffer's
1480 // `BufferDiffState`, in case it already has one.
1481 let buffer_id = buffer.read(cx).remote_id();
1482 let diff_state = self.diffs.get(&buffer_id);
1483 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1484
1485 if let Some(diff_state) = diff_state
1486 && let Some((repo, repo_path)) = repo
1487 {
1488 let buffer = buffer.clone();
1489 let diff_state = diff_state.clone();
1490
1491 cx.spawn(async move |_git_store, cx| {
1492 async {
1493 let diff_bases_change = repo
1494 .update(cx, |repo, cx| {
1495 repo.load_committed_text(buffer_id, repo_path, cx)
1496 })?
1497 .await?;
1498
1499 diff_state.update(cx, |diff_state, cx| {
1500 let buffer_snapshot = buffer.read(cx).text_snapshot();
1501 diff_state.diff_bases_changed(
1502 buffer_snapshot,
1503 Some(diff_bases_change),
1504 cx,
1505 );
1506 })
1507 }
1508 .await
1509 .log_err();
1510 })
1511 .detach();
1512 }
1513 }
1514 _ => {}
1515 }
1516 }
1517
1518 pub fn recalculate_buffer_diffs(
1519 &mut self,
1520 buffers: Vec<Entity<Buffer>>,
1521 cx: &mut Context<Self>,
1522 ) -> impl Future<Output = ()> + use<> {
1523 let mut futures = Vec::new();
1524 for buffer in buffers {
1525 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1526 let buffer = buffer.read(cx).text_snapshot();
1527 diff_state.update(cx, |diff_state, cx| {
1528 diff_state.recalculate_diffs(buffer.clone(), cx);
1529 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1530 });
1531 futures.push(diff_state.update(cx, |diff_state, cx| {
1532 diff_state
1533 .reparse_conflict_markers(buffer, cx)
1534 .map(|_| {})
1535 .boxed()
1536 }));
1537 }
1538 }
1539 async move {
1540 futures::future::join_all(futures).await;
1541 }
1542 }
1543
1544 fn on_buffer_diff_event(
1545 &mut self,
1546 diff: Entity<buffer_diff::BufferDiff>,
1547 event: &BufferDiffEvent,
1548 cx: &mut Context<Self>,
1549 ) {
1550 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1551 let buffer_id = diff.read(cx).buffer_id;
1552 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1553 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1554 diff_state.hunk_staging_operation_count += 1;
1555 diff_state.hunk_staging_operation_count
1556 });
1557 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1558 let recv = repo.update(cx, |repo, cx| {
1559 log::debug!("hunks changed for {}", path.as_unix_str());
1560 repo.spawn_set_index_text_job(
1561 path,
1562 new_index_text.as_ref().map(|rope| rope.to_string()),
1563 Some(hunk_staging_operation_count),
1564 cx,
1565 )
1566 });
1567 let diff = diff.downgrade();
1568 cx.spawn(async move |this, cx| {
1569 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1570 diff.update(cx, |diff, cx| {
1571 diff.clear_pending_hunks(cx);
1572 })
1573 .ok();
1574 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1575 .ok();
1576 }
1577 })
1578 .detach();
1579 }
1580 }
1581 }
1582 }
1583
1584 fn local_worktree_git_repos_changed(
1585 &mut self,
1586 worktree: Entity<Worktree>,
1587 changed_repos: &UpdatedGitRepositoriesSet,
1588 cx: &mut Context<Self>,
1589 ) {
1590 log::debug!("local worktree repos changed");
1591 debug_assert!(worktree.read(cx).is_local());
1592
1593 for repository in self.repositories.values() {
1594 repository.update(cx, |repository, cx| {
1595 let repo_abs_path = &repository.work_directory_abs_path;
1596 if changed_repos.iter().any(|update| {
1597 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1598 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1599 }) {
1600 repository.reload_buffer_diff_bases(cx);
1601 }
1602 });
1603 }
1604 }
1605
1606 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1607 &self.repositories
1608 }
1609
1610 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1611 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1612 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1613 Some(status.status)
1614 }
1615
1616 pub fn repository_and_path_for_buffer_id(
1617 &self,
1618 buffer_id: BufferId,
1619 cx: &App,
1620 ) -> Option<(Entity<Repository>, RepoPath)> {
1621 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1622 let project_path = buffer.read(cx).project_path(cx)?;
1623 self.repository_and_path_for_project_path(&project_path, cx)
1624 }
1625
1626 pub fn repository_and_path_for_project_path(
1627 &self,
1628 path: &ProjectPath,
1629 cx: &App,
1630 ) -> Option<(Entity<Repository>, RepoPath)> {
1631 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1632 self.repositories
1633 .values()
1634 .filter_map(|repo| {
1635 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1636 Some((repo.clone(), repo_path))
1637 })
1638 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1639 }
1640
1641 pub fn git_init(
1642 &self,
1643 path: Arc<Path>,
1644 fallback_branch_name: String,
1645 cx: &App,
1646 ) -> Task<Result<()>> {
1647 match &self.state {
1648 GitStoreState::Local { fs, .. } => {
1649 let fs = fs.clone();
1650 cx.background_executor()
1651 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1652 }
1653 GitStoreState::Remote {
1654 upstream_client,
1655 upstream_project_id: project_id,
1656 ..
1657 } => {
1658 let client = upstream_client.clone();
1659 let project_id = *project_id;
1660 cx.background_executor().spawn(async move {
1661 client
1662 .request(proto::GitInit {
1663 project_id: project_id,
1664 abs_path: path.to_string_lossy().into_owned(),
1665 fallback_branch_name,
1666 })
1667 .await?;
1668 Ok(())
1669 })
1670 }
1671 }
1672 }
1673
1674 pub fn git_clone(
1675 &self,
1676 repo: String,
1677 path: impl Into<Arc<std::path::Path>>,
1678 cx: &App,
1679 ) -> Task<Result<()>> {
1680 let path = path.into();
1681 match &self.state {
1682 GitStoreState::Local { fs, .. } => {
1683 let fs = fs.clone();
1684 cx.background_executor()
1685 .spawn(async move { fs.git_clone(&repo, &path).await })
1686 }
1687 GitStoreState::Remote {
1688 upstream_client,
1689 upstream_project_id,
1690 ..
1691 } => {
1692 if upstream_client.is_via_collab() {
1693 return Task::ready(Err(anyhow!(
1694 "Git Clone isn't supported for project guests"
1695 )));
1696 }
1697 let request = upstream_client.request(proto::GitClone {
1698 project_id: *upstream_project_id,
1699 abs_path: path.to_string_lossy().into_owned(),
1700 remote_repo: repo,
1701 });
1702
1703 cx.background_spawn(async move {
1704 let result = request.await?;
1705
1706 match result.success {
1707 true => Ok(()),
1708 false => Err(anyhow!("Git Clone failed")),
1709 }
1710 })
1711 }
1712 }
1713 }
1714
1715 async fn handle_update_repository(
1716 this: Entity<Self>,
1717 envelope: TypedEnvelope<proto::UpdateRepository>,
1718 mut cx: AsyncApp,
1719 ) -> Result<()> {
1720 this.update(&mut cx, |this, cx| {
1721 let path_style = this.worktree_store.read(cx).path_style();
1722 let mut update = envelope.payload;
1723
1724 let id = RepositoryId::from_proto(update.id);
1725 let client = this.upstream_client().context("no upstream client")?;
1726
1727 let mut repo_subscription = None;
1728 let repo = this.repositories.entry(id).or_insert_with(|| {
1729 let git_store = cx.weak_entity();
1730 let repo = cx.new(|cx| {
1731 Repository::remote(
1732 id,
1733 Path::new(&update.abs_path).into(),
1734 path_style,
1735 ProjectId(update.project_id),
1736 client,
1737 git_store,
1738 cx,
1739 )
1740 });
1741 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1742 cx.emit(GitStoreEvent::RepositoryAdded);
1743 repo
1744 });
1745 this._subscriptions.extend(repo_subscription);
1746
1747 repo.update(cx, {
1748 let update = update.clone();
1749 |repo, cx| repo.apply_remote_update(update, cx)
1750 })?;
1751
1752 this.active_repo_id.get_or_insert_with(|| {
1753 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1754 id
1755 });
1756
1757 if let Some((client, project_id)) = this.downstream_client() {
1758 update.project_id = project_id.to_proto();
1759 client.send(update).log_err();
1760 }
1761 Ok(())
1762 })?
1763 }
1764
1765 async fn handle_remove_repository(
1766 this: Entity<Self>,
1767 envelope: TypedEnvelope<proto::RemoveRepository>,
1768 mut cx: AsyncApp,
1769 ) -> Result<()> {
1770 this.update(&mut cx, |this, cx| {
1771 let mut update = envelope.payload;
1772 let id = RepositoryId::from_proto(update.id);
1773 this.repositories.remove(&id);
1774 if let Some((client, project_id)) = this.downstream_client() {
1775 update.project_id = project_id.to_proto();
1776 client.send(update).log_err();
1777 }
1778 if this.active_repo_id == Some(id) {
1779 this.active_repo_id = None;
1780 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1781 }
1782 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1783 })
1784 }
1785
1786 async fn handle_git_init(
1787 this: Entity<Self>,
1788 envelope: TypedEnvelope<proto::GitInit>,
1789 cx: AsyncApp,
1790 ) -> Result<proto::Ack> {
1791 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1792 let name = envelope.payload.fallback_branch_name;
1793 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1794 .await?;
1795
1796 Ok(proto::Ack {})
1797 }
1798
1799 async fn handle_git_clone(
1800 this: Entity<Self>,
1801 envelope: TypedEnvelope<proto::GitClone>,
1802 cx: AsyncApp,
1803 ) -> Result<proto::GitCloneResponse> {
1804 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1805 let repo_name = envelope.payload.remote_repo;
1806 let result = cx
1807 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1808 .await;
1809
1810 Ok(proto::GitCloneResponse {
1811 success: result.is_ok(),
1812 })
1813 }
1814
1815 async fn handle_fetch(
1816 this: Entity<Self>,
1817 envelope: TypedEnvelope<proto::Fetch>,
1818 mut cx: AsyncApp,
1819 ) -> Result<proto::RemoteMessageResponse> {
1820 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1821 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1822 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1823 let askpass_id = envelope.payload.askpass_id;
1824
1825 let askpass = make_remote_delegate(
1826 this,
1827 envelope.payload.project_id,
1828 repository_id,
1829 askpass_id,
1830 &mut cx,
1831 );
1832
1833 let remote_output = repository_handle
1834 .update(&mut cx, |repository_handle, cx| {
1835 repository_handle.fetch(fetch_options, askpass, cx)
1836 })?
1837 .await??;
1838
1839 Ok(proto::RemoteMessageResponse {
1840 stdout: remote_output.stdout,
1841 stderr: remote_output.stderr,
1842 })
1843 }
1844
1845 async fn handle_push(
1846 this: Entity<Self>,
1847 envelope: TypedEnvelope<proto::Push>,
1848 mut cx: AsyncApp,
1849 ) -> Result<proto::RemoteMessageResponse> {
1850 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1851 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1852
1853 let askpass_id = envelope.payload.askpass_id;
1854 let askpass = make_remote_delegate(
1855 this,
1856 envelope.payload.project_id,
1857 repository_id,
1858 askpass_id,
1859 &mut cx,
1860 );
1861
1862 let options = envelope
1863 .payload
1864 .options
1865 .as_ref()
1866 .map(|_| match envelope.payload.options() {
1867 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1868 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1869 });
1870
1871 let branch_name = envelope.payload.branch_name.into();
1872 let remote_branch_name = envelope.payload.remote_branch_name.into();
1873 let remote_name = envelope.payload.remote_name.into();
1874
1875 let remote_output = repository_handle
1876 .update(&mut cx, |repository_handle, cx| {
1877 repository_handle.push(
1878 branch_name,
1879 remote_branch_name,
1880 remote_name,
1881 options,
1882 askpass,
1883 cx,
1884 )
1885 })?
1886 .await??;
1887 Ok(proto::RemoteMessageResponse {
1888 stdout: remote_output.stdout,
1889 stderr: remote_output.stderr,
1890 })
1891 }
1892
1893 async fn handle_pull(
1894 this: Entity<Self>,
1895 envelope: TypedEnvelope<proto::Pull>,
1896 mut cx: AsyncApp,
1897 ) -> Result<proto::RemoteMessageResponse> {
1898 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1899 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1900 let askpass_id = envelope.payload.askpass_id;
1901 let askpass = make_remote_delegate(
1902 this,
1903 envelope.payload.project_id,
1904 repository_id,
1905 askpass_id,
1906 &mut cx,
1907 );
1908
1909 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1910 let remote_name = envelope.payload.remote_name.into();
1911 let rebase = envelope.payload.rebase;
1912
1913 let remote_message = repository_handle
1914 .update(&mut cx, |repository_handle, cx| {
1915 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1916 })?
1917 .await??;
1918
1919 Ok(proto::RemoteMessageResponse {
1920 stdout: remote_message.stdout,
1921 stderr: remote_message.stderr,
1922 })
1923 }
1924
1925 async fn handle_stage(
1926 this: Entity<Self>,
1927 envelope: TypedEnvelope<proto::Stage>,
1928 mut cx: AsyncApp,
1929 ) -> Result<proto::Ack> {
1930 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1931 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1932
1933 let entries = envelope
1934 .payload
1935 .paths
1936 .into_iter()
1937 .map(|path| RepoPath::new(&path))
1938 .collect::<Result<Vec<_>>>()?;
1939
1940 repository_handle
1941 .update(&mut cx, |repository_handle, cx| {
1942 repository_handle.stage_entries(entries, cx)
1943 })?
1944 .await?;
1945 Ok(proto::Ack {})
1946 }
1947
1948 async fn handle_unstage(
1949 this: Entity<Self>,
1950 envelope: TypedEnvelope<proto::Unstage>,
1951 mut cx: AsyncApp,
1952 ) -> Result<proto::Ack> {
1953 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1954 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1955
1956 let entries = envelope
1957 .payload
1958 .paths
1959 .into_iter()
1960 .map(|path| RepoPath::new(&path))
1961 .collect::<Result<Vec<_>>>()?;
1962
1963 repository_handle
1964 .update(&mut cx, |repository_handle, cx| {
1965 repository_handle.unstage_entries(entries, cx)
1966 })?
1967 .await?;
1968
1969 Ok(proto::Ack {})
1970 }
1971
1972 async fn handle_stash(
1973 this: Entity<Self>,
1974 envelope: TypedEnvelope<proto::Stash>,
1975 mut cx: AsyncApp,
1976 ) -> Result<proto::Ack> {
1977 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1978 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1979
1980 let entries = envelope
1981 .payload
1982 .paths
1983 .into_iter()
1984 .map(|path| RepoPath::new(&path))
1985 .collect::<Result<Vec<_>>>()?;
1986
1987 repository_handle
1988 .update(&mut cx, |repository_handle, cx| {
1989 repository_handle.stash_entries(entries, cx)
1990 })?
1991 .await?;
1992
1993 Ok(proto::Ack {})
1994 }
1995
1996 async fn handle_stash_pop(
1997 this: Entity<Self>,
1998 envelope: TypedEnvelope<proto::StashPop>,
1999 mut cx: AsyncApp,
2000 ) -> Result<proto::Ack> {
2001 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2002 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2003 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2004
2005 repository_handle
2006 .update(&mut cx, |repository_handle, cx| {
2007 repository_handle.stash_pop(stash_index, cx)
2008 })?
2009 .await?;
2010
2011 Ok(proto::Ack {})
2012 }
2013
2014 async fn handle_stash_apply(
2015 this: Entity<Self>,
2016 envelope: TypedEnvelope<proto::StashApply>,
2017 mut cx: AsyncApp,
2018 ) -> Result<proto::Ack> {
2019 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2020 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2021 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2022
2023 repository_handle
2024 .update(&mut cx, |repository_handle, cx| {
2025 repository_handle.stash_apply(stash_index, cx)
2026 })?
2027 .await?;
2028
2029 Ok(proto::Ack {})
2030 }
2031
2032 async fn handle_stash_drop(
2033 this: Entity<Self>,
2034 envelope: TypedEnvelope<proto::StashDrop>,
2035 mut cx: AsyncApp,
2036 ) -> Result<proto::Ack> {
2037 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2038 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2039 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2040
2041 repository_handle
2042 .update(&mut cx, |repository_handle, cx| {
2043 repository_handle.stash_drop(stash_index, cx)
2044 })?
2045 .await??;
2046
2047 Ok(proto::Ack {})
2048 }
2049
2050 async fn handle_set_index_text(
2051 this: Entity<Self>,
2052 envelope: TypedEnvelope<proto::SetIndexText>,
2053 mut cx: AsyncApp,
2054 ) -> Result<proto::Ack> {
2055 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2056 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2057 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2058
2059 repository_handle
2060 .update(&mut cx, |repository_handle, cx| {
2061 repository_handle.spawn_set_index_text_job(
2062 repo_path,
2063 envelope.payload.text,
2064 None,
2065 cx,
2066 )
2067 })?
2068 .await??;
2069 Ok(proto::Ack {})
2070 }
2071
2072 async fn handle_run_hook(
2073 this: Entity<Self>,
2074 envelope: TypedEnvelope<proto::RunGitHook>,
2075 mut cx: AsyncApp,
2076 ) -> Result<proto::Ack> {
2077 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2078 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2079 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2080 repository_handle
2081 .update(&mut cx, |repository_handle, cx| {
2082 repository_handle.run_hook(hook, cx)
2083 })?
2084 .await??;
2085 Ok(proto::Ack {})
2086 }
2087
2088 async fn handle_commit(
2089 this: Entity<Self>,
2090 envelope: TypedEnvelope<proto::Commit>,
2091 mut cx: AsyncApp,
2092 ) -> Result<proto::Ack> {
2093 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2094 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2095 let askpass_id = envelope.payload.askpass_id;
2096
2097 let askpass = make_remote_delegate(
2098 this,
2099 envelope.payload.project_id,
2100 repository_id,
2101 askpass_id,
2102 &mut cx,
2103 );
2104
2105 let message = SharedString::from(envelope.payload.message);
2106 let name = envelope.payload.name.map(SharedString::from);
2107 let email = envelope.payload.email.map(SharedString::from);
2108 let options = envelope.payload.options.unwrap_or_default();
2109
2110 repository_handle
2111 .update(&mut cx, |repository_handle, cx| {
2112 repository_handle.commit(
2113 message,
2114 name.zip(email),
2115 CommitOptions {
2116 amend: options.amend,
2117 signoff: options.signoff,
2118 },
2119 askpass,
2120 cx,
2121 )
2122 })?
2123 .await??;
2124 Ok(proto::Ack {})
2125 }
2126
2127 async fn handle_get_remotes(
2128 this: Entity<Self>,
2129 envelope: TypedEnvelope<proto::GetRemotes>,
2130 mut cx: AsyncApp,
2131 ) -> Result<proto::GetRemotesResponse> {
2132 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2133 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2134
2135 let branch_name = envelope.payload.branch_name;
2136 let is_push = envelope.payload.is_push;
2137
2138 let remotes = repository_handle
2139 .update(&mut cx, |repository_handle, _| {
2140 repository_handle.get_remotes(branch_name, is_push)
2141 })?
2142 .await??;
2143
2144 Ok(proto::GetRemotesResponse {
2145 remotes: remotes
2146 .into_iter()
2147 .map(|remotes| proto::get_remotes_response::Remote {
2148 name: remotes.name.to_string(),
2149 })
2150 .collect::<Vec<_>>(),
2151 })
2152 }
2153
2154 async fn handle_get_worktrees(
2155 this: Entity<Self>,
2156 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2157 mut cx: AsyncApp,
2158 ) -> Result<proto::GitWorktreesResponse> {
2159 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2160 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2161
2162 let worktrees = repository_handle
2163 .update(&mut cx, |repository_handle, _| {
2164 repository_handle.worktrees()
2165 })?
2166 .await??;
2167
2168 Ok(proto::GitWorktreesResponse {
2169 worktrees: worktrees
2170 .into_iter()
2171 .map(|worktree| worktree_to_proto(&worktree))
2172 .collect::<Vec<_>>(),
2173 })
2174 }
2175
2176 async fn handle_create_worktree(
2177 this: Entity<Self>,
2178 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2179 mut cx: AsyncApp,
2180 ) -> Result<proto::Ack> {
2181 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2182 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2183 let directory = PathBuf::from(envelope.payload.directory);
2184 let name = envelope.payload.name;
2185 let commit = envelope.payload.commit;
2186
2187 repository_handle
2188 .update(&mut cx, |repository_handle, _| {
2189 repository_handle.create_worktree(name, directory, commit)
2190 })?
2191 .await??;
2192
2193 Ok(proto::Ack {})
2194 }
2195
2196 async fn handle_get_branches(
2197 this: Entity<Self>,
2198 envelope: TypedEnvelope<proto::GitGetBranches>,
2199 mut cx: AsyncApp,
2200 ) -> Result<proto::GitBranchesResponse> {
2201 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2202 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2203
2204 let branches = repository_handle
2205 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2206 .await??;
2207
2208 Ok(proto::GitBranchesResponse {
2209 branches: branches
2210 .into_iter()
2211 .map(|branch| branch_to_proto(&branch))
2212 .collect::<Vec<_>>(),
2213 })
2214 }
2215 async fn handle_get_default_branch(
2216 this: Entity<Self>,
2217 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2218 mut cx: AsyncApp,
2219 ) -> Result<proto::GetDefaultBranchResponse> {
2220 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2221 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2222
2223 let branch = repository_handle
2224 .update(&mut cx, |repository_handle, _| {
2225 repository_handle.default_branch()
2226 })?
2227 .await??
2228 .map(Into::into);
2229
2230 Ok(proto::GetDefaultBranchResponse { branch })
2231 }
2232 async fn handle_create_branch(
2233 this: Entity<Self>,
2234 envelope: TypedEnvelope<proto::GitCreateBranch>,
2235 mut cx: AsyncApp,
2236 ) -> Result<proto::Ack> {
2237 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2238 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2239 let branch_name = envelope.payload.branch_name;
2240
2241 repository_handle
2242 .update(&mut cx, |repository_handle, _| {
2243 repository_handle.create_branch(branch_name, None)
2244 })?
2245 .await??;
2246
2247 Ok(proto::Ack {})
2248 }
2249
2250 async fn handle_change_branch(
2251 this: Entity<Self>,
2252 envelope: TypedEnvelope<proto::GitChangeBranch>,
2253 mut cx: AsyncApp,
2254 ) -> Result<proto::Ack> {
2255 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2256 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2257 let branch_name = envelope.payload.branch_name;
2258
2259 repository_handle
2260 .update(&mut cx, |repository_handle, _| {
2261 repository_handle.change_branch(branch_name)
2262 })?
2263 .await??;
2264
2265 Ok(proto::Ack {})
2266 }
2267
2268 async fn handle_rename_branch(
2269 this: Entity<Self>,
2270 envelope: TypedEnvelope<proto::GitRenameBranch>,
2271 mut cx: AsyncApp,
2272 ) -> Result<proto::Ack> {
2273 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2274 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2275 let branch = envelope.payload.branch;
2276 let new_name = envelope.payload.new_name;
2277
2278 repository_handle
2279 .update(&mut cx, |repository_handle, _| {
2280 repository_handle.rename_branch(branch, new_name)
2281 })?
2282 .await??;
2283
2284 Ok(proto::Ack {})
2285 }
2286
2287 async fn handle_create_remote(
2288 this: Entity<Self>,
2289 envelope: TypedEnvelope<proto::GitCreateRemote>,
2290 mut cx: AsyncApp,
2291 ) -> Result<proto::Ack> {
2292 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2293 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2294 let remote_name = envelope.payload.remote_name;
2295 let remote_url = envelope.payload.remote_url;
2296
2297 repository_handle
2298 .update(&mut cx, |repository_handle, _| {
2299 repository_handle.create_remote(remote_name, remote_url)
2300 })?
2301 .await??;
2302
2303 Ok(proto::Ack {})
2304 }
2305
2306 async fn handle_delete_branch(
2307 this: Entity<Self>,
2308 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2309 mut cx: AsyncApp,
2310 ) -> Result<proto::Ack> {
2311 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2312 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2313 let branch_name = envelope.payload.branch_name;
2314
2315 repository_handle
2316 .update(&mut cx, |repository_handle, _| {
2317 repository_handle.delete_branch(branch_name)
2318 })?
2319 .await??;
2320
2321 Ok(proto::Ack {})
2322 }
2323
2324 async fn handle_remove_remote(
2325 this: Entity<Self>,
2326 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2327 mut cx: AsyncApp,
2328 ) -> Result<proto::Ack> {
2329 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2330 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2331 let remote_name = envelope.payload.remote_name;
2332
2333 repository_handle
2334 .update(&mut cx, |repository_handle, _| {
2335 repository_handle.remove_remote(remote_name)
2336 })?
2337 .await??;
2338
2339 Ok(proto::Ack {})
2340 }
2341
2342 async fn handle_show(
2343 this: Entity<Self>,
2344 envelope: TypedEnvelope<proto::GitShow>,
2345 mut cx: AsyncApp,
2346 ) -> Result<proto::GitCommitDetails> {
2347 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2348 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2349
2350 let commit = repository_handle
2351 .update(&mut cx, |repository_handle, _| {
2352 repository_handle.show(envelope.payload.commit)
2353 })?
2354 .await??;
2355 Ok(proto::GitCommitDetails {
2356 sha: commit.sha.into(),
2357 message: commit.message.into(),
2358 commit_timestamp: commit.commit_timestamp,
2359 author_email: commit.author_email.into(),
2360 author_name: commit.author_name.into(),
2361 })
2362 }
2363
2364 async fn handle_load_commit_diff(
2365 this: Entity<Self>,
2366 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2367 mut cx: AsyncApp,
2368 ) -> Result<proto::LoadCommitDiffResponse> {
2369 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2370 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2371
2372 let commit_diff = repository_handle
2373 .update(&mut cx, |repository_handle, _| {
2374 repository_handle.load_commit_diff(envelope.payload.commit)
2375 })?
2376 .await??;
2377 Ok(proto::LoadCommitDiffResponse {
2378 files: commit_diff
2379 .files
2380 .into_iter()
2381 .map(|file| proto::CommitFile {
2382 path: file.path.to_proto(),
2383 old_text: file.old_text,
2384 new_text: file.new_text,
2385 })
2386 .collect(),
2387 })
2388 }
2389
2390 async fn handle_file_history(
2391 this: Entity<Self>,
2392 envelope: TypedEnvelope<proto::GitFileHistory>,
2393 mut cx: AsyncApp,
2394 ) -> Result<proto::GitFileHistoryResponse> {
2395 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2396 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2397 let path = RepoPath::from_proto(&envelope.payload.path)?;
2398 let skip = envelope.payload.skip as usize;
2399 let limit = envelope.payload.limit.map(|l| l as usize);
2400
2401 let file_history = repository_handle
2402 .update(&mut cx, |repository_handle, _| {
2403 repository_handle.file_history_paginated(path, skip, limit)
2404 })?
2405 .await??;
2406
2407 Ok(proto::GitFileHistoryResponse {
2408 entries: file_history
2409 .entries
2410 .into_iter()
2411 .map(|entry| proto::FileHistoryEntry {
2412 sha: entry.sha.to_string(),
2413 subject: entry.subject.to_string(),
2414 message: entry.message.to_string(),
2415 commit_timestamp: entry.commit_timestamp,
2416 author_name: entry.author_name.to_string(),
2417 author_email: entry.author_email.to_string(),
2418 })
2419 .collect(),
2420 path: file_history.path.to_proto(),
2421 })
2422 }
2423
2424 async fn handle_reset(
2425 this: Entity<Self>,
2426 envelope: TypedEnvelope<proto::GitReset>,
2427 mut cx: AsyncApp,
2428 ) -> Result<proto::Ack> {
2429 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2430 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2431
2432 let mode = match envelope.payload.mode() {
2433 git_reset::ResetMode::Soft => ResetMode::Soft,
2434 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2435 };
2436
2437 repository_handle
2438 .update(&mut cx, |repository_handle, cx| {
2439 repository_handle.reset(envelope.payload.commit, mode, cx)
2440 })?
2441 .await??;
2442 Ok(proto::Ack {})
2443 }
2444
2445 async fn handle_checkout_files(
2446 this: Entity<Self>,
2447 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2448 mut cx: AsyncApp,
2449 ) -> Result<proto::Ack> {
2450 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2451 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2452 let paths = envelope
2453 .payload
2454 .paths
2455 .iter()
2456 .map(|s| RepoPath::from_proto(s))
2457 .collect::<Result<Vec<_>>>()?;
2458
2459 repository_handle
2460 .update(&mut cx, |repository_handle, cx| {
2461 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2462 })?
2463 .await?;
2464 Ok(proto::Ack {})
2465 }
2466
2467 async fn handle_open_commit_message_buffer(
2468 this: Entity<Self>,
2469 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2470 mut cx: AsyncApp,
2471 ) -> Result<proto::OpenBufferResponse> {
2472 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2473 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2474 let buffer = repository
2475 .update(&mut cx, |repository, cx| {
2476 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2477 })?
2478 .await?;
2479
2480 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2481 this.update(&mut cx, |this, cx| {
2482 this.buffer_store.update(cx, |buffer_store, cx| {
2483 buffer_store
2484 .create_buffer_for_peer(
2485 &buffer,
2486 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2487 cx,
2488 )
2489 .detach_and_log_err(cx);
2490 })
2491 })?;
2492
2493 Ok(proto::OpenBufferResponse {
2494 buffer_id: buffer_id.to_proto(),
2495 })
2496 }
2497
2498 async fn handle_askpass(
2499 this: Entity<Self>,
2500 envelope: TypedEnvelope<proto::AskPassRequest>,
2501 mut cx: AsyncApp,
2502 ) -> Result<proto::AskPassResponse> {
2503 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2504 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2505
2506 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2507 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2508 debug_panic!("no askpass found");
2509 anyhow::bail!("no askpass found");
2510 };
2511
2512 let response = askpass
2513 .ask_password(envelope.payload.prompt)
2514 .await
2515 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2516
2517 delegates
2518 .lock()
2519 .insert(envelope.payload.askpass_id, askpass);
2520
2521 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2522 Ok(proto::AskPassResponse {
2523 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2524 })
2525 }
2526
2527 async fn handle_check_for_pushed_commits(
2528 this: Entity<Self>,
2529 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2530 mut cx: AsyncApp,
2531 ) -> Result<proto::CheckForPushedCommitsResponse> {
2532 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2533 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2534
2535 let branches = repository_handle
2536 .update(&mut cx, |repository_handle, _| {
2537 repository_handle.check_for_pushed_commits()
2538 })?
2539 .await??;
2540 Ok(proto::CheckForPushedCommitsResponse {
2541 pushed_to: branches
2542 .into_iter()
2543 .map(|commit| commit.to_string())
2544 .collect(),
2545 })
2546 }
2547
2548 async fn handle_git_diff(
2549 this: Entity<Self>,
2550 envelope: TypedEnvelope<proto::GitDiff>,
2551 mut cx: AsyncApp,
2552 ) -> Result<proto::GitDiffResponse> {
2553 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2554 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2555 let diff_type = match envelope.payload.diff_type() {
2556 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2557 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2558 };
2559
2560 let mut diff = repository_handle
2561 .update(&mut cx, |repository_handle, cx| {
2562 repository_handle.diff(diff_type, cx)
2563 })?
2564 .await??;
2565 const ONE_MB: usize = 1_000_000;
2566 if diff.len() > ONE_MB {
2567 diff = diff.chars().take(ONE_MB).collect()
2568 }
2569
2570 Ok(proto::GitDiffResponse { diff })
2571 }
2572
2573 async fn handle_tree_diff(
2574 this: Entity<Self>,
2575 request: TypedEnvelope<proto::GetTreeDiff>,
2576 mut cx: AsyncApp,
2577 ) -> Result<proto::GetTreeDiffResponse> {
2578 let repository_id = RepositoryId(request.payload.repository_id);
2579 let diff_type = if request.payload.is_merge {
2580 DiffTreeType::MergeBase {
2581 base: request.payload.base.into(),
2582 head: request.payload.head.into(),
2583 }
2584 } else {
2585 DiffTreeType::Since {
2586 base: request.payload.base.into(),
2587 head: request.payload.head.into(),
2588 }
2589 };
2590
2591 let diff = this
2592 .update(&mut cx, |this, cx| {
2593 let repository = this.repositories().get(&repository_id)?;
2594 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2595 })?
2596 .context("missing repository")?
2597 .await??;
2598
2599 Ok(proto::GetTreeDiffResponse {
2600 entries: diff
2601 .entries
2602 .into_iter()
2603 .map(|(path, status)| proto::TreeDiffStatus {
2604 path: path.as_ref().to_proto(),
2605 status: match status {
2606 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2607 TreeDiffStatus::Modified { .. } => {
2608 proto::tree_diff_status::Status::Modified.into()
2609 }
2610 TreeDiffStatus::Deleted { .. } => {
2611 proto::tree_diff_status::Status::Deleted.into()
2612 }
2613 },
2614 oid: match status {
2615 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2616 Some(old.to_string())
2617 }
2618 TreeDiffStatus::Added => None,
2619 },
2620 })
2621 .collect(),
2622 })
2623 }
2624
2625 async fn handle_get_blob_content(
2626 this: Entity<Self>,
2627 request: TypedEnvelope<proto::GetBlobContent>,
2628 mut cx: AsyncApp,
2629 ) -> Result<proto::GetBlobContentResponse> {
2630 let oid = git::Oid::from_str(&request.payload.oid)?;
2631 let repository_id = RepositoryId(request.payload.repository_id);
2632 let content = this
2633 .update(&mut cx, |this, cx| {
2634 let repository = this.repositories().get(&repository_id)?;
2635 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2636 })?
2637 .context("missing repository")?
2638 .await?;
2639 Ok(proto::GetBlobContentResponse { content })
2640 }
2641
2642 async fn handle_open_unstaged_diff(
2643 this: Entity<Self>,
2644 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2645 mut cx: AsyncApp,
2646 ) -> Result<proto::OpenUnstagedDiffResponse> {
2647 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2648 let diff = this
2649 .update(&mut cx, |this, cx| {
2650 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2651 Some(this.open_unstaged_diff(buffer, cx))
2652 })?
2653 .context("missing buffer")?
2654 .await?;
2655 this.update(&mut cx, |this, _| {
2656 let shared_diffs = this
2657 .shared_diffs
2658 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2659 .or_default();
2660 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2661 })?;
2662 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx))?;
2663 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2664 }
2665
2666 async fn handle_open_uncommitted_diff(
2667 this: Entity<Self>,
2668 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2669 mut cx: AsyncApp,
2670 ) -> Result<proto::OpenUncommittedDiffResponse> {
2671 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2672 let diff = this
2673 .update(&mut cx, |this, cx| {
2674 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2675 Some(this.open_uncommitted_diff(buffer, cx))
2676 })?
2677 .context("missing buffer")?
2678 .await?;
2679 this.update(&mut cx, |this, _| {
2680 let shared_diffs = this
2681 .shared_diffs
2682 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2683 .or_default();
2684 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2685 })?;
2686 diff.read_with(&cx, |diff, cx| {
2687 use proto::open_uncommitted_diff_response::Mode;
2688
2689 let unstaged_diff = diff.secondary_diff();
2690 let index_snapshot = unstaged_diff.and_then(|diff| {
2691 let diff = diff.read(cx);
2692 diff.base_text_exists().then(|| diff.base_text(cx))
2693 });
2694
2695 let mode;
2696 let staged_text;
2697 let committed_text;
2698 if diff.base_text_exists() {
2699 let committed_snapshot = diff.base_text(cx);
2700 committed_text = Some(committed_snapshot.text());
2701 if let Some(index_text) = index_snapshot {
2702 if index_text.remote_id() == committed_snapshot.remote_id() {
2703 mode = Mode::IndexMatchesHead;
2704 staged_text = None;
2705 } else {
2706 mode = Mode::IndexAndHead;
2707 staged_text = Some(index_text.text());
2708 }
2709 } else {
2710 mode = Mode::IndexAndHead;
2711 staged_text = None;
2712 }
2713 } else {
2714 mode = Mode::IndexAndHead;
2715 committed_text = None;
2716 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2717 }
2718
2719 proto::OpenUncommittedDiffResponse {
2720 committed_text,
2721 staged_text,
2722 mode: mode.into(),
2723 }
2724 })
2725 }
2726
2727 async fn handle_update_diff_bases(
2728 this: Entity<Self>,
2729 request: TypedEnvelope<proto::UpdateDiffBases>,
2730 mut cx: AsyncApp,
2731 ) -> Result<()> {
2732 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2733 this.update(&mut cx, |this, cx| {
2734 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2735 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2736 {
2737 let buffer = buffer.read(cx).text_snapshot();
2738 diff_state.update(cx, |diff_state, cx| {
2739 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2740 })
2741 }
2742 })
2743 }
2744
2745 async fn handle_blame_buffer(
2746 this: Entity<Self>,
2747 envelope: TypedEnvelope<proto::BlameBuffer>,
2748 mut cx: AsyncApp,
2749 ) -> Result<proto::BlameBufferResponse> {
2750 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2751 let version = deserialize_version(&envelope.payload.version);
2752 let buffer = this.read_with(&cx, |this, cx| {
2753 this.buffer_store.read(cx).get_existing(buffer_id)
2754 })??;
2755 buffer
2756 .update(&mut cx, |buffer, _| {
2757 buffer.wait_for_version(version.clone())
2758 })?
2759 .await?;
2760 let blame = this
2761 .update(&mut cx, |this, cx| {
2762 this.blame_buffer(&buffer, Some(version), cx)
2763 })?
2764 .await?;
2765 Ok(serialize_blame_buffer_response(blame))
2766 }
2767
2768 async fn handle_get_permalink_to_line(
2769 this: Entity<Self>,
2770 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2771 mut cx: AsyncApp,
2772 ) -> Result<proto::GetPermalinkToLineResponse> {
2773 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2774 // let version = deserialize_version(&envelope.payload.version);
2775 let selection = {
2776 let proto_selection = envelope
2777 .payload
2778 .selection
2779 .context("no selection to get permalink for defined")?;
2780 proto_selection.start as u32..proto_selection.end as u32
2781 };
2782 let buffer = this.read_with(&cx, |this, cx| {
2783 this.buffer_store.read(cx).get_existing(buffer_id)
2784 })??;
2785 let permalink = this
2786 .update(&mut cx, |this, cx| {
2787 this.get_permalink_to_line(&buffer, selection, cx)
2788 })?
2789 .await?;
2790 Ok(proto::GetPermalinkToLineResponse {
2791 permalink: permalink.to_string(),
2792 })
2793 }
2794
2795 fn repository_for_request(
2796 this: &Entity<Self>,
2797 id: RepositoryId,
2798 cx: &mut AsyncApp,
2799 ) -> Result<Entity<Repository>> {
2800 this.read_with(cx, |this, _| {
2801 this.repositories
2802 .get(&id)
2803 .context("missing repository handle")
2804 .cloned()
2805 })?
2806 }
2807
2808 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2809 self.repositories
2810 .iter()
2811 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2812 .collect()
2813 }
2814
2815 fn process_updated_entries(
2816 &self,
2817 worktree: &Entity<Worktree>,
2818 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2819 cx: &mut App,
2820 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2821 let path_style = worktree.read(cx).path_style();
2822 let mut repo_paths = self
2823 .repositories
2824 .values()
2825 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2826 .collect::<Vec<_>>();
2827 let mut entries: Vec<_> = updated_entries
2828 .iter()
2829 .map(|(path, _, _)| path.clone())
2830 .collect();
2831 entries.sort();
2832 let worktree = worktree.read(cx);
2833
2834 let entries = entries
2835 .into_iter()
2836 .map(|path| worktree.absolutize(&path))
2837 .collect::<Arc<[_]>>();
2838
2839 let executor = cx.background_executor().clone();
2840 cx.background_executor().spawn(async move {
2841 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2842 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2843 let mut tasks = FuturesOrdered::new();
2844 for (repo_path, repo) in repo_paths.into_iter().rev() {
2845 let entries = entries.clone();
2846 let task = executor.spawn(async move {
2847 // Find all repository paths that belong to this repo
2848 let mut ix = entries.partition_point(|path| path < &*repo_path);
2849 if ix == entries.len() {
2850 return None;
2851 };
2852
2853 let mut paths = Vec::new();
2854 // All paths prefixed by a given repo will constitute a continuous range.
2855 while let Some(path) = entries.get(ix)
2856 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2857 &repo_path, path, path_style,
2858 )
2859 {
2860 paths.push((repo_path, ix));
2861 ix += 1;
2862 }
2863 if paths.is_empty() {
2864 None
2865 } else {
2866 Some((repo, paths))
2867 }
2868 });
2869 tasks.push_back(task);
2870 }
2871
2872 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2873 let mut path_was_used = vec![false; entries.len()];
2874 let tasks = tasks.collect::<Vec<_>>().await;
2875 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2876 // We always want to assign a path to it's innermost repository.
2877 for t in tasks {
2878 let Some((repo, paths)) = t else {
2879 continue;
2880 };
2881 let entry = paths_by_git_repo.entry(repo).or_default();
2882 for (repo_path, ix) in paths {
2883 if path_was_used[ix] {
2884 continue;
2885 }
2886 path_was_used[ix] = true;
2887 entry.push(repo_path);
2888 }
2889 }
2890
2891 paths_by_git_repo
2892 })
2893 }
2894}
2895
2896impl BufferGitState {
2897 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2898 Self {
2899 unstaged_diff: Default::default(),
2900 uncommitted_diff: Default::default(),
2901 recalculate_diff_task: Default::default(),
2902 language: Default::default(),
2903 language_registry: Default::default(),
2904 recalculating_tx: postage::watch::channel_with(false).0,
2905 hunk_staging_operation_count: 0,
2906 hunk_staging_operation_count_as_of_write: 0,
2907 head_text: Default::default(),
2908 index_text: Default::default(),
2909 head_changed: Default::default(),
2910 index_changed: Default::default(),
2911 language_changed: Default::default(),
2912 conflict_updated_futures: Default::default(),
2913 conflict_set: Default::default(),
2914 reparse_conflict_markers_task: Default::default(),
2915 }
2916 }
2917
2918 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2919 self.language = buffer.read(cx).language().cloned();
2920 self.language_changed = true;
2921 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2922 }
2923
2924 fn reparse_conflict_markers(
2925 &mut self,
2926 buffer: text::BufferSnapshot,
2927 cx: &mut Context<Self>,
2928 ) -> oneshot::Receiver<()> {
2929 let (tx, rx) = oneshot::channel();
2930
2931 let Some(conflict_set) = self
2932 .conflict_set
2933 .as_ref()
2934 .and_then(|conflict_set| conflict_set.upgrade())
2935 else {
2936 return rx;
2937 };
2938
2939 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2940 if conflict_set.has_conflict {
2941 Some(conflict_set.snapshot())
2942 } else {
2943 None
2944 }
2945 });
2946
2947 if let Some(old_snapshot) = old_snapshot {
2948 self.conflict_updated_futures.push(tx);
2949 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2950 let (snapshot, changed_range) = cx
2951 .background_spawn(async move {
2952 let new_snapshot = ConflictSet::parse(&buffer);
2953 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2954 (new_snapshot, changed_range)
2955 })
2956 .await;
2957 this.update(cx, |this, cx| {
2958 if let Some(conflict_set) = &this.conflict_set {
2959 conflict_set
2960 .update(cx, |conflict_set, cx| {
2961 conflict_set.set_snapshot(snapshot, changed_range, cx);
2962 })
2963 .ok();
2964 }
2965 let futures = std::mem::take(&mut this.conflict_updated_futures);
2966 for tx in futures {
2967 tx.send(()).ok();
2968 }
2969 })
2970 }))
2971 }
2972
2973 rx
2974 }
2975
2976 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2977 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2978 }
2979
2980 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2981 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2982 }
2983
2984 fn handle_base_texts_updated(
2985 &mut self,
2986 buffer: text::BufferSnapshot,
2987 message: proto::UpdateDiffBases,
2988 cx: &mut Context<Self>,
2989 ) {
2990 use proto::update_diff_bases::Mode;
2991
2992 let Some(mode) = Mode::from_i32(message.mode) else {
2993 return;
2994 };
2995
2996 let diff_bases_change = match mode {
2997 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2998 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2999 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3000 Mode::IndexAndHead => DiffBasesChange::SetEach {
3001 index: message.staged_text,
3002 head: message.committed_text,
3003 },
3004 };
3005
3006 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3007 }
3008
3009 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3010 if *self.recalculating_tx.borrow() {
3011 let mut rx = self.recalculating_tx.subscribe();
3012 Some(async move {
3013 loop {
3014 let is_recalculating = rx.recv().await;
3015 if is_recalculating != Some(true) {
3016 break;
3017 }
3018 }
3019 })
3020 } else {
3021 None
3022 }
3023 }
3024
3025 fn diff_bases_changed(
3026 &mut self,
3027 buffer: text::BufferSnapshot,
3028 diff_bases_change: Option<DiffBasesChange>,
3029 cx: &mut Context<Self>,
3030 ) {
3031 match diff_bases_change {
3032 Some(DiffBasesChange::SetIndex(index)) => {
3033 self.index_text = index.map(|mut index| {
3034 text::LineEnding::normalize(&mut index);
3035 Arc::from(index.as_str())
3036 });
3037 self.index_changed = true;
3038 }
3039 Some(DiffBasesChange::SetHead(head)) => {
3040 self.head_text = head.map(|mut head| {
3041 text::LineEnding::normalize(&mut head);
3042 Arc::from(head.as_str())
3043 });
3044 self.head_changed = true;
3045 }
3046 Some(DiffBasesChange::SetBoth(text)) => {
3047 let text = text.map(|mut text| {
3048 text::LineEnding::normalize(&mut text);
3049 Arc::from(text.as_str())
3050 });
3051 self.head_text = text.clone();
3052 self.index_text = text;
3053 self.head_changed = true;
3054 self.index_changed = true;
3055 }
3056 Some(DiffBasesChange::SetEach { index, head }) => {
3057 self.index_text = index.map(|mut index| {
3058 text::LineEnding::normalize(&mut index);
3059 Arc::from(index.as_str())
3060 });
3061 self.index_changed = true;
3062 self.head_text = head.map(|mut head| {
3063 text::LineEnding::normalize(&mut head);
3064 Arc::from(head.as_str())
3065 });
3066 self.head_changed = true;
3067 }
3068 None => {}
3069 }
3070
3071 self.recalculate_diffs(buffer, cx)
3072 }
3073
3074 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3075 *self.recalculating_tx.borrow_mut() = true;
3076
3077 let language = self.language.clone();
3078 let language_registry = self.language_registry.clone();
3079 let unstaged_diff = self.unstaged_diff();
3080 let uncommitted_diff = self.uncommitted_diff();
3081 let head = self.head_text.clone();
3082 let index = self.index_text.clone();
3083 let index_changed = self.index_changed;
3084 let head_changed = self.head_changed;
3085 let language_changed = self.language_changed;
3086 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3087 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3088 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3089 (None, None) => true,
3090 _ => false,
3091 };
3092 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3093 log::debug!(
3094 "start recalculating diffs for buffer {}",
3095 buffer.remote_id()
3096 );
3097
3098 let mut new_unstaged_diff = None;
3099 if let Some(unstaged_diff) = &unstaged_diff {
3100 new_unstaged_diff = Some(
3101 cx.update(|cx| {
3102 unstaged_diff.read(cx).update_diff(
3103 buffer.clone(),
3104 index,
3105 index_changed,
3106 language.clone(),
3107 cx,
3108 )
3109 })?
3110 .await,
3111 );
3112 }
3113
3114 // Dropping BufferDiff can be expensive, so yield back to the event loop
3115 // for a bit
3116 yield_now().await;
3117
3118 let mut new_uncommitted_diff = None;
3119 if let Some(uncommitted_diff) = &uncommitted_diff {
3120 new_uncommitted_diff = if index_matches_head {
3121 new_unstaged_diff.clone()
3122 } else {
3123 Some(
3124 cx.update(|cx| {
3125 uncommitted_diff.read(cx).update_diff(
3126 buffer.clone(),
3127 head,
3128 head_changed,
3129 language.clone(),
3130 cx,
3131 )
3132 })?
3133 .await,
3134 )
3135 }
3136 }
3137
3138 // Dropping BufferDiff can be expensive, so yield back to the event loop
3139 // for a bit
3140 yield_now().await;
3141
3142 let cancel = this.update(cx, |this, _| {
3143 // This checks whether all pending stage/unstage operations
3144 // have quiesced (i.e. both the corresponding write and the
3145 // read of that write have completed). If not, then we cancel
3146 // this recalculation attempt to avoid invalidating pending
3147 // state too quickly; another recalculation will come along
3148 // later and clear the pending state once the state of the index has settled.
3149 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3150 *this.recalculating_tx.borrow_mut() = false;
3151 true
3152 } else {
3153 false
3154 }
3155 })?;
3156 if cancel {
3157 log::debug!(
3158 concat!(
3159 "aborting recalculating diffs for buffer {}",
3160 "due to subsequent hunk operations",
3161 ),
3162 buffer.remote_id()
3163 );
3164 return Ok(());
3165 }
3166
3167 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3168 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3169 {
3170 let task = unstaged_diff.update(cx, |diff, cx| {
3171 if language_changed {
3172 diff.language_changed(language.clone(), language_registry.clone(), cx);
3173 }
3174 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3175 })?;
3176 Some(task.await)
3177 } else {
3178 None
3179 };
3180
3181 yield_now().await;
3182
3183 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3184 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3185 {
3186 uncommitted_diff
3187 .update(cx, |diff, cx| {
3188 if language_changed {
3189 diff.language_changed(language, language_registry, cx);
3190 }
3191 diff.set_snapshot_with_secondary(
3192 new_uncommitted_diff,
3193 &buffer,
3194 unstaged_changed_range.flatten(),
3195 true,
3196 cx,
3197 )
3198 })?
3199 .await;
3200 }
3201
3202 log::debug!(
3203 "finished recalculating diffs for buffer {}",
3204 buffer.remote_id()
3205 );
3206
3207 if let Some(this) = this.upgrade() {
3208 this.update(cx, |this, _| {
3209 this.index_changed = false;
3210 this.head_changed = false;
3211 this.language_changed = false;
3212 *this.recalculating_tx.borrow_mut() = false;
3213 })?;
3214 }
3215
3216 Ok(())
3217 }));
3218 }
3219}
3220
3221fn make_remote_delegate(
3222 this: Entity<GitStore>,
3223 project_id: u64,
3224 repository_id: RepositoryId,
3225 askpass_id: u64,
3226 cx: &mut AsyncApp,
3227) -> AskPassDelegate {
3228 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3229 this.update(cx, |this, cx| {
3230 let Some((client, _)) = this.downstream_client() else {
3231 return;
3232 };
3233 let response = client.request(proto::AskPassRequest {
3234 project_id,
3235 repository_id: repository_id.to_proto(),
3236 askpass_id,
3237 prompt,
3238 });
3239 cx.spawn(async move |_, _| {
3240 let mut response = response.await?.response;
3241 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3242 .ok();
3243 response.zeroize();
3244 anyhow::Ok(())
3245 })
3246 .detach_and_log_err(cx);
3247 })
3248 .log_err();
3249 })
3250}
3251
3252impl RepositoryId {
3253 pub fn to_proto(self) -> u64 {
3254 self.0
3255 }
3256
3257 pub fn from_proto(id: u64) -> Self {
3258 RepositoryId(id)
3259 }
3260}
3261
3262impl RepositorySnapshot {
3263 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3264 Self {
3265 id,
3266 statuses_by_path: Default::default(),
3267 work_directory_abs_path,
3268 branch: None,
3269 head_commit: None,
3270 scan_id: 0,
3271 merge: Default::default(),
3272 remote_origin_url: None,
3273 remote_upstream_url: None,
3274 stash_entries: Default::default(),
3275 path_style,
3276 }
3277 }
3278
3279 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3280 proto::UpdateRepository {
3281 branch_summary: self.branch.as_ref().map(branch_to_proto),
3282 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3283 updated_statuses: self
3284 .statuses_by_path
3285 .iter()
3286 .map(|entry| entry.to_proto())
3287 .collect(),
3288 removed_statuses: Default::default(),
3289 current_merge_conflicts: self
3290 .merge
3291 .conflicted_paths
3292 .iter()
3293 .map(|repo_path| repo_path.to_proto())
3294 .collect(),
3295 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3296 project_id,
3297 id: self.id.to_proto(),
3298 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3299 entry_ids: vec![self.id.to_proto()],
3300 scan_id: self.scan_id,
3301 is_last_update: true,
3302 stash_entries: self
3303 .stash_entries
3304 .entries
3305 .iter()
3306 .map(stash_to_proto)
3307 .collect(),
3308 remote_upstream_url: self.remote_upstream_url.clone(),
3309 remote_origin_url: self.remote_origin_url.clone(),
3310 }
3311 }
3312
3313 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3314 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3315 let mut removed_statuses: Vec<String> = Vec::new();
3316
3317 let mut new_statuses = self.statuses_by_path.iter().peekable();
3318 let mut old_statuses = old.statuses_by_path.iter().peekable();
3319
3320 let mut current_new_entry = new_statuses.next();
3321 let mut current_old_entry = old_statuses.next();
3322 loop {
3323 match (current_new_entry, current_old_entry) {
3324 (Some(new_entry), Some(old_entry)) => {
3325 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3326 Ordering::Less => {
3327 updated_statuses.push(new_entry.to_proto());
3328 current_new_entry = new_statuses.next();
3329 }
3330 Ordering::Equal => {
3331 if new_entry.status != old_entry.status {
3332 updated_statuses.push(new_entry.to_proto());
3333 }
3334 current_old_entry = old_statuses.next();
3335 current_new_entry = new_statuses.next();
3336 }
3337 Ordering::Greater => {
3338 removed_statuses.push(old_entry.repo_path.to_proto());
3339 current_old_entry = old_statuses.next();
3340 }
3341 }
3342 }
3343 (None, Some(old_entry)) => {
3344 removed_statuses.push(old_entry.repo_path.to_proto());
3345 current_old_entry = old_statuses.next();
3346 }
3347 (Some(new_entry), None) => {
3348 updated_statuses.push(new_entry.to_proto());
3349 current_new_entry = new_statuses.next();
3350 }
3351 (None, None) => break,
3352 }
3353 }
3354
3355 proto::UpdateRepository {
3356 branch_summary: self.branch.as_ref().map(branch_to_proto),
3357 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3358 updated_statuses,
3359 removed_statuses,
3360 current_merge_conflicts: self
3361 .merge
3362 .conflicted_paths
3363 .iter()
3364 .map(|path| path.to_proto())
3365 .collect(),
3366 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3367 project_id,
3368 id: self.id.to_proto(),
3369 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3370 entry_ids: vec![],
3371 scan_id: self.scan_id,
3372 is_last_update: true,
3373 stash_entries: self
3374 .stash_entries
3375 .entries
3376 .iter()
3377 .map(stash_to_proto)
3378 .collect(),
3379 remote_upstream_url: self.remote_upstream_url.clone(),
3380 remote_origin_url: self.remote_origin_url.clone(),
3381 }
3382 }
3383
3384 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3385 self.statuses_by_path.iter().cloned()
3386 }
3387
3388 pub fn status_summary(&self) -> GitSummary {
3389 self.statuses_by_path.summary().item_summary
3390 }
3391
3392 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3393 self.statuses_by_path
3394 .get(&PathKey(path.as_ref().clone()), ())
3395 .cloned()
3396 }
3397
3398 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3399 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3400 }
3401
3402 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3403 self.path_style
3404 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3405 .unwrap()
3406 .into()
3407 }
3408
3409 #[inline]
3410 fn abs_path_to_repo_path_inner(
3411 work_directory_abs_path: &Path,
3412 abs_path: &Path,
3413 path_style: PathStyle,
3414 ) -> Option<RepoPath> {
3415 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3416 Some(RepoPath::from_rel_path(&rel_path))
3417 }
3418
3419 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3420 self.merge.conflicted_paths.contains(repo_path)
3421 }
3422
3423 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3424 let had_conflict_on_last_merge_head_change =
3425 self.merge.conflicted_paths.contains(repo_path);
3426 let has_conflict_currently = self
3427 .status_for_path(repo_path)
3428 .is_some_and(|entry| entry.status.is_conflicted());
3429 had_conflict_on_last_merge_head_change || has_conflict_currently
3430 }
3431
3432 /// This is the name that will be displayed in the repository selector for this repository.
3433 pub fn display_name(&self) -> SharedString {
3434 self.work_directory_abs_path
3435 .file_name()
3436 .unwrap_or_default()
3437 .to_string_lossy()
3438 .to_string()
3439 .into()
3440 }
3441}
3442
3443pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3444 proto::StashEntry {
3445 oid: entry.oid.as_bytes().to_vec(),
3446 message: entry.message.clone(),
3447 branch: entry.branch.clone(),
3448 index: entry.index as u64,
3449 timestamp: entry.timestamp,
3450 }
3451}
3452
3453pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3454 Ok(StashEntry {
3455 oid: Oid::from_bytes(&entry.oid)?,
3456 message: entry.message.clone(),
3457 index: entry.index as usize,
3458 branch: entry.branch.clone(),
3459 timestamp: entry.timestamp,
3460 })
3461}
3462
3463impl MergeDetails {
3464 async fn load(
3465 backend: &Arc<dyn GitRepository>,
3466 status: &SumTree<StatusEntry>,
3467 prev_snapshot: &RepositorySnapshot,
3468 ) -> Result<(MergeDetails, bool)> {
3469 log::debug!("load merge details");
3470 let message = backend.merge_message().await;
3471 let heads = backend
3472 .revparse_batch(vec![
3473 "MERGE_HEAD".into(),
3474 "CHERRY_PICK_HEAD".into(),
3475 "REBASE_HEAD".into(),
3476 "REVERT_HEAD".into(),
3477 "APPLY_HEAD".into(),
3478 ])
3479 .await
3480 .log_err()
3481 .unwrap_or_default()
3482 .into_iter()
3483 .map(|opt| opt.map(SharedString::from))
3484 .collect::<Vec<_>>();
3485 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3486 let conflicted_paths = if merge_heads_changed {
3487 let current_conflicted_paths = TreeSet::from_ordered_entries(
3488 status
3489 .iter()
3490 .filter(|entry| entry.status.is_conflicted())
3491 .map(|entry| entry.repo_path.clone()),
3492 );
3493
3494 // It can happen that we run a scan while a lengthy merge is in progress
3495 // that will eventually result in conflicts, but before those conflicts
3496 // are reported by `git status`. Since for the moment we only care about
3497 // the merge heads state for the purposes of tracking conflicts, don't update
3498 // this state until we see some conflicts.
3499 if heads.iter().any(Option::is_some)
3500 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3501 && current_conflicted_paths.is_empty()
3502 {
3503 log::debug!("not updating merge heads because no conflicts found");
3504 return Ok((
3505 MergeDetails {
3506 message: message.map(SharedString::from),
3507 ..prev_snapshot.merge.clone()
3508 },
3509 false,
3510 ));
3511 }
3512
3513 current_conflicted_paths
3514 } else {
3515 prev_snapshot.merge.conflicted_paths.clone()
3516 };
3517 let details = MergeDetails {
3518 conflicted_paths,
3519 message: message.map(SharedString::from),
3520 heads,
3521 };
3522 Ok((details, merge_heads_changed))
3523 }
3524}
3525
3526impl Repository {
3527 pub fn snapshot(&self) -> RepositorySnapshot {
3528 self.snapshot.clone()
3529 }
3530
3531 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3532 self.pending_ops.iter().cloned()
3533 }
3534
3535 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3536 self.pending_ops.summary().clone()
3537 }
3538
3539 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3540 self.pending_ops
3541 .get(&PathKey(path.as_ref().clone()), ())
3542 .cloned()
3543 }
3544
3545 fn local(
3546 id: RepositoryId,
3547 work_directory_abs_path: Arc<Path>,
3548 dot_git_abs_path: Arc<Path>,
3549 project_environment: WeakEntity<ProjectEnvironment>,
3550 fs: Arc<dyn Fs>,
3551 git_store: WeakEntity<GitStore>,
3552 cx: &mut Context<Self>,
3553 ) -> Self {
3554 let snapshot =
3555 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3556 let state = cx
3557 .spawn(async move |_, cx| {
3558 LocalRepositoryState::new(
3559 work_directory_abs_path,
3560 dot_git_abs_path,
3561 project_environment,
3562 fs,
3563 cx,
3564 )
3565 .await
3566 .map_err(|err| err.to_string())
3567 })
3568 .shared();
3569 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3570 let state = cx
3571 .spawn(async move |_, _| {
3572 let state = state.await?;
3573 Ok(RepositoryState::Local(state))
3574 })
3575 .shared();
3576
3577 Repository {
3578 this: cx.weak_entity(),
3579 git_store,
3580 snapshot,
3581 pending_ops: Default::default(),
3582 repository_state: state,
3583 commit_message_buffer: None,
3584 askpass_delegates: Default::default(),
3585 paths_needing_status_update: Default::default(),
3586 latest_askpass_id: 0,
3587 job_sender,
3588 job_id: 0,
3589 active_jobs: Default::default(),
3590 }
3591 }
3592
3593 fn remote(
3594 id: RepositoryId,
3595 work_directory_abs_path: Arc<Path>,
3596 path_style: PathStyle,
3597 project_id: ProjectId,
3598 client: AnyProtoClient,
3599 git_store: WeakEntity<GitStore>,
3600 cx: &mut Context<Self>,
3601 ) -> Self {
3602 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3603 let repository_state = RemoteRepositoryState { project_id, client };
3604 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3605 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3606 Self {
3607 this: cx.weak_entity(),
3608 snapshot,
3609 commit_message_buffer: None,
3610 git_store,
3611 pending_ops: Default::default(),
3612 paths_needing_status_update: Default::default(),
3613 job_sender,
3614 repository_state,
3615 askpass_delegates: Default::default(),
3616 latest_askpass_id: 0,
3617 active_jobs: Default::default(),
3618 job_id: 0,
3619 }
3620 }
3621
3622 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3623 self.git_store.upgrade()
3624 }
3625
3626 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3627 let this = cx.weak_entity();
3628 let git_store = self.git_store.clone();
3629 let _ = self.send_keyed_job(
3630 Some(GitJobKey::ReloadBufferDiffBases),
3631 None,
3632 |state, mut cx| async move {
3633 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3634 log::error!("tried to recompute diffs for a non-local repository");
3635 return Ok(());
3636 };
3637
3638 let Some(this) = this.upgrade() else {
3639 return Ok(());
3640 };
3641
3642 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3643 git_store.update(cx, |git_store, cx| {
3644 git_store
3645 .diffs
3646 .iter()
3647 .filter_map(|(buffer_id, diff_state)| {
3648 let buffer_store = git_store.buffer_store.read(cx);
3649 let buffer = buffer_store.get(*buffer_id)?;
3650 let file = File::from_dyn(buffer.read(cx).file())?;
3651 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3652 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3653 log::debug!(
3654 "start reload diff bases for repo path {}",
3655 repo_path.as_unix_str()
3656 );
3657 diff_state.update(cx, |diff_state, _| {
3658 let has_unstaged_diff = diff_state
3659 .unstaged_diff
3660 .as_ref()
3661 .is_some_and(|diff| diff.is_upgradable());
3662 let has_uncommitted_diff = diff_state
3663 .uncommitted_diff
3664 .as_ref()
3665 .is_some_and(|set| set.is_upgradable());
3666
3667 Some((
3668 buffer,
3669 repo_path,
3670 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3671 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3672 ))
3673 })
3674 })
3675 .collect::<Vec<_>>()
3676 })
3677 })??;
3678
3679 let buffer_diff_base_changes = cx
3680 .background_spawn(async move {
3681 let mut changes = Vec::new();
3682 for (buffer, repo_path, current_index_text, current_head_text) in
3683 &repo_diff_state_updates
3684 {
3685 let index_text = if current_index_text.is_some() {
3686 backend.load_index_text(repo_path.clone()).await
3687 } else {
3688 None
3689 };
3690 let head_text = if current_head_text.is_some() {
3691 backend.load_committed_text(repo_path.clone()).await
3692 } else {
3693 None
3694 };
3695
3696 let change =
3697 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3698 (Some(current_index), Some(current_head)) => {
3699 let index_changed =
3700 index_text.as_deref() != current_index.as_deref();
3701 let head_changed =
3702 head_text.as_deref() != current_head.as_deref();
3703 if index_changed && head_changed {
3704 if index_text == head_text {
3705 Some(DiffBasesChange::SetBoth(head_text))
3706 } else {
3707 Some(DiffBasesChange::SetEach {
3708 index: index_text,
3709 head: head_text,
3710 })
3711 }
3712 } else if index_changed {
3713 Some(DiffBasesChange::SetIndex(index_text))
3714 } else if head_changed {
3715 Some(DiffBasesChange::SetHead(head_text))
3716 } else {
3717 None
3718 }
3719 }
3720 (Some(current_index), None) => {
3721 let index_changed =
3722 index_text.as_deref() != current_index.as_deref();
3723 index_changed
3724 .then_some(DiffBasesChange::SetIndex(index_text))
3725 }
3726 (None, Some(current_head)) => {
3727 let head_changed =
3728 head_text.as_deref() != current_head.as_deref();
3729 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3730 }
3731 (None, None) => None,
3732 };
3733
3734 changes.push((buffer.clone(), change))
3735 }
3736 changes
3737 })
3738 .await;
3739
3740 git_store.update(&mut cx, |git_store, cx| {
3741 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3742 let buffer_snapshot = buffer.read(cx).text_snapshot();
3743 let buffer_id = buffer_snapshot.remote_id();
3744 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3745 continue;
3746 };
3747
3748 let downstream_client = git_store.downstream_client();
3749 diff_state.update(cx, |diff_state, cx| {
3750 use proto::update_diff_bases::Mode;
3751
3752 if let Some((diff_bases_change, (client, project_id))) =
3753 diff_bases_change.clone().zip(downstream_client)
3754 {
3755 let (staged_text, committed_text, mode) = match diff_bases_change {
3756 DiffBasesChange::SetIndex(index) => {
3757 (index, None, Mode::IndexOnly)
3758 }
3759 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3760 DiffBasesChange::SetEach { index, head } => {
3761 (index, head, Mode::IndexAndHead)
3762 }
3763 DiffBasesChange::SetBoth(text) => {
3764 (None, text, Mode::IndexMatchesHead)
3765 }
3766 };
3767 client
3768 .send(proto::UpdateDiffBases {
3769 project_id: project_id.to_proto(),
3770 buffer_id: buffer_id.to_proto(),
3771 staged_text,
3772 committed_text,
3773 mode: mode as i32,
3774 })
3775 .log_err();
3776 }
3777
3778 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3779 });
3780 }
3781 })
3782 },
3783 );
3784 }
3785
3786 pub fn send_job<F, Fut, R>(
3787 &mut self,
3788 status: Option<SharedString>,
3789 job: F,
3790 ) -> oneshot::Receiver<R>
3791 where
3792 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3793 Fut: Future<Output = R> + 'static,
3794 R: Send + 'static,
3795 {
3796 self.send_keyed_job(None, status, job)
3797 }
3798
3799 fn send_keyed_job<F, Fut, R>(
3800 &mut self,
3801 key: Option<GitJobKey>,
3802 status: Option<SharedString>,
3803 job: F,
3804 ) -> oneshot::Receiver<R>
3805 where
3806 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3807 Fut: Future<Output = R> + 'static,
3808 R: Send + 'static,
3809 {
3810 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3811 let job_id = post_inc(&mut self.job_id);
3812 let this = self.this.clone();
3813 self.job_sender
3814 .unbounded_send(GitJob {
3815 key,
3816 job: Box::new(move |state, cx: &mut AsyncApp| {
3817 let job = job(state, cx.clone());
3818 cx.spawn(async move |cx| {
3819 if let Some(s) = status.clone() {
3820 this.update(cx, |this, cx| {
3821 this.active_jobs.insert(
3822 job_id,
3823 JobInfo {
3824 start: Instant::now(),
3825 message: s.clone(),
3826 },
3827 );
3828
3829 cx.notify();
3830 })
3831 .ok();
3832 }
3833 let result = job.await;
3834
3835 this.update(cx, |this, cx| {
3836 this.active_jobs.remove(&job_id);
3837 cx.notify();
3838 })
3839 .ok();
3840
3841 result_tx.send(result).ok();
3842 })
3843 }),
3844 })
3845 .ok();
3846 result_rx
3847 }
3848
3849 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3850 let Some(git_store) = self.git_store.upgrade() else {
3851 return;
3852 };
3853 let entity = cx.entity();
3854 git_store.update(cx, |git_store, cx| {
3855 let Some((&id, _)) = git_store
3856 .repositories
3857 .iter()
3858 .find(|(_, handle)| *handle == &entity)
3859 else {
3860 return;
3861 };
3862 git_store.active_repo_id = Some(id);
3863 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3864 });
3865 }
3866
3867 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3868 self.snapshot.status()
3869 }
3870
3871 pub fn cached_stash(&self) -> GitStash {
3872 self.snapshot.stash_entries.clone()
3873 }
3874
3875 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3876 let git_store = self.git_store.upgrade()?;
3877 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3878 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3879 let abs_path = SanitizedPath::new(&abs_path);
3880 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3881 Some(ProjectPath {
3882 worktree_id: worktree.read(cx).id(),
3883 path: relative_path,
3884 })
3885 }
3886
3887 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3888 let git_store = self.git_store.upgrade()?;
3889 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3890 let abs_path = worktree_store.absolutize(path, cx)?;
3891 self.snapshot.abs_path_to_repo_path(&abs_path)
3892 }
3893
3894 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3895 other
3896 .read(cx)
3897 .snapshot
3898 .work_directory_abs_path
3899 .starts_with(&self.snapshot.work_directory_abs_path)
3900 }
3901
3902 pub fn open_commit_buffer(
3903 &mut self,
3904 languages: Option<Arc<LanguageRegistry>>,
3905 buffer_store: Entity<BufferStore>,
3906 cx: &mut Context<Self>,
3907 ) -> Task<Result<Entity<Buffer>>> {
3908 let id = self.id;
3909 if let Some(buffer) = self.commit_message_buffer.clone() {
3910 return Task::ready(Ok(buffer));
3911 }
3912 let this = cx.weak_entity();
3913
3914 let rx = self.send_job(None, move |state, mut cx| async move {
3915 let Some(this) = this.upgrade() else {
3916 bail!("git store was dropped");
3917 };
3918 match state {
3919 RepositoryState::Local(..) => {
3920 this.update(&mut cx, |_, cx| {
3921 Self::open_local_commit_buffer(languages, buffer_store, cx)
3922 })?
3923 .await
3924 }
3925 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3926 let request = client.request(proto::OpenCommitMessageBuffer {
3927 project_id: project_id.0,
3928 repository_id: id.to_proto(),
3929 });
3930 let response = request.await.context("requesting to open commit buffer")?;
3931 let buffer_id = BufferId::new(response.buffer_id)?;
3932 let buffer = buffer_store
3933 .update(&mut cx, |buffer_store, cx| {
3934 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3935 })?
3936 .await?;
3937 if let Some(language_registry) = languages {
3938 let git_commit_language =
3939 language_registry.language_for_name("Git Commit").await?;
3940 buffer.update(&mut cx, |buffer, cx| {
3941 buffer.set_language(Some(git_commit_language), cx);
3942 })?;
3943 }
3944 this.update(&mut cx, |this, _| {
3945 this.commit_message_buffer = Some(buffer.clone());
3946 })?;
3947 Ok(buffer)
3948 }
3949 }
3950 });
3951
3952 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3953 }
3954
3955 fn open_local_commit_buffer(
3956 language_registry: Option<Arc<LanguageRegistry>>,
3957 buffer_store: Entity<BufferStore>,
3958 cx: &mut Context<Self>,
3959 ) -> Task<Result<Entity<Buffer>>> {
3960 cx.spawn(async move |repository, cx| {
3961 let buffer = buffer_store
3962 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3963 .await?;
3964
3965 if let Some(language_registry) = language_registry {
3966 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3967 buffer.update(cx, |buffer, cx| {
3968 buffer.set_language(Some(git_commit_language), cx);
3969 })?;
3970 }
3971
3972 repository.update(cx, |repository, _| {
3973 repository.commit_message_buffer = Some(buffer.clone());
3974 })?;
3975 Ok(buffer)
3976 })
3977 }
3978
3979 pub fn checkout_files(
3980 &mut self,
3981 commit: &str,
3982 paths: Vec<RepoPath>,
3983 cx: &mut Context<Self>,
3984 ) -> Task<Result<()>> {
3985 let commit = commit.to_string();
3986 let id = self.id;
3987
3988 self.spawn_job_with_tracking(
3989 paths.clone(),
3990 pending_op::GitStatus::Reverted,
3991 cx,
3992 async move |this, cx| {
3993 this.update(cx, |this, _cx| {
3994 this.send_job(
3995 Some(format!("git checkout {}", commit).into()),
3996 move |git_repo, _| async move {
3997 match git_repo {
3998 RepositoryState::Local(LocalRepositoryState {
3999 backend,
4000 environment,
4001 ..
4002 }) => {
4003 backend
4004 .checkout_files(commit, paths, environment.clone())
4005 .await
4006 }
4007 RepositoryState::Remote(RemoteRepositoryState {
4008 project_id,
4009 client,
4010 }) => {
4011 client
4012 .request(proto::GitCheckoutFiles {
4013 project_id: project_id.0,
4014 repository_id: id.to_proto(),
4015 commit,
4016 paths: paths
4017 .into_iter()
4018 .map(|p| p.to_proto())
4019 .collect(),
4020 })
4021 .await?;
4022
4023 Ok(())
4024 }
4025 }
4026 },
4027 )
4028 })?
4029 .await?
4030 },
4031 )
4032 }
4033
4034 pub fn reset(
4035 &mut self,
4036 commit: String,
4037 reset_mode: ResetMode,
4038 _cx: &mut App,
4039 ) -> oneshot::Receiver<Result<()>> {
4040 let id = self.id;
4041
4042 self.send_job(None, move |git_repo, _| async move {
4043 match git_repo {
4044 RepositoryState::Local(LocalRepositoryState {
4045 backend,
4046 environment,
4047 ..
4048 }) => backend.reset(commit, reset_mode, environment).await,
4049 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4050 client
4051 .request(proto::GitReset {
4052 project_id: project_id.0,
4053 repository_id: id.to_proto(),
4054 commit,
4055 mode: match reset_mode {
4056 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4057 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4058 },
4059 })
4060 .await?;
4061
4062 Ok(())
4063 }
4064 }
4065 })
4066 }
4067
4068 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4069 let id = self.id;
4070 self.send_job(None, move |git_repo, _cx| async move {
4071 match git_repo {
4072 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4073 backend.show(commit).await
4074 }
4075 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4076 let resp = client
4077 .request(proto::GitShow {
4078 project_id: project_id.0,
4079 repository_id: id.to_proto(),
4080 commit,
4081 })
4082 .await?;
4083
4084 Ok(CommitDetails {
4085 sha: resp.sha.into(),
4086 message: resp.message.into(),
4087 commit_timestamp: resp.commit_timestamp,
4088 author_email: resp.author_email.into(),
4089 author_name: resp.author_name.into(),
4090 })
4091 }
4092 }
4093 })
4094 }
4095
4096 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4097 let id = self.id;
4098 self.send_job(None, move |git_repo, cx| async move {
4099 match git_repo {
4100 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4101 backend.load_commit(commit, cx).await
4102 }
4103 RepositoryState::Remote(RemoteRepositoryState {
4104 client, project_id, ..
4105 }) => {
4106 let response = client
4107 .request(proto::LoadCommitDiff {
4108 project_id: project_id.0,
4109 repository_id: id.to_proto(),
4110 commit,
4111 })
4112 .await?;
4113 Ok(CommitDiff {
4114 files: response
4115 .files
4116 .into_iter()
4117 .map(|file| {
4118 Ok(CommitFile {
4119 path: RepoPath::from_proto(&file.path)?,
4120 old_text: file.old_text,
4121 new_text: file.new_text,
4122 })
4123 })
4124 .collect::<Result<Vec<_>>>()?,
4125 })
4126 }
4127 }
4128 })
4129 }
4130
4131 pub fn file_history(
4132 &mut self,
4133 path: RepoPath,
4134 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4135 self.file_history_paginated(path, 0, None)
4136 }
4137
4138 pub fn file_history_paginated(
4139 &mut self,
4140 path: RepoPath,
4141 skip: usize,
4142 limit: Option<usize>,
4143 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4144 let id = self.id;
4145 self.send_job(None, move |git_repo, _cx| async move {
4146 match git_repo {
4147 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4148 backend.file_history_paginated(path, skip, limit).await
4149 }
4150 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4151 let response = client
4152 .request(proto::GitFileHistory {
4153 project_id: project_id.0,
4154 repository_id: id.to_proto(),
4155 path: path.to_proto(),
4156 skip: skip as u64,
4157 limit: limit.map(|l| l as u64),
4158 })
4159 .await?;
4160 Ok(git::repository::FileHistory {
4161 entries: response
4162 .entries
4163 .into_iter()
4164 .map(|entry| git::repository::FileHistoryEntry {
4165 sha: entry.sha.into(),
4166 subject: entry.subject.into(),
4167 message: entry.message.into(),
4168 commit_timestamp: entry.commit_timestamp,
4169 author_name: entry.author_name.into(),
4170 author_email: entry.author_email.into(),
4171 })
4172 .collect(),
4173 path: RepoPath::from_proto(&response.path)?,
4174 })
4175 }
4176 }
4177 })
4178 }
4179
4180 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4181 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4182 }
4183
4184 fn save_buffers<'a>(
4185 &self,
4186 entries: impl IntoIterator<Item = &'a RepoPath>,
4187 cx: &mut Context<Self>,
4188 ) -> Vec<Task<anyhow::Result<()>>> {
4189 let mut save_futures = Vec::new();
4190 if let Some(buffer_store) = self.buffer_store(cx) {
4191 buffer_store.update(cx, |buffer_store, cx| {
4192 for path in entries {
4193 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4194 continue;
4195 };
4196 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4197 && buffer
4198 .read(cx)
4199 .file()
4200 .is_some_and(|file| file.disk_state().exists())
4201 && buffer.read(cx).has_unsaved_edits()
4202 {
4203 save_futures.push(buffer_store.save_buffer(buffer, cx));
4204 }
4205 }
4206 })
4207 }
4208 save_futures
4209 }
4210
4211 pub fn stage_entries(
4212 &mut self,
4213 entries: Vec<RepoPath>,
4214 cx: &mut Context<Self>,
4215 ) -> Task<anyhow::Result<()>> {
4216 self.stage_or_unstage_entries(true, entries, cx)
4217 }
4218
4219 pub fn unstage_entries(
4220 &mut self,
4221 entries: Vec<RepoPath>,
4222 cx: &mut Context<Self>,
4223 ) -> Task<anyhow::Result<()>> {
4224 self.stage_or_unstage_entries(false, entries, cx)
4225 }
4226
4227 fn stage_or_unstage_entries(
4228 &mut self,
4229 stage: bool,
4230 entries: Vec<RepoPath>,
4231 cx: &mut Context<Self>,
4232 ) -> Task<anyhow::Result<()>> {
4233 if entries.is_empty() {
4234 return Task::ready(Ok(()));
4235 }
4236 let Some(git_store) = self.git_store.upgrade() else {
4237 return Task::ready(Ok(()));
4238 };
4239 let id = self.id;
4240 let save_tasks = self.save_buffers(&entries, cx);
4241 let paths = entries
4242 .iter()
4243 .map(|p| p.as_unix_str())
4244 .collect::<Vec<_>>()
4245 .join(" ");
4246 let status = if stage {
4247 format!("git add {paths}")
4248 } else {
4249 format!("git reset {paths}")
4250 };
4251 let job_key = GitJobKey::WriteIndex(entries.clone());
4252
4253 self.spawn_job_with_tracking(
4254 entries.clone(),
4255 if stage {
4256 pending_op::GitStatus::Staged
4257 } else {
4258 pending_op::GitStatus::Unstaged
4259 },
4260 cx,
4261 async move |this, cx| {
4262 for save_task in save_tasks {
4263 save_task.await?;
4264 }
4265
4266 this.update(cx, |this, cx| {
4267 let weak_this = cx.weak_entity();
4268 this.send_keyed_job(
4269 Some(job_key),
4270 Some(status.into()),
4271 move |git_repo, mut cx| async move {
4272 let hunk_staging_operation_counts = weak_this
4273 .update(&mut cx, |this, cx| {
4274 let mut hunk_staging_operation_counts = HashMap::default();
4275 for path in &entries {
4276 let Some(project_path) =
4277 this.repo_path_to_project_path(path, cx)
4278 else {
4279 continue;
4280 };
4281 let Some(buffer) = git_store
4282 .read(cx)
4283 .buffer_store
4284 .read(cx)
4285 .get_by_path(&project_path)
4286 else {
4287 continue;
4288 };
4289 let Some(diff_state) = git_store
4290 .read(cx)
4291 .diffs
4292 .get(&buffer.read(cx).remote_id())
4293 .cloned()
4294 else {
4295 continue;
4296 };
4297 let Some(uncommitted_diff) =
4298 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4299 |uncommitted_diff| uncommitted_diff.upgrade(),
4300 )
4301 else {
4302 continue;
4303 };
4304 let buffer_snapshot = buffer.read(cx).text_snapshot();
4305 let file_exists = buffer
4306 .read(cx)
4307 .file()
4308 .is_some_and(|file| file.disk_state().exists());
4309 let hunk_staging_operation_count =
4310 diff_state.update(cx, |diff_state, cx| {
4311 uncommitted_diff.update(
4312 cx,
4313 |uncommitted_diff, cx| {
4314 uncommitted_diff
4315 .stage_or_unstage_all_hunks(
4316 stage,
4317 &buffer_snapshot,
4318 file_exists,
4319 cx,
4320 );
4321 },
4322 );
4323
4324 diff_state.hunk_staging_operation_count += 1;
4325 diff_state.hunk_staging_operation_count
4326 });
4327 hunk_staging_operation_counts.insert(
4328 diff_state.downgrade(),
4329 hunk_staging_operation_count,
4330 );
4331 }
4332 hunk_staging_operation_counts
4333 })
4334 .unwrap_or_default();
4335
4336 let result = match git_repo {
4337 RepositoryState::Local(LocalRepositoryState {
4338 backend,
4339 environment,
4340 ..
4341 }) => {
4342 if stage {
4343 backend.stage_paths(entries, environment.clone()).await
4344 } else {
4345 backend.unstage_paths(entries, environment.clone()).await
4346 }
4347 }
4348 RepositoryState::Remote(RemoteRepositoryState {
4349 project_id,
4350 client,
4351 }) => {
4352 if stage {
4353 client
4354 .request(proto::Stage {
4355 project_id: project_id.0,
4356 repository_id: id.to_proto(),
4357 paths: entries
4358 .into_iter()
4359 .map(|repo_path| repo_path.to_proto())
4360 .collect(),
4361 })
4362 .await
4363 .context("sending stage request")
4364 .map(|_| ())
4365 } else {
4366 client
4367 .request(proto::Unstage {
4368 project_id: project_id.0,
4369 repository_id: id.to_proto(),
4370 paths: entries
4371 .into_iter()
4372 .map(|repo_path| repo_path.to_proto())
4373 .collect(),
4374 })
4375 .await
4376 .context("sending unstage request")
4377 .map(|_| ())
4378 }
4379 }
4380 };
4381
4382 for (diff_state, hunk_staging_operation_count) in
4383 hunk_staging_operation_counts
4384 {
4385 diff_state
4386 .update(&mut cx, |diff_state, cx| {
4387 if result.is_ok() {
4388 diff_state.hunk_staging_operation_count_as_of_write =
4389 hunk_staging_operation_count;
4390 } else if let Some(uncommitted_diff) =
4391 &diff_state.uncommitted_diff
4392 {
4393 uncommitted_diff
4394 .update(cx, |uncommitted_diff, cx| {
4395 uncommitted_diff.clear_pending_hunks(cx);
4396 })
4397 .ok();
4398 }
4399 })
4400 .ok();
4401 }
4402
4403 result
4404 },
4405 )
4406 })?
4407 .await?
4408 },
4409 )
4410 }
4411
4412 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4413 let to_stage = self
4414 .cached_status()
4415 .filter_map(|entry| {
4416 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4417 if ops.staging() || ops.staged() {
4418 None
4419 } else {
4420 Some(entry.repo_path)
4421 }
4422 } else if entry.status.staging().is_fully_staged() {
4423 None
4424 } else {
4425 Some(entry.repo_path)
4426 }
4427 })
4428 .collect();
4429 self.stage_or_unstage_entries(true, to_stage, cx)
4430 }
4431
4432 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4433 let to_unstage = self
4434 .cached_status()
4435 .filter_map(|entry| {
4436 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4437 if !ops.staging() && !ops.staged() {
4438 None
4439 } else {
4440 Some(entry.repo_path)
4441 }
4442 } else if entry.status.staging().is_fully_unstaged() {
4443 None
4444 } else {
4445 Some(entry.repo_path)
4446 }
4447 })
4448 .collect();
4449 self.stage_or_unstage_entries(false, to_unstage, cx)
4450 }
4451
4452 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4453 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4454
4455 self.stash_entries(to_stash, cx)
4456 }
4457
4458 pub fn stash_entries(
4459 &mut self,
4460 entries: Vec<RepoPath>,
4461 cx: &mut Context<Self>,
4462 ) -> Task<anyhow::Result<()>> {
4463 let id = self.id;
4464
4465 cx.spawn(async move |this, cx| {
4466 this.update(cx, |this, _| {
4467 this.send_job(None, move |git_repo, _cx| async move {
4468 match git_repo {
4469 RepositoryState::Local(LocalRepositoryState {
4470 backend,
4471 environment,
4472 ..
4473 }) => backend.stash_paths(entries, environment).await,
4474 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4475 client
4476 .request(proto::Stash {
4477 project_id: project_id.0,
4478 repository_id: id.to_proto(),
4479 paths: entries
4480 .into_iter()
4481 .map(|repo_path| repo_path.to_proto())
4482 .collect(),
4483 })
4484 .await
4485 .context("sending stash request")?;
4486 Ok(())
4487 }
4488 }
4489 })
4490 })?
4491 .await??;
4492 Ok(())
4493 })
4494 }
4495
4496 pub fn stash_pop(
4497 &mut self,
4498 index: Option<usize>,
4499 cx: &mut Context<Self>,
4500 ) -> Task<anyhow::Result<()>> {
4501 let id = self.id;
4502 cx.spawn(async move |this, cx| {
4503 this.update(cx, |this, _| {
4504 this.send_job(None, move |git_repo, _cx| async move {
4505 match git_repo {
4506 RepositoryState::Local(LocalRepositoryState {
4507 backend,
4508 environment,
4509 ..
4510 }) => backend.stash_pop(index, environment).await,
4511 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4512 client
4513 .request(proto::StashPop {
4514 project_id: project_id.0,
4515 repository_id: id.to_proto(),
4516 stash_index: index.map(|i| i as u64),
4517 })
4518 .await
4519 .context("sending stash pop request")?;
4520 Ok(())
4521 }
4522 }
4523 })
4524 })?
4525 .await??;
4526 Ok(())
4527 })
4528 }
4529
4530 pub fn stash_apply(
4531 &mut self,
4532 index: Option<usize>,
4533 cx: &mut Context<Self>,
4534 ) -> Task<anyhow::Result<()>> {
4535 let id = self.id;
4536 cx.spawn(async move |this, cx| {
4537 this.update(cx, |this, _| {
4538 this.send_job(None, move |git_repo, _cx| async move {
4539 match git_repo {
4540 RepositoryState::Local(LocalRepositoryState {
4541 backend,
4542 environment,
4543 ..
4544 }) => backend.stash_apply(index, environment).await,
4545 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4546 client
4547 .request(proto::StashApply {
4548 project_id: project_id.0,
4549 repository_id: id.to_proto(),
4550 stash_index: index.map(|i| i as u64),
4551 })
4552 .await
4553 .context("sending stash apply request")?;
4554 Ok(())
4555 }
4556 }
4557 })
4558 })?
4559 .await??;
4560 Ok(())
4561 })
4562 }
4563
4564 pub fn stash_drop(
4565 &mut self,
4566 index: Option<usize>,
4567 cx: &mut Context<Self>,
4568 ) -> oneshot::Receiver<anyhow::Result<()>> {
4569 let id = self.id;
4570 let updates_tx = self
4571 .git_store()
4572 .and_then(|git_store| match &git_store.read(cx).state {
4573 GitStoreState::Local { downstream, .. } => downstream
4574 .as_ref()
4575 .map(|downstream| downstream.updates_tx.clone()),
4576 _ => None,
4577 });
4578 let this = cx.weak_entity();
4579 self.send_job(None, move |git_repo, mut cx| async move {
4580 match git_repo {
4581 RepositoryState::Local(LocalRepositoryState {
4582 backend,
4583 environment,
4584 ..
4585 }) => {
4586 // TODO would be nice to not have to do this manually
4587 let result = backend.stash_drop(index, environment).await;
4588 if result.is_ok()
4589 && let Ok(stash_entries) = backend.stash_entries().await
4590 {
4591 let snapshot = this.update(&mut cx, |this, cx| {
4592 this.snapshot.stash_entries = stash_entries;
4593 cx.emit(RepositoryEvent::StashEntriesChanged);
4594 this.snapshot.clone()
4595 })?;
4596 if let Some(updates_tx) = updates_tx {
4597 updates_tx
4598 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4599 .ok();
4600 }
4601 }
4602
4603 result
4604 }
4605 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4606 client
4607 .request(proto::StashDrop {
4608 project_id: project_id.0,
4609 repository_id: id.to_proto(),
4610 stash_index: index.map(|i| i as u64),
4611 })
4612 .await
4613 .context("sending stash pop request")?;
4614 Ok(())
4615 }
4616 }
4617 })
4618 }
4619
4620 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4621 let id = self.id;
4622 self.send_job(
4623 Some(format!("git hook {}", hook.as_str()).into()),
4624 move |git_repo, _cx| async move {
4625 match git_repo {
4626 RepositoryState::Local(LocalRepositoryState {
4627 backend,
4628 environment,
4629 ..
4630 }) => backend.run_hook(hook, environment.clone()).await,
4631 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4632 client
4633 .request(proto::RunGitHook {
4634 project_id: project_id.0,
4635 repository_id: id.to_proto(),
4636 hook: hook.to_proto(),
4637 })
4638 .await?;
4639
4640 Ok(())
4641 }
4642 }
4643 },
4644 )
4645 }
4646
4647 pub fn commit(
4648 &mut self,
4649 message: SharedString,
4650 name_and_email: Option<(SharedString, SharedString)>,
4651 options: CommitOptions,
4652 askpass: AskPassDelegate,
4653 cx: &mut App,
4654 ) -> oneshot::Receiver<Result<()>> {
4655 let id = self.id;
4656 let askpass_delegates = self.askpass_delegates.clone();
4657 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4658
4659 let rx = self.run_hook(RunHook::PreCommit, cx);
4660
4661 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4662 rx.await??;
4663
4664 match git_repo {
4665 RepositoryState::Local(LocalRepositoryState {
4666 backend,
4667 environment,
4668 ..
4669 }) => {
4670 backend
4671 .commit(message, name_and_email, options, askpass, environment)
4672 .await
4673 }
4674 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4675 askpass_delegates.lock().insert(askpass_id, askpass);
4676 let _defer = util::defer(|| {
4677 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4678 debug_assert!(askpass_delegate.is_some());
4679 });
4680 let (name, email) = name_and_email.unzip();
4681 client
4682 .request(proto::Commit {
4683 project_id: project_id.0,
4684 repository_id: id.to_proto(),
4685 message: String::from(message),
4686 name: name.map(String::from),
4687 email: email.map(String::from),
4688 options: Some(proto::commit::CommitOptions {
4689 amend: options.amend,
4690 signoff: options.signoff,
4691 }),
4692 askpass_id,
4693 })
4694 .await
4695 .context("sending commit request")?;
4696
4697 Ok(())
4698 }
4699 }
4700 })
4701 }
4702
4703 pub fn fetch(
4704 &mut self,
4705 fetch_options: FetchOptions,
4706 askpass: AskPassDelegate,
4707 _cx: &mut App,
4708 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4709 let askpass_delegates = self.askpass_delegates.clone();
4710 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4711 let id = self.id;
4712
4713 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4714 match git_repo {
4715 RepositoryState::Local(LocalRepositoryState {
4716 backend,
4717 environment,
4718 ..
4719 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4720 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4721 askpass_delegates.lock().insert(askpass_id, askpass);
4722 let _defer = util::defer(|| {
4723 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4724 debug_assert!(askpass_delegate.is_some());
4725 });
4726
4727 let response = client
4728 .request(proto::Fetch {
4729 project_id: project_id.0,
4730 repository_id: id.to_proto(),
4731 askpass_id,
4732 remote: fetch_options.to_proto(),
4733 })
4734 .await
4735 .context("sending fetch request")?;
4736
4737 Ok(RemoteCommandOutput {
4738 stdout: response.stdout,
4739 stderr: response.stderr,
4740 })
4741 }
4742 }
4743 })
4744 }
4745
4746 pub fn push(
4747 &mut self,
4748 branch: SharedString,
4749 remote_branch: SharedString,
4750 remote: SharedString,
4751 options: Option<PushOptions>,
4752 askpass: AskPassDelegate,
4753 cx: &mut Context<Self>,
4754 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4755 let askpass_delegates = self.askpass_delegates.clone();
4756 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4757 let id = self.id;
4758
4759 let args = options
4760 .map(|option| match option {
4761 PushOptions::SetUpstream => " --set-upstream",
4762 PushOptions::Force => " --force-with-lease",
4763 })
4764 .unwrap_or("");
4765
4766 let updates_tx = self
4767 .git_store()
4768 .and_then(|git_store| match &git_store.read(cx).state {
4769 GitStoreState::Local { downstream, .. } => downstream
4770 .as_ref()
4771 .map(|downstream| downstream.updates_tx.clone()),
4772 _ => None,
4773 });
4774
4775 let this = cx.weak_entity();
4776 self.send_job(
4777 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
4778 move |git_repo, mut cx| async move {
4779 match git_repo {
4780 RepositoryState::Local(LocalRepositoryState {
4781 backend,
4782 environment,
4783 ..
4784 }) => {
4785 let result = backend
4786 .push(
4787 branch.to_string(),
4788 remote_branch.to_string(),
4789 remote.to_string(),
4790 options,
4791 askpass,
4792 environment.clone(),
4793 cx.clone(),
4794 )
4795 .await;
4796 // TODO would be nice to not have to do this manually
4797 if result.is_ok() {
4798 let branches = backend.branches().await?;
4799 let branch = branches.into_iter().find(|branch| branch.is_head);
4800 log::info!("head branch after scan is {branch:?}");
4801 let snapshot = this.update(&mut cx, |this, cx| {
4802 this.snapshot.branch = branch;
4803 cx.emit(RepositoryEvent::BranchChanged);
4804 this.snapshot.clone()
4805 })?;
4806 if let Some(updates_tx) = updates_tx {
4807 updates_tx
4808 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4809 .ok();
4810 }
4811 }
4812 result
4813 }
4814 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4815 askpass_delegates.lock().insert(askpass_id, askpass);
4816 let _defer = util::defer(|| {
4817 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4818 debug_assert!(askpass_delegate.is_some());
4819 });
4820 let response = client
4821 .request(proto::Push {
4822 project_id: project_id.0,
4823 repository_id: id.to_proto(),
4824 askpass_id,
4825 branch_name: branch.to_string(),
4826 remote_branch_name: remote_branch.to_string(),
4827 remote_name: remote.to_string(),
4828 options: options.map(|options| match options {
4829 PushOptions::Force => proto::push::PushOptions::Force,
4830 PushOptions::SetUpstream => {
4831 proto::push::PushOptions::SetUpstream
4832 }
4833 }
4834 as i32),
4835 })
4836 .await
4837 .context("sending push request")?;
4838
4839 Ok(RemoteCommandOutput {
4840 stdout: response.stdout,
4841 stderr: response.stderr,
4842 })
4843 }
4844 }
4845 },
4846 )
4847 }
4848
4849 pub fn pull(
4850 &mut self,
4851 branch: Option<SharedString>,
4852 remote: SharedString,
4853 rebase: bool,
4854 askpass: AskPassDelegate,
4855 _cx: &mut App,
4856 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4857 let askpass_delegates = self.askpass_delegates.clone();
4858 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4859 let id = self.id;
4860
4861 let mut status = "git pull".to_string();
4862 if rebase {
4863 status.push_str(" --rebase");
4864 }
4865 status.push_str(&format!(" {}", remote));
4866 if let Some(b) = &branch {
4867 status.push_str(&format!(" {}", b));
4868 }
4869
4870 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4871 match git_repo {
4872 RepositoryState::Local(LocalRepositoryState {
4873 backend,
4874 environment,
4875 ..
4876 }) => {
4877 backend
4878 .pull(
4879 branch.as_ref().map(|b| b.to_string()),
4880 remote.to_string(),
4881 rebase,
4882 askpass,
4883 environment.clone(),
4884 cx,
4885 )
4886 .await
4887 }
4888 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4889 askpass_delegates.lock().insert(askpass_id, askpass);
4890 let _defer = util::defer(|| {
4891 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4892 debug_assert!(askpass_delegate.is_some());
4893 });
4894 let response = client
4895 .request(proto::Pull {
4896 project_id: project_id.0,
4897 repository_id: id.to_proto(),
4898 askpass_id,
4899 rebase,
4900 branch_name: branch.as_ref().map(|b| b.to_string()),
4901 remote_name: remote.to_string(),
4902 })
4903 .await
4904 .context("sending pull request")?;
4905
4906 Ok(RemoteCommandOutput {
4907 stdout: response.stdout,
4908 stderr: response.stderr,
4909 })
4910 }
4911 }
4912 })
4913 }
4914
4915 fn spawn_set_index_text_job(
4916 &mut self,
4917 path: RepoPath,
4918 content: Option<String>,
4919 hunk_staging_operation_count: Option<usize>,
4920 cx: &mut Context<Self>,
4921 ) -> oneshot::Receiver<anyhow::Result<()>> {
4922 let id = self.id;
4923 let this = cx.weak_entity();
4924 let git_store = self.git_store.clone();
4925 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4926 self.send_keyed_job(
4927 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4928 None,
4929 move |git_repo, mut cx| async move {
4930 log::debug!(
4931 "start updating index text for buffer {}",
4932 path.as_unix_str()
4933 );
4934
4935 match git_repo {
4936 RepositoryState::Local(LocalRepositoryState {
4937 fs,
4938 backend,
4939 environment,
4940 ..
4941 }) => {
4942 let executable = match fs.metadata(&abs_path).await {
4943 Ok(Some(meta)) => meta.is_executable,
4944 Ok(None) => false,
4945 Err(_err) => false,
4946 };
4947 backend
4948 .set_index_text(path.clone(), content, environment.clone(), executable)
4949 .await?;
4950 }
4951 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4952 client
4953 .request(proto::SetIndexText {
4954 project_id: project_id.0,
4955 repository_id: id.to_proto(),
4956 path: path.to_proto(),
4957 text: content,
4958 })
4959 .await?;
4960 }
4961 }
4962 log::debug!(
4963 "finish updating index text for buffer {}",
4964 path.as_unix_str()
4965 );
4966
4967 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4968 let project_path = this
4969 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4970 .ok()
4971 .flatten();
4972 git_store.update(&mut cx, |git_store, cx| {
4973 let buffer_id = git_store
4974 .buffer_store
4975 .read(cx)
4976 .get_by_path(&project_path?)?
4977 .read(cx)
4978 .remote_id();
4979 let diff_state = git_store.diffs.get(&buffer_id)?;
4980 diff_state.update(cx, |diff_state, _| {
4981 diff_state.hunk_staging_operation_count_as_of_write =
4982 hunk_staging_operation_count;
4983 });
4984 Some(())
4985 })?;
4986 }
4987 Ok(())
4988 },
4989 )
4990 }
4991
4992 pub fn create_remote(
4993 &mut self,
4994 remote_name: String,
4995 remote_url: String,
4996 ) -> oneshot::Receiver<Result<()>> {
4997 let id = self.id;
4998 self.send_job(
4999 Some(format!("git remote add {remote_name} {remote_url}").into()),
5000 move |repo, _cx| async move {
5001 match repo {
5002 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5003 backend.create_remote(remote_name, remote_url).await
5004 }
5005 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5006 client
5007 .request(proto::GitCreateRemote {
5008 project_id: project_id.0,
5009 repository_id: id.to_proto(),
5010 remote_name,
5011 remote_url,
5012 })
5013 .await?;
5014
5015 Ok(())
5016 }
5017 }
5018 },
5019 )
5020 }
5021
5022 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5023 let id = self.id;
5024 self.send_job(
5025 Some(format!("git remove remote {remote_name}").into()),
5026 move |repo, _cx| async move {
5027 match repo {
5028 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5029 backend.remove_remote(remote_name).await
5030 }
5031 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5032 client
5033 .request(proto::GitRemoveRemote {
5034 project_id: project_id.0,
5035 repository_id: id.to_proto(),
5036 remote_name,
5037 })
5038 .await?;
5039
5040 Ok(())
5041 }
5042 }
5043 },
5044 )
5045 }
5046
5047 pub fn get_remotes(
5048 &mut self,
5049 branch_name: Option<String>,
5050 is_push: bool,
5051 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5052 let id = self.id;
5053 self.send_job(None, move |repo, _cx| async move {
5054 match repo {
5055 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5056 let remote = if let Some(branch_name) = branch_name {
5057 if is_push {
5058 backend.get_push_remote(branch_name).await?
5059 } else {
5060 backend.get_branch_remote(branch_name).await?
5061 }
5062 } else {
5063 None
5064 };
5065
5066 match remote {
5067 Some(remote) => Ok(vec![remote]),
5068 None => backend.get_all_remotes().await,
5069 }
5070 }
5071 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5072 let response = client
5073 .request(proto::GetRemotes {
5074 project_id: project_id.0,
5075 repository_id: id.to_proto(),
5076 branch_name,
5077 is_push,
5078 })
5079 .await?;
5080
5081 let remotes = response
5082 .remotes
5083 .into_iter()
5084 .map(|remotes| Remote {
5085 name: remotes.name.into(),
5086 })
5087 .collect();
5088
5089 Ok(remotes)
5090 }
5091 }
5092 })
5093 }
5094
5095 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5096 let id = self.id;
5097 self.send_job(None, move |repo, _| async move {
5098 match repo {
5099 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5100 backend.branches().await
5101 }
5102 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5103 let response = client
5104 .request(proto::GitGetBranches {
5105 project_id: project_id.0,
5106 repository_id: id.to_proto(),
5107 })
5108 .await?;
5109
5110 let branches = response
5111 .branches
5112 .into_iter()
5113 .map(|branch| proto_to_branch(&branch))
5114 .collect();
5115
5116 Ok(branches)
5117 }
5118 }
5119 })
5120 }
5121
5122 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5123 let id = self.id;
5124 self.send_job(None, move |repo, _| async move {
5125 match repo {
5126 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5127 backend.worktrees().await
5128 }
5129 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5130 let response = client
5131 .request(proto::GitGetWorktrees {
5132 project_id: project_id.0,
5133 repository_id: id.to_proto(),
5134 })
5135 .await?;
5136
5137 let worktrees = response
5138 .worktrees
5139 .into_iter()
5140 .map(|worktree| proto_to_worktree(&worktree))
5141 .collect();
5142
5143 Ok(worktrees)
5144 }
5145 }
5146 })
5147 }
5148
5149 pub fn create_worktree(
5150 &mut self,
5151 name: String,
5152 path: PathBuf,
5153 commit: Option<String>,
5154 ) -> oneshot::Receiver<Result<()>> {
5155 let id = self.id;
5156 self.send_job(
5157 Some("git worktree add".into()),
5158 move |repo, _cx| async move {
5159 match repo {
5160 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5161 backend.create_worktree(name, path, commit).await
5162 }
5163 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5164 client
5165 .request(proto::GitCreateWorktree {
5166 project_id: project_id.0,
5167 repository_id: id.to_proto(),
5168 name,
5169 directory: path.to_string_lossy().to_string(),
5170 commit,
5171 })
5172 .await?;
5173
5174 Ok(())
5175 }
5176 }
5177 },
5178 )
5179 }
5180
5181 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
5182 let id = self.id;
5183 self.send_job(None, move |repo, _| async move {
5184 match repo {
5185 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5186 backend.default_branch().await
5187 }
5188 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5189 let response = client
5190 .request(proto::GetDefaultBranch {
5191 project_id: project_id.0,
5192 repository_id: id.to_proto(),
5193 })
5194 .await?;
5195
5196 anyhow::Ok(response.branch.map(SharedString::from))
5197 }
5198 }
5199 })
5200 }
5201
5202 pub fn diff_tree(
5203 &mut self,
5204 diff_type: DiffTreeType,
5205 _cx: &App,
5206 ) -> oneshot::Receiver<Result<TreeDiff>> {
5207 let repository_id = self.snapshot.id;
5208 self.send_job(None, move |repo, _cx| async move {
5209 match repo {
5210 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5211 backend.diff_tree(diff_type).await
5212 }
5213 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5214 let response = client
5215 .request(proto::GetTreeDiff {
5216 project_id: project_id.0,
5217 repository_id: repository_id.0,
5218 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5219 base: diff_type.base().to_string(),
5220 head: diff_type.head().to_string(),
5221 })
5222 .await?;
5223
5224 let entries = response
5225 .entries
5226 .into_iter()
5227 .filter_map(|entry| {
5228 let status = match entry.status() {
5229 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5230 proto::tree_diff_status::Status::Modified => {
5231 TreeDiffStatus::Modified {
5232 old: git::Oid::from_str(
5233 &entry.oid.context("missing oid").log_err()?,
5234 )
5235 .log_err()?,
5236 }
5237 }
5238 proto::tree_diff_status::Status::Deleted => {
5239 TreeDiffStatus::Deleted {
5240 old: git::Oid::from_str(
5241 &entry.oid.context("missing oid").log_err()?,
5242 )
5243 .log_err()?,
5244 }
5245 }
5246 };
5247 Some((
5248 RepoPath::from_rel_path(
5249 &RelPath::from_proto(&entry.path).log_err()?,
5250 ),
5251 status,
5252 ))
5253 })
5254 .collect();
5255
5256 Ok(TreeDiff { entries })
5257 }
5258 }
5259 })
5260 }
5261
5262 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5263 let id = self.id;
5264 self.send_job(None, move |repo, _cx| async move {
5265 match repo {
5266 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5267 backend.diff(diff_type).await
5268 }
5269 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5270 let response = client
5271 .request(proto::GitDiff {
5272 project_id: project_id.0,
5273 repository_id: id.to_proto(),
5274 diff_type: match diff_type {
5275 DiffType::HeadToIndex => {
5276 proto::git_diff::DiffType::HeadToIndex.into()
5277 }
5278 DiffType::HeadToWorktree => {
5279 proto::git_diff::DiffType::HeadToWorktree.into()
5280 }
5281 },
5282 })
5283 .await?;
5284
5285 Ok(response.diff)
5286 }
5287 }
5288 })
5289 }
5290
5291 pub fn create_branch(
5292 &mut self,
5293 branch_name: String,
5294 base_branch: Option<String>,
5295 ) -> oneshot::Receiver<Result<()>> {
5296 let id = self.id;
5297 let status_msg = if let Some(ref base) = base_branch {
5298 format!("git switch -c {branch_name} {base}").into()
5299 } else {
5300 format!("git switch -c {branch_name}").into()
5301 };
5302 self.send_job(Some(status_msg), move |repo, _cx| async move {
5303 match repo {
5304 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5305 backend.create_branch(branch_name, base_branch).await
5306 }
5307 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5308 client
5309 .request(proto::GitCreateBranch {
5310 project_id: project_id.0,
5311 repository_id: id.to_proto(),
5312 branch_name,
5313 })
5314 .await?;
5315
5316 Ok(())
5317 }
5318 }
5319 })
5320 }
5321
5322 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5323 let id = self.id;
5324 self.send_job(
5325 Some(format!("git switch {branch_name}").into()),
5326 move |repo, _cx| async move {
5327 match repo {
5328 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5329 backend.change_branch(branch_name).await
5330 }
5331 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5332 client
5333 .request(proto::GitChangeBranch {
5334 project_id: project_id.0,
5335 repository_id: id.to_proto(),
5336 branch_name,
5337 })
5338 .await?;
5339
5340 Ok(())
5341 }
5342 }
5343 },
5344 )
5345 }
5346
5347 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5348 let id = self.id;
5349 self.send_job(
5350 Some(format!("git branch -d {branch_name}").into()),
5351 move |repo, _cx| async move {
5352 match repo {
5353 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5354 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5355 client
5356 .request(proto::GitDeleteBranch {
5357 project_id: project_id.0,
5358 repository_id: id.to_proto(),
5359 branch_name,
5360 })
5361 .await?;
5362
5363 Ok(())
5364 }
5365 }
5366 },
5367 )
5368 }
5369
5370 pub fn rename_branch(
5371 &mut self,
5372 branch: String,
5373 new_name: String,
5374 ) -> oneshot::Receiver<Result<()>> {
5375 let id = self.id;
5376 self.send_job(
5377 Some(format!("git branch -m {branch} {new_name}").into()),
5378 move |repo, _cx| async move {
5379 match repo {
5380 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5381 backend.rename_branch(branch, new_name).await
5382 }
5383 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5384 client
5385 .request(proto::GitRenameBranch {
5386 project_id: project_id.0,
5387 repository_id: id.to_proto(),
5388 branch,
5389 new_name,
5390 })
5391 .await?;
5392
5393 Ok(())
5394 }
5395 }
5396 },
5397 )
5398 }
5399
5400 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5401 let id = self.id;
5402 self.send_job(None, move |repo, _cx| async move {
5403 match repo {
5404 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5405 backend.check_for_pushed_commit().await
5406 }
5407 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5408 let response = client
5409 .request(proto::CheckForPushedCommits {
5410 project_id: project_id.0,
5411 repository_id: id.to_proto(),
5412 })
5413 .await?;
5414
5415 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5416
5417 Ok(branches)
5418 }
5419 }
5420 })
5421 }
5422
5423 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5424 self.send_job(None, |repo, _cx| async move {
5425 match repo {
5426 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5427 backend.checkpoint().await
5428 }
5429 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5430 }
5431 })
5432 }
5433
5434 pub fn restore_checkpoint(
5435 &mut self,
5436 checkpoint: GitRepositoryCheckpoint,
5437 ) -> oneshot::Receiver<Result<()>> {
5438 self.send_job(None, move |repo, _cx| async move {
5439 match repo {
5440 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5441 backend.restore_checkpoint(checkpoint).await
5442 }
5443 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5444 }
5445 })
5446 }
5447
5448 pub(crate) fn apply_remote_update(
5449 &mut self,
5450 update: proto::UpdateRepository,
5451 cx: &mut Context<Self>,
5452 ) -> Result<()> {
5453 let conflicted_paths = TreeSet::from_ordered_entries(
5454 update
5455 .current_merge_conflicts
5456 .into_iter()
5457 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5458 );
5459 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5460 let new_head_commit = update
5461 .head_commit_details
5462 .as_ref()
5463 .map(proto_to_commit_details);
5464 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5465 cx.emit(RepositoryEvent::BranchChanged)
5466 }
5467 self.snapshot.branch = new_branch;
5468 self.snapshot.head_commit = new_head_commit;
5469
5470 self.snapshot.merge.conflicted_paths = conflicted_paths;
5471 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5472 let new_stash_entries = GitStash {
5473 entries: update
5474 .stash_entries
5475 .iter()
5476 .filter_map(|entry| proto_to_stash(entry).ok())
5477 .collect(),
5478 };
5479 if self.snapshot.stash_entries != new_stash_entries {
5480 cx.emit(RepositoryEvent::StashEntriesChanged)
5481 }
5482 self.snapshot.stash_entries = new_stash_entries;
5483 self.snapshot.remote_upstream_url = update.remote_upstream_url;
5484 self.snapshot.remote_origin_url = update.remote_origin_url;
5485
5486 let edits = update
5487 .removed_statuses
5488 .into_iter()
5489 .filter_map(|path| {
5490 Some(sum_tree::Edit::Remove(PathKey(
5491 RelPath::from_proto(&path).log_err()?,
5492 )))
5493 })
5494 .chain(
5495 update
5496 .updated_statuses
5497 .into_iter()
5498 .filter_map(|updated_status| {
5499 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5500 }),
5501 )
5502 .collect::<Vec<_>>();
5503 if !edits.is_empty() {
5504 cx.emit(RepositoryEvent::StatusesChanged);
5505 }
5506 self.snapshot.statuses_by_path.edit(edits, ());
5507 if update.is_last_update {
5508 self.snapshot.scan_id = update.scan_id;
5509 }
5510 self.clear_pending_ops(cx);
5511 Ok(())
5512 }
5513
5514 pub fn compare_checkpoints(
5515 &mut self,
5516 left: GitRepositoryCheckpoint,
5517 right: GitRepositoryCheckpoint,
5518 ) -> oneshot::Receiver<Result<bool>> {
5519 self.send_job(None, move |repo, _cx| async move {
5520 match repo {
5521 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5522 backend.compare_checkpoints(left, right).await
5523 }
5524 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5525 }
5526 })
5527 }
5528
5529 pub fn diff_checkpoints(
5530 &mut self,
5531 base_checkpoint: GitRepositoryCheckpoint,
5532 target_checkpoint: GitRepositoryCheckpoint,
5533 ) -> oneshot::Receiver<Result<String>> {
5534 self.send_job(None, move |repo, _cx| async move {
5535 match repo {
5536 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5537 backend
5538 .diff_checkpoints(base_checkpoint, target_checkpoint)
5539 .await
5540 }
5541 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5542 }
5543 })
5544 }
5545
5546 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5547 let updated = SumTree::from_iter(
5548 self.pending_ops.iter().filter_map(|ops| {
5549 let inner_ops: Vec<PendingOp> =
5550 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5551 if inner_ops.is_empty() {
5552 None
5553 } else {
5554 Some(PendingOps {
5555 repo_path: ops.repo_path.clone(),
5556 ops: inner_ops,
5557 })
5558 }
5559 }),
5560 (),
5561 );
5562
5563 if updated != self.pending_ops {
5564 cx.emit(RepositoryEvent::PendingOpsChanged {
5565 pending_ops: self.pending_ops.clone(),
5566 })
5567 }
5568
5569 self.pending_ops = updated;
5570 }
5571
5572 fn schedule_scan(
5573 &mut self,
5574 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5575 cx: &mut Context<Self>,
5576 ) {
5577 let this = cx.weak_entity();
5578 let _ = self.send_keyed_job(
5579 Some(GitJobKey::ReloadGitState),
5580 None,
5581 |state, mut cx| async move {
5582 log::debug!("run scheduled git status scan");
5583
5584 let Some(this) = this.upgrade() else {
5585 return Ok(());
5586 };
5587 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5588 bail!("not a local repository")
5589 };
5590 let (snapshot, events) = this
5591 .update(&mut cx, |this, _| {
5592 this.paths_needing_status_update.clear();
5593 compute_snapshot(
5594 this.id,
5595 this.work_directory_abs_path.clone(),
5596 this.snapshot.clone(),
5597 backend.clone(),
5598 )
5599 })?
5600 .await?;
5601 this.update(&mut cx, |this, cx| {
5602 this.snapshot = snapshot.clone();
5603 this.clear_pending_ops(cx);
5604 for event in events {
5605 cx.emit(event);
5606 }
5607 })?;
5608 if let Some(updates_tx) = updates_tx {
5609 updates_tx
5610 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5611 .ok();
5612 }
5613 Ok(())
5614 },
5615 );
5616 }
5617
5618 fn spawn_local_git_worker(
5619 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5620 cx: &mut Context<Self>,
5621 ) -> mpsc::UnboundedSender<GitJob> {
5622 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5623
5624 cx.spawn(async move |_, cx| {
5625 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5626 if let Some(git_hosting_provider_registry) =
5627 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5628 {
5629 git_hosting_providers::register_additional_providers(
5630 git_hosting_provider_registry,
5631 state.backend.clone(),
5632 )
5633 .await;
5634 }
5635 let state = RepositoryState::Local(state);
5636 let mut jobs = VecDeque::new();
5637 loop {
5638 while let Ok(Some(next_job)) = job_rx.try_next() {
5639 jobs.push_back(next_job);
5640 }
5641
5642 if let Some(job) = jobs.pop_front() {
5643 if let Some(current_key) = &job.key
5644 && jobs
5645 .iter()
5646 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5647 {
5648 continue;
5649 }
5650 (job.job)(state.clone(), cx).await;
5651 } else if let Some(job) = job_rx.next().await {
5652 jobs.push_back(job);
5653 } else {
5654 break;
5655 }
5656 }
5657 anyhow::Ok(())
5658 })
5659 .detach_and_log_err(cx);
5660
5661 job_tx
5662 }
5663
5664 fn spawn_remote_git_worker(
5665 state: RemoteRepositoryState,
5666 cx: &mut Context<Self>,
5667 ) -> mpsc::UnboundedSender<GitJob> {
5668 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5669
5670 cx.spawn(async move |_, cx| {
5671 let state = RepositoryState::Remote(state);
5672 let mut jobs = VecDeque::new();
5673 loop {
5674 while let Ok(Some(next_job)) = job_rx.try_next() {
5675 jobs.push_back(next_job);
5676 }
5677
5678 if let Some(job) = jobs.pop_front() {
5679 if let Some(current_key) = &job.key
5680 && jobs
5681 .iter()
5682 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5683 {
5684 continue;
5685 }
5686 (job.job)(state.clone(), cx).await;
5687 } else if let Some(job) = job_rx.next().await {
5688 jobs.push_back(job);
5689 } else {
5690 break;
5691 }
5692 }
5693 anyhow::Ok(())
5694 })
5695 .detach_and_log_err(cx);
5696
5697 job_tx
5698 }
5699
5700 fn load_staged_text(
5701 &mut self,
5702 buffer_id: BufferId,
5703 repo_path: RepoPath,
5704 cx: &App,
5705 ) -> Task<Result<Option<String>>> {
5706 let rx = self.send_job(None, move |state, _| async move {
5707 match state {
5708 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5709 anyhow::Ok(backend.load_index_text(repo_path).await)
5710 }
5711 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5712 let response = client
5713 .request(proto::OpenUnstagedDiff {
5714 project_id: project_id.to_proto(),
5715 buffer_id: buffer_id.to_proto(),
5716 })
5717 .await?;
5718 Ok(response.staged_text)
5719 }
5720 }
5721 });
5722 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5723 }
5724
5725 fn load_committed_text(
5726 &mut self,
5727 buffer_id: BufferId,
5728 repo_path: RepoPath,
5729 cx: &App,
5730 ) -> Task<Result<DiffBasesChange>> {
5731 let rx = self.send_job(None, move |state, _| async move {
5732 match state {
5733 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5734 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5735 let staged_text = backend.load_index_text(repo_path).await;
5736 let diff_bases_change = if committed_text == staged_text {
5737 DiffBasesChange::SetBoth(committed_text)
5738 } else {
5739 DiffBasesChange::SetEach {
5740 index: staged_text,
5741 head: committed_text,
5742 }
5743 };
5744 anyhow::Ok(diff_bases_change)
5745 }
5746 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5747 use proto::open_uncommitted_diff_response::Mode;
5748
5749 let response = client
5750 .request(proto::OpenUncommittedDiff {
5751 project_id: project_id.to_proto(),
5752 buffer_id: buffer_id.to_proto(),
5753 })
5754 .await?;
5755 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5756 let bases = match mode {
5757 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5758 Mode::IndexAndHead => DiffBasesChange::SetEach {
5759 head: response.committed_text,
5760 index: response.staged_text,
5761 },
5762 };
5763 Ok(bases)
5764 }
5765 }
5766 });
5767
5768 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5769 }
5770
5771 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5772 let repository_id = self.snapshot.id;
5773 let rx = self.send_job(None, move |state, _| async move {
5774 match state {
5775 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5776 backend.load_blob_content(oid).await
5777 }
5778 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5779 let response = client
5780 .request(proto::GetBlobContent {
5781 project_id: project_id.to_proto(),
5782 repository_id: repository_id.0,
5783 oid: oid.to_string(),
5784 })
5785 .await?;
5786 Ok(response.content)
5787 }
5788 }
5789 });
5790 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5791 }
5792
5793 fn paths_changed(
5794 &mut self,
5795 paths: Vec<RepoPath>,
5796 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5797 cx: &mut Context<Self>,
5798 ) {
5799 self.paths_needing_status_update.extend(paths);
5800
5801 let this = cx.weak_entity();
5802 let _ = self.send_keyed_job(
5803 Some(GitJobKey::RefreshStatuses),
5804 None,
5805 |state, mut cx| async move {
5806 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5807 (
5808 this.snapshot.clone(),
5809 mem::take(&mut this.paths_needing_status_update),
5810 )
5811 })?;
5812 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5813 bail!("not a local repository")
5814 };
5815
5816 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5817 if paths.is_empty() {
5818 return Ok(());
5819 }
5820 let statuses = backend.status(&paths).await?;
5821 let stash_entries = backend.stash_entries().await?;
5822
5823 let changed_path_statuses = cx
5824 .background_spawn(async move {
5825 let mut changed_path_statuses = Vec::new();
5826 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5827 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5828
5829 for (repo_path, status) in &*statuses.entries {
5830 changed_paths.remove(repo_path);
5831 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5832 && cursor.item().is_some_and(|entry| entry.status == *status)
5833 {
5834 continue;
5835 }
5836
5837 changed_path_statuses.push(Edit::Insert(StatusEntry {
5838 repo_path: repo_path.clone(),
5839 status: *status,
5840 }));
5841 }
5842 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5843 for path in changed_paths.into_iter() {
5844 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5845 changed_path_statuses
5846 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5847 }
5848 }
5849 changed_path_statuses
5850 })
5851 .await;
5852
5853 this.update(&mut cx, |this, cx| {
5854 if this.snapshot.stash_entries != stash_entries {
5855 cx.emit(RepositoryEvent::StashEntriesChanged);
5856 this.snapshot.stash_entries = stash_entries;
5857 }
5858
5859 if !changed_path_statuses.is_empty() {
5860 cx.emit(RepositoryEvent::StatusesChanged);
5861 this.snapshot
5862 .statuses_by_path
5863 .edit(changed_path_statuses, ());
5864 this.snapshot.scan_id += 1;
5865 }
5866
5867 if let Some(updates_tx) = updates_tx {
5868 updates_tx
5869 .unbounded_send(DownstreamUpdate::UpdateRepository(
5870 this.snapshot.clone(),
5871 ))
5872 .ok();
5873 }
5874 })
5875 },
5876 );
5877 }
5878
5879 /// currently running git command and when it started
5880 pub fn current_job(&self) -> Option<JobInfo> {
5881 self.active_jobs.values().next().cloned()
5882 }
5883
5884 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5885 self.send_job(None, |_, _| async {})
5886 }
5887
5888 fn spawn_job_with_tracking<AsyncFn>(
5889 &mut self,
5890 paths: Vec<RepoPath>,
5891 git_status: pending_op::GitStatus,
5892 cx: &mut Context<Self>,
5893 f: AsyncFn,
5894 ) -> Task<Result<()>>
5895 where
5896 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5897 {
5898 let ids = self.new_pending_ops_for_paths(paths, git_status);
5899
5900 cx.spawn(async move |this, cx| {
5901 let (job_status, result) = match f(this.clone(), cx).await {
5902 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5903 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5904 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5905 };
5906
5907 this.update(cx, |this, _| {
5908 let mut edits = Vec::with_capacity(ids.len());
5909 for (id, entry) in ids {
5910 if let Some(mut ops) = this
5911 .pending_ops
5912 .get(&PathKey(entry.as_ref().clone()), ())
5913 .cloned()
5914 {
5915 if let Some(op) = ops.op_by_id_mut(id) {
5916 op.job_status = job_status;
5917 }
5918 edits.push(sum_tree::Edit::Insert(ops));
5919 }
5920 }
5921 this.pending_ops.edit(edits, ());
5922 })?;
5923
5924 result
5925 })
5926 }
5927
5928 fn new_pending_ops_for_paths(
5929 &mut self,
5930 paths: Vec<RepoPath>,
5931 git_status: pending_op::GitStatus,
5932 ) -> Vec<(PendingOpId, RepoPath)> {
5933 let mut edits = Vec::with_capacity(paths.len());
5934 let mut ids = Vec::with_capacity(paths.len());
5935 for path in paths {
5936 let mut ops = self
5937 .pending_ops
5938 .get(&PathKey(path.as_ref().clone()), ())
5939 .cloned()
5940 .unwrap_or_else(|| PendingOps::new(&path));
5941 let id = ops.max_id() + 1;
5942 ops.ops.push(PendingOp {
5943 id,
5944 git_status,
5945 job_status: pending_op::JobStatus::Running,
5946 });
5947 edits.push(sum_tree::Edit::Insert(ops));
5948 ids.push((id, path));
5949 }
5950 self.pending_ops.edit(edits, ());
5951 ids
5952 }
5953 pub fn default_remote_url(&self) -> Option<String> {
5954 self.remote_upstream_url
5955 .clone()
5956 .or(self.remote_origin_url.clone())
5957 }
5958}
5959
5960fn get_permalink_in_rust_registry_src(
5961 provider_registry: Arc<GitHostingProviderRegistry>,
5962 path: PathBuf,
5963 selection: Range<u32>,
5964) -> Result<url::Url> {
5965 #[derive(Deserialize)]
5966 struct CargoVcsGit {
5967 sha1: String,
5968 }
5969
5970 #[derive(Deserialize)]
5971 struct CargoVcsInfo {
5972 git: CargoVcsGit,
5973 path_in_vcs: String,
5974 }
5975
5976 #[derive(Deserialize)]
5977 struct CargoPackage {
5978 repository: String,
5979 }
5980
5981 #[derive(Deserialize)]
5982 struct CargoToml {
5983 package: CargoPackage,
5984 }
5985
5986 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5987 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5988 Some((dir, json))
5989 }) else {
5990 bail!("No .cargo_vcs_info.json found in parent directories")
5991 };
5992 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5993 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5994 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5995 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5996 .context("parsing package.repository field of manifest")?;
5997 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5998 let permalink = provider.build_permalink(
5999 remote,
6000 BuildPermalinkParams::new(
6001 &cargo_vcs_info.git.sha1,
6002 &RepoPath::from_rel_path(
6003 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6004 ),
6005 Some(selection),
6006 ),
6007 );
6008 Ok(permalink)
6009}
6010
6011fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6012 let Some(blame) = blame else {
6013 return proto::BlameBufferResponse {
6014 blame_response: None,
6015 };
6016 };
6017
6018 let entries = blame
6019 .entries
6020 .into_iter()
6021 .map(|entry| proto::BlameEntry {
6022 sha: entry.sha.as_bytes().into(),
6023 start_line: entry.range.start,
6024 end_line: entry.range.end,
6025 original_line_number: entry.original_line_number,
6026 author: entry.author,
6027 author_mail: entry.author_mail,
6028 author_time: entry.author_time,
6029 author_tz: entry.author_tz,
6030 committer: entry.committer_name,
6031 committer_mail: entry.committer_email,
6032 committer_time: entry.committer_time,
6033 committer_tz: entry.committer_tz,
6034 summary: entry.summary,
6035 previous: entry.previous,
6036 filename: entry.filename,
6037 })
6038 .collect::<Vec<_>>();
6039
6040 let messages = blame
6041 .messages
6042 .into_iter()
6043 .map(|(oid, message)| proto::CommitMessage {
6044 oid: oid.as_bytes().into(),
6045 message,
6046 })
6047 .collect::<Vec<_>>();
6048
6049 proto::BlameBufferResponse {
6050 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6051 }
6052}
6053
6054fn deserialize_blame_buffer_response(
6055 response: proto::BlameBufferResponse,
6056) -> Option<git::blame::Blame> {
6057 let response = response.blame_response?;
6058 let entries = response
6059 .entries
6060 .into_iter()
6061 .filter_map(|entry| {
6062 Some(git::blame::BlameEntry {
6063 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6064 range: entry.start_line..entry.end_line,
6065 original_line_number: entry.original_line_number,
6066 committer_name: entry.committer,
6067 committer_time: entry.committer_time,
6068 committer_tz: entry.committer_tz,
6069 committer_email: entry.committer_mail,
6070 author: entry.author,
6071 author_mail: entry.author_mail,
6072 author_time: entry.author_time,
6073 author_tz: entry.author_tz,
6074 summary: entry.summary,
6075 previous: entry.previous,
6076 filename: entry.filename,
6077 })
6078 })
6079 .collect::<Vec<_>>();
6080
6081 let messages = response
6082 .messages
6083 .into_iter()
6084 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6085 .collect::<HashMap<_, _>>();
6086
6087 Some(Blame { entries, messages })
6088}
6089
6090fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6091 proto::Branch {
6092 is_head: branch.is_head,
6093 ref_name: branch.ref_name.to_string(),
6094 unix_timestamp: branch
6095 .most_recent_commit
6096 .as_ref()
6097 .map(|commit| commit.commit_timestamp as u64),
6098 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6099 ref_name: upstream.ref_name.to_string(),
6100 tracking: upstream
6101 .tracking
6102 .status()
6103 .map(|upstream| proto::UpstreamTracking {
6104 ahead: upstream.ahead as u64,
6105 behind: upstream.behind as u64,
6106 }),
6107 }),
6108 most_recent_commit: branch
6109 .most_recent_commit
6110 .as_ref()
6111 .map(|commit| proto::CommitSummary {
6112 sha: commit.sha.to_string(),
6113 subject: commit.subject.to_string(),
6114 commit_timestamp: commit.commit_timestamp,
6115 author_name: commit.author_name.to_string(),
6116 }),
6117 }
6118}
6119
6120fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6121 proto::Worktree {
6122 path: worktree.path.to_string_lossy().to_string(),
6123 ref_name: worktree.ref_name.to_string(),
6124 sha: worktree.sha.to_string(),
6125 }
6126}
6127
6128fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6129 git::repository::Worktree {
6130 path: PathBuf::from(proto.path.clone()),
6131 ref_name: proto.ref_name.clone().into(),
6132 sha: proto.sha.clone().into(),
6133 }
6134}
6135
6136fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6137 git::repository::Branch {
6138 is_head: proto.is_head,
6139 ref_name: proto.ref_name.clone().into(),
6140 upstream: proto
6141 .upstream
6142 .as_ref()
6143 .map(|upstream| git::repository::Upstream {
6144 ref_name: upstream.ref_name.to_string().into(),
6145 tracking: upstream
6146 .tracking
6147 .as_ref()
6148 .map(|tracking| {
6149 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6150 ahead: tracking.ahead as u32,
6151 behind: tracking.behind as u32,
6152 })
6153 })
6154 .unwrap_or(git::repository::UpstreamTracking::Gone),
6155 }),
6156 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6157 git::repository::CommitSummary {
6158 sha: commit.sha.to_string().into(),
6159 subject: commit.subject.to_string().into(),
6160 commit_timestamp: commit.commit_timestamp,
6161 author_name: commit.author_name.to_string().into(),
6162 has_parent: true,
6163 }
6164 }),
6165 }
6166}
6167
6168fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6169 proto::GitCommitDetails {
6170 sha: commit.sha.to_string(),
6171 message: commit.message.to_string(),
6172 commit_timestamp: commit.commit_timestamp,
6173 author_email: commit.author_email.to_string(),
6174 author_name: commit.author_name.to_string(),
6175 }
6176}
6177
6178fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6179 CommitDetails {
6180 sha: proto.sha.clone().into(),
6181 message: proto.message.clone().into(),
6182 commit_timestamp: proto.commit_timestamp,
6183 author_email: proto.author_email.clone().into(),
6184 author_name: proto.author_name.clone().into(),
6185 }
6186}
6187
6188async fn compute_snapshot(
6189 id: RepositoryId,
6190 work_directory_abs_path: Arc<Path>,
6191 prev_snapshot: RepositorySnapshot,
6192 backend: Arc<dyn GitRepository>,
6193) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6194 let mut events = Vec::new();
6195 let branches = backend.branches().await?;
6196 let branch = branches.into_iter().find(|branch| branch.is_head);
6197 let statuses = backend
6198 .status(&[RepoPath::from_rel_path(
6199 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6200 )])
6201 .await?;
6202 let stash_entries = backend.stash_entries().await?;
6203 let statuses_by_path = SumTree::from_iter(
6204 statuses
6205 .entries
6206 .iter()
6207 .map(|(repo_path, status)| StatusEntry {
6208 repo_path: repo_path.clone(),
6209 status: *status,
6210 }),
6211 (),
6212 );
6213 let (merge_details, merge_heads_changed) =
6214 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6215 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6216
6217 if merge_heads_changed {
6218 events.push(RepositoryEvent::MergeHeadsChanged);
6219 }
6220
6221 if statuses_by_path != prev_snapshot.statuses_by_path {
6222 events.push(RepositoryEvent::StatusesChanged)
6223 }
6224
6225 // Useful when branch is None in detached head state
6226 let head_commit = match backend.head_sha().await {
6227 Some(head_sha) => backend.show(head_sha).await.log_err(),
6228 None => None,
6229 };
6230
6231 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6232 events.push(RepositoryEvent::BranchChanged);
6233 }
6234
6235 let remote_origin_url = backend.remote_url("origin").await;
6236 let remote_upstream_url = backend.remote_url("upstream").await;
6237
6238 let snapshot = RepositorySnapshot {
6239 id,
6240 statuses_by_path,
6241 work_directory_abs_path,
6242 path_style: prev_snapshot.path_style,
6243 scan_id: prev_snapshot.scan_id + 1,
6244 branch,
6245 head_commit,
6246 merge: merge_details,
6247 remote_origin_url,
6248 remote_upstream_url,
6249 stash_entries,
6250 };
6251
6252 Ok((snapshot, events))
6253}
6254
6255fn status_from_proto(
6256 simple_status: i32,
6257 status: Option<proto::GitFileStatus>,
6258) -> anyhow::Result<FileStatus> {
6259 use proto::git_file_status::Variant;
6260
6261 let Some(variant) = status.and_then(|status| status.variant) else {
6262 let code = proto::GitStatus::from_i32(simple_status)
6263 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6264 let result = match code {
6265 proto::GitStatus::Added => TrackedStatus {
6266 worktree_status: StatusCode::Added,
6267 index_status: StatusCode::Unmodified,
6268 }
6269 .into(),
6270 proto::GitStatus::Modified => TrackedStatus {
6271 worktree_status: StatusCode::Modified,
6272 index_status: StatusCode::Unmodified,
6273 }
6274 .into(),
6275 proto::GitStatus::Conflict => UnmergedStatus {
6276 first_head: UnmergedStatusCode::Updated,
6277 second_head: UnmergedStatusCode::Updated,
6278 }
6279 .into(),
6280 proto::GitStatus::Deleted => TrackedStatus {
6281 worktree_status: StatusCode::Deleted,
6282 index_status: StatusCode::Unmodified,
6283 }
6284 .into(),
6285 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6286 };
6287 return Ok(result);
6288 };
6289
6290 let result = match variant {
6291 Variant::Untracked(_) => FileStatus::Untracked,
6292 Variant::Ignored(_) => FileStatus::Ignored,
6293 Variant::Unmerged(unmerged) => {
6294 let [first_head, second_head] =
6295 [unmerged.first_head, unmerged.second_head].map(|head| {
6296 let code = proto::GitStatus::from_i32(head)
6297 .with_context(|| format!("Invalid git status code: {head}"))?;
6298 let result = match code {
6299 proto::GitStatus::Added => UnmergedStatusCode::Added,
6300 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6301 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6302 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6303 };
6304 Ok(result)
6305 });
6306 let [first_head, second_head] = [first_head?, second_head?];
6307 UnmergedStatus {
6308 first_head,
6309 second_head,
6310 }
6311 .into()
6312 }
6313 Variant::Tracked(tracked) => {
6314 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6315 .map(|status| {
6316 let code = proto::GitStatus::from_i32(status)
6317 .with_context(|| format!("Invalid git status code: {status}"))?;
6318 let result = match code {
6319 proto::GitStatus::Modified => StatusCode::Modified,
6320 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6321 proto::GitStatus::Added => StatusCode::Added,
6322 proto::GitStatus::Deleted => StatusCode::Deleted,
6323 proto::GitStatus::Renamed => StatusCode::Renamed,
6324 proto::GitStatus::Copied => StatusCode::Copied,
6325 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6326 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6327 };
6328 Ok(result)
6329 });
6330 let [index_status, worktree_status] = [index_status?, worktree_status?];
6331 TrackedStatus {
6332 index_status,
6333 worktree_status,
6334 }
6335 .into()
6336 }
6337 };
6338 Ok(result)
6339}
6340
6341fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6342 use proto::git_file_status::{Tracked, Unmerged, Variant};
6343
6344 let variant = match status {
6345 FileStatus::Untracked => Variant::Untracked(Default::default()),
6346 FileStatus::Ignored => Variant::Ignored(Default::default()),
6347 FileStatus::Unmerged(UnmergedStatus {
6348 first_head,
6349 second_head,
6350 }) => Variant::Unmerged(Unmerged {
6351 first_head: unmerged_status_to_proto(first_head),
6352 second_head: unmerged_status_to_proto(second_head),
6353 }),
6354 FileStatus::Tracked(TrackedStatus {
6355 index_status,
6356 worktree_status,
6357 }) => Variant::Tracked(Tracked {
6358 index_status: tracked_status_to_proto(index_status),
6359 worktree_status: tracked_status_to_proto(worktree_status),
6360 }),
6361 };
6362 proto::GitFileStatus {
6363 variant: Some(variant),
6364 }
6365}
6366
6367fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6368 match code {
6369 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6370 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6371 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6372 }
6373}
6374
6375fn tracked_status_to_proto(code: StatusCode) -> i32 {
6376 match code {
6377 StatusCode::Added => proto::GitStatus::Added as _,
6378 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6379 StatusCode::Modified => proto::GitStatus::Modified as _,
6380 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6381 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6382 StatusCode::Copied => proto::GitStatus::Copied as _,
6383 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6384 }
6385}