1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<str>>,
133 index_text: Option<Arc<str>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_create_remote);
476 client.add_entity_request_handler(Self::handle_remove_remote);
477 client.add_entity_request_handler(Self::handle_delete_branch);
478 client.add_entity_request_handler(Self::handle_git_init);
479 client.add_entity_request_handler(Self::handle_push);
480 client.add_entity_request_handler(Self::handle_pull);
481 client.add_entity_request_handler(Self::handle_fetch);
482 client.add_entity_request_handler(Self::handle_stage);
483 client.add_entity_request_handler(Self::handle_unstage);
484 client.add_entity_request_handler(Self::handle_stash);
485 client.add_entity_request_handler(Self::handle_stash_pop);
486 client.add_entity_request_handler(Self::handle_stash_apply);
487 client.add_entity_request_handler(Self::handle_stash_drop);
488 client.add_entity_request_handler(Self::handle_commit);
489 client.add_entity_request_handler(Self::handle_run_hook);
490 client.add_entity_request_handler(Self::handle_reset);
491 client.add_entity_request_handler(Self::handle_show);
492 client.add_entity_request_handler(Self::handle_load_commit_diff);
493 client.add_entity_request_handler(Self::handle_file_history);
494 client.add_entity_request_handler(Self::handle_checkout_files);
495 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
496 client.add_entity_request_handler(Self::handle_set_index_text);
497 client.add_entity_request_handler(Self::handle_askpass);
498 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
499 client.add_entity_request_handler(Self::handle_git_diff);
500 client.add_entity_request_handler(Self::handle_tree_diff);
501 client.add_entity_request_handler(Self::handle_get_blob_content);
502 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
503 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
504 client.add_entity_message_handler(Self::handle_update_diff_bases);
505 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
506 client.add_entity_request_handler(Self::handle_blame_buffer);
507 client.add_entity_message_handler(Self::handle_update_repository);
508 client.add_entity_message_handler(Self::handle_remove_repository);
509 client.add_entity_request_handler(Self::handle_git_clone);
510 client.add_entity_request_handler(Self::handle_get_worktrees);
511 client.add_entity_request_handler(Self::handle_create_worktree);
512 }
513
514 pub fn is_local(&self) -> bool {
515 matches!(self.state, GitStoreState::Local { .. })
516 }
517 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
518 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
519 let id = repo.read(cx).id;
520 if self.active_repo_id != Some(id) {
521 self.active_repo_id = Some(id);
522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
523 }
524 }
525 }
526
527 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
528 match &mut self.state {
529 GitStoreState::Remote {
530 downstream: downstream_client,
531 ..
532 } => {
533 for repo in self.repositories.values() {
534 let update = repo.read(cx).snapshot.initial_update(project_id);
535 for update in split_repository_update(update) {
536 client.send(update).log_err();
537 }
538 }
539 *downstream_client = Some((client, ProjectId(project_id)));
540 }
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 let mut snapshots = HashMap::default();
546 let (updates_tx, mut updates_rx) = mpsc::unbounded();
547 for repo in self.repositories.values() {
548 updates_tx
549 .unbounded_send(DownstreamUpdate::UpdateRepository(
550 repo.read(cx).snapshot.clone(),
551 ))
552 .ok();
553 }
554 *downstream_client = Some(LocalDownstreamState {
555 client: client.clone(),
556 project_id: ProjectId(project_id),
557 updates_tx,
558 _task: cx.spawn(async move |this, cx| {
559 cx.background_spawn(async move {
560 while let Some(update) = updates_rx.next().await {
561 match update {
562 DownstreamUpdate::UpdateRepository(snapshot) => {
563 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
564 {
565 let update =
566 snapshot.build_update(old_snapshot, project_id);
567 *old_snapshot = snapshot;
568 for update in split_repository_update(update) {
569 client.send(update)?;
570 }
571 } else {
572 let update = snapshot.initial_update(project_id);
573 for update in split_repository_update(update) {
574 client.send(update)?;
575 }
576 snapshots.insert(snapshot.id, snapshot);
577 }
578 }
579 DownstreamUpdate::RemoveRepository(id) => {
580 client.send(proto::RemoveRepository {
581 project_id,
582 id: id.to_proto(),
583 })?;
584 }
585 }
586 }
587 anyhow::Ok(())
588 })
589 .await
590 .ok();
591 this.update(cx, |this, _| {
592 if let GitStoreState::Local {
593 downstream: downstream_client,
594 ..
595 } = &mut this.state
596 {
597 downstream_client.take();
598 } else {
599 unreachable!("unshared called on remote store");
600 }
601 })
602 }),
603 });
604 }
605 }
606 }
607
608 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
609 match &mut self.state {
610 GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } => {
614 downstream_client.take();
615 }
616 GitStoreState::Remote {
617 downstream: downstream_client,
618 ..
619 } => {
620 downstream_client.take();
621 }
622 }
623 self.shared_diffs.clear();
624 }
625
626 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
627 self.shared_diffs.remove(peer_id);
628 }
629
630 pub fn active_repository(&self) -> Option<Entity<Repository>> {
631 self.active_repo_id
632 .as_ref()
633 .map(|id| self.repositories[id].clone())
634 }
635
636 pub fn open_unstaged_diff(
637 &mut self,
638 buffer: Entity<Buffer>,
639 cx: &mut Context<Self>,
640 ) -> Task<Result<Entity<BufferDiff>>> {
641 let buffer_id = buffer.read(cx).remote_id();
642 if let Some(diff_state) = self.diffs.get(&buffer_id)
643 && let Some(unstaged_diff) = diff_state
644 .read(cx)
645 .unstaged_diff
646 .as_ref()
647 .and_then(|weak| weak.upgrade())
648 {
649 if let Some(task) =
650 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
651 {
652 return cx.background_executor().spawn(async move {
653 task.await;
654 Ok(unstaged_diff)
655 });
656 }
657 return Task::ready(Ok(unstaged_diff));
658 }
659
660 let Some((repo, repo_path)) =
661 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
662 else {
663 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
664 };
665
666 let task = self
667 .loading_diffs
668 .entry((buffer_id, DiffKind::Unstaged))
669 .or_insert_with(|| {
670 let staged_text = repo.update(cx, |repo, cx| {
671 repo.load_staged_text(buffer_id, repo_path, cx)
672 });
673 cx.spawn(async move |this, cx| {
674 Self::open_diff_internal(
675 this,
676 DiffKind::Unstaged,
677 staged_text.await.map(DiffBasesChange::SetIndex),
678 buffer,
679 cx,
680 )
681 .await
682 .map_err(Arc::new)
683 })
684 .shared()
685 })
686 .clone();
687
688 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
689 }
690
691 pub fn open_diff_since(
692 &mut self,
693 oid: Option<git::Oid>,
694 buffer: Entity<Buffer>,
695 repo: Entity<Repository>,
696 cx: &mut Context<Self>,
697 ) -> Task<Result<Entity<BufferDiff>>> {
698 cx.spawn(async move |this, cx| {
699 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
700 let content = match oid {
701 None => None,
702 Some(oid) => Some(
703 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
704 .await?,
705 ),
706 };
707 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
708
709 buffer_diff
710 .update(cx, |buffer_diff, cx| {
711 buffer_diff.set_base_text(
712 content.map(|s| s.as_str().into()),
713 buffer_snapshot.language().cloned(),
714 buffer_snapshot.text,
715 cx,
716 )
717 })?
718 .await?;
719 let unstaged_diff = this
720 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
721 .await?;
722 buffer_diff.update(cx, |buffer_diff, _| {
723 buffer_diff.set_secondary_diff(unstaged_diff);
724 })?;
725
726 this.update(cx, |_, cx| {
727 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
728 .detach();
729 })?;
730
731 Ok(buffer_diff)
732 })
733 }
734
735 pub fn open_uncommitted_diff(
736 &mut self,
737 buffer: Entity<Buffer>,
738 cx: &mut Context<Self>,
739 ) -> Task<Result<Entity<BufferDiff>>> {
740 let buffer_id = buffer.read(cx).remote_id();
741
742 if let Some(diff_state) = self.diffs.get(&buffer_id)
743 && let Some(uncommitted_diff) = diff_state
744 .read(cx)
745 .uncommitted_diff
746 .as_ref()
747 .and_then(|weak| weak.upgrade())
748 {
749 if let Some(task) =
750 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
751 {
752 return cx.background_executor().spawn(async move {
753 task.await;
754 Ok(uncommitted_diff)
755 });
756 }
757 return Task::ready(Ok(uncommitted_diff));
758 }
759
760 let Some((repo, repo_path)) =
761 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
762 else {
763 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
764 };
765
766 let task = self
767 .loading_diffs
768 .entry((buffer_id, DiffKind::Uncommitted))
769 .or_insert_with(|| {
770 let changes = repo.update(cx, |repo, cx| {
771 repo.load_committed_text(buffer_id, repo_path, cx)
772 });
773
774 // todo(lw): hot foreground spawn
775 cx.spawn(async move |this, cx| {
776 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
777 .await
778 .map_err(Arc::new)
779 })
780 .shared()
781 })
782 .clone();
783
784 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
785 }
786
787 async fn open_diff_internal(
788 this: WeakEntity<Self>,
789 kind: DiffKind,
790 texts: Result<DiffBasesChange>,
791 buffer_entity: Entity<Buffer>,
792 cx: &mut AsyncApp,
793 ) -> Result<Entity<BufferDiff>> {
794 let diff_bases_change = match texts {
795 Err(e) => {
796 this.update(cx, |this, cx| {
797 let buffer = buffer_entity.read(cx);
798 let buffer_id = buffer.remote_id();
799 this.loading_diffs.remove(&(buffer_id, kind));
800 })?;
801 return Err(e);
802 }
803 Ok(change) => change,
804 };
805
806 this.update(cx, |this, cx| {
807 let buffer = buffer_entity.read(cx);
808 let buffer_id = buffer.remote_id();
809 let language = buffer.language().cloned();
810 let language_registry = buffer.language_registry();
811 let text_snapshot = buffer.text_snapshot();
812 this.loading_diffs.remove(&(buffer_id, kind));
813
814 let git_store = cx.weak_entity();
815 let diff_state = this
816 .diffs
817 .entry(buffer_id)
818 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
819
820 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
821
822 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
823 diff_state.update(cx, |diff_state, cx| {
824 diff_state.language_changed = true;
825 diff_state.language = language;
826 diff_state.language_registry = language_registry;
827
828 match kind {
829 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
830 DiffKind::Uncommitted => {
831 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
832 diff
833 } else {
834 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
835 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
836 unstaged_diff
837 };
838
839 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
840 diff_state.uncommitted_diff = Some(diff.downgrade())
841 }
842 }
843
844 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
845 let rx = diff_state.wait_for_recalculation();
846
847 anyhow::Ok(async move {
848 if let Some(rx) = rx {
849 rx.await;
850 }
851 Ok(diff)
852 })
853 })
854 })??
855 .await
856 }
857
858 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
859 let diff_state = self.diffs.get(&buffer_id)?;
860 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
861 }
862
863 pub fn get_uncommitted_diff(
864 &self,
865 buffer_id: BufferId,
866 cx: &App,
867 ) -> Option<Entity<BufferDiff>> {
868 let diff_state = self.diffs.get(&buffer_id)?;
869 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
870 }
871
872 pub fn open_conflict_set(
873 &mut self,
874 buffer: Entity<Buffer>,
875 cx: &mut Context<Self>,
876 ) -> Entity<ConflictSet> {
877 log::debug!("open conflict set");
878 let buffer_id = buffer.read(cx).remote_id();
879
880 if let Some(git_state) = self.diffs.get(&buffer_id)
881 && let Some(conflict_set) = git_state
882 .read(cx)
883 .conflict_set
884 .as_ref()
885 .and_then(|weak| weak.upgrade())
886 {
887 let conflict_set = conflict_set;
888 let buffer_snapshot = buffer.read(cx).text_snapshot();
889
890 git_state.update(cx, |state, cx| {
891 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
892 });
893
894 return conflict_set;
895 }
896
897 let is_unmerged = self
898 .repository_and_path_for_buffer_id(buffer_id, cx)
899 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
900 let git_store = cx.weak_entity();
901 let buffer_git_state = self
902 .diffs
903 .entry(buffer_id)
904 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
905 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
906
907 self._subscriptions
908 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
909 cx.emit(GitStoreEvent::ConflictsUpdated);
910 }));
911
912 buffer_git_state.update(cx, |state, cx| {
913 state.conflict_set = Some(conflict_set.downgrade());
914 let buffer_snapshot = buffer.read(cx).text_snapshot();
915 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
916 });
917
918 conflict_set
919 }
920
921 pub fn project_path_git_status(
922 &self,
923 project_path: &ProjectPath,
924 cx: &App,
925 ) -> Option<FileStatus> {
926 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
927 Some(repo.read(cx).status_for_path(&repo_path)?.status)
928 }
929
930 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
931 let mut work_directory_abs_paths = Vec::new();
932 let mut checkpoints = Vec::new();
933 for repository in self.repositories.values() {
934 repository.update(cx, |repository, _| {
935 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
936 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
937 });
938 }
939
940 cx.background_executor().spawn(async move {
941 let checkpoints = future::try_join_all(checkpoints).await?;
942 Ok(GitStoreCheckpoint {
943 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
944 .into_iter()
945 .zip(checkpoints)
946 .collect(),
947 })
948 })
949 }
950
951 pub fn restore_checkpoint(
952 &self,
953 checkpoint: GitStoreCheckpoint,
954 cx: &mut App,
955 ) -> Task<Result<()>> {
956 let repositories_by_work_dir_abs_path = self
957 .repositories
958 .values()
959 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
960 .collect::<HashMap<_, _>>();
961
962 let mut tasks = Vec::new();
963 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
964 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
965 let restore = repository.update(cx, |repository, _| {
966 repository.restore_checkpoint(checkpoint)
967 });
968 tasks.push(async move { restore.await? });
969 }
970 }
971 cx.background_spawn(async move {
972 future::try_join_all(tasks).await?;
973 Ok(())
974 })
975 }
976
977 /// Compares two checkpoints, returning true if they are equal.
978 pub fn compare_checkpoints(
979 &self,
980 left: GitStoreCheckpoint,
981 mut right: GitStoreCheckpoint,
982 cx: &mut App,
983 ) -> Task<Result<bool>> {
984 let repositories_by_work_dir_abs_path = self
985 .repositories
986 .values()
987 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
988 .collect::<HashMap<_, _>>();
989
990 let mut tasks = Vec::new();
991 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
992 if let Some(right_checkpoint) = right
993 .checkpoints_by_work_dir_abs_path
994 .remove(&work_dir_abs_path)
995 {
996 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
997 {
998 let compare = repository.update(cx, |repository, _| {
999 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1000 });
1001
1002 tasks.push(async move { compare.await? });
1003 }
1004 } else {
1005 return Task::ready(Ok(false));
1006 }
1007 }
1008 cx.background_spawn(async move {
1009 Ok(future::try_join_all(tasks)
1010 .await?
1011 .into_iter()
1012 .all(|result| result))
1013 })
1014 }
1015
1016 /// Blames a buffer.
1017 pub fn blame_buffer(
1018 &self,
1019 buffer: &Entity<Buffer>,
1020 version: Option<clock::Global>,
1021 cx: &mut Context<Self>,
1022 ) -> Task<Result<Option<Blame>>> {
1023 let buffer = buffer.read(cx);
1024 let Some((repo, repo_path)) =
1025 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1026 else {
1027 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1028 };
1029 let content = match &version {
1030 Some(version) => buffer.rope_for_version(version),
1031 None => buffer.as_rope().clone(),
1032 };
1033 let line_ending = buffer.line_ending();
1034 let version = version.unwrap_or(buffer.version());
1035 let buffer_id = buffer.remote_id();
1036
1037 let repo = repo.downgrade();
1038 cx.spawn(async move |_, cx| {
1039 let repository_state = repo
1040 .update(cx, |repo, _| repo.repository_state.clone())?
1041 .await
1042 .map_err(|err| anyhow::anyhow!(err))?;
1043 match repository_state {
1044 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1045 .blame(repo_path.clone(), content, line_ending)
1046 .await
1047 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1048 .map(Some),
1049 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1050 let response = client
1051 .request(proto::BlameBuffer {
1052 project_id: project_id.to_proto(),
1053 buffer_id: buffer_id.into(),
1054 version: serialize_version(&version),
1055 })
1056 .await?;
1057 Ok(deserialize_blame_buffer_response(response))
1058 }
1059 }
1060 })
1061 }
1062
1063 pub fn file_history(
1064 &self,
1065 repo: &Entity<Repository>,
1066 path: RepoPath,
1067 cx: &mut App,
1068 ) -> Task<Result<git::repository::FileHistory>> {
1069 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1070
1071 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1072 }
1073
1074 pub fn file_history_paginated(
1075 &self,
1076 repo: &Entity<Repository>,
1077 path: RepoPath,
1078 skip: usize,
1079 limit: Option<usize>,
1080 cx: &mut App,
1081 ) -> Task<Result<git::repository::FileHistory>> {
1082 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1083
1084 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1085 }
1086
1087 pub fn get_permalink_to_line(
1088 &self,
1089 buffer: &Entity<Buffer>,
1090 selection: Range<u32>,
1091 cx: &mut App,
1092 ) -> Task<Result<url::Url>> {
1093 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1094 return Task::ready(Err(anyhow!("buffer has no file")));
1095 };
1096
1097 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1098 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1099 cx,
1100 ) else {
1101 // If we're not in a Git repo, check whether this is a Rust source
1102 // file in the Cargo registry (presumably opened with go-to-definition
1103 // from a normal Rust file). If so, we can put together a permalink
1104 // using crate metadata.
1105 if buffer
1106 .read(cx)
1107 .language()
1108 .is_none_or(|lang| lang.name() != "Rust".into())
1109 {
1110 return Task::ready(Err(anyhow!("no permalink available")));
1111 }
1112 let file_path = file.worktree.read(cx).absolutize(&file.path);
1113 return cx.spawn(async move |cx| {
1114 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1115 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1116 .context("no permalink available")
1117 });
1118 };
1119
1120 let buffer_id = buffer.read(cx).remote_id();
1121 let branch = repo.read(cx).branch.clone();
1122 let remote = branch
1123 .as_ref()
1124 .and_then(|b| b.upstream.as_ref())
1125 .and_then(|b| b.remote_name())
1126 .unwrap_or("origin")
1127 .to_string();
1128
1129 let rx = repo.update(cx, |repo, _| {
1130 repo.send_job(None, move |state, cx| async move {
1131 match state {
1132 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1133 let origin_url = backend
1134 .remote_url(&remote)
1135 .await
1136 .with_context(|| format!("remote \"{remote}\" not found"))?;
1137
1138 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1139
1140 let provider_registry =
1141 cx.update(GitHostingProviderRegistry::default_global)?;
1142
1143 let (provider, remote) =
1144 parse_git_remote_url(provider_registry, &origin_url)
1145 .context("parsing Git remote URL")?;
1146
1147 Ok(provider.build_permalink(
1148 remote,
1149 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1150 ))
1151 }
1152 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1153 let response = client
1154 .request(proto::GetPermalinkToLine {
1155 project_id: project_id.to_proto(),
1156 buffer_id: buffer_id.into(),
1157 selection: Some(proto::Range {
1158 start: selection.start as u64,
1159 end: selection.end as u64,
1160 }),
1161 })
1162 .await?;
1163
1164 url::Url::parse(&response.permalink).context("failed to parse permalink")
1165 }
1166 }
1167 })
1168 });
1169 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1170 }
1171
1172 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1173 match &self.state {
1174 GitStoreState::Local {
1175 downstream: downstream_client,
1176 ..
1177 } => downstream_client
1178 .as_ref()
1179 .map(|state| (state.client.clone(), state.project_id)),
1180 GitStoreState::Remote {
1181 downstream: downstream_client,
1182 ..
1183 } => downstream_client.clone(),
1184 }
1185 }
1186
1187 fn upstream_client(&self) -> Option<AnyProtoClient> {
1188 match &self.state {
1189 GitStoreState::Local { .. } => None,
1190 GitStoreState::Remote {
1191 upstream_client, ..
1192 } => Some(upstream_client.clone()),
1193 }
1194 }
1195
1196 fn on_worktree_store_event(
1197 &mut self,
1198 worktree_store: Entity<WorktreeStore>,
1199 event: &WorktreeStoreEvent,
1200 cx: &mut Context<Self>,
1201 ) {
1202 let GitStoreState::Local {
1203 project_environment,
1204 downstream,
1205 next_repository_id,
1206 fs,
1207 } = &self.state
1208 else {
1209 return;
1210 };
1211
1212 match event {
1213 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1214 if let Some(worktree) = self
1215 .worktree_store
1216 .read(cx)
1217 .worktree_for_id(*worktree_id, cx)
1218 {
1219 let paths_by_git_repo =
1220 self.process_updated_entries(&worktree, updated_entries, cx);
1221 let downstream = downstream
1222 .as_ref()
1223 .map(|downstream| downstream.updates_tx.clone());
1224 cx.spawn(async move |_, cx| {
1225 let paths_by_git_repo = paths_by_git_repo.await;
1226 for (repo, paths) in paths_by_git_repo {
1227 repo.update(cx, |repo, cx| {
1228 repo.paths_changed(paths, downstream.clone(), cx);
1229 })
1230 .ok();
1231 }
1232 })
1233 .detach();
1234 }
1235 }
1236 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1237 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1238 else {
1239 return;
1240 };
1241 if !worktree.read(cx).is_visible() {
1242 log::debug!(
1243 "not adding repositories for local worktree {:?} because it's not visible",
1244 worktree.read(cx).abs_path()
1245 );
1246 return;
1247 }
1248 self.update_repositories_from_worktree(
1249 *worktree_id,
1250 project_environment.clone(),
1251 next_repository_id.clone(),
1252 downstream
1253 .as_ref()
1254 .map(|downstream| downstream.updates_tx.clone()),
1255 changed_repos.clone(),
1256 fs.clone(),
1257 cx,
1258 );
1259 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1260 }
1261 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1262 let repos_without_worktree: Vec<RepositoryId> = self
1263 .worktree_ids
1264 .iter_mut()
1265 .filter_map(|(repo_id, worktree_ids)| {
1266 worktree_ids.remove(worktree_id);
1267 if worktree_ids.is_empty() {
1268 Some(*repo_id)
1269 } else {
1270 None
1271 }
1272 })
1273 .collect();
1274 let is_active_repo_removed = repos_without_worktree
1275 .iter()
1276 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1277
1278 for repo_id in repos_without_worktree {
1279 self.repositories.remove(&repo_id);
1280 self.worktree_ids.remove(&repo_id);
1281 if let Some(updates_tx) =
1282 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1283 {
1284 updates_tx
1285 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1286 .ok();
1287 }
1288 }
1289
1290 if is_active_repo_removed {
1291 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1292 self.active_repo_id = Some(repo_id);
1293 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1294 } else {
1295 self.active_repo_id = None;
1296 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1297 }
1298 }
1299 }
1300 _ => {}
1301 }
1302 }
1303 fn on_repository_event(
1304 &mut self,
1305 repo: Entity<Repository>,
1306 event: &RepositoryEvent,
1307 cx: &mut Context<Self>,
1308 ) {
1309 let id = repo.read(cx).id;
1310 let repo_snapshot = repo.read(cx).snapshot.clone();
1311 for (buffer_id, diff) in self.diffs.iter() {
1312 if let Some((buffer_repo, repo_path)) =
1313 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1314 && buffer_repo == repo
1315 {
1316 diff.update(cx, |diff, cx| {
1317 if let Some(conflict_set) = &diff.conflict_set {
1318 let conflict_status_changed =
1319 conflict_set.update(cx, |conflict_set, cx| {
1320 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1321 conflict_set.set_has_conflict(has_conflict, cx)
1322 })?;
1323 if conflict_status_changed {
1324 let buffer_store = self.buffer_store.read(cx);
1325 if let Some(buffer) = buffer_store.get(*buffer_id) {
1326 let _ = diff
1327 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1328 }
1329 }
1330 }
1331 anyhow::Ok(())
1332 })
1333 .ok();
1334 }
1335 }
1336 cx.emit(GitStoreEvent::RepositoryUpdated(
1337 id,
1338 event.clone(),
1339 self.active_repo_id == Some(id),
1340 ))
1341 }
1342
1343 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1344 cx.emit(GitStoreEvent::JobsUpdated)
1345 }
1346
1347 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1348 fn update_repositories_from_worktree(
1349 &mut self,
1350 worktree_id: WorktreeId,
1351 project_environment: Entity<ProjectEnvironment>,
1352 next_repository_id: Arc<AtomicU64>,
1353 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1354 updated_git_repositories: UpdatedGitRepositoriesSet,
1355 fs: Arc<dyn Fs>,
1356 cx: &mut Context<Self>,
1357 ) {
1358 let mut removed_ids = Vec::new();
1359 for update in updated_git_repositories.iter() {
1360 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1361 let existing_work_directory_abs_path =
1362 repo.read(cx).work_directory_abs_path.clone();
1363 Some(&existing_work_directory_abs_path)
1364 == update.old_work_directory_abs_path.as_ref()
1365 || Some(&existing_work_directory_abs_path)
1366 == update.new_work_directory_abs_path.as_ref()
1367 }) {
1368 let repo_id = *id;
1369 if let Some(new_work_directory_abs_path) =
1370 update.new_work_directory_abs_path.clone()
1371 {
1372 self.worktree_ids
1373 .entry(repo_id)
1374 .or_insert_with(HashSet::new)
1375 .insert(worktree_id);
1376 existing.update(cx, |existing, cx| {
1377 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1378 existing.schedule_scan(updates_tx.clone(), cx);
1379 });
1380 } else {
1381 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1382 worktree_ids.remove(&worktree_id);
1383 if worktree_ids.is_empty() {
1384 removed_ids.push(repo_id);
1385 }
1386 }
1387 }
1388 } else if let UpdatedGitRepository {
1389 new_work_directory_abs_path: Some(work_directory_abs_path),
1390 dot_git_abs_path: Some(dot_git_abs_path),
1391 repository_dir_abs_path: Some(_repository_dir_abs_path),
1392 common_dir_abs_path: Some(_common_dir_abs_path),
1393 ..
1394 } = update
1395 {
1396 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1397 let git_store = cx.weak_entity();
1398 let repo = cx.new(|cx| {
1399 let mut repo = Repository::local(
1400 id,
1401 work_directory_abs_path.clone(),
1402 dot_git_abs_path.clone(),
1403 project_environment.downgrade(),
1404 fs.clone(),
1405 git_store,
1406 cx,
1407 );
1408 if let Some(updates_tx) = updates_tx.as_ref() {
1409 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1410 updates_tx
1411 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1412 .ok();
1413 }
1414 repo.schedule_scan(updates_tx.clone(), cx);
1415 repo
1416 });
1417 self._subscriptions
1418 .push(cx.subscribe(&repo, Self::on_repository_event));
1419 self._subscriptions
1420 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1421 self.repositories.insert(id, repo);
1422 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1423 cx.emit(GitStoreEvent::RepositoryAdded);
1424 self.active_repo_id.get_or_insert_with(|| {
1425 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1426 id
1427 });
1428 }
1429 }
1430
1431 for id in removed_ids {
1432 if self.active_repo_id == Some(id) {
1433 self.active_repo_id = None;
1434 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1435 }
1436 self.repositories.remove(&id);
1437 if let Some(updates_tx) = updates_tx.as_ref() {
1438 updates_tx
1439 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1440 .ok();
1441 }
1442 }
1443 }
1444
1445 fn on_buffer_store_event(
1446 &mut self,
1447 _: Entity<BufferStore>,
1448 event: &BufferStoreEvent,
1449 cx: &mut Context<Self>,
1450 ) {
1451 match event {
1452 BufferStoreEvent::BufferAdded(buffer) => {
1453 cx.subscribe(buffer, |this, buffer, event, cx| {
1454 if let BufferEvent::LanguageChanged(_) = event {
1455 let buffer_id = buffer.read(cx).remote_id();
1456 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1457 diff_state.update(cx, |diff_state, cx| {
1458 diff_state.buffer_language_changed(buffer, cx);
1459 });
1460 }
1461 }
1462 })
1463 .detach();
1464 }
1465 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1466 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1467 diffs.remove(buffer_id);
1468 }
1469 }
1470 BufferStoreEvent::BufferDropped(buffer_id) => {
1471 self.diffs.remove(buffer_id);
1472 for diffs in self.shared_diffs.values_mut() {
1473 diffs.remove(buffer_id);
1474 }
1475 }
1476 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1477 // Whenever a buffer's file path changes, it's possible that the
1478 // new path is actually a path that is being tracked by a git
1479 // repository. In that case, we'll want to update the buffer's
1480 // `BufferDiffState`, in case it already has one.
1481 let buffer_id = buffer.read(cx).remote_id();
1482 let diff_state = self.diffs.get(&buffer_id);
1483 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1484
1485 if let Some(diff_state) = diff_state
1486 && let Some((repo, repo_path)) = repo
1487 {
1488 let buffer = buffer.clone();
1489 let diff_state = diff_state.clone();
1490
1491 cx.spawn(async move |_git_store, cx| {
1492 async {
1493 let diff_bases_change = repo
1494 .update(cx, |repo, cx| {
1495 repo.load_committed_text(buffer_id, repo_path, cx)
1496 })?
1497 .await?;
1498
1499 diff_state.update(cx, |diff_state, cx| {
1500 let buffer_snapshot = buffer.read(cx).text_snapshot();
1501 diff_state.diff_bases_changed(
1502 buffer_snapshot,
1503 Some(diff_bases_change),
1504 cx,
1505 );
1506 })
1507 }
1508 .await
1509 .log_err();
1510 })
1511 .detach();
1512 }
1513 }
1514 _ => {}
1515 }
1516 }
1517
1518 pub fn recalculate_buffer_diffs(
1519 &mut self,
1520 buffers: Vec<Entity<Buffer>>,
1521 cx: &mut Context<Self>,
1522 ) -> impl Future<Output = ()> + use<> {
1523 let mut futures = Vec::new();
1524 for buffer in buffers {
1525 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1526 let buffer = buffer.read(cx).text_snapshot();
1527 diff_state.update(cx, |diff_state, cx| {
1528 diff_state.recalculate_diffs(buffer.clone(), cx);
1529 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1530 });
1531 futures.push(diff_state.update(cx, |diff_state, cx| {
1532 diff_state
1533 .reparse_conflict_markers(buffer, cx)
1534 .map(|_| {})
1535 .boxed()
1536 }));
1537 }
1538 }
1539 async move {
1540 futures::future::join_all(futures).await;
1541 }
1542 }
1543
1544 fn on_buffer_diff_event(
1545 &mut self,
1546 diff: Entity<buffer_diff::BufferDiff>,
1547 event: &BufferDiffEvent,
1548 cx: &mut Context<Self>,
1549 ) {
1550 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1551 let buffer_id = diff.read(cx).buffer_id;
1552 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1553 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1554 diff_state.hunk_staging_operation_count += 1;
1555 diff_state.hunk_staging_operation_count
1556 });
1557 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1558 let recv = repo.update(cx, |repo, cx| {
1559 log::debug!("hunks changed for {}", path.as_unix_str());
1560 repo.spawn_set_index_text_job(
1561 path,
1562 new_index_text.as_ref().map(|rope| rope.to_string()),
1563 Some(hunk_staging_operation_count),
1564 cx,
1565 )
1566 });
1567 let diff = diff.downgrade();
1568 cx.spawn(async move |this, cx| {
1569 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1570 diff.update(cx, |diff, cx| {
1571 diff.clear_pending_hunks(cx);
1572 })
1573 .ok();
1574 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1575 .ok();
1576 }
1577 })
1578 .detach();
1579 }
1580 }
1581 }
1582 }
1583
1584 fn local_worktree_git_repos_changed(
1585 &mut self,
1586 worktree: Entity<Worktree>,
1587 changed_repos: &UpdatedGitRepositoriesSet,
1588 cx: &mut Context<Self>,
1589 ) {
1590 log::debug!("local worktree repos changed");
1591 debug_assert!(worktree.read(cx).is_local());
1592
1593 for repository in self.repositories.values() {
1594 repository.update(cx, |repository, cx| {
1595 let repo_abs_path = &repository.work_directory_abs_path;
1596 if changed_repos.iter().any(|update| {
1597 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1598 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1599 }) {
1600 repository.reload_buffer_diff_bases(cx);
1601 }
1602 });
1603 }
1604 }
1605
1606 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1607 &self.repositories
1608 }
1609
1610 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1611 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1612 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1613 Some(status.status)
1614 }
1615
1616 pub fn repository_and_path_for_buffer_id(
1617 &self,
1618 buffer_id: BufferId,
1619 cx: &App,
1620 ) -> Option<(Entity<Repository>, RepoPath)> {
1621 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1622 let project_path = buffer.read(cx).project_path(cx)?;
1623 self.repository_and_path_for_project_path(&project_path, cx)
1624 }
1625
1626 pub fn repository_and_path_for_project_path(
1627 &self,
1628 path: &ProjectPath,
1629 cx: &App,
1630 ) -> Option<(Entity<Repository>, RepoPath)> {
1631 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1632 self.repositories
1633 .values()
1634 .filter_map(|repo| {
1635 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1636 Some((repo.clone(), repo_path))
1637 })
1638 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1639 }
1640
1641 pub fn git_init(
1642 &self,
1643 path: Arc<Path>,
1644 fallback_branch_name: String,
1645 cx: &App,
1646 ) -> Task<Result<()>> {
1647 match &self.state {
1648 GitStoreState::Local { fs, .. } => {
1649 let fs = fs.clone();
1650 cx.background_executor()
1651 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1652 }
1653 GitStoreState::Remote {
1654 upstream_client,
1655 upstream_project_id: project_id,
1656 ..
1657 } => {
1658 let client = upstream_client.clone();
1659 let project_id = *project_id;
1660 cx.background_executor().spawn(async move {
1661 client
1662 .request(proto::GitInit {
1663 project_id: project_id,
1664 abs_path: path.to_string_lossy().into_owned(),
1665 fallback_branch_name,
1666 })
1667 .await?;
1668 Ok(())
1669 })
1670 }
1671 }
1672 }
1673
1674 pub fn git_clone(
1675 &self,
1676 repo: String,
1677 path: impl Into<Arc<std::path::Path>>,
1678 cx: &App,
1679 ) -> Task<Result<()>> {
1680 let path = path.into();
1681 match &self.state {
1682 GitStoreState::Local { fs, .. } => {
1683 let fs = fs.clone();
1684 cx.background_executor()
1685 .spawn(async move { fs.git_clone(&repo, &path).await })
1686 }
1687 GitStoreState::Remote {
1688 upstream_client,
1689 upstream_project_id,
1690 ..
1691 } => {
1692 if upstream_client.is_via_collab() {
1693 return Task::ready(Err(anyhow!(
1694 "Git Clone isn't supported for project guests"
1695 )));
1696 }
1697 let request = upstream_client.request(proto::GitClone {
1698 project_id: *upstream_project_id,
1699 abs_path: path.to_string_lossy().into_owned(),
1700 remote_repo: repo,
1701 });
1702
1703 cx.background_spawn(async move {
1704 let result = request.await?;
1705
1706 match result.success {
1707 true => Ok(()),
1708 false => Err(anyhow!("Git Clone failed")),
1709 }
1710 })
1711 }
1712 }
1713 }
1714
1715 async fn handle_update_repository(
1716 this: Entity<Self>,
1717 envelope: TypedEnvelope<proto::UpdateRepository>,
1718 mut cx: AsyncApp,
1719 ) -> Result<()> {
1720 this.update(&mut cx, |this, cx| {
1721 let path_style = this.worktree_store.read(cx).path_style();
1722 let mut update = envelope.payload;
1723
1724 let id = RepositoryId::from_proto(update.id);
1725 let client = this.upstream_client().context("no upstream client")?;
1726
1727 let mut repo_subscription = None;
1728 let repo = this.repositories.entry(id).or_insert_with(|| {
1729 let git_store = cx.weak_entity();
1730 let repo = cx.new(|cx| {
1731 Repository::remote(
1732 id,
1733 Path::new(&update.abs_path).into(),
1734 path_style,
1735 ProjectId(update.project_id),
1736 client,
1737 git_store,
1738 cx,
1739 )
1740 });
1741 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1742 cx.emit(GitStoreEvent::RepositoryAdded);
1743 repo
1744 });
1745 this._subscriptions.extend(repo_subscription);
1746
1747 repo.update(cx, {
1748 let update = update.clone();
1749 |repo, cx| repo.apply_remote_update(update, cx)
1750 })?;
1751
1752 this.active_repo_id.get_or_insert_with(|| {
1753 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1754 id
1755 });
1756
1757 if let Some((client, project_id)) = this.downstream_client() {
1758 update.project_id = project_id.to_proto();
1759 client.send(update).log_err();
1760 }
1761 Ok(())
1762 })?
1763 }
1764
1765 async fn handle_remove_repository(
1766 this: Entity<Self>,
1767 envelope: TypedEnvelope<proto::RemoveRepository>,
1768 mut cx: AsyncApp,
1769 ) -> Result<()> {
1770 this.update(&mut cx, |this, cx| {
1771 let mut update = envelope.payload;
1772 let id = RepositoryId::from_proto(update.id);
1773 this.repositories.remove(&id);
1774 if let Some((client, project_id)) = this.downstream_client() {
1775 update.project_id = project_id.to_proto();
1776 client.send(update).log_err();
1777 }
1778 if this.active_repo_id == Some(id) {
1779 this.active_repo_id = None;
1780 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1781 }
1782 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1783 })
1784 }
1785
1786 async fn handle_git_init(
1787 this: Entity<Self>,
1788 envelope: TypedEnvelope<proto::GitInit>,
1789 cx: AsyncApp,
1790 ) -> Result<proto::Ack> {
1791 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1792 let name = envelope.payload.fallback_branch_name;
1793 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1794 .await?;
1795
1796 Ok(proto::Ack {})
1797 }
1798
1799 async fn handle_git_clone(
1800 this: Entity<Self>,
1801 envelope: TypedEnvelope<proto::GitClone>,
1802 cx: AsyncApp,
1803 ) -> Result<proto::GitCloneResponse> {
1804 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1805 let repo_name = envelope.payload.remote_repo;
1806 let result = cx
1807 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1808 .await;
1809
1810 Ok(proto::GitCloneResponse {
1811 success: result.is_ok(),
1812 })
1813 }
1814
1815 async fn handle_fetch(
1816 this: Entity<Self>,
1817 envelope: TypedEnvelope<proto::Fetch>,
1818 mut cx: AsyncApp,
1819 ) -> Result<proto::RemoteMessageResponse> {
1820 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1821 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1822 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1823 let askpass_id = envelope.payload.askpass_id;
1824
1825 let askpass = make_remote_delegate(
1826 this,
1827 envelope.payload.project_id,
1828 repository_id,
1829 askpass_id,
1830 &mut cx,
1831 );
1832
1833 let remote_output = repository_handle
1834 .update(&mut cx, |repository_handle, cx| {
1835 repository_handle.fetch(fetch_options, askpass, cx)
1836 })?
1837 .await??;
1838
1839 Ok(proto::RemoteMessageResponse {
1840 stdout: remote_output.stdout,
1841 stderr: remote_output.stderr,
1842 })
1843 }
1844
1845 async fn handle_push(
1846 this: Entity<Self>,
1847 envelope: TypedEnvelope<proto::Push>,
1848 mut cx: AsyncApp,
1849 ) -> Result<proto::RemoteMessageResponse> {
1850 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1851 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1852
1853 let askpass_id = envelope.payload.askpass_id;
1854 let askpass = make_remote_delegate(
1855 this,
1856 envelope.payload.project_id,
1857 repository_id,
1858 askpass_id,
1859 &mut cx,
1860 );
1861
1862 let options = envelope
1863 .payload
1864 .options
1865 .as_ref()
1866 .map(|_| match envelope.payload.options() {
1867 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1868 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1869 });
1870
1871 let branch_name = envelope.payload.branch_name.into();
1872 let remote_name = envelope.payload.remote_name.into();
1873
1874 let remote_output = repository_handle
1875 .update(&mut cx, |repository_handle, cx| {
1876 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1877 })?
1878 .await??;
1879 Ok(proto::RemoteMessageResponse {
1880 stdout: remote_output.stdout,
1881 stderr: remote_output.stderr,
1882 })
1883 }
1884
1885 async fn handle_pull(
1886 this: Entity<Self>,
1887 envelope: TypedEnvelope<proto::Pull>,
1888 mut cx: AsyncApp,
1889 ) -> Result<proto::RemoteMessageResponse> {
1890 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1891 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1892 let askpass_id = envelope.payload.askpass_id;
1893 let askpass = make_remote_delegate(
1894 this,
1895 envelope.payload.project_id,
1896 repository_id,
1897 askpass_id,
1898 &mut cx,
1899 );
1900
1901 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1902 let remote_name = envelope.payload.remote_name.into();
1903 let rebase = envelope.payload.rebase;
1904
1905 let remote_message = repository_handle
1906 .update(&mut cx, |repository_handle, cx| {
1907 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1908 })?
1909 .await??;
1910
1911 Ok(proto::RemoteMessageResponse {
1912 stdout: remote_message.stdout,
1913 stderr: remote_message.stderr,
1914 })
1915 }
1916
1917 async fn handle_stage(
1918 this: Entity<Self>,
1919 envelope: TypedEnvelope<proto::Stage>,
1920 mut cx: AsyncApp,
1921 ) -> Result<proto::Ack> {
1922 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1923 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1924
1925 let entries = envelope
1926 .payload
1927 .paths
1928 .into_iter()
1929 .map(|path| RepoPath::new(&path))
1930 .collect::<Result<Vec<_>>>()?;
1931
1932 repository_handle
1933 .update(&mut cx, |repository_handle, cx| {
1934 repository_handle.stage_entries(entries, cx)
1935 })?
1936 .await?;
1937 Ok(proto::Ack {})
1938 }
1939
1940 async fn handle_unstage(
1941 this: Entity<Self>,
1942 envelope: TypedEnvelope<proto::Unstage>,
1943 mut cx: AsyncApp,
1944 ) -> Result<proto::Ack> {
1945 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1946 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1947
1948 let entries = envelope
1949 .payload
1950 .paths
1951 .into_iter()
1952 .map(|path| RepoPath::new(&path))
1953 .collect::<Result<Vec<_>>>()?;
1954
1955 repository_handle
1956 .update(&mut cx, |repository_handle, cx| {
1957 repository_handle.unstage_entries(entries, cx)
1958 })?
1959 .await?;
1960
1961 Ok(proto::Ack {})
1962 }
1963
1964 async fn handle_stash(
1965 this: Entity<Self>,
1966 envelope: TypedEnvelope<proto::Stash>,
1967 mut cx: AsyncApp,
1968 ) -> Result<proto::Ack> {
1969 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1970 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1971
1972 let entries = envelope
1973 .payload
1974 .paths
1975 .into_iter()
1976 .map(|path| RepoPath::new(&path))
1977 .collect::<Result<Vec<_>>>()?;
1978
1979 repository_handle
1980 .update(&mut cx, |repository_handle, cx| {
1981 repository_handle.stash_entries(entries, cx)
1982 })?
1983 .await?;
1984
1985 Ok(proto::Ack {})
1986 }
1987
1988 async fn handle_stash_pop(
1989 this: Entity<Self>,
1990 envelope: TypedEnvelope<proto::StashPop>,
1991 mut cx: AsyncApp,
1992 ) -> Result<proto::Ack> {
1993 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1994 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1995 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1996
1997 repository_handle
1998 .update(&mut cx, |repository_handle, cx| {
1999 repository_handle.stash_pop(stash_index, cx)
2000 })?
2001 .await?;
2002
2003 Ok(proto::Ack {})
2004 }
2005
2006 async fn handle_stash_apply(
2007 this: Entity<Self>,
2008 envelope: TypedEnvelope<proto::StashApply>,
2009 mut cx: AsyncApp,
2010 ) -> Result<proto::Ack> {
2011 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2012 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2013 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2014
2015 repository_handle
2016 .update(&mut cx, |repository_handle, cx| {
2017 repository_handle.stash_apply(stash_index, cx)
2018 })?
2019 .await?;
2020
2021 Ok(proto::Ack {})
2022 }
2023
2024 async fn handle_stash_drop(
2025 this: Entity<Self>,
2026 envelope: TypedEnvelope<proto::StashDrop>,
2027 mut cx: AsyncApp,
2028 ) -> Result<proto::Ack> {
2029 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2030 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2031 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2032
2033 repository_handle
2034 .update(&mut cx, |repository_handle, cx| {
2035 repository_handle.stash_drop(stash_index, cx)
2036 })?
2037 .await??;
2038
2039 Ok(proto::Ack {})
2040 }
2041
2042 async fn handle_set_index_text(
2043 this: Entity<Self>,
2044 envelope: TypedEnvelope<proto::SetIndexText>,
2045 mut cx: AsyncApp,
2046 ) -> Result<proto::Ack> {
2047 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2048 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2049 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2050
2051 repository_handle
2052 .update(&mut cx, |repository_handle, cx| {
2053 repository_handle.spawn_set_index_text_job(
2054 repo_path,
2055 envelope.payload.text,
2056 None,
2057 cx,
2058 )
2059 })?
2060 .await??;
2061 Ok(proto::Ack {})
2062 }
2063
2064 async fn handle_run_hook(
2065 this: Entity<Self>,
2066 envelope: TypedEnvelope<proto::RunGitHook>,
2067 mut cx: AsyncApp,
2068 ) -> Result<proto::Ack> {
2069 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2070 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2071 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2072 repository_handle
2073 .update(&mut cx, |repository_handle, cx| {
2074 repository_handle.run_hook(hook, cx)
2075 })?
2076 .await??;
2077 Ok(proto::Ack {})
2078 }
2079
2080 async fn handle_commit(
2081 this: Entity<Self>,
2082 envelope: TypedEnvelope<proto::Commit>,
2083 mut cx: AsyncApp,
2084 ) -> Result<proto::Ack> {
2085 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2086 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2087 let askpass_id = envelope.payload.askpass_id;
2088
2089 let askpass = make_remote_delegate(
2090 this,
2091 envelope.payload.project_id,
2092 repository_id,
2093 askpass_id,
2094 &mut cx,
2095 );
2096
2097 let message = SharedString::from(envelope.payload.message);
2098 let name = envelope.payload.name.map(SharedString::from);
2099 let email = envelope.payload.email.map(SharedString::from);
2100 let options = envelope.payload.options.unwrap_or_default();
2101
2102 repository_handle
2103 .update(&mut cx, |repository_handle, cx| {
2104 repository_handle.commit(
2105 message,
2106 name.zip(email),
2107 CommitOptions {
2108 amend: options.amend,
2109 signoff: options.signoff,
2110 },
2111 askpass,
2112 cx,
2113 )
2114 })?
2115 .await??;
2116 Ok(proto::Ack {})
2117 }
2118
2119 async fn handle_get_remotes(
2120 this: Entity<Self>,
2121 envelope: TypedEnvelope<proto::GetRemotes>,
2122 mut cx: AsyncApp,
2123 ) -> Result<proto::GetRemotesResponse> {
2124 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2125 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2126
2127 let branch_name = envelope.payload.branch_name;
2128 let is_push = envelope.payload.is_push;
2129
2130 let remotes = repository_handle
2131 .update(&mut cx, |repository_handle, _| {
2132 repository_handle.get_remotes(branch_name, is_push)
2133 })?
2134 .await??;
2135
2136 Ok(proto::GetRemotesResponse {
2137 remotes: remotes
2138 .into_iter()
2139 .map(|remotes| proto::get_remotes_response::Remote {
2140 name: remotes.name.to_string(),
2141 })
2142 .collect::<Vec<_>>(),
2143 })
2144 }
2145
2146 async fn handle_get_worktrees(
2147 this: Entity<Self>,
2148 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2149 mut cx: AsyncApp,
2150 ) -> Result<proto::GitWorktreesResponse> {
2151 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2152 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2153
2154 let worktrees = repository_handle
2155 .update(&mut cx, |repository_handle, _| {
2156 repository_handle.worktrees()
2157 })?
2158 .await??;
2159
2160 Ok(proto::GitWorktreesResponse {
2161 worktrees: worktrees
2162 .into_iter()
2163 .map(|worktree| worktree_to_proto(&worktree))
2164 .collect::<Vec<_>>(),
2165 })
2166 }
2167
2168 async fn handle_create_worktree(
2169 this: Entity<Self>,
2170 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2171 mut cx: AsyncApp,
2172 ) -> Result<proto::Ack> {
2173 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2174 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2175 let directory = PathBuf::from(envelope.payload.directory);
2176 let name = envelope.payload.name;
2177 let commit = envelope.payload.commit;
2178
2179 repository_handle
2180 .update(&mut cx, |repository_handle, _| {
2181 repository_handle.create_worktree(name, directory, commit)
2182 })?
2183 .await??;
2184
2185 Ok(proto::Ack {})
2186 }
2187
2188 async fn handle_get_branches(
2189 this: Entity<Self>,
2190 envelope: TypedEnvelope<proto::GitGetBranches>,
2191 mut cx: AsyncApp,
2192 ) -> Result<proto::GitBranchesResponse> {
2193 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2194 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2195
2196 let branches = repository_handle
2197 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2198 .await??;
2199
2200 Ok(proto::GitBranchesResponse {
2201 branches: branches
2202 .into_iter()
2203 .map(|branch| branch_to_proto(&branch))
2204 .collect::<Vec<_>>(),
2205 })
2206 }
2207 async fn handle_get_default_branch(
2208 this: Entity<Self>,
2209 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2210 mut cx: AsyncApp,
2211 ) -> Result<proto::GetDefaultBranchResponse> {
2212 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2213 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2214
2215 let branch = repository_handle
2216 .update(&mut cx, |repository_handle, _| {
2217 repository_handle.default_branch()
2218 })?
2219 .await??
2220 .map(Into::into);
2221
2222 Ok(proto::GetDefaultBranchResponse { branch })
2223 }
2224 async fn handle_create_branch(
2225 this: Entity<Self>,
2226 envelope: TypedEnvelope<proto::GitCreateBranch>,
2227 mut cx: AsyncApp,
2228 ) -> Result<proto::Ack> {
2229 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2230 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2231 let branch_name = envelope.payload.branch_name;
2232
2233 repository_handle
2234 .update(&mut cx, |repository_handle, _| {
2235 repository_handle.create_branch(branch_name, None)
2236 })?
2237 .await??;
2238
2239 Ok(proto::Ack {})
2240 }
2241
2242 async fn handle_change_branch(
2243 this: Entity<Self>,
2244 envelope: TypedEnvelope<proto::GitChangeBranch>,
2245 mut cx: AsyncApp,
2246 ) -> Result<proto::Ack> {
2247 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2248 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2249 let branch_name = envelope.payload.branch_name;
2250
2251 repository_handle
2252 .update(&mut cx, |repository_handle, _| {
2253 repository_handle.change_branch(branch_name)
2254 })?
2255 .await??;
2256
2257 Ok(proto::Ack {})
2258 }
2259
2260 async fn handle_rename_branch(
2261 this: Entity<Self>,
2262 envelope: TypedEnvelope<proto::GitRenameBranch>,
2263 mut cx: AsyncApp,
2264 ) -> Result<proto::Ack> {
2265 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2266 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2267 let branch = envelope.payload.branch;
2268 let new_name = envelope.payload.new_name;
2269
2270 repository_handle
2271 .update(&mut cx, |repository_handle, _| {
2272 repository_handle.rename_branch(branch, new_name)
2273 })?
2274 .await??;
2275
2276 Ok(proto::Ack {})
2277 }
2278
2279 async fn handle_create_remote(
2280 this: Entity<Self>,
2281 envelope: TypedEnvelope<proto::GitCreateRemote>,
2282 mut cx: AsyncApp,
2283 ) -> Result<proto::Ack> {
2284 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2285 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2286 let remote_name = envelope.payload.remote_name;
2287 let remote_url = envelope.payload.remote_url;
2288
2289 repository_handle
2290 .update(&mut cx, |repository_handle, _| {
2291 repository_handle.create_remote(remote_name, remote_url)
2292 })?
2293 .await??;
2294
2295 Ok(proto::Ack {})
2296 }
2297
2298 async fn handle_delete_branch(
2299 this: Entity<Self>,
2300 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2301 mut cx: AsyncApp,
2302 ) -> Result<proto::Ack> {
2303 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2304 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2305 let branch_name = envelope.payload.branch_name;
2306
2307 repository_handle
2308 .update(&mut cx, |repository_handle, _| {
2309 repository_handle.delete_branch(branch_name)
2310 })?
2311 .await??;
2312
2313 Ok(proto::Ack {})
2314 }
2315
2316 async fn handle_remove_remote(
2317 this: Entity<Self>,
2318 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2319 mut cx: AsyncApp,
2320 ) -> Result<proto::Ack> {
2321 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2322 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2323 let remote_name = envelope.payload.remote_name;
2324
2325 repository_handle
2326 .update(&mut cx, |repository_handle, _| {
2327 repository_handle.remove_remote(remote_name)
2328 })?
2329 .await??;
2330
2331 Ok(proto::Ack {})
2332 }
2333
2334 async fn handle_show(
2335 this: Entity<Self>,
2336 envelope: TypedEnvelope<proto::GitShow>,
2337 mut cx: AsyncApp,
2338 ) -> Result<proto::GitCommitDetails> {
2339 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2340 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2341
2342 let commit = repository_handle
2343 .update(&mut cx, |repository_handle, _| {
2344 repository_handle.show(envelope.payload.commit)
2345 })?
2346 .await??;
2347 Ok(proto::GitCommitDetails {
2348 sha: commit.sha.into(),
2349 message: commit.message.into(),
2350 commit_timestamp: commit.commit_timestamp,
2351 author_email: commit.author_email.into(),
2352 author_name: commit.author_name.into(),
2353 })
2354 }
2355
2356 async fn handle_load_commit_diff(
2357 this: Entity<Self>,
2358 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2359 mut cx: AsyncApp,
2360 ) -> Result<proto::LoadCommitDiffResponse> {
2361 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2362 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2363
2364 let commit_diff = repository_handle
2365 .update(&mut cx, |repository_handle, _| {
2366 repository_handle.load_commit_diff(envelope.payload.commit)
2367 })?
2368 .await??;
2369 Ok(proto::LoadCommitDiffResponse {
2370 files: commit_diff
2371 .files
2372 .into_iter()
2373 .map(|file| proto::CommitFile {
2374 path: file.path.to_proto(),
2375 old_text: file.old_text,
2376 new_text: file.new_text,
2377 })
2378 .collect(),
2379 })
2380 }
2381
2382 async fn handle_file_history(
2383 this: Entity<Self>,
2384 envelope: TypedEnvelope<proto::GitFileHistory>,
2385 mut cx: AsyncApp,
2386 ) -> Result<proto::GitFileHistoryResponse> {
2387 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2388 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2389 let path = RepoPath::from_proto(&envelope.payload.path)?;
2390 let skip = envelope.payload.skip as usize;
2391 let limit = envelope.payload.limit.map(|l| l as usize);
2392
2393 let file_history = repository_handle
2394 .update(&mut cx, |repository_handle, _| {
2395 repository_handle.file_history_paginated(path, skip, limit)
2396 })?
2397 .await??;
2398
2399 Ok(proto::GitFileHistoryResponse {
2400 entries: file_history
2401 .entries
2402 .into_iter()
2403 .map(|entry| proto::FileHistoryEntry {
2404 sha: entry.sha.to_string(),
2405 subject: entry.subject.to_string(),
2406 message: entry.message.to_string(),
2407 commit_timestamp: entry.commit_timestamp,
2408 author_name: entry.author_name.to_string(),
2409 author_email: entry.author_email.to_string(),
2410 })
2411 .collect(),
2412 path: file_history.path.to_proto(),
2413 })
2414 }
2415
2416 async fn handle_reset(
2417 this: Entity<Self>,
2418 envelope: TypedEnvelope<proto::GitReset>,
2419 mut cx: AsyncApp,
2420 ) -> Result<proto::Ack> {
2421 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2422 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2423
2424 let mode = match envelope.payload.mode() {
2425 git_reset::ResetMode::Soft => ResetMode::Soft,
2426 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2427 };
2428
2429 repository_handle
2430 .update(&mut cx, |repository_handle, cx| {
2431 repository_handle.reset(envelope.payload.commit, mode, cx)
2432 })?
2433 .await??;
2434 Ok(proto::Ack {})
2435 }
2436
2437 async fn handle_checkout_files(
2438 this: Entity<Self>,
2439 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2440 mut cx: AsyncApp,
2441 ) -> Result<proto::Ack> {
2442 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2443 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2444 let paths = envelope
2445 .payload
2446 .paths
2447 .iter()
2448 .map(|s| RepoPath::from_proto(s))
2449 .collect::<Result<Vec<_>>>()?;
2450
2451 repository_handle
2452 .update(&mut cx, |repository_handle, cx| {
2453 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2454 })?
2455 .await?;
2456 Ok(proto::Ack {})
2457 }
2458
2459 async fn handle_open_commit_message_buffer(
2460 this: Entity<Self>,
2461 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2462 mut cx: AsyncApp,
2463 ) -> Result<proto::OpenBufferResponse> {
2464 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2465 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2466 let buffer = repository
2467 .update(&mut cx, |repository, cx| {
2468 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2469 })?
2470 .await?;
2471
2472 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2473 this.update(&mut cx, |this, cx| {
2474 this.buffer_store.update(cx, |buffer_store, cx| {
2475 buffer_store
2476 .create_buffer_for_peer(
2477 &buffer,
2478 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2479 cx,
2480 )
2481 .detach_and_log_err(cx);
2482 })
2483 })?;
2484
2485 Ok(proto::OpenBufferResponse {
2486 buffer_id: buffer_id.to_proto(),
2487 })
2488 }
2489
2490 async fn handle_askpass(
2491 this: Entity<Self>,
2492 envelope: TypedEnvelope<proto::AskPassRequest>,
2493 mut cx: AsyncApp,
2494 ) -> Result<proto::AskPassResponse> {
2495 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2496 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2497
2498 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2499 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2500 debug_panic!("no askpass found");
2501 anyhow::bail!("no askpass found");
2502 };
2503
2504 let response = askpass
2505 .ask_password(envelope.payload.prompt)
2506 .await
2507 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2508
2509 delegates
2510 .lock()
2511 .insert(envelope.payload.askpass_id, askpass);
2512
2513 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2514 Ok(proto::AskPassResponse {
2515 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2516 })
2517 }
2518
2519 async fn handle_check_for_pushed_commits(
2520 this: Entity<Self>,
2521 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2522 mut cx: AsyncApp,
2523 ) -> Result<proto::CheckForPushedCommitsResponse> {
2524 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2525 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2526
2527 let branches = repository_handle
2528 .update(&mut cx, |repository_handle, _| {
2529 repository_handle.check_for_pushed_commits()
2530 })?
2531 .await??;
2532 Ok(proto::CheckForPushedCommitsResponse {
2533 pushed_to: branches
2534 .into_iter()
2535 .map(|commit| commit.to_string())
2536 .collect(),
2537 })
2538 }
2539
2540 async fn handle_git_diff(
2541 this: Entity<Self>,
2542 envelope: TypedEnvelope<proto::GitDiff>,
2543 mut cx: AsyncApp,
2544 ) -> Result<proto::GitDiffResponse> {
2545 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2546 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2547 let diff_type = match envelope.payload.diff_type() {
2548 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2549 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2550 };
2551
2552 let mut diff = repository_handle
2553 .update(&mut cx, |repository_handle, cx| {
2554 repository_handle.diff(diff_type, cx)
2555 })?
2556 .await??;
2557 const ONE_MB: usize = 1_000_000;
2558 if diff.len() > ONE_MB {
2559 diff = diff.chars().take(ONE_MB).collect()
2560 }
2561
2562 Ok(proto::GitDiffResponse { diff })
2563 }
2564
2565 async fn handle_tree_diff(
2566 this: Entity<Self>,
2567 request: TypedEnvelope<proto::GetTreeDiff>,
2568 mut cx: AsyncApp,
2569 ) -> Result<proto::GetTreeDiffResponse> {
2570 let repository_id = RepositoryId(request.payload.repository_id);
2571 let diff_type = if request.payload.is_merge {
2572 DiffTreeType::MergeBase {
2573 base: request.payload.base.into(),
2574 head: request.payload.head.into(),
2575 }
2576 } else {
2577 DiffTreeType::Since {
2578 base: request.payload.base.into(),
2579 head: request.payload.head.into(),
2580 }
2581 };
2582
2583 let diff = this
2584 .update(&mut cx, |this, cx| {
2585 let repository = this.repositories().get(&repository_id)?;
2586 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2587 })?
2588 .context("missing repository")?
2589 .await??;
2590
2591 Ok(proto::GetTreeDiffResponse {
2592 entries: diff
2593 .entries
2594 .into_iter()
2595 .map(|(path, status)| proto::TreeDiffStatus {
2596 path: path.as_ref().to_proto(),
2597 status: match status {
2598 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2599 TreeDiffStatus::Modified { .. } => {
2600 proto::tree_diff_status::Status::Modified.into()
2601 }
2602 TreeDiffStatus::Deleted { .. } => {
2603 proto::tree_diff_status::Status::Deleted.into()
2604 }
2605 },
2606 oid: match status {
2607 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2608 Some(old.to_string())
2609 }
2610 TreeDiffStatus::Added => None,
2611 },
2612 })
2613 .collect(),
2614 })
2615 }
2616
2617 async fn handle_get_blob_content(
2618 this: Entity<Self>,
2619 request: TypedEnvelope<proto::GetBlobContent>,
2620 mut cx: AsyncApp,
2621 ) -> Result<proto::GetBlobContentResponse> {
2622 let oid = git::Oid::from_str(&request.payload.oid)?;
2623 let repository_id = RepositoryId(request.payload.repository_id);
2624 let content = this
2625 .update(&mut cx, |this, cx| {
2626 let repository = this.repositories().get(&repository_id)?;
2627 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2628 })?
2629 .context("missing repository")?
2630 .await?;
2631 Ok(proto::GetBlobContentResponse { content })
2632 }
2633
2634 async fn handle_open_unstaged_diff(
2635 this: Entity<Self>,
2636 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2637 mut cx: AsyncApp,
2638 ) -> Result<proto::OpenUnstagedDiffResponse> {
2639 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2640 let diff = this
2641 .update(&mut cx, |this, cx| {
2642 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2643 Some(this.open_unstaged_diff(buffer, cx))
2644 })?
2645 .context("missing buffer")?
2646 .await?;
2647 this.update(&mut cx, |this, _| {
2648 let shared_diffs = this
2649 .shared_diffs
2650 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2651 .or_default();
2652 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2653 })?;
2654 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx))?;
2655 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2656 }
2657
2658 async fn handle_open_uncommitted_diff(
2659 this: Entity<Self>,
2660 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2661 mut cx: AsyncApp,
2662 ) -> Result<proto::OpenUncommittedDiffResponse> {
2663 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2664 let diff = this
2665 .update(&mut cx, |this, cx| {
2666 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2667 Some(this.open_uncommitted_diff(buffer, cx))
2668 })?
2669 .context("missing buffer")?
2670 .await?;
2671 this.update(&mut cx, |this, _| {
2672 let shared_diffs = this
2673 .shared_diffs
2674 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2675 .or_default();
2676 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2677 })?;
2678 diff.read_with(&cx, |diff, cx| {
2679 use proto::open_uncommitted_diff_response::Mode;
2680
2681 let unstaged_diff = diff.secondary_diff();
2682 let index_snapshot = unstaged_diff.and_then(|diff| {
2683 let diff = diff.read(cx);
2684 diff.base_text_exists().then(|| diff.base_text(cx))
2685 });
2686
2687 let mode;
2688 let staged_text;
2689 let committed_text;
2690 if diff.base_text_exists() {
2691 let committed_snapshot = diff.base_text(cx);
2692 committed_text = Some(committed_snapshot.text());
2693 if let Some(index_text) = index_snapshot {
2694 if index_text.remote_id() == committed_snapshot.remote_id() {
2695 mode = Mode::IndexMatchesHead;
2696 staged_text = None;
2697 } else {
2698 mode = Mode::IndexAndHead;
2699 staged_text = Some(index_text.text());
2700 }
2701 } else {
2702 mode = Mode::IndexAndHead;
2703 staged_text = None;
2704 }
2705 } else {
2706 mode = Mode::IndexAndHead;
2707 committed_text = None;
2708 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2709 }
2710
2711 proto::OpenUncommittedDiffResponse {
2712 committed_text,
2713 staged_text,
2714 mode: mode.into(),
2715 }
2716 })
2717 }
2718
2719 async fn handle_update_diff_bases(
2720 this: Entity<Self>,
2721 request: TypedEnvelope<proto::UpdateDiffBases>,
2722 mut cx: AsyncApp,
2723 ) -> Result<()> {
2724 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2725 this.update(&mut cx, |this, cx| {
2726 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2727 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2728 {
2729 let buffer = buffer.read(cx).text_snapshot();
2730 diff_state.update(cx, |diff_state, cx| {
2731 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2732 })
2733 }
2734 })
2735 }
2736
2737 async fn handle_blame_buffer(
2738 this: Entity<Self>,
2739 envelope: TypedEnvelope<proto::BlameBuffer>,
2740 mut cx: AsyncApp,
2741 ) -> Result<proto::BlameBufferResponse> {
2742 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2743 let version = deserialize_version(&envelope.payload.version);
2744 let buffer = this.read_with(&cx, |this, cx| {
2745 this.buffer_store.read(cx).get_existing(buffer_id)
2746 })??;
2747 buffer
2748 .update(&mut cx, |buffer, _| {
2749 buffer.wait_for_version(version.clone())
2750 })?
2751 .await?;
2752 let blame = this
2753 .update(&mut cx, |this, cx| {
2754 this.blame_buffer(&buffer, Some(version), cx)
2755 })?
2756 .await?;
2757 Ok(serialize_blame_buffer_response(blame))
2758 }
2759
2760 async fn handle_get_permalink_to_line(
2761 this: Entity<Self>,
2762 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2763 mut cx: AsyncApp,
2764 ) -> Result<proto::GetPermalinkToLineResponse> {
2765 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2766 // let version = deserialize_version(&envelope.payload.version);
2767 let selection = {
2768 let proto_selection = envelope
2769 .payload
2770 .selection
2771 .context("no selection to get permalink for defined")?;
2772 proto_selection.start as u32..proto_selection.end as u32
2773 };
2774 let buffer = this.read_with(&cx, |this, cx| {
2775 this.buffer_store.read(cx).get_existing(buffer_id)
2776 })??;
2777 let permalink = this
2778 .update(&mut cx, |this, cx| {
2779 this.get_permalink_to_line(&buffer, selection, cx)
2780 })?
2781 .await?;
2782 Ok(proto::GetPermalinkToLineResponse {
2783 permalink: permalink.to_string(),
2784 })
2785 }
2786
2787 fn repository_for_request(
2788 this: &Entity<Self>,
2789 id: RepositoryId,
2790 cx: &mut AsyncApp,
2791 ) -> Result<Entity<Repository>> {
2792 this.read_with(cx, |this, _| {
2793 this.repositories
2794 .get(&id)
2795 .context("missing repository handle")
2796 .cloned()
2797 })?
2798 }
2799
2800 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2801 self.repositories
2802 .iter()
2803 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2804 .collect()
2805 }
2806
2807 fn process_updated_entries(
2808 &self,
2809 worktree: &Entity<Worktree>,
2810 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2811 cx: &mut App,
2812 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2813 let path_style = worktree.read(cx).path_style();
2814 let mut repo_paths = self
2815 .repositories
2816 .values()
2817 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2818 .collect::<Vec<_>>();
2819 let mut entries: Vec<_> = updated_entries
2820 .iter()
2821 .map(|(path, _, _)| path.clone())
2822 .collect();
2823 entries.sort();
2824 let worktree = worktree.read(cx);
2825
2826 let entries = entries
2827 .into_iter()
2828 .map(|path| worktree.absolutize(&path))
2829 .collect::<Arc<[_]>>();
2830
2831 let executor = cx.background_executor().clone();
2832 cx.background_executor().spawn(async move {
2833 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2834 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2835 let mut tasks = FuturesOrdered::new();
2836 for (repo_path, repo) in repo_paths.into_iter().rev() {
2837 let entries = entries.clone();
2838 let task = executor.spawn(async move {
2839 // Find all repository paths that belong to this repo
2840 let mut ix = entries.partition_point(|path| path < &*repo_path);
2841 if ix == entries.len() {
2842 return None;
2843 };
2844
2845 let mut paths = Vec::new();
2846 // All paths prefixed by a given repo will constitute a continuous range.
2847 while let Some(path) = entries.get(ix)
2848 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2849 &repo_path, path, path_style,
2850 )
2851 {
2852 paths.push((repo_path, ix));
2853 ix += 1;
2854 }
2855 if paths.is_empty() {
2856 None
2857 } else {
2858 Some((repo, paths))
2859 }
2860 });
2861 tasks.push_back(task);
2862 }
2863
2864 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2865 let mut path_was_used = vec![false; entries.len()];
2866 let tasks = tasks.collect::<Vec<_>>().await;
2867 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2868 // We always want to assign a path to it's innermost repository.
2869 for t in tasks {
2870 let Some((repo, paths)) = t else {
2871 continue;
2872 };
2873 let entry = paths_by_git_repo.entry(repo).or_default();
2874 for (repo_path, ix) in paths {
2875 if path_was_used[ix] {
2876 continue;
2877 }
2878 path_was_used[ix] = true;
2879 entry.push(repo_path);
2880 }
2881 }
2882
2883 paths_by_git_repo
2884 })
2885 }
2886}
2887
2888impl BufferGitState {
2889 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2890 Self {
2891 unstaged_diff: Default::default(),
2892 uncommitted_diff: Default::default(),
2893 recalculate_diff_task: Default::default(),
2894 language: Default::default(),
2895 language_registry: Default::default(),
2896 recalculating_tx: postage::watch::channel_with(false).0,
2897 hunk_staging_operation_count: 0,
2898 hunk_staging_operation_count_as_of_write: 0,
2899 head_text: Default::default(),
2900 index_text: Default::default(),
2901 head_changed: Default::default(),
2902 index_changed: Default::default(),
2903 language_changed: Default::default(),
2904 conflict_updated_futures: Default::default(),
2905 conflict_set: Default::default(),
2906 reparse_conflict_markers_task: Default::default(),
2907 }
2908 }
2909
2910 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2911 self.language = buffer.read(cx).language().cloned();
2912 self.language_changed = true;
2913 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2914 }
2915
2916 fn reparse_conflict_markers(
2917 &mut self,
2918 buffer: text::BufferSnapshot,
2919 cx: &mut Context<Self>,
2920 ) -> oneshot::Receiver<()> {
2921 let (tx, rx) = oneshot::channel();
2922
2923 let Some(conflict_set) = self
2924 .conflict_set
2925 .as_ref()
2926 .and_then(|conflict_set| conflict_set.upgrade())
2927 else {
2928 return rx;
2929 };
2930
2931 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2932 if conflict_set.has_conflict {
2933 Some(conflict_set.snapshot())
2934 } else {
2935 None
2936 }
2937 });
2938
2939 if let Some(old_snapshot) = old_snapshot {
2940 self.conflict_updated_futures.push(tx);
2941 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2942 let (snapshot, changed_range) = cx
2943 .background_spawn(async move {
2944 let new_snapshot = ConflictSet::parse(&buffer);
2945 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2946 (new_snapshot, changed_range)
2947 })
2948 .await;
2949 this.update(cx, |this, cx| {
2950 if let Some(conflict_set) = &this.conflict_set {
2951 conflict_set
2952 .update(cx, |conflict_set, cx| {
2953 conflict_set.set_snapshot(snapshot, changed_range, cx);
2954 })
2955 .ok();
2956 }
2957 let futures = std::mem::take(&mut this.conflict_updated_futures);
2958 for tx in futures {
2959 tx.send(()).ok();
2960 }
2961 })
2962 }))
2963 }
2964
2965 rx
2966 }
2967
2968 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2969 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2970 }
2971
2972 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2973 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2974 }
2975
2976 fn handle_base_texts_updated(
2977 &mut self,
2978 buffer: text::BufferSnapshot,
2979 message: proto::UpdateDiffBases,
2980 cx: &mut Context<Self>,
2981 ) {
2982 use proto::update_diff_bases::Mode;
2983
2984 let Some(mode) = Mode::from_i32(message.mode) else {
2985 return;
2986 };
2987
2988 let diff_bases_change = match mode {
2989 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2990 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2991 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2992 Mode::IndexAndHead => DiffBasesChange::SetEach {
2993 index: message.staged_text,
2994 head: message.committed_text,
2995 },
2996 };
2997
2998 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2999 }
3000
3001 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3002 if *self.recalculating_tx.borrow() {
3003 let mut rx = self.recalculating_tx.subscribe();
3004 Some(async move {
3005 loop {
3006 let is_recalculating = rx.recv().await;
3007 if is_recalculating != Some(true) {
3008 break;
3009 }
3010 }
3011 })
3012 } else {
3013 None
3014 }
3015 }
3016
3017 fn diff_bases_changed(
3018 &mut self,
3019 buffer: text::BufferSnapshot,
3020 diff_bases_change: Option<DiffBasesChange>,
3021 cx: &mut Context<Self>,
3022 ) {
3023 match diff_bases_change {
3024 Some(DiffBasesChange::SetIndex(index)) => {
3025 self.index_text = index.map(|mut index| {
3026 text::LineEnding::normalize(&mut index);
3027 Arc::from(index.as_str())
3028 });
3029 self.index_changed = true;
3030 }
3031 Some(DiffBasesChange::SetHead(head)) => {
3032 self.head_text = head.map(|mut head| {
3033 text::LineEnding::normalize(&mut head);
3034 Arc::from(head.as_str())
3035 });
3036 self.head_changed = true;
3037 }
3038 Some(DiffBasesChange::SetBoth(text)) => {
3039 let text = text.map(|mut text| {
3040 text::LineEnding::normalize(&mut text);
3041 Arc::from(text.as_str())
3042 });
3043 self.head_text = text.clone();
3044 self.index_text = text;
3045 self.head_changed = true;
3046 self.index_changed = true;
3047 }
3048 Some(DiffBasesChange::SetEach { index, head }) => {
3049 self.index_text = index.map(|mut index| {
3050 text::LineEnding::normalize(&mut index);
3051 Arc::from(index.as_str())
3052 });
3053 self.index_changed = true;
3054 self.head_text = head.map(|mut head| {
3055 text::LineEnding::normalize(&mut head);
3056 Arc::from(head.as_str())
3057 });
3058 self.head_changed = true;
3059 }
3060 None => {}
3061 }
3062
3063 self.recalculate_diffs(buffer, cx)
3064 }
3065
3066 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3067 *self.recalculating_tx.borrow_mut() = true;
3068
3069 let language = self.language.clone();
3070 let language_registry = self.language_registry.clone();
3071 let unstaged_diff = self.unstaged_diff();
3072 let uncommitted_diff = self.uncommitted_diff();
3073 let head = self.head_text.clone();
3074 let index = self.index_text.clone();
3075 let index_changed = self.index_changed;
3076 let head_changed = self.head_changed;
3077 let language_changed = self.language_changed;
3078 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3079 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3080 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3081 (None, None) => true,
3082 _ => false,
3083 };
3084 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3085 log::debug!(
3086 "start recalculating diffs for buffer {}",
3087 buffer.remote_id()
3088 );
3089
3090 let mut new_unstaged_diff = None;
3091 if let Some(unstaged_diff) = &unstaged_diff {
3092 new_unstaged_diff = Some(
3093 cx.update(|cx| {
3094 unstaged_diff.read(cx).update_diff(
3095 buffer.clone(),
3096 index,
3097 index_changed,
3098 language.clone(),
3099 cx,
3100 )
3101 })?
3102 .await,
3103 );
3104 }
3105
3106 // Dropping BufferDiff can be expensive, so yield back to the event loop
3107 // for a bit
3108 yield_now().await;
3109
3110 let mut new_uncommitted_diff = None;
3111 if let Some(uncommitted_diff) = &uncommitted_diff {
3112 new_uncommitted_diff = if index_matches_head {
3113 new_unstaged_diff.clone()
3114 } else {
3115 Some(
3116 cx.update(|cx| {
3117 uncommitted_diff.read(cx).update_diff(
3118 buffer.clone(),
3119 head,
3120 head_changed,
3121 language.clone(),
3122 cx,
3123 )
3124 })?
3125 .await,
3126 )
3127 }
3128 }
3129
3130 // Dropping BufferDiff can be expensive, so yield back to the event loop
3131 // for a bit
3132 yield_now().await;
3133
3134 let cancel = this.update(cx, |this, _| {
3135 // This checks whether all pending stage/unstage operations
3136 // have quiesced (i.e. both the corresponding write and the
3137 // read of that write have completed). If not, then we cancel
3138 // this recalculation attempt to avoid invalidating pending
3139 // state too quickly; another recalculation will come along
3140 // later and clear the pending state once the state of the index has settled.
3141 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3142 *this.recalculating_tx.borrow_mut() = false;
3143 true
3144 } else {
3145 false
3146 }
3147 })?;
3148 if cancel {
3149 log::debug!(
3150 concat!(
3151 "aborting recalculating diffs for buffer {}",
3152 "due to subsequent hunk operations",
3153 ),
3154 buffer.remote_id()
3155 );
3156 return Ok(());
3157 }
3158
3159 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3160 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3161 {
3162 let task = unstaged_diff.update(cx, |diff, cx| {
3163 if language_changed {
3164 diff.language_changed(language.clone(), language_registry.clone(), cx);
3165 }
3166 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3167 })?;
3168 Some(task.await)
3169 } else {
3170 None
3171 };
3172
3173 yield_now().await;
3174
3175 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3176 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3177 {
3178 uncommitted_diff
3179 .update(cx, |diff, cx| {
3180 if language_changed {
3181 diff.language_changed(language, language_registry, cx);
3182 }
3183 diff.set_snapshot_with_secondary(
3184 new_uncommitted_diff,
3185 &buffer,
3186 unstaged_changed_range.flatten(),
3187 true,
3188 cx,
3189 )
3190 })?
3191 .await;
3192 }
3193
3194 log::debug!(
3195 "finished recalculating diffs for buffer {}",
3196 buffer.remote_id()
3197 );
3198
3199 if let Some(this) = this.upgrade() {
3200 this.update(cx, |this, _| {
3201 this.index_changed = false;
3202 this.head_changed = false;
3203 this.language_changed = false;
3204 *this.recalculating_tx.borrow_mut() = false;
3205 })?;
3206 }
3207
3208 Ok(())
3209 }));
3210 }
3211}
3212
3213fn make_remote_delegate(
3214 this: Entity<GitStore>,
3215 project_id: u64,
3216 repository_id: RepositoryId,
3217 askpass_id: u64,
3218 cx: &mut AsyncApp,
3219) -> AskPassDelegate {
3220 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3221 this.update(cx, |this, cx| {
3222 let Some((client, _)) = this.downstream_client() else {
3223 return;
3224 };
3225 let response = client.request(proto::AskPassRequest {
3226 project_id,
3227 repository_id: repository_id.to_proto(),
3228 askpass_id,
3229 prompt,
3230 });
3231 cx.spawn(async move |_, _| {
3232 let mut response = response.await?.response;
3233 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3234 .ok();
3235 response.zeroize();
3236 anyhow::Ok(())
3237 })
3238 .detach_and_log_err(cx);
3239 })
3240 .log_err();
3241 })
3242}
3243
3244impl RepositoryId {
3245 pub fn to_proto(self) -> u64 {
3246 self.0
3247 }
3248
3249 pub fn from_proto(id: u64) -> Self {
3250 RepositoryId(id)
3251 }
3252}
3253
3254impl RepositorySnapshot {
3255 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3256 Self {
3257 id,
3258 statuses_by_path: Default::default(),
3259 work_directory_abs_path,
3260 branch: None,
3261 head_commit: None,
3262 scan_id: 0,
3263 merge: Default::default(),
3264 remote_origin_url: None,
3265 remote_upstream_url: None,
3266 stash_entries: Default::default(),
3267 path_style,
3268 }
3269 }
3270
3271 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3272 proto::UpdateRepository {
3273 branch_summary: self.branch.as_ref().map(branch_to_proto),
3274 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3275 updated_statuses: self
3276 .statuses_by_path
3277 .iter()
3278 .map(|entry| entry.to_proto())
3279 .collect(),
3280 removed_statuses: Default::default(),
3281 current_merge_conflicts: self
3282 .merge
3283 .conflicted_paths
3284 .iter()
3285 .map(|repo_path| repo_path.to_proto())
3286 .collect(),
3287 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3288 project_id,
3289 id: self.id.to_proto(),
3290 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3291 entry_ids: vec![self.id.to_proto()],
3292 scan_id: self.scan_id,
3293 is_last_update: true,
3294 stash_entries: self
3295 .stash_entries
3296 .entries
3297 .iter()
3298 .map(stash_to_proto)
3299 .collect(),
3300 remote_upstream_url: self.remote_upstream_url.clone(),
3301 remote_origin_url: self.remote_origin_url.clone(),
3302 }
3303 }
3304
3305 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3306 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3307 let mut removed_statuses: Vec<String> = Vec::new();
3308
3309 let mut new_statuses = self.statuses_by_path.iter().peekable();
3310 let mut old_statuses = old.statuses_by_path.iter().peekable();
3311
3312 let mut current_new_entry = new_statuses.next();
3313 let mut current_old_entry = old_statuses.next();
3314 loop {
3315 match (current_new_entry, current_old_entry) {
3316 (Some(new_entry), Some(old_entry)) => {
3317 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3318 Ordering::Less => {
3319 updated_statuses.push(new_entry.to_proto());
3320 current_new_entry = new_statuses.next();
3321 }
3322 Ordering::Equal => {
3323 if new_entry.status != old_entry.status {
3324 updated_statuses.push(new_entry.to_proto());
3325 }
3326 current_old_entry = old_statuses.next();
3327 current_new_entry = new_statuses.next();
3328 }
3329 Ordering::Greater => {
3330 removed_statuses.push(old_entry.repo_path.to_proto());
3331 current_old_entry = old_statuses.next();
3332 }
3333 }
3334 }
3335 (None, Some(old_entry)) => {
3336 removed_statuses.push(old_entry.repo_path.to_proto());
3337 current_old_entry = old_statuses.next();
3338 }
3339 (Some(new_entry), None) => {
3340 updated_statuses.push(new_entry.to_proto());
3341 current_new_entry = new_statuses.next();
3342 }
3343 (None, None) => break,
3344 }
3345 }
3346
3347 proto::UpdateRepository {
3348 branch_summary: self.branch.as_ref().map(branch_to_proto),
3349 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3350 updated_statuses,
3351 removed_statuses,
3352 current_merge_conflicts: self
3353 .merge
3354 .conflicted_paths
3355 .iter()
3356 .map(|path| path.to_proto())
3357 .collect(),
3358 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3359 project_id,
3360 id: self.id.to_proto(),
3361 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3362 entry_ids: vec![],
3363 scan_id: self.scan_id,
3364 is_last_update: true,
3365 stash_entries: self
3366 .stash_entries
3367 .entries
3368 .iter()
3369 .map(stash_to_proto)
3370 .collect(),
3371 remote_upstream_url: self.remote_upstream_url.clone(),
3372 remote_origin_url: self.remote_origin_url.clone(),
3373 }
3374 }
3375
3376 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3377 self.statuses_by_path.iter().cloned()
3378 }
3379
3380 pub fn status_summary(&self) -> GitSummary {
3381 self.statuses_by_path.summary().item_summary
3382 }
3383
3384 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3385 self.statuses_by_path
3386 .get(&PathKey(path.as_ref().clone()), ())
3387 .cloned()
3388 }
3389
3390 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3391 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3392 }
3393
3394 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3395 self.path_style
3396 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3397 .unwrap()
3398 .into()
3399 }
3400
3401 #[inline]
3402 fn abs_path_to_repo_path_inner(
3403 work_directory_abs_path: &Path,
3404 abs_path: &Path,
3405 path_style: PathStyle,
3406 ) -> Option<RepoPath> {
3407 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3408 Some(RepoPath::from_rel_path(&rel_path))
3409 }
3410
3411 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3412 self.merge.conflicted_paths.contains(repo_path)
3413 }
3414
3415 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3416 let had_conflict_on_last_merge_head_change =
3417 self.merge.conflicted_paths.contains(repo_path);
3418 let has_conflict_currently = self
3419 .status_for_path(repo_path)
3420 .is_some_and(|entry| entry.status.is_conflicted());
3421 had_conflict_on_last_merge_head_change || has_conflict_currently
3422 }
3423
3424 /// This is the name that will be displayed in the repository selector for this repository.
3425 pub fn display_name(&self) -> SharedString {
3426 self.work_directory_abs_path
3427 .file_name()
3428 .unwrap_or_default()
3429 .to_string_lossy()
3430 .to_string()
3431 .into()
3432 }
3433}
3434
3435pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3436 proto::StashEntry {
3437 oid: entry.oid.as_bytes().to_vec(),
3438 message: entry.message.clone(),
3439 branch: entry.branch.clone(),
3440 index: entry.index as u64,
3441 timestamp: entry.timestamp,
3442 }
3443}
3444
3445pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3446 Ok(StashEntry {
3447 oid: Oid::from_bytes(&entry.oid)?,
3448 message: entry.message.clone(),
3449 index: entry.index as usize,
3450 branch: entry.branch.clone(),
3451 timestamp: entry.timestamp,
3452 })
3453}
3454
3455impl MergeDetails {
3456 async fn load(
3457 backend: &Arc<dyn GitRepository>,
3458 status: &SumTree<StatusEntry>,
3459 prev_snapshot: &RepositorySnapshot,
3460 ) -> Result<(MergeDetails, bool)> {
3461 log::debug!("load merge details");
3462 let message = backend.merge_message().await;
3463 let heads = backend
3464 .revparse_batch(vec![
3465 "MERGE_HEAD".into(),
3466 "CHERRY_PICK_HEAD".into(),
3467 "REBASE_HEAD".into(),
3468 "REVERT_HEAD".into(),
3469 "APPLY_HEAD".into(),
3470 ])
3471 .await
3472 .log_err()
3473 .unwrap_or_default()
3474 .into_iter()
3475 .map(|opt| opt.map(SharedString::from))
3476 .collect::<Vec<_>>();
3477 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3478 let conflicted_paths = if merge_heads_changed {
3479 let current_conflicted_paths = TreeSet::from_ordered_entries(
3480 status
3481 .iter()
3482 .filter(|entry| entry.status.is_conflicted())
3483 .map(|entry| entry.repo_path.clone()),
3484 );
3485
3486 // It can happen that we run a scan while a lengthy merge is in progress
3487 // that will eventually result in conflicts, but before those conflicts
3488 // are reported by `git status`. Since for the moment we only care about
3489 // the merge heads state for the purposes of tracking conflicts, don't update
3490 // this state until we see some conflicts.
3491 if heads.iter().any(Option::is_some)
3492 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3493 && current_conflicted_paths.is_empty()
3494 {
3495 log::debug!("not updating merge heads because no conflicts found");
3496 return Ok((
3497 MergeDetails {
3498 message: message.map(SharedString::from),
3499 ..prev_snapshot.merge.clone()
3500 },
3501 false,
3502 ));
3503 }
3504
3505 current_conflicted_paths
3506 } else {
3507 prev_snapshot.merge.conflicted_paths.clone()
3508 };
3509 let details = MergeDetails {
3510 conflicted_paths,
3511 message: message.map(SharedString::from),
3512 heads,
3513 };
3514 Ok((details, merge_heads_changed))
3515 }
3516}
3517
3518impl Repository {
3519 pub fn snapshot(&self) -> RepositorySnapshot {
3520 self.snapshot.clone()
3521 }
3522
3523 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3524 self.pending_ops.iter().cloned()
3525 }
3526
3527 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3528 self.pending_ops.summary().clone()
3529 }
3530
3531 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3532 self.pending_ops
3533 .get(&PathKey(path.as_ref().clone()), ())
3534 .cloned()
3535 }
3536
3537 fn local(
3538 id: RepositoryId,
3539 work_directory_abs_path: Arc<Path>,
3540 dot_git_abs_path: Arc<Path>,
3541 project_environment: WeakEntity<ProjectEnvironment>,
3542 fs: Arc<dyn Fs>,
3543 git_store: WeakEntity<GitStore>,
3544 cx: &mut Context<Self>,
3545 ) -> Self {
3546 let snapshot =
3547 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3548 let state = cx
3549 .spawn(async move |_, cx| {
3550 LocalRepositoryState::new(
3551 work_directory_abs_path,
3552 dot_git_abs_path,
3553 project_environment,
3554 fs,
3555 cx,
3556 )
3557 .await
3558 .map_err(|err| err.to_string())
3559 })
3560 .shared();
3561 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3562 let state = cx
3563 .spawn(async move |_, _| {
3564 let state = state.await?;
3565 Ok(RepositoryState::Local(state))
3566 })
3567 .shared();
3568
3569 Repository {
3570 this: cx.weak_entity(),
3571 git_store,
3572 snapshot,
3573 pending_ops: Default::default(),
3574 repository_state: state,
3575 commit_message_buffer: None,
3576 askpass_delegates: Default::default(),
3577 paths_needing_status_update: Default::default(),
3578 latest_askpass_id: 0,
3579 job_sender,
3580 job_id: 0,
3581 active_jobs: Default::default(),
3582 }
3583 }
3584
3585 fn remote(
3586 id: RepositoryId,
3587 work_directory_abs_path: Arc<Path>,
3588 path_style: PathStyle,
3589 project_id: ProjectId,
3590 client: AnyProtoClient,
3591 git_store: WeakEntity<GitStore>,
3592 cx: &mut Context<Self>,
3593 ) -> Self {
3594 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3595 let repository_state = RemoteRepositoryState { project_id, client };
3596 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3597 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3598 Self {
3599 this: cx.weak_entity(),
3600 snapshot,
3601 commit_message_buffer: None,
3602 git_store,
3603 pending_ops: Default::default(),
3604 paths_needing_status_update: Default::default(),
3605 job_sender,
3606 repository_state,
3607 askpass_delegates: Default::default(),
3608 latest_askpass_id: 0,
3609 active_jobs: Default::default(),
3610 job_id: 0,
3611 }
3612 }
3613
3614 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3615 self.git_store.upgrade()
3616 }
3617
3618 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3619 let this = cx.weak_entity();
3620 let git_store = self.git_store.clone();
3621 let _ = self.send_keyed_job(
3622 Some(GitJobKey::ReloadBufferDiffBases),
3623 None,
3624 |state, mut cx| async move {
3625 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3626 log::error!("tried to recompute diffs for a non-local repository");
3627 return Ok(());
3628 };
3629
3630 let Some(this) = this.upgrade() else {
3631 return Ok(());
3632 };
3633
3634 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3635 git_store.update(cx, |git_store, cx| {
3636 git_store
3637 .diffs
3638 .iter()
3639 .filter_map(|(buffer_id, diff_state)| {
3640 let buffer_store = git_store.buffer_store.read(cx);
3641 let buffer = buffer_store.get(*buffer_id)?;
3642 let file = File::from_dyn(buffer.read(cx).file())?;
3643 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3644 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3645 log::debug!(
3646 "start reload diff bases for repo path {}",
3647 repo_path.as_unix_str()
3648 );
3649 diff_state.update(cx, |diff_state, _| {
3650 let has_unstaged_diff = diff_state
3651 .unstaged_diff
3652 .as_ref()
3653 .is_some_and(|diff| diff.is_upgradable());
3654 let has_uncommitted_diff = diff_state
3655 .uncommitted_diff
3656 .as_ref()
3657 .is_some_and(|set| set.is_upgradable());
3658
3659 Some((
3660 buffer,
3661 repo_path,
3662 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3663 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3664 ))
3665 })
3666 })
3667 .collect::<Vec<_>>()
3668 })
3669 })??;
3670
3671 let buffer_diff_base_changes = cx
3672 .background_spawn(async move {
3673 let mut changes = Vec::new();
3674 for (buffer, repo_path, current_index_text, current_head_text) in
3675 &repo_diff_state_updates
3676 {
3677 let index_text = if current_index_text.is_some() {
3678 backend.load_index_text(repo_path.clone()).await
3679 } else {
3680 None
3681 };
3682 let head_text = if current_head_text.is_some() {
3683 backend.load_committed_text(repo_path.clone()).await
3684 } else {
3685 None
3686 };
3687
3688 let change =
3689 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3690 (Some(current_index), Some(current_head)) => {
3691 let index_changed =
3692 index_text.as_deref() != current_index.as_deref();
3693 let head_changed =
3694 head_text.as_deref() != current_head.as_deref();
3695 if index_changed && head_changed {
3696 if index_text == head_text {
3697 Some(DiffBasesChange::SetBoth(head_text))
3698 } else {
3699 Some(DiffBasesChange::SetEach {
3700 index: index_text,
3701 head: head_text,
3702 })
3703 }
3704 } else if index_changed {
3705 Some(DiffBasesChange::SetIndex(index_text))
3706 } else if head_changed {
3707 Some(DiffBasesChange::SetHead(head_text))
3708 } else {
3709 None
3710 }
3711 }
3712 (Some(current_index), None) => {
3713 let index_changed =
3714 index_text.as_deref() != current_index.as_deref();
3715 index_changed
3716 .then_some(DiffBasesChange::SetIndex(index_text))
3717 }
3718 (None, Some(current_head)) => {
3719 let head_changed =
3720 head_text.as_deref() != current_head.as_deref();
3721 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3722 }
3723 (None, None) => None,
3724 };
3725
3726 changes.push((buffer.clone(), change))
3727 }
3728 changes
3729 })
3730 .await;
3731
3732 git_store.update(&mut cx, |git_store, cx| {
3733 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3734 let buffer_snapshot = buffer.read(cx).text_snapshot();
3735 let buffer_id = buffer_snapshot.remote_id();
3736 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3737 continue;
3738 };
3739
3740 let downstream_client = git_store.downstream_client();
3741 diff_state.update(cx, |diff_state, cx| {
3742 use proto::update_diff_bases::Mode;
3743
3744 if let Some((diff_bases_change, (client, project_id))) =
3745 diff_bases_change.clone().zip(downstream_client)
3746 {
3747 let (staged_text, committed_text, mode) = match diff_bases_change {
3748 DiffBasesChange::SetIndex(index) => {
3749 (index, None, Mode::IndexOnly)
3750 }
3751 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3752 DiffBasesChange::SetEach { index, head } => {
3753 (index, head, Mode::IndexAndHead)
3754 }
3755 DiffBasesChange::SetBoth(text) => {
3756 (None, text, Mode::IndexMatchesHead)
3757 }
3758 };
3759 client
3760 .send(proto::UpdateDiffBases {
3761 project_id: project_id.to_proto(),
3762 buffer_id: buffer_id.to_proto(),
3763 staged_text,
3764 committed_text,
3765 mode: mode as i32,
3766 })
3767 .log_err();
3768 }
3769
3770 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3771 });
3772 }
3773 })
3774 },
3775 );
3776 }
3777
3778 pub fn send_job<F, Fut, R>(
3779 &mut self,
3780 status: Option<SharedString>,
3781 job: F,
3782 ) -> oneshot::Receiver<R>
3783 where
3784 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3785 Fut: Future<Output = R> + 'static,
3786 R: Send + 'static,
3787 {
3788 self.send_keyed_job(None, status, job)
3789 }
3790
3791 fn send_keyed_job<F, Fut, R>(
3792 &mut self,
3793 key: Option<GitJobKey>,
3794 status: Option<SharedString>,
3795 job: F,
3796 ) -> oneshot::Receiver<R>
3797 where
3798 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3799 Fut: Future<Output = R> + 'static,
3800 R: Send + 'static,
3801 {
3802 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3803 let job_id = post_inc(&mut self.job_id);
3804 let this = self.this.clone();
3805 self.job_sender
3806 .unbounded_send(GitJob {
3807 key,
3808 job: Box::new(move |state, cx: &mut AsyncApp| {
3809 let job = job(state, cx.clone());
3810 cx.spawn(async move |cx| {
3811 if let Some(s) = status.clone() {
3812 this.update(cx, |this, cx| {
3813 this.active_jobs.insert(
3814 job_id,
3815 JobInfo {
3816 start: Instant::now(),
3817 message: s.clone(),
3818 },
3819 );
3820
3821 cx.notify();
3822 })
3823 .ok();
3824 }
3825 let result = job.await;
3826
3827 this.update(cx, |this, cx| {
3828 this.active_jobs.remove(&job_id);
3829 cx.notify();
3830 })
3831 .ok();
3832
3833 result_tx.send(result).ok();
3834 })
3835 }),
3836 })
3837 .ok();
3838 result_rx
3839 }
3840
3841 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3842 let Some(git_store) = self.git_store.upgrade() else {
3843 return;
3844 };
3845 let entity = cx.entity();
3846 git_store.update(cx, |git_store, cx| {
3847 let Some((&id, _)) = git_store
3848 .repositories
3849 .iter()
3850 .find(|(_, handle)| *handle == &entity)
3851 else {
3852 return;
3853 };
3854 git_store.active_repo_id = Some(id);
3855 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3856 });
3857 }
3858
3859 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3860 self.snapshot.status()
3861 }
3862
3863 pub fn cached_stash(&self) -> GitStash {
3864 self.snapshot.stash_entries.clone()
3865 }
3866
3867 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3868 let git_store = self.git_store.upgrade()?;
3869 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3870 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3871 let abs_path = SanitizedPath::new(&abs_path);
3872 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3873 Some(ProjectPath {
3874 worktree_id: worktree.read(cx).id(),
3875 path: relative_path,
3876 })
3877 }
3878
3879 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3880 let git_store = self.git_store.upgrade()?;
3881 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3882 let abs_path = worktree_store.absolutize(path, cx)?;
3883 self.snapshot.abs_path_to_repo_path(&abs_path)
3884 }
3885
3886 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3887 other
3888 .read(cx)
3889 .snapshot
3890 .work_directory_abs_path
3891 .starts_with(&self.snapshot.work_directory_abs_path)
3892 }
3893
3894 pub fn open_commit_buffer(
3895 &mut self,
3896 languages: Option<Arc<LanguageRegistry>>,
3897 buffer_store: Entity<BufferStore>,
3898 cx: &mut Context<Self>,
3899 ) -> Task<Result<Entity<Buffer>>> {
3900 let id = self.id;
3901 if let Some(buffer) = self.commit_message_buffer.clone() {
3902 return Task::ready(Ok(buffer));
3903 }
3904 let this = cx.weak_entity();
3905
3906 let rx = self.send_job(None, move |state, mut cx| async move {
3907 let Some(this) = this.upgrade() else {
3908 bail!("git store was dropped");
3909 };
3910 match state {
3911 RepositoryState::Local(..) => {
3912 this.update(&mut cx, |_, cx| {
3913 Self::open_local_commit_buffer(languages, buffer_store, cx)
3914 })?
3915 .await
3916 }
3917 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3918 let request = client.request(proto::OpenCommitMessageBuffer {
3919 project_id: project_id.0,
3920 repository_id: id.to_proto(),
3921 });
3922 let response = request.await.context("requesting to open commit buffer")?;
3923 let buffer_id = BufferId::new(response.buffer_id)?;
3924 let buffer = buffer_store
3925 .update(&mut cx, |buffer_store, cx| {
3926 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3927 })?
3928 .await?;
3929 if let Some(language_registry) = languages {
3930 let git_commit_language =
3931 language_registry.language_for_name("Git Commit").await?;
3932 buffer.update(&mut cx, |buffer, cx| {
3933 buffer.set_language(Some(git_commit_language), cx);
3934 })?;
3935 }
3936 this.update(&mut cx, |this, _| {
3937 this.commit_message_buffer = Some(buffer.clone());
3938 })?;
3939 Ok(buffer)
3940 }
3941 }
3942 });
3943
3944 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3945 }
3946
3947 fn open_local_commit_buffer(
3948 language_registry: Option<Arc<LanguageRegistry>>,
3949 buffer_store: Entity<BufferStore>,
3950 cx: &mut Context<Self>,
3951 ) -> Task<Result<Entity<Buffer>>> {
3952 cx.spawn(async move |repository, cx| {
3953 let buffer = buffer_store
3954 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3955 .await?;
3956
3957 if let Some(language_registry) = language_registry {
3958 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3959 buffer.update(cx, |buffer, cx| {
3960 buffer.set_language(Some(git_commit_language), cx);
3961 })?;
3962 }
3963
3964 repository.update(cx, |repository, _| {
3965 repository.commit_message_buffer = Some(buffer.clone());
3966 })?;
3967 Ok(buffer)
3968 })
3969 }
3970
3971 pub fn checkout_files(
3972 &mut self,
3973 commit: &str,
3974 paths: Vec<RepoPath>,
3975 cx: &mut Context<Self>,
3976 ) -> Task<Result<()>> {
3977 let commit = commit.to_string();
3978 let id = self.id;
3979
3980 self.spawn_job_with_tracking(
3981 paths.clone(),
3982 pending_op::GitStatus::Reverted,
3983 cx,
3984 async move |this, cx| {
3985 this.update(cx, |this, _cx| {
3986 this.send_job(
3987 Some(format!("git checkout {}", commit).into()),
3988 move |git_repo, _| async move {
3989 match git_repo {
3990 RepositoryState::Local(LocalRepositoryState {
3991 backend,
3992 environment,
3993 ..
3994 }) => {
3995 backend
3996 .checkout_files(commit, paths, environment.clone())
3997 .await
3998 }
3999 RepositoryState::Remote(RemoteRepositoryState {
4000 project_id,
4001 client,
4002 }) => {
4003 client
4004 .request(proto::GitCheckoutFiles {
4005 project_id: project_id.0,
4006 repository_id: id.to_proto(),
4007 commit,
4008 paths: paths
4009 .into_iter()
4010 .map(|p| p.to_proto())
4011 .collect(),
4012 })
4013 .await?;
4014
4015 Ok(())
4016 }
4017 }
4018 },
4019 )
4020 })?
4021 .await?
4022 },
4023 )
4024 }
4025
4026 pub fn reset(
4027 &mut self,
4028 commit: String,
4029 reset_mode: ResetMode,
4030 _cx: &mut App,
4031 ) -> oneshot::Receiver<Result<()>> {
4032 let id = self.id;
4033
4034 self.send_job(None, move |git_repo, _| async move {
4035 match git_repo {
4036 RepositoryState::Local(LocalRepositoryState {
4037 backend,
4038 environment,
4039 ..
4040 }) => backend.reset(commit, reset_mode, environment).await,
4041 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4042 client
4043 .request(proto::GitReset {
4044 project_id: project_id.0,
4045 repository_id: id.to_proto(),
4046 commit,
4047 mode: match reset_mode {
4048 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4049 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4050 },
4051 })
4052 .await?;
4053
4054 Ok(())
4055 }
4056 }
4057 })
4058 }
4059
4060 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4061 let id = self.id;
4062 self.send_job(None, move |git_repo, _cx| async move {
4063 match git_repo {
4064 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4065 backend.show(commit).await
4066 }
4067 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4068 let resp = client
4069 .request(proto::GitShow {
4070 project_id: project_id.0,
4071 repository_id: id.to_proto(),
4072 commit,
4073 })
4074 .await?;
4075
4076 Ok(CommitDetails {
4077 sha: resp.sha.into(),
4078 message: resp.message.into(),
4079 commit_timestamp: resp.commit_timestamp,
4080 author_email: resp.author_email.into(),
4081 author_name: resp.author_name.into(),
4082 })
4083 }
4084 }
4085 })
4086 }
4087
4088 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4089 let id = self.id;
4090 self.send_job(None, move |git_repo, cx| async move {
4091 match git_repo {
4092 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4093 backend.load_commit(commit, cx).await
4094 }
4095 RepositoryState::Remote(RemoteRepositoryState {
4096 client, project_id, ..
4097 }) => {
4098 let response = client
4099 .request(proto::LoadCommitDiff {
4100 project_id: project_id.0,
4101 repository_id: id.to_proto(),
4102 commit,
4103 })
4104 .await?;
4105 Ok(CommitDiff {
4106 files: response
4107 .files
4108 .into_iter()
4109 .map(|file| {
4110 Ok(CommitFile {
4111 path: RepoPath::from_proto(&file.path)?,
4112 old_text: file.old_text,
4113 new_text: file.new_text,
4114 })
4115 })
4116 .collect::<Result<Vec<_>>>()?,
4117 })
4118 }
4119 }
4120 })
4121 }
4122
4123 pub fn file_history(
4124 &mut self,
4125 path: RepoPath,
4126 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4127 self.file_history_paginated(path, 0, None)
4128 }
4129
4130 pub fn file_history_paginated(
4131 &mut self,
4132 path: RepoPath,
4133 skip: usize,
4134 limit: Option<usize>,
4135 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4136 let id = self.id;
4137 self.send_job(None, move |git_repo, _cx| async move {
4138 match git_repo {
4139 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4140 backend.file_history_paginated(path, skip, limit).await
4141 }
4142 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4143 let response = client
4144 .request(proto::GitFileHistory {
4145 project_id: project_id.0,
4146 repository_id: id.to_proto(),
4147 path: path.to_proto(),
4148 skip: skip as u64,
4149 limit: limit.map(|l| l as u64),
4150 })
4151 .await?;
4152 Ok(git::repository::FileHistory {
4153 entries: response
4154 .entries
4155 .into_iter()
4156 .map(|entry| git::repository::FileHistoryEntry {
4157 sha: entry.sha.into(),
4158 subject: entry.subject.into(),
4159 message: entry.message.into(),
4160 commit_timestamp: entry.commit_timestamp,
4161 author_name: entry.author_name.into(),
4162 author_email: entry.author_email.into(),
4163 })
4164 .collect(),
4165 path: RepoPath::from_proto(&response.path)?,
4166 })
4167 }
4168 }
4169 })
4170 }
4171
4172 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4173 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4174 }
4175
4176 fn save_buffers<'a>(
4177 &self,
4178 entries: impl IntoIterator<Item = &'a RepoPath>,
4179 cx: &mut Context<Self>,
4180 ) -> Vec<Task<anyhow::Result<()>>> {
4181 let mut save_futures = Vec::new();
4182 if let Some(buffer_store) = self.buffer_store(cx) {
4183 buffer_store.update(cx, |buffer_store, cx| {
4184 for path in entries {
4185 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4186 continue;
4187 };
4188 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4189 && buffer
4190 .read(cx)
4191 .file()
4192 .is_some_and(|file| file.disk_state().exists())
4193 && buffer.read(cx).has_unsaved_edits()
4194 {
4195 save_futures.push(buffer_store.save_buffer(buffer, cx));
4196 }
4197 }
4198 })
4199 }
4200 save_futures
4201 }
4202
4203 pub fn stage_entries(
4204 &mut self,
4205 entries: Vec<RepoPath>,
4206 cx: &mut Context<Self>,
4207 ) -> Task<anyhow::Result<()>> {
4208 self.stage_or_unstage_entries(true, entries, cx)
4209 }
4210
4211 pub fn unstage_entries(
4212 &mut self,
4213 entries: Vec<RepoPath>,
4214 cx: &mut Context<Self>,
4215 ) -> Task<anyhow::Result<()>> {
4216 self.stage_or_unstage_entries(false, entries, cx)
4217 }
4218
4219 fn stage_or_unstage_entries(
4220 &mut self,
4221 stage: bool,
4222 entries: Vec<RepoPath>,
4223 cx: &mut Context<Self>,
4224 ) -> Task<anyhow::Result<()>> {
4225 if entries.is_empty() {
4226 return Task::ready(Ok(()));
4227 }
4228 let Some(git_store) = self.git_store.upgrade() else {
4229 return Task::ready(Ok(()));
4230 };
4231 let id = self.id;
4232 let save_tasks = self.save_buffers(&entries, cx);
4233 let paths = entries
4234 .iter()
4235 .map(|p| p.as_unix_str())
4236 .collect::<Vec<_>>()
4237 .join(" ");
4238 let status = if stage {
4239 format!("git add {paths}")
4240 } else {
4241 format!("git reset {paths}")
4242 };
4243 let job_key = GitJobKey::WriteIndex(entries.clone());
4244
4245 self.spawn_job_with_tracking(
4246 entries.clone(),
4247 if stage {
4248 pending_op::GitStatus::Staged
4249 } else {
4250 pending_op::GitStatus::Unstaged
4251 },
4252 cx,
4253 async move |this, cx| {
4254 for save_task in save_tasks {
4255 save_task.await?;
4256 }
4257
4258 this.update(cx, |this, cx| {
4259 let weak_this = cx.weak_entity();
4260 this.send_keyed_job(
4261 Some(job_key),
4262 Some(status.into()),
4263 move |git_repo, mut cx| async move {
4264 let hunk_staging_operation_counts = weak_this
4265 .update(&mut cx, |this, cx| {
4266 let mut hunk_staging_operation_counts = HashMap::default();
4267 for path in &entries {
4268 let Some(project_path) =
4269 this.repo_path_to_project_path(path, cx)
4270 else {
4271 continue;
4272 };
4273 let Some(buffer) = git_store
4274 .read(cx)
4275 .buffer_store
4276 .read(cx)
4277 .get_by_path(&project_path)
4278 else {
4279 continue;
4280 };
4281 let Some(diff_state) = git_store
4282 .read(cx)
4283 .diffs
4284 .get(&buffer.read(cx).remote_id())
4285 .cloned()
4286 else {
4287 continue;
4288 };
4289 let Some(uncommitted_diff) =
4290 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4291 |uncommitted_diff| uncommitted_diff.upgrade(),
4292 )
4293 else {
4294 continue;
4295 };
4296 let buffer_snapshot = buffer.read(cx).text_snapshot();
4297 let file_exists = buffer
4298 .read(cx)
4299 .file()
4300 .is_some_and(|file| file.disk_state().exists());
4301 let hunk_staging_operation_count =
4302 diff_state.update(cx, |diff_state, cx| {
4303 uncommitted_diff.update(
4304 cx,
4305 |uncommitted_diff, cx| {
4306 uncommitted_diff
4307 .stage_or_unstage_all_hunks(
4308 stage,
4309 &buffer_snapshot,
4310 file_exists,
4311 cx,
4312 );
4313 },
4314 );
4315
4316 diff_state.hunk_staging_operation_count += 1;
4317 diff_state.hunk_staging_operation_count
4318 });
4319 hunk_staging_operation_counts.insert(
4320 diff_state.downgrade(),
4321 hunk_staging_operation_count,
4322 );
4323 }
4324 hunk_staging_operation_counts
4325 })
4326 .unwrap_or_default();
4327
4328 let result = match git_repo {
4329 RepositoryState::Local(LocalRepositoryState {
4330 backend,
4331 environment,
4332 ..
4333 }) => {
4334 if stage {
4335 backend.stage_paths(entries, environment.clone()).await
4336 } else {
4337 backend.unstage_paths(entries, environment.clone()).await
4338 }
4339 }
4340 RepositoryState::Remote(RemoteRepositoryState {
4341 project_id,
4342 client,
4343 }) => {
4344 if stage {
4345 client
4346 .request(proto::Stage {
4347 project_id: project_id.0,
4348 repository_id: id.to_proto(),
4349 paths: entries
4350 .into_iter()
4351 .map(|repo_path| repo_path.to_proto())
4352 .collect(),
4353 })
4354 .await
4355 .context("sending stage request")
4356 .map(|_| ())
4357 } else {
4358 client
4359 .request(proto::Unstage {
4360 project_id: project_id.0,
4361 repository_id: id.to_proto(),
4362 paths: entries
4363 .into_iter()
4364 .map(|repo_path| repo_path.to_proto())
4365 .collect(),
4366 })
4367 .await
4368 .context("sending unstage request")
4369 .map(|_| ())
4370 }
4371 }
4372 };
4373
4374 for (diff_state, hunk_staging_operation_count) in
4375 hunk_staging_operation_counts
4376 {
4377 diff_state
4378 .update(&mut cx, |diff_state, cx| {
4379 if result.is_ok() {
4380 diff_state.hunk_staging_operation_count_as_of_write =
4381 hunk_staging_operation_count;
4382 } else if let Some(uncommitted_diff) =
4383 &diff_state.uncommitted_diff
4384 {
4385 uncommitted_diff
4386 .update(cx, |uncommitted_diff, cx| {
4387 uncommitted_diff.clear_pending_hunks(cx);
4388 })
4389 .ok();
4390 }
4391 })
4392 .ok();
4393 }
4394
4395 result
4396 },
4397 )
4398 })?
4399 .await?
4400 },
4401 )
4402 }
4403
4404 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4405 let to_stage = self
4406 .cached_status()
4407 .filter_map(|entry| {
4408 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4409 if ops.staging() || ops.staged() {
4410 None
4411 } else {
4412 Some(entry.repo_path)
4413 }
4414 } else if entry.status.staging().is_fully_staged() {
4415 None
4416 } else {
4417 Some(entry.repo_path)
4418 }
4419 })
4420 .collect();
4421 self.stage_or_unstage_entries(true, to_stage, cx)
4422 }
4423
4424 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4425 let to_unstage = self
4426 .cached_status()
4427 .filter_map(|entry| {
4428 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4429 if !ops.staging() && !ops.staged() {
4430 None
4431 } else {
4432 Some(entry.repo_path)
4433 }
4434 } else if entry.status.staging().is_fully_unstaged() {
4435 None
4436 } else {
4437 Some(entry.repo_path)
4438 }
4439 })
4440 .collect();
4441 self.stage_or_unstage_entries(false, to_unstage, cx)
4442 }
4443
4444 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4445 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4446
4447 self.stash_entries(to_stash, cx)
4448 }
4449
4450 pub fn stash_entries(
4451 &mut self,
4452 entries: Vec<RepoPath>,
4453 cx: &mut Context<Self>,
4454 ) -> Task<anyhow::Result<()>> {
4455 let id = self.id;
4456
4457 cx.spawn(async move |this, cx| {
4458 this.update(cx, |this, _| {
4459 this.send_job(None, move |git_repo, _cx| async move {
4460 match git_repo {
4461 RepositoryState::Local(LocalRepositoryState {
4462 backend,
4463 environment,
4464 ..
4465 }) => backend.stash_paths(entries, environment).await,
4466 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4467 client
4468 .request(proto::Stash {
4469 project_id: project_id.0,
4470 repository_id: id.to_proto(),
4471 paths: entries
4472 .into_iter()
4473 .map(|repo_path| repo_path.to_proto())
4474 .collect(),
4475 })
4476 .await
4477 .context("sending stash request")?;
4478 Ok(())
4479 }
4480 }
4481 })
4482 })?
4483 .await??;
4484 Ok(())
4485 })
4486 }
4487
4488 pub fn stash_pop(
4489 &mut self,
4490 index: Option<usize>,
4491 cx: &mut Context<Self>,
4492 ) -> Task<anyhow::Result<()>> {
4493 let id = self.id;
4494 cx.spawn(async move |this, cx| {
4495 this.update(cx, |this, _| {
4496 this.send_job(None, move |git_repo, _cx| async move {
4497 match git_repo {
4498 RepositoryState::Local(LocalRepositoryState {
4499 backend,
4500 environment,
4501 ..
4502 }) => backend.stash_pop(index, environment).await,
4503 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4504 client
4505 .request(proto::StashPop {
4506 project_id: project_id.0,
4507 repository_id: id.to_proto(),
4508 stash_index: index.map(|i| i as u64),
4509 })
4510 .await
4511 .context("sending stash pop request")?;
4512 Ok(())
4513 }
4514 }
4515 })
4516 })?
4517 .await??;
4518 Ok(())
4519 })
4520 }
4521
4522 pub fn stash_apply(
4523 &mut self,
4524 index: Option<usize>,
4525 cx: &mut Context<Self>,
4526 ) -> Task<anyhow::Result<()>> {
4527 let id = self.id;
4528 cx.spawn(async move |this, cx| {
4529 this.update(cx, |this, _| {
4530 this.send_job(None, move |git_repo, _cx| async move {
4531 match git_repo {
4532 RepositoryState::Local(LocalRepositoryState {
4533 backend,
4534 environment,
4535 ..
4536 }) => backend.stash_apply(index, environment).await,
4537 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4538 client
4539 .request(proto::StashApply {
4540 project_id: project_id.0,
4541 repository_id: id.to_proto(),
4542 stash_index: index.map(|i| i as u64),
4543 })
4544 .await
4545 .context("sending stash apply request")?;
4546 Ok(())
4547 }
4548 }
4549 })
4550 })?
4551 .await??;
4552 Ok(())
4553 })
4554 }
4555
4556 pub fn stash_drop(
4557 &mut self,
4558 index: Option<usize>,
4559 cx: &mut Context<Self>,
4560 ) -> oneshot::Receiver<anyhow::Result<()>> {
4561 let id = self.id;
4562 let updates_tx = self
4563 .git_store()
4564 .and_then(|git_store| match &git_store.read(cx).state {
4565 GitStoreState::Local { downstream, .. } => downstream
4566 .as_ref()
4567 .map(|downstream| downstream.updates_tx.clone()),
4568 _ => None,
4569 });
4570 let this = cx.weak_entity();
4571 self.send_job(None, move |git_repo, mut cx| async move {
4572 match git_repo {
4573 RepositoryState::Local(LocalRepositoryState {
4574 backend,
4575 environment,
4576 ..
4577 }) => {
4578 // TODO would be nice to not have to do this manually
4579 let result = backend.stash_drop(index, environment).await;
4580 if result.is_ok()
4581 && let Ok(stash_entries) = backend.stash_entries().await
4582 {
4583 let snapshot = this.update(&mut cx, |this, cx| {
4584 this.snapshot.stash_entries = stash_entries;
4585 cx.emit(RepositoryEvent::StashEntriesChanged);
4586 this.snapshot.clone()
4587 })?;
4588 if let Some(updates_tx) = updates_tx {
4589 updates_tx
4590 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4591 .ok();
4592 }
4593 }
4594
4595 result
4596 }
4597 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4598 client
4599 .request(proto::StashDrop {
4600 project_id: project_id.0,
4601 repository_id: id.to_proto(),
4602 stash_index: index.map(|i| i as u64),
4603 })
4604 .await
4605 .context("sending stash pop request")?;
4606 Ok(())
4607 }
4608 }
4609 })
4610 }
4611
4612 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4613 let id = self.id;
4614 self.send_job(
4615 Some(format!("git hook {}", hook.as_str()).into()),
4616 move |git_repo, _cx| async move {
4617 match git_repo {
4618 RepositoryState::Local(LocalRepositoryState {
4619 backend,
4620 environment,
4621 ..
4622 }) => backend.run_hook(hook, environment.clone()).await,
4623 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4624 client
4625 .request(proto::RunGitHook {
4626 project_id: project_id.0,
4627 repository_id: id.to_proto(),
4628 hook: hook.to_proto(),
4629 })
4630 .await?;
4631
4632 Ok(())
4633 }
4634 }
4635 },
4636 )
4637 }
4638
4639 pub fn commit(
4640 &mut self,
4641 message: SharedString,
4642 name_and_email: Option<(SharedString, SharedString)>,
4643 options: CommitOptions,
4644 askpass: AskPassDelegate,
4645 cx: &mut App,
4646 ) -> oneshot::Receiver<Result<()>> {
4647 let id = self.id;
4648 let askpass_delegates = self.askpass_delegates.clone();
4649 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4650
4651 let rx = self.run_hook(RunHook::PreCommit, cx);
4652
4653 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4654 rx.await??;
4655
4656 match git_repo {
4657 RepositoryState::Local(LocalRepositoryState {
4658 backend,
4659 environment,
4660 ..
4661 }) => {
4662 backend
4663 .commit(message, name_and_email, options, askpass, environment)
4664 .await
4665 }
4666 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4667 askpass_delegates.lock().insert(askpass_id, askpass);
4668 let _defer = util::defer(|| {
4669 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4670 debug_assert!(askpass_delegate.is_some());
4671 });
4672 let (name, email) = name_and_email.unzip();
4673 client
4674 .request(proto::Commit {
4675 project_id: project_id.0,
4676 repository_id: id.to_proto(),
4677 message: String::from(message),
4678 name: name.map(String::from),
4679 email: email.map(String::from),
4680 options: Some(proto::commit::CommitOptions {
4681 amend: options.amend,
4682 signoff: options.signoff,
4683 }),
4684 askpass_id,
4685 })
4686 .await
4687 .context("sending commit request")?;
4688
4689 Ok(())
4690 }
4691 }
4692 })
4693 }
4694
4695 pub fn fetch(
4696 &mut self,
4697 fetch_options: FetchOptions,
4698 askpass: AskPassDelegate,
4699 _cx: &mut App,
4700 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4701 let askpass_delegates = self.askpass_delegates.clone();
4702 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4703 let id = self.id;
4704
4705 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4706 match git_repo {
4707 RepositoryState::Local(LocalRepositoryState {
4708 backend,
4709 environment,
4710 ..
4711 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4712 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4713 askpass_delegates.lock().insert(askpass_id, askpass);
4714 let _defer = util::defer(|| {
4715 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4716 debug_assert!(askpass_delegate.is_some());
4717 });
4718
4719 let response = client
4720 .request(proto::Fetch {
4721 project_id: project_id.0,
4722 repository_id: id.to_proto(),
4723 askpass_id,
4724 remote: fetch_options.to_proto(),
4725 })
4726 .await
4727 .context("sending fetch request")?;
4728
4729 Ok(RemoteCommandOutput {
4730 stdout: response.stdout,
4731 stderr: response.stderr,
4732 })
4733 }
4734 }
4735 })
4736 }
4737
4738 pub fn push(
4739 &mut self,
4740 branch: SharedString,
4741 remote: SharedString,
4742 options: Option<PushOptions>,
4743 askpass: AskPassDelegate,
4744 cx: &mut Context<Self>,
4745 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4746 let askpass_delegates = self.askpass_delegates.clone();
4747 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4748 let id = self.id;
4749
4750 let args = options
4751 .map(|option| match option {
4752 PushOptions::SetUpstream => " --set-upstream",
4753 PushOptions::Force => " --force-with-lease",
4754 })
4755 .unwrap_or("");
4756
4757 let updates_tx = self
4758 .git_store()
4759 .and_then(|git_store| match &git_store.read(cx).state {
4760 GitStoreState::Local { downstream, .. } => downstream
4761 .as_ref()
4762 .map(|downstream| downstream.updates_tx.clone()),
4763 _ => None,
4764 });
4765
4766 let this = cx.weak_entity();
4767 self.send_job(
4768 Some(format!("git push {} {} {}", args, remote, branch).into()),
4769 move |git_repo, mut cx| async move {
4770 match git_repo {
4771 RepositoryState::Local(LocalRepositoryState {
4772 backend,
4773 environment,
4774 ..
4775 }) => {
4776 let result = backend
4777 .push(
4778 branch.to_string(),
4779 remote.to_string(),
4780 options,
4781 askpass,
4782 environment.clone(),
4783 cx.clone(),
4784 )
4785 .await;
4786 // TODO would be nice to not have to do this manually
4787 if result.is_ok() {
4788 let branches = backend.branches().await?;
4789 let branch = branches.into_iter().find(|branch| branch.is_head);
4790 log::info!("head branch after scan is {branch:?}");
4791 let snapshot = this.update(&mut cx, |this, cx| {
4792 this.snapshot.branch = branch;
4793 cx.emit(RepositoryEvent::BranchChanged);
4794 this.snapshot.clone()
4795 })?;
4796 if let Some(updates_tx) = updates_tx {
4797 updates_tx
4798 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4799 .ok();
4800 }
4801 }
4802 result
4803 }
4804 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4805 askpass_delegates.lock().insert(askpass_id, askpass);
4806 let _defer = util::defer(|| {
4807 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4808 debug_assert!(askpass_delegate.is_some());
4809 });
4810 let response = client
4811 .request(proto::Push {
4812 project_id: project_id.0,
4813 repository_id: id.to_proto(),
4814 askpass_id,
4815 branch_name: branch.to_string(),
4816 remote_name: remote.to_string(),
4817 options: options.map(|options| match options {
4818 PushOptions::Force => proto::push::PushOptions::Force,
4819 PushOptions::SetUpstream => {
4820 proto::push::PushOptions::SetUpstream
4821 }
4822 }
4823 as i32),
4824 })
4825 .await
4826 .context("sending push request")?;
4827
4828 Ok(RemoteCommandOutput {
4829 stdout: response.stdout,
4830 stderr: response.stderr,
4831 })
4832 }
4833 }
4834 },
4835 )
4836 }
4837
4838 pub fn pull(
4839 &mut self,
4840 branch: Option<SharedString>,
4841 remote: SharedString,
4842 rebase: bool,
4843 askpass: AskPassDelegate,
4844 _cx: &mut App,
4845 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4846 let askpass_delegates = self.askpass_delegates.clone();
4847 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4848 let id = self.id;
4849
4850 let mut status = "git pull".to_string();
4851 if rebase {
4852 status.push_str(" --rebase");
4853 }
4854 status.push_str(&format!(" {}", remote));
4855 if let Some(b) = &branch {
4856 status.push_str(&format!(" {}", b));
4857 }
4858
4859 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4860 match git_repo {
4861 RepositoryState::Local(LocalRepositoryState {
4862 backend,
4863 environment,
4864 ..
4865 }) => {
4866 backend
4867 .pull(
4868 branch.as_ref().map(|b| b.to_string()),
4869 remote.to_string(),
4870 rebase,
4871 askpass,
4872 environment.clone(),
4873 cx,
4874 )
4875 .await
4876 }
4877 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4878 askpass_delegates.lock().insert(askpass_id, askpass);
4879 let _defer = util::defer(|| {
4880 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4881 debug_assert!(askpass_delegate.is_some());
4882 });
4883 let response = client
4884 .request(proto::Pull {
4885 project_id: project_id.0,
4886 repository_id: id.to_proto(),
4887 askpass_id,
4888 rebase,
4889 branch_name: branch.as_ref().map(|b| b.to_string()),
4890 remote_name: remote.to_string(),
4891 })
4892 .await
4893 .context("sending pull request")?;
4894
4895 Ok(RemoteCommandOutput {
4896 stdout: response.stdout,
4897 stderr: response.stderr,
4898 })
4899 }
4900 }
4901 })
4902 }
4903
4904 fn spawn_set_index_text_job(
4905 &mut self,
4906 path: RepoPath,
4907 content: Option<String>,
4908 hunk_staging_operation_count: Option<usize>,
4909 cx: &mut Context<Self>,
4910 ) -> oneshot::Receiver<anyhow::Result<()>> {
4911 let id = self.id;
4912 let this = cx.weak_entity();
4913 let git_store = self.git_store.clone();
4914 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4915 self.send_keyed_job(
4916 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4917 None,
4918 move |git_repo, mut cx| async move {
4919 log::debug!(
4920 "start updating index text for buffer {}",
4921 path.as_unix_str()
4922 );
4923
4924 match git_repo {
4925 RepositoryState::Local(LocalRepositoryState {
4926 fs,
4927 backend,
4928 environment,
4929 ..
4930 }) => {
4931 let executable = match fs.metadata(&abs_path).await {
4932 Ok(Some(meta)) => meta.is_executable,
4933 Ok(None) => false,
4934 Err(_err) => false,
4935 };
4936 backend
4937 .set_index_text(path.clone(), content, environment.clone(), executable)
4938 .await?;
4939 }
4940 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4941 client
4942 .request(proto::SetIndexText {
4943 project_id: project_id.0,
4944 repository_id: id.to_proto(),
4945 path: path.to_proto(),
4946 text: content,
4947 })
4948 .await?;
4949 }
4950 }
4951 log::debug!(
4952 "finish updating index text for buffer {}",
4953 path.as_unix_str()
4954 );
4955
4956 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4957 let project_path = this
4958 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4959 .ok()
4960 .flatten();
4961 git_store.update(&mut cx, |git_store, cx| {
4962 let buffer_id = git_store
4963 .buffer_store
4964 .read(cx)
4965 .get_by_path(&project_path?)?
4966 .read(cx)
4967 .remote_id();
4968 let diff_state = git_store.diffs.get(&buffer_id)?;
4969 diff_state.update(cx, |diff_state, _| {
4970 diff_state.hunk_staging_operation_count_as_of_write =
4971 hunk_staging_operation_count;
4972 });
4973 Some(())
4974 })?;
4975 }
4976 Ok(())
4977 },
4978 )
4979 }
4980
4981 pub fn create_remote(
4982 &mut self,
4983 remote_name: String,
4984 remote_url: String,
4985 ) -> oneshot::Receiver<Result<()>> {
4986 let id = self.id;
4987 self.send_job(
4988 Some(format!("git remote add {remote_name} {remote_url}").into()),
4989 move |repo, _cx| async move {
4990 match repo {
4991 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4992 backend.create_remote(remote_name, remote_url).await
4993 }
4994 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4995 client
4996 .request(proto::GitCreateRemote {
4997 project_id: project_id.0,
4998 repository_id: id.to_proto(),
4999 remote_name,
5000 remote_url,
5001 })
5002 .await?;
5003
5004 Ok(())
5005 }
5006 }
5007 },
5008 )
5009 }
5010
5011 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5012 let id = self.id;
5013 self.send_job(
5014 Some(format!("git remove remote {remote_name}").into()),
5015 move |repo, _cx| async move {
5016 match repo {
5017 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5018 backend.remove_remote(remote_name).await
5019 }
5020 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5021 client
5022 .request(proto::GitRemoveRemote {
5023 project_id: project_id.0,
5024 repository_id: id.to_proto(),
5025 remote_name,
5026 })
5027 .await?;
5028
5029 Ok(())
5030 }
5031 }
5032 },
5033 )
5034 }
5035
5036 pub fn get_remotes(
5037 &mut self,
5038 branch_name: Option<String>,
5039 is_push: bool,
5040 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5041 let id = self.id;
5042 self.send_job(None, move |repo, _cx| async move {
5043 match repo {
5044 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5045 let remote = if let Some(branch_name) = branch_name {
5046 if is_push {
5047 backend.get_push_remote(branch_name).await?
5048 } else {
5049 backend.get_branch_remote(branch_name).await?
5050 }
5051 } else {
5052 None
5053 };
5054
5055 match remote {
5056 Some(remote) => Ok(vec![remote]),
5057 None => backend.get_all_remotes().await,
5058 }
5059 }
5060 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5061 let response = client
5062 .request(proto::GetRemotes {
5063 project_id: project_id.0,
5064 repository_id: id.to_proto(),
5065 branch_name,
5066 is_push,
5067 })
5068 .await?;
5069
5070 let remotes = response
5071 .remotes
5072 .into_iter()
5073 .map(|remotes| Remote {
5074 name: remotes.name.into(),
5075 })
5076 .collect();
5077
5078 Ok(remotes)
5079 }
5080 }
5081 })
5082 }
5083
5084 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5085 let id = self.id;
5086 self.send_job(None, move |repo, _| async move {
5087 match repo {
5088 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5089 backend.branches().await
5090 }
5091 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5092 let response = client
5093 .request(proto::GitGetBranches {
5094 project_id: project_id.0,
5095 repository_id: id.to_proto(),
5096 })
5097 .await?;
5098
5099 let branches = response
5100 .branches
5101 .into_iter()
5102 .map(|branch| proto_to_branch(&branch))
5103 .collect();
5104
5105 Ok(branches)
5106 }
5107 }
5108 })
5109 }
5110
5111 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5112 let id = self.id;
5113 self.send_job(None, move |repo, _| async move {
5114 match repo {
5115 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5116 backend.worktrees().await
5117 }
5118 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5119 let response = client
5120 .request(proto::GitGetWorktrees {
5121 project_id: project_id.0,
5122 repository_id: id.to_proto(),
5123 })
5124 .await?;
5125
5126 let worktrees = response
5127 .worktrees
5128 .into_iter()
5129 .map(|worktree| proto_to_worktree(&worktree))
5130 .collect();
5131
5132 Ok(worktrees)
5133 }
5134 }
5135 })
5136 }
5137
5138 pub fn create_worktree(
5139 &mut self,
5140 name: String,
5141 path: PathBuf,
5142 commit: Option<String>,
5143 ) -> oneshot::Receiver<Result<()>> {
5144 let id = self.id;
5145 self.send_job(
5146 Some("git worktree add".into()),
5147 move |repo, _cx| async move {
5148 match repo {
5149 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5150 backend.create_worktree(name, path, commit).await
5151 }
5152 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5153 client
5154 .request(proto::GitCreateWorktree {
5155 project_id: project_id.0,
5156 repository_id: id.to_proto(),
5157 name,
5158 directory: path.to_string_lossy().to_string(),
5159 commit,
5160 })
5161 .await?;
5162
5163 Ok(())
5164 }
5165 }
5166 },
5167 )
5168 }
5169
5170 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
5171 let id = self.id;
5172 self.send_job(None, move |repo, _| async move {
5173 match repo {
5174 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5175 backend.default_branch().await
5176 }
5177 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5178 let response = client
5179 .request(proto::GetDefaultBranch {
5180 project_id: project_id.0,
5181 repository_id: id.to_proto(),
5182 })
5183 .await?;
5184
5185 anyhow::Ok(response.branch.map(SharedString::from))
5186 }
5187 }
5188 })
5189 }
5190
5191 pub fn diff_tree(
5192 &mut self,
5193 diff_type: DiffTreeType,
5194 _cx: &App,
5195 ) -> oneshot::Receiver<Result<TreeDiff>> {
5196 let repository_id = self.snapshot.id;
5197 self.send_job(None, move |repo, _cx| async move {
5198 match repo {
5199 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5200 backend.diff_tree(diff_type).await
5201 }
5202 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5203 let response = client
5204 .request(proto::GetTreeDiff {
5205 project_id: project_id.0,
5206 repository_id: repository_id.0,
5207 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5208 base: diff_type.base().to_string(),
5209 head: diff_type.head().to_string(),
5210 })
5211 .await?;
5212
5213 let entries = response
5214 .entries
5215 .into_iter()
5216 .filter_map(|entry| {
5217 let status = match entry.status() {
5218 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5219 proto::tree_diff_status::Status::Modified => {
5220 TreeDiffStatus::Modified {
5221 old: git::Oid::from_str(
5222 &entry.oid.context("missing oid").log_err()?,
5223 )
5224 .log_err()?,
5225 }
5226 }
5227 proto::tree_diff_status::Status::Deleted => {
5228 TreeDiffStatus::Deleted {
5229 old: git::Oid::from_str(
5230 &entry.oid.context("missing oid").log_err()?,
5231 )
5232 .log_err()?,
5233 }
5234 }
5235 };
5236 Some((
5237 RepoPath::from_rel_path(
5238 &RelPath::from_proto(&entry.path).log_err()?,
5239 ),
5240 status,
5241 ))
5242 })
5243 .collect();
5244
5245 Ok(TreeDiff { entries })
5246 }
5247 }
5248 })
5249 }
5250
5251 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5252 let id = self.id;
5253 self.send_job(None, move |repo, _cx| async move {
5254 match repo {
5255 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5256 backend.diff(diff_type).await
5257 }
5258 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5259 let response = client
5260 .request(proto::GitDiff {
5261 project_id: project_id.0,
5262 repository_id: id.to_proto(),
5263 diff_type: match diff_type {
5264 DiffType::HeadToIndex => {
5265 proto::git_diff::DiffType::HeadToIndex.into()
5266 }
5267 DiffType::HeadToWorktree => {
5268 proto::git_diff::DiffType::HeadToWorktree.into()
5269 }
5270 },
5271 })
5272 .await?;
5273
5274 Ok(response.diff)
5275 }
5276 }
5277 })
5278 }
5279
5280 pub fn create_branch(
5281 &mut self,
5282 branch_name: String,
5283 base_branch: Option<String>,
5284 ) -> oneshot::Receiver<Result<()>> {
5285 let id = self.id;
5286 let status_msg = if let Some(ref base) = base_branch {
5287 format!("git switch -c {branch_name} {base}").into()
5288 } else {
5289 format!("git switch -c {branch_name}").into()
5290 };
5291 self.send_job(Some(status_msg), move |repo, _cx| async move {
5292 match repo {
5293 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5294 backend.create_branch(branch_name, base_branch).await
5295 }
5296 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5297 client
5298 .request(proto::GitCreateBranch {
5299 project_id: project_id.0,
5300 repository_id: id.to_proto(),
5301 branch_name,
5302 })
5303 .await?;
5304
5305 Ok(())
5306 }
5307 }
5308 })
5309 }
5310
5311 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5312 let id = self.id;
5313 self.send_job(
5314 Some(format!("git switch {branch_name}").into()),
5315 move |repo, _cx| async move {
5316 match repo {
5317 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5318 backend.change_branch(branch_name).await
5319 }
5320 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5321 client
5322 .request(proto::GitChangeBranch {
5323 project_id: project_id.0,
5324 repository_id: id.to_proto(),
5325 branch_name,
5326 })
5327 .await?;
5328
5329 Ok(())
5330 }
5331 }
5332 },
5333 )
5334 }
5335
5336 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5337 let id = self.id;
5338 self.send_job(
5339 Some(format!("git branch -d {branch_name}").into()),
5340 move |repo, _cx| async move {
5341 match repo {
5342 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5343 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5344 client
5345 .request(proto::GitDeleteBranch {
5346 project_id: project_id.0,
5347 repository_id: id.to_proto(),
5348 branch_name,
5349 })
5350 .await?;
5351
5352 Ok(())
5353 }
5354 }
5355 },
5356 )
5357 }
5358
5359 pub fn rename_branch(
5360 &mut self,
5361 branch: String,
5362 new_name: String,
5363 ) -> oneshot::Receiver<Result<()>> {
5364 let id = self.id;
5365 self.send_job(
5366 Some(format!("git branch -m {branch} {new_name}").into()),
5367 move |repo, _cx| async move {
5368 match repo {
5369 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5370 backend.rename_branch(branch, new_name).await
5371 }
5372 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5373 client
5374 .request(proto::GitRenameBranch {
5375 project_id: project_id.0,
5376 repository_id: id.to_proto(),
5377 branch,
5378 new_name,
5379 })
5380 .await?;
5381
5382 Ok(())
5383 }
5384 }
5385 },
5386 )
5387 }
5388
5389 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5390 let id = self.id;
5391 self.send_job(None, move |repo, _cx| async move {
5392 match repo {
5393 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5394 backend.check_for_pushed_commit().await
5395 }
5396 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5397 let response = client
5398 .request(proto::CheckForPushedCommits {
5399 project_id: project_id.0,
5400 repository_id: id.to_proto(),
5401 })
5402 .await?;
5403
5404 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5405
5406 Ok(branches)
5407 }
5408 }
5409 })
5410 }
5411
5412 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5413 self.send_job(None, |repo, _cx| async move {
5414 match repo {
5415 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5416 backend.checkpoint().await
5417 }
5418 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5419 }
5420 })
5421 }
5422
5423 pub fn restore_checkpoint(
5424 &mut self,
5425 checkpoint: GitRepositoryCheckpoint,
5426 ) -> oneshot::Receiver<Result<()>> {
5427 self.send_job(None, move |repo, _cx| async move {
5428 match repo {
5429 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5430 backend.restore_checkpoint(checkpoint).await
5431 }
5432 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5433 }
5434 })
5435 }
5436
5437 pub(crate) fn apply_remote_update(
5438 &mut self,
5439 update: proto::UpdateRepository,
5440 cx: &mut Context<Self>,
5441 ) -> Result<()> {
5442 let conflicted_paths = TreeSet::from_ordered_entries(
5443 update
5444 .current_merge_conflicts
5445 .into_iter()
5446 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5447 );
5448 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5449 let new_head_commit = update
5450 .head_commit_details
5451 .as_ref()
5452 .map(proto_to_commit_details);
5453 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5454 cx.emit(RepositoryEvent::BranchChanged)
5455 }
5456 self.snapshot.branch = new_branch;
5457 self.snapshot.head_commit = new_head_commit;
5458
5459 self.snapshot.merge.conflicted_paths = conflicted_paths;
5460 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5461 let new_stash_entries = GitStash {
5462 entries: update
5463 .stash_entries
5464 .iter()
5465 .filter_map(|entry| proto_to_stash(entry).ok())
5466 .collect(),
5467 };
5468 if self.snapshot.stash_entries != new_stash_entries {
5469 cx.emit(RepositoryEvent::StashEntriesChanged)
5470 }
5471 self.snapshot.stash_entries = new_stash_entries;
5472 self.snapshot.remote_upstream_url = update.remote_upstream_url;
5473 self.snapshot.remote_origin_url = update.remote_origin_url;
5474
5475 let edits = update
5476 .removed_statuses
5477 .into_iter()
5478 .filter_map(|path| {
5479 Some(sum_tree::Edit::Remove(PathKey(
5480 RelPath::from_proto(&path).log_err()?,
5481 )))
5482 })
5483 .chain(
5484 update
5485 .updated_statuses
5486 .into_iter()
5487 .filter_map(|updated_status| {
5488 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5489 }),
5490 )
5491 .collect::<Vec<_>>();
5492 if !edits.is_empty() {
5493 cx.emit(RepositoryEvent::StatusesChanged);
5494 }
5495 self.snapshot.statuses_by_path.edit(edits, ());
5496 if update.is_last_update {
5497 self.snapshot.scan_id = update.scan_id;
5498 }
5499 self.clear_pending_ops(cx);
5500 Ok(())
5501 }
5502
5503 pub fn compare_checkpoints(
5504 &mut self,
5505 left: GitRepositoryCheckpoint,
5506 right: GitRepositoryCheckpoint,
5507 ) -> oneshot::Receiver<Result<bool>> {
5508 self.send_job(None, move |repo, _cx| async move {
5509 match repo {
5510 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5511 backend.compare_checkpoints(left, right).await
5512 }
5513 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5514 }
5515 })
5516 }
5517
5518 pub fn diff_checkpoints(
5519 &mut self,
5520 base_checkpoint: GitRepositoryCheckpoint,
5521 target_checkpoint: GitRepositoryCheckpoint,
5522 ) -> oneshot::Receiver<Result<String>> {
5523 self.send_job(None, move |repo, _cx| async move {
5524 match repo {
5525 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5526 backend
5527 .diff_checkpoints(base_checkpoint, target_checkpoint)
5528 .await
5529 }
5530 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5531 }
5532 })
5533 }
5534
5535 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5536 let updated = SumTree::from_iter(
5537 self.pending_ops.iter().filter_map(|ops| {
5538 let inner_ops: Vec<PendingOp> =
5539 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5540 if inner_ops.is_empty() {
5541 None
5542 } else {
5543 Some(PendingOps {
5544 repo_path: ops.repo_path.clone(),
5545 ops: inner_ops,
5546 })
5547 }
5548 }),
5549 (),
5550 );
5551
5552 if updated != self.pending_ops {
5553 cx.emit(RepositoryEvent::PendingOpsChanged {
5554 pending_ops: self.pending_ops.clone(),
5555 })
5556 }
5557
5558 self.pending_ops = updated;
5559 }
5560
5561 fn schedule_scan(
5562 &mut self,
5563 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5564 cx: &mut Context<Self>,
5565 ) {
5566 let this = cx.weak_entity();
5567 let _ = self.send_keyed_job(
5568 Some(GitJobKey::ReloadGitState),
5569 None,
5570 |state, mut cx| async move {
5571 log::debug!("run scheduled git status scan");
5572
5573 let Some(this) = this.upgrade() else {
5574 return Ok(());
5575 };
5576 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5577 bail!("not a local repository")
5578 };
5579 let (snapshot, events) = this
5580 .update(&mut cx, |this, _| {
5581 this.paths_needing_status_update.clear();
5582 compute_snapshot(
5583 this.id,
5584 this.work_directory_abs_path.clone(),
5585 this.snapshot.clone(),
5586 backend.clone(),
5587 )
5588 })?
5589 .await?;
5590 this.update(&mut cx, |this, cx| {
5591 this.snapshot = snapshot.clone();
5592 this.clear_pending_ops(cx);
5593 for event in events {
5594 cx.emit(event);
5595 }
5596 })?;
5597 if let Some(updates_tx) = updates_tx {
5598 updates_tx
5599 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5600 .ok();
5601 }
5602 Ok(())
5603 },
5604 );
5605 }
5606
5607 fn spawn_local_git_worker(
5608 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5609 cx: &mut Context<Self>,
5610 ) -> mpsc::UnboundedSender<GitJob> {
5611 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5612
5613 cx.spawn(async move |_, cx| {
5614 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5615 if let Some(git_hosting_provider_registry) =
5616 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5617 {
5618 git_hosting_providers::register_additional_providers(
5619 git_hosting_provider_registry,
5620 state.backend.clone(),
5621 )
5622 .await;
5623 }
5624 let state = RepositoryState::Local(state);
5625 let mut jobs = VecDeque::new();
5626 loop {
5627 while let Ok(Some(next_job)) = job_rx.try_next() {
5628 jobs.push_back(next_job);
5629 }
5630
5631 if let Some(job) = jobs.pop_front() {
5632 if let Some(current_key) = &job.key
5633 && jobs
5634 .iter()
5635 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5636 {
5637 continue;
5638 }
5639 (job.job)(state.clone(), cx).await;
5640 } else if let Some(job) = job_rx.next().await {
5641 jobs.push_back(job);
5642 } else {
5643 break;
5644 }
5645 }
5646 anyhow::Ok(())
5647 })
5648 .detach_and_log_err(cx);
5649
5650 job_tx
5651 }
5652
5653 fn spawn_remote_git_worker(
5654 state: RemoteRepositoryState,
5655 cx: &mut Context<Self>,
5656 ) -> mpsc::UnboundedSender<GitJob> {
5657 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5658
5659 cx.spawn(async move |_, cx| {
5660 let state = RepositoryState::Remote(state);
5661 let mut jobs = VecDeque::new();
5662 loop {
5663 while let Ok(Some(next_job)) = job_rx.try_next() {
5664 jobs.push_back(next_job);
5665 }
5666
5667 if let Some(job) = jobs.pop_front() {
5668 if let Some(current_key) = &job.key
5669 && jobs
5670 .iter()
5671 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5672 {
5673 continue;
5674 }
5675 (job.job)(state.clone(), cx).await;
5676 } else if let Some(job) = job_rx.next().await {
5677 jobs.push_back(job);
5678 } else {
5679 break;
5680 }
5681 }
5682 anyhow::Ok(())
5683 })
5684 .detach_and_log_err(cx);
5685
5686 job_tx
5687 }
5688
5689 fn load_staged_text(
5690 &mut self,
5691 buffer_id: BufferId,
5692 repo_path: RepoPath,
5693 cx: &App,
5694 ) -> Task<Result<Option<String>>> {
5695 let rx = self.send_job(None, move |state, _| async move {
5696 match state {
5697 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5698 anyhow::Ok(backend.load_index_text(repo_path).await)
5699 }
5700 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5701 let response = client
5702 .request(proto::OpenUnstagedDiff {
5703 project_id: project_id.to_proto(),
5704 buffer_id: buffer_id.to_proto(),
5705 })
5706 .await?;
5707 Ok(response.staged_text)
5708 }
5709 }
5710 });
5711 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5712 }
5713
5714 fn load_committed_text(
5715 &mut self,
5716 buffer_id: BufferId,
5717 repo_path: RepoPath,
5718 cx: &App,
5719 ) -> Task<Result<DiffBasesChange>> {
5720 let rx = self.send_job(None, move |state, _| async move {
5721 match state {
5722 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5723 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5724 let staged_text = backend.load_index_text(repo_path).await;
5725 let diff_bases_change = if committed_text == staged_text {
5726 DiffBasesChange::SetBoth(committed_text)
5727 } else {
5728 DiffBasesChange::SetEach {
5729 index: staged_text,
5730 head: committed_text,
5731 }
5732 };
5733 anyhow::Ok(diff_bases_change)
5734 }
5735 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5736 use proto::open_uncommitted_diff_response::Mode;
5737
5738 let response = client
5739 .request(proto::OpenUncommittedDiff {
5740 project_id: project_id.to_proto(),
5741 buffer_id: buffer_id.to_proto(),
5742 })
5743 .await?;
5744 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5745 let bases = match mode {
5746 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5747 Mode::IndexAndHead => DiffBasesChange::SetEach {
5748 head: response.committed_text,
5749 index: response.staged_text,
5750 },
5751 };
5752 Ok(bases)
5753 }
5754 }
5755 });
5756
5757 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5758 }
5759
5760 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5761 let repository_id = self.snapshot.id;
5762 let rx = self.send_job(None, move |state, _| async move {
5763 match state {
5764 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5765 backend.load_blob_content(oid).await
5766 }
5767 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5768 let response = client
5769 .request(proto::GetBlobContent {
5770 project_id: project_id.to_proto(),
5771 repository_id: repository_id.0,
5772 oid: oid.to_string(),
5773 })
5774 .await?;
5775 Ok(response.content)
5776 }
5777 }
5778 });
5779 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5780 }
5781
5782 fn paths_changed(
5783 &mut self,
5784 paths: Vec<RepoPath>,
5785 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5786 cx: &mut Context<Self>,
5787 ) {
5788 self.paths_needing_status_update.extend(paths);
5789
5790 let this = cx.weak_entity();
5791 let _ = self.send_keyed_job(
5792 Some(GitJobKey::RefreshStatuses),
5793 None,
5794 |state, mut cx| async move {
5795 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5796 (
5797 this.snapshot.clone(),
5798 mem::take(&mut this.paths_needing_status_update),
5799 )
5800 })?;
5801 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5802 bail!("not a local repository")
5803 };
5804
5805 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5806 if paths.is_empty() {
5807 return Ok(());
5808 }
5809 let statuses = backend.status(&paths).await?;
5810 let stash_entries = backend.stash_entries().await?;
5811
5812 let changed_path_statuses = cx
5813 .background_spawn(async move {
5814 let mut changed_path_statuses = Vec::new();
5815 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5816 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5817
5818 for (repo_path, status) in &*statuses.entries {
5819 changed_paths.remove(repo_path);
5820 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5821 && cursor.item().is_some_and(|entry| entry.status == *status)
5822 {
5823 continue;
5824 }
5825
5826 changed_path_statuses.push(Edit::Insert(StatusEntry {
5827 repo_path: repo_path.clone(),
5828 status: *status,
5829 }));
5830 }
5831 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5832 for path in changed_paths.into_iter() {
5833 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5834 changed_path_statuses
5835 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5836 }
5837 }
5838 changed_path_statuses
5839 })
5840 .await;
5841
5842 this.update(&mut cx, |this, cx| {
5843 if this.snapshot.stash_entries != stash_entries {
5844 cx.emit(RepositoryEvent::StashEntriesChanged);
5845 this.snapshot.stash_entries = stash_entries;
5846 }
5847
5848 if !changed_path_statuses.is_empty() {
5849 cx.emit(RepositoryEvent::StatusesChanged);
5850 this.snapshot
5851 .statuses_by_path
5852 .edit(changed_path_statuses, ());
5853 this.snapshot.scan_id += 1;
5854 }
5855
5856 if let Some(updates_tx) = updates_tx {
5857 updates_tx
5858 .unbounded_send(DownstreamUpdate::UpdateRepository(
5859 this.snapshot.clone(),
5860 ))
5861 .ok();
5862 }
5863 })
5864 },
5865 );
5866 }
5867
5868 /// currently running git command and when it started
5869 pub fn current_job(&self) -> Option<JobInfo> {
5870 self.active_jobs.values().next().cloned()
5871 }
5872
5873 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5874 self.send_job(None, |_, _| async {})
5875 }
5876
5877 fn spawn_job_with_tracking<AsyncFn>(
5878 &mut self,
5879 paths: Vec<RepoPath>,
5880 git_status: pending_op::GitStatus,
5881 cx: &mut Context<Self>,
5882 f: AsyncFn,
5883 ) -> Task<Result<()>>
5884 where
5885 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5886 {
5887 let ids = self.new_pending_ops_for_paths(paths, git_status);
5888
5889 cx.spawn(async move |this, cx| {
5890 let (job_status, result) = match f(this.clone(), cx).await {
5891 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5892 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5893 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5894 };
5895
5896 this.update(cx, |this, _| {
5897 let mut edits = Vec::with_capacity(ids.len());
5898 for (id, entry) in ids {
5899 if let Some(mut ops) = this
5900 .pending_ops
5901 .get(&PathKey(entry.as_ref().clone()), ())
5902 .cloned()
5903 {
5904 if let Some(op) = ops.op_by_id_mut(id) {
5905 op.job_status = job_status;
5906 }
5907 edits.push(sum_tree::Edit::Insert(ops));
5908 }
5909 }
5910 this.pending_ops.edit(edits, ());
5911 })?;
5912
5913 result
5914 })
5915 }
5916
5917 fn new_pending_ops_for_paths(
5918 &mut self,
5919 paths: Vec<RepoPath>,
5920 git_status: pending_op::GitStatus,
5921 ) -> Vec<(PendingOpId, RepoPath)> {
5922 let mut edits = Vec::with_capacity(paths.len());
5923 let mut ids = Vec::with_capacity(paths.len());
5924 for path in paths {
5925 let mut ops = self
5926 .pending_ops
5927 .get(&PathKey(path.as_ref().clone()), ())
5928 .cloned()
5929 .unwrap_or_else(|| PendingOps::new(&path));
5930 let id = ops.max_id() + 1;
5931 ops.ops.push(PendingOp {
5932 id,
5933 git_status,
5934 job_status: pending_op::JobStatus::Running,
5935 });
5936 edits.push(sum_tree::Edit::Insert(ops));
5937 ids.push((id, path));
5938 }
5939 self.pending_ops.edit(edits, ());
5940 ids
5941 }
5942 pub fn default_remote_url(&self) -> Option<String> {
5943 self.remote_upstream_url
5944 .clone()
5945 .or(self.remote_origin_url.clone())
5946 }
5947}
5948
5949fn get_permalink_in_rust_registry_src(
5950 provider_registry: Arc<GitHostingProviderRegistry>,
5951 path: PathBuf,
5952 selection: Range<u32>,
5953) -> Result<url::Url> {
5954 #[derive(Deserialize)]
5955 struct CargoVcsGit {
5956 sha1: String,
5957 }
5958
5959 #[derive(Deserialize)]
5960 struct CargoVcsInfo {
5961 git: CargoVcsGit,
5962 path_in_vcs: String,
5963 }
5964
5965 #[derive(Deserialize)]
5966 struct CargoPackage {
5967 repository: String,
5968 }
5969
5970 #[derive(Deserialize)]
5971 struct CargoToml {
5972 package: CargoPackage,
5973 }
5974
5975 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5976 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5977 Some((dir, json))
5978 }) else {
5979 bail!("No .cargo_vcs_info.json found in parent directories")
5980 };
5981 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5982 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5983 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5984 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5985 .context("parsing package.repository field of manifest")?;
5986 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5987 let permalink = provider.build_permalink(
5988 remote,
5989 BuildPermalinkParams::new(
5990 &cargo_vcs_info.git.sha1,
5991 &RepoPath::from_rel_path(
5992 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5993 ),
5994 Some(selection),
5995 ),
5996 );
5997 Ok(permalink)
5998}
5999
6000fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6001 let Some(blame) = blame else {
6002 return proto::BlameBufferResponse {
6003 blame_response: None,
6004 };
6005 };
6006
6007 let entries = blame
6008 .entries
6009 .into_iter()
6010 .map(|entry| proto::BlameEntry {
6011 sha: entry.sha.as_bytes().into(),
6012 start_line: entry.range.start,
6013 end_line: entry.range.end,
6014 original_line_number: entry.original_line_number,
6015 author: entry.author,
6016 author_mail: entry.author_mail,
6017 author_time: entry.author_time,
6018 author_tz: entry.author_tz,
6019 committer: entry.committer_name,
6020 committer_mail: entry.committer_email,
6021 committer_time: entry.committer_time,
6022 committer_tz: entry.committer_tz,
6023 summary: entry.summary,
6024 previous: entry.previous,
6025 filename: entry.filename,
6026 })
6027 .collect::<Vec<_>>();
6028
6029 let messages = blame
6030 .messages
6031 .into_iter()
6032 .map(|(oid, message)| proto::CommitMessage {
6033 oid: oid.as_bytes().into(),
6034 message,
6035 })
6036 .collect::<Vec<_>>();
6037
6038 proto::BlameBufferResponse {
6039 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6040 }
6041}
6042
6043fn deserialize_blame_buffer_response(
6044 response: proto::BlameBufferResponse,
6045) -> Option<git::blame::Blame> {
6046 let response = response.blame_response?;
6047 let entries = response
6048 .entries
6049 .into_iter()
6050 .filter_map(|entry| {
6051 Some(git::blame::BlameEntry {
6052 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6053 range: entry.start_line..entry.end_line,
6054 original_line_number: entry.original_line_number,
6055 committer_name: entry.committer,
6056 committer_time: entry.committer_time,
6057 committer_tz: entry.committer_tz,
6058 committer_email: entry.committer_mail,
6059 author: entry.author,
6060 author_mail: entry.author_mail,
6061 author_time: entry.author_time,
6062 author_tz: entry.author_tz,
6063 summary: entry.summary,
6064 previous: entry.previous,
6065 filename: entry.filename,
6066 })
6067 })
6068 .collect::<Vec<_>>();
6069
6070 let messages = response
6071 .messages
6072 .into_iter()
6073 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6074 .collect::<HashMap<_, _>>();
6075
6076 Some(Blame { entries, messages })
6077}
6078
6079fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6080 proto::Branch {
6081 is_head: branch.is_head,
6082 ref_name: branch.ref_name.to_string(),
6083 unix_timestamp: branch
6084 .most_recent_commit
6085 .as_ref()
6086 .map(|commit| commit.commit_timestamp as u64),
6087 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6088 ref_name: upstream.ref_name.to_string(),
6089 tracking: upstream
6090 .tracking
6091 .status()
6092 .map(|upstream| proto::UpstreamTracking {
6093 ahead: upstream.ahead as u64,
6094 behind: upstream.behind as u64,
6095 }),
6096 }),
6097 most_recent_commit: branch
6098 .most_recent_commit
6099 .as_ref()
6100 .map(|commit| proto::CommitSummary {
6101 sha: commit.sha.to_string(),
6102 subject: commit.subject.to_string(),
6103 commit_timestamp: commit.commit_timestamp,
6104 author_name: commit.author_name.to_string(),
6105 }),
6106 }
6107}
6108
6109fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6110 proto::Worktree {
6111 path: worktree.path.to_string_lossy().to_string(),
6112 ref_name: worktree.ref_name.to_string(),
6113 sha: worktree.sha.to_string(),
6114 }
6115}
6116
6117fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6118 git::repository::Worktree {
6119 path: PathBuf::from(proto.path.clone()),
6120 ref_name: proto.ref_name.clone().into(),
6121 sha: proto.sha.clone().into(),
6122 }
6123}
6124
6125fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6126 git::repository::Branch {
6127 is_head: proto.is_head,
6128 ref_name: proto.ref_name.clone().into(),
6129 upstream: proto
6130 .upstream
6131 .as_ref()
6132 .map(|upstream| git::repository::Upstream {
6133 ref_name: upstream.ref_name.to_string().into(),
6134 tracking: upstream
6135 .tracking
6136 .as_ref()
6137 .map(|tracking| {
6138 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6139 ahead: tracking.ahead as u32,
6140 behind: tracking.behind as u32,
6141 })
6142 })
6143 .unwrap_or(git::repository::UpstreamTracking::Gone),
6144 }),
6145 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6146 git::repository::CommitSummary {
6147 sha: commit.sha.to_string().into(),
6148 subject: commit.subject.to_string().into(),
6149 commit_timestamp: commit.commit_timestamp,
6150 author_name: commit.author_name.to_string().into(),
6151 has_parent: true,
6152 }
6153 }),
6154 }
6155}
6156
6157fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6158 proto::GitCommitDetails {
6159 sha: commit.sha.to_string(),
6160 message: commit.message.to_string(),
6161 commit_timestamp: commit.commit_timestamp,
6162 author_email: commit.author_email.to_string(),
6163 author_name: commit.author_name.to_string(),
6164 }
6165}
6166
6167fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6168 CommitDetails {
6169 sha: proto.sha.clone().into(),
6170 message: proto.message.clone().into(),
6171 commit_timestamp: proto.commit_timestamp,
6172 author_email: proto.author_email.clone().into(),
6173 author_name: proto.author_name.clone().into(),
6174 }
6175}
6176
6177async fn compute_snapshot(
6178 id: RepositoryId,
6179 work_directory_abs_path: Arc<Path>,
6180 prev_snapshot: RepositorySnapshot,
6181 backend: Arc<dyn GitRepository>,
6182) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6183 let mut events = Vec::new();
6184 let branches = backend.branches().await?;
6185 let branch = branches.into_iter().find(|branch| branch.is_head);
6186 let statuses = backend
6187 .status(&[RepoPath::from_rel_path(
6188 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6189 )])
6190 .await?;
6191 let stash_entries = backend.stash_entries().await?;
6192 let statuses_by_path = SumTree::from_iter(
6193 statuses
6194 .entries
6195 .iter()
6196 .map(|(repo_path, status)| StatusEntry {
6197 repo_path: repo_path.clone(),
6198 status: *status,
6199 }),
6200 (),
6201 );
6202 let (merge_details, merge_heads_changed) =
6203 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6204 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6205
6206 if merge_heads_changed {
6207 events.push(RepositoryEvent::MergeHeadsChanged);
6208 }
6209
6210 if statuses_by_path != prev_snapshot.statuses_by_path {
6211 events.push(RepositoryEvent::StatusesChanged)
6212 }
6213
6214 // Useful when branch is None in detached head state
6215 let head_commit = match backend.head_sha().await {
6216 Some(head_sha) => backend.show(head_sha).await.log_err(),
6217 None => None,
6218 };
6219
6220 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6221 events.push(RepositoryEvent::BranchChanged);
6222 }
6223
6224 let remote_origin_url = backend.remote_url("origin").await;
6225 let remote_upstream_url = backend.remote_url("upstream").await;
6226
6227 let snapshot = RepositorySnapshot {
6228 id,
6229 statuses_by_path,
6230 work_directory_abs_path,
6231 path_style: prev_snapshot.path_style,
6232 scan_id: prev_snapshot.scan_id + 1,
6233 branch,
6234 head_commit,
6235 merge: merge_details,
6236 remote_origin_url,
6237 remote_upstream_url,
6238 stash_entries,
6239 };
6240
6241 Ok((snapshot, events))
6242}
6243
6244fn status_from_proto(
6245 simple_status: i32,
6246 status: Option<proto::GitFileStatus>,
6247) -> anyhow::Result<FileStatus> {
6248 use proto::git_file_status::Variant;
6249
6250 let Some(variant) = status.and_then(|status| status.variant) else {
6251 let code = proto::GitStatus::from_i32(simple_status)
6252 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6253 let result = match code {
6254 proto::GitStatus::Added => TrackedStatus {
6255 worktree_status: StatusCode::Added,
6256 index_status: StatusCode::Unmodified,
6257 }
6258 .into(),
6259 proto::GitStatus::Modified => TrackedStatus {
6260 worktree_status: StatusCode::Modified,
6261 index_status: StatusCode::Unmodified,
6262 }
6263 .into(),
6264 proto::GitStatus::Conflict => UnmergedStatus {
6265 first_head: UnmergedStatusCode::Updated,
6266 second_head: UnmergedStatusCode::Updated,
6267 }
6268 .into(),
6269 proto::GitStatus::Deleted => TrackedStatus {
6270 worktree_status: StatusCode::Deleted,
6271 index_status: StatusCode::Unmodified,
6272 }
6273 .into(),
6274 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6275 };
6276 return Ok(result);
6277 };
6278
6279 let result = match variant {
6280 Variant::Untracked(_) => FileStatus::Untracked,
6281 Variant::Ignored(_) => FileStatus::Ignored,
6282 Variant::Unmerged(unmerged) => {
6283 let [first_head, second_head] =
6284 [unmerged.first_head, unmerged.second_head].map(|head| {
6285 let code = proto::GitStatus::from_i32(head)
6286 .with_context(|| format!("Invalid git status code: {head}"))?;
6287 let result = match code {
6288 proto::GitStatus::Added => UnmergedStatusCode::Added,
6289 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6290 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6291 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6292 };
6293 Ok(result)
6294 });
6295 let [first_head, second_head] = [first_head?, second_head?];
6296 UnmergedStatus {
6297 first_head,
6298 second_head,
6299 }
6300 .into()
6301 }
6302 Variant::Tracked(tracked) => {
6303 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6304 .map(|status| {
6305 let code = proto::GitStatus::from_i32(status)
6306 .with_context(|| format!("Invalid git status code: {status}"))?;
6307 let result = match code {
6308 proto::GitStatus::Modified => StatusCode::Modified,
6309 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6310 proto::GitStatus::Added => StatusCode::Added,
6311 proto::GitStatus::Deleted => StatusCode::Deleted,
6312 proto::GitStatus::Renamed => StatusCode::Renamed,
6313 proto::GitStatus::Copied => StatusCode::Copied,
6314 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6315 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6316 };
6317 Ok(result)
6318 });
6319 let [index_status, worktree_status] = [index_status?, worktree_status?];
6320 TrackedStatus {
6321 index_status,
6322 worktree_status,
6323 }
6324 .into()
6325 }
6326 };
6327 Ok(result)
6328}
6329
6330fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6331 use proto::git_file_status::{Tracked, Unmerged, Variant};
6332
6333 let variant = match status {
6334 FileStatus::Untracked => Variant::Untracked(Default::default()),
6335 FileStatus::Ignored => Variant::Ignored(Default::default()),
6336 FileStatus::Unmerged(UnmergedStatus {
6337 first_head,
6338 second_head,
6339 }) => Variant::Unmerged(Unmerged {
6340 first_head: unmerged_status_to_proto(first_head),
6341 second_head: unmerged_status_to_proto(second_head),
6342 }),
6343 FileStatus::Tracked(TrackedStatus {
6344 index_status,
6345 worktree_status,
6346 }) => Variant::Tracked(Tracked {
6347 index_status: tracked_status_to_proto(index_status),
6348 worktree_status: tracked_status_to_proto(worktree_status),
6349 }),
6350 };
6351 proto::GitFileStatus {
6352 variant: Some(variant),
6353 }
6354}
6355
6356fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6357 match code {
6358 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6359 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6360 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6361 }
6362}
6363
6364fn tracked_status_to_proto(code: StatusCode) -> i32 {
6365 match code {
6366 StatusCode::Added => proto::GitStatus::Added as _,
6367 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6368 StatusCode::Modified => proto::GitStatus::Modified as _,
6369 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6370 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6371 StatusCode::Copied => proto::GitStatus::Copied as _,
6372 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6373 }
6374}