1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<str>>,
133 index_text: Option<Arc<str>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_create_remote);
476 client.add_entity_request_handler(Self::handle_remove_remote);
477 client.add_entity_request_handler(Self::handle_delete_branch);
478 client.add_entity_request_handler(Self::handle_git_init);
479 client.add_entity_request_handler(Self::handle_push);
480 client.add_entity_request_handler(Self::handle_pull);
481 client.add_entity_request_handler(Self::handle_fetch);
482 client.add_entity_request_handler(Self::handle_stage);
483 client.add_entity_request_handler(Self::handle_unstage);
484 client.add_entity_request_handler(Self::handle_stash);
485 client.add_entity_request_handler(Self::handle_stash_pop);
486 client.add_entity_request_handler(Self::handle_stash_apply);
487 client.add_entity_request_handler(Self::handle_stash_drop);
488 client.add_entity_request_handler(Self::handle_commit);
489 client.add_entity_request_handler(Self::handle_run_hook);
490 client.add_entity_request_handler(Self::handle_reset);
491 client.add_entity_request_handler(Self::handle_show);
492 client.add_entity_request_handler(Self::handle_load_commit_diff);
493 client.add_entity_request_handler(Self::handle_file_history);
494 client.add_entity_request_handler(Self::handle_checkout_files);
495 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
496 client.add_entity_request_handler(Self::handle_set_index_text);
497 client.add_entity_request_handler(Self::handle_askpass);
498 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
499 client.add_entity_request_handler(Self::handle_git_diff);
500 client.add_entity_request_handler(Self::handle_tree_diff);
501 client.add_entity_request_handler(Self::handle_get_blob_content);
502 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
503 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
504 client.add_entity_message_handler(Self::handle_update_diff_bases);
505 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
506 client.add_entity_request_handler(Self::handle_blame_buffer);
507 client.add_entity_message_handler(Self::handle_update_repository);
508 client.add_entity_message_handler(Self::handle_remove_repository);
509 client.add_entity_request_handler(Self::handle_git_clone);
510 client.add_entity_request_handler(Self::handle_get_worktrees);
511 client.add_entity_request_handler(Self::handle_create_worktree);
512 }
513
514 pub fn is_local(&self) -> bool {
515 matches!(self.state, GitStoreState::Local { .. })
516 }
517 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
518 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
519 let id = repo.read(cx).id;
520 if self.active_repo_id != Some(id) {
521 self.active_repo_id = Some(id);
522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
523 }
524 }
525 }
526
527 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
528 match &mut self.state {
529 GitStoreState::Remote {
530 downstream: downstream_client,
531 ..
532 } => {
533 for repo in self.repositories.values() {
534 let update = repo.read(cx).snapshot.initial_update(project_id);
535 for update in split_repository_update(update) {
536 client.send(update).log_err();
537 }
538 }
539 *downstream_client = Some((client, ProjectId(project_id)));
540 }
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 let mut snapshots = HashMap::default();
546 let (updates_tx, mut updates_rx) = mpsc::unbounded();
547 for repo in self.repositories.values() {
548 updates_tx
549 .unbounded_send(DownstreamUpdate::UpdateRepository(
550 repo.read(cx).snapshot.clone(),
551 ))
552 .ok();
553 }
554 *downstream_client = Some(LocalDownstreamState {
555 client: client.clone(),
556 project_id: ProjectId(project_id),
557 updates_tx,
558 _task: cx.spawn(async move |this, cx| {
559 cx.background_spawn(async move {
560 while let Some(update) = updates_rx.next().await {
561 match update {
562 DownstreamUpdate::UpdateRepository(snapshot) => {
563 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
564 {
565 let update =
566 snapshot.build_update(old_snapshot, project_id);
567 *old_snapshot = snapshot;
568 for update in split_repository_update(update) {
569 client.send(update)?;
570 }
571 } else {
572 let update = snapshot.initial_update(project_id);
573 for update in split_repository_update(update) {
574 client.send(update)?;
575 }
576 snapshots.insert(snapshot.id, snapshot);
577 }
578 }
579 DownstreamUpdate::RemoveRepository(id) => {
580 client.send(proto::RemoveRepository {
581 project_id,
582 id: id.to_proto(),
583 })?;
584 }
585 }
586 }
587 anyhow::Ok(())
588 })
589 .await
590 .ok();
591 this.update(cx, |this, _| {
592 if let GitStoreState::Local {
593 downstream: downstream_client,
594 ..
595 } = &mut this.state
596 {
597 downstream_client.take();
598 } else {
599 unreachable!("unshared called on remote store");
600 }
601 })
602 }),
603 });
604 }
605 }
606 }
607
608 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
609 match &mut self.state {
610 GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } => {
614 downstream_client.take();
615 }
616 GitStoreState::Remote {
617 downstream: downstream_client,
618 ..
619 } => {
620 downstream_client.take();
621 }
622 }
623 self.shared_diffs.clear();
624 }
625
626 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
627 self.shared_diffs.remove(peer_id);
628 }
629
630 pub fn active_repository(&self) -> Option<Entity<Repository>> {
631 self.active_repo_id
632 .as_ref()
633 .map(|id| self.repositories[id].clone())
634 }
635
636 pub fn open_unstaged_diff(
637 &mut self,
638 buffer: Entity<Buffer>,
639 cx: &mut Context<Self>,
640 ) -> Task<Result<Entity<BufferDiff>>> {
641 let buffer_id = buffer.read(cx).remote_id();
642 if let Some(diff_state) = self.diffs.get(&buffer_id)
643 && let Some(unstaged_diff) = diff_state
644 .read(cx)
645 .unstaged_diff
646 .as_ref()
647 .and_then(|weak| weak.upgrade())
648 {
649 if let Some(task) =
650 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
651 {
652 return cx.background_executor().spawn(async move {
653 task.await;
654 Ok(unstaged_diff)
655 });
656 }
657 return Task::ready(Ok(unstaged_diff));
658 }
659
660 let Some((repo, repo_path)) =
661 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
662 else {
663 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
664 };
665
666 let task = self
667 .loading_diffs
668 .entry((buffer_id, DiffKind::Unstaged))
669 .or_insert_with(|| {
670 let staged_text = repo.update(cx, |repo, cx| {
671 repo.load_staged_text(buffer_id, repo_path, cx)
672 });
673 cx.spawn(async move |this, cx| {
674 Self::open_diff_internal(
675 this,
676 DiffKind::Unstaged,
677 staged_text.await.map(DiffBasesChange::SetIndex),
678 buffer,
679 cx,
680 )
681 .await
682 .map_err(Arc::new)
683 })
684 .shared()
685 })
686 .clone();
687
688 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
689 }
690
691 pub fn open_diff_since(
692 &mut self,
693 oid: Option<git::Oid>,
694 buffer: Entity<Buffer>,
695 repo: Entity<Repository>,
696 cx: &mut Context<Self>,
697 ) -> Task<Result<Entity<BufferDiff>>> {
698 cx.spawn(async move |this, cx| {
699 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
700 let language_registry = buffer.update(cx, |buffer, _| buffer.language_registry());
701 let content = match oid {
702 None => None,
703 Some(oid) => Some(
704 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
705 .await?,
706 ),
707 };
708 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
709
710 buffer_diff
711 .update(cx, |buffer_diff, cx| {
712 buffer_diff.language_changed(
713 buffer_snapshot.language().cloned(),
714 language_registry,
715 cx,
716 );
717 buffer_diff.set_base_text(
718 content.map(|s| s.as_str().into()),
719 buffer_snapshot.language().cloned(),
720 buffer_snapshot.text,
721 cx,
722 )
723 })
724 .await?;
725 let unstaged_diff = this
726 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
727 .await?;
728 buffer_diff.update(cx, |buffer_diff, _| {
729 buffer_diff.set_secondary_diff(unstaged_diff);
730 });
731
732 this.update(cx, |_, cx| {
733 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
734 .detach();
735 })?;
736
737 Ok(buffer_diff)
738 })
739 }
740
741 #[ztracing::instrument(skip_all)]
742 pub fn open_uncommitted_diff(
743 &mut self,
744 buffer: Entity<Buffer>,
745 cx: &mut Context<Self>,
746 ) -> Task<Result<Entity<BufferDiff>>> {
747 let buffer_id = buffer.read(cx).remote_id();
748
749 if let Some(diff_state) = self.diffs.get(&buffer_id)
750 && let Some(uncommitted_diff) = diff_state
751 .read(cx)
752 .uncommitted_diff
753 .as_ref()
754 .and_then(|weak| weak.upgrade())
755 {
756 if let Some(task) =
757 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
758 {
759 return cx.background_executor().spawn(async move {
760 task.await;
761 Ok(uncommitted_diff)
762 });
763 }
764 return Task::ready(Ok(uncommitted_diff));
765 }
766
767 let Some((repo, repo_path)) =
768 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
769 else {
770 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
771 };
772
773 let task = self
774 .loading_diffs
775 .entry((buffer_id, DiffKind::Uncommitted))
776 .or_insert_with(|| {
777 let changes = repo.update(cx, |repo, cx| {
778 repo.load_committed_text(buffer_id, repo_path, cx)
779 });
780
781 // todo(lw): hot foreground spawn
782 cx.spawn(async move |this, cx| {
783 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
784 .await
785 .map_err(Arc::new)
786 })
787 .shared()
788 })
789 .clone();
790
791 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
792 }
793
794 #[ztracing::instrument(skip_all)]
795 async fn open_diff_internal(
796 this: WeakEntity<Self>,
797 kind: DiffKind,
798 texts: Result<DiffBasesChange>,
799 buffer_entity: Entity<Buffer>,
800 cx: &mut AsyncApp,
801 ) -> Result<Entity<BufferDiff>> {
802 let diff_bases_change = match texts {
803 Err(e) => {
804 this.update(cx, |this, cx| {
805 let buffer = buffer_entity.read(cx);
806 let buffer_id = buffer.remote_id();
807 this.loading_diffs.remove(&(buffer_id, kind));
808 })?;
809 return Err(e);
810 }
811 Ok(change) => change,
812 };
813
814 this.update(cx, |this, cx| {
815 let buffer = buffer_entity.read(cx);
816 let buffer_id = buffer.remote_id();
817 let language = buffer.language().cloned();
818 let language_registry = buffer.language_registry();
819 let text_snapshot = buffer.text_snapshot();
820 this.loading_diffs.remove(&(buffer_id, kind));
821
822 let git_store = cx.weak_entity();
823 let diff_state = this
824 .diffs
825 .entry(buffer_id)
826 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
827
828 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
829
830 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
831 diff_state.update(cx, |diff_state, cx| {
832 diff_state.language_changed = true;
833 diff_state.language = language;
834 diff_state.language_registry = language_registry;
835
836 match kind {
837 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
838 DiffKind::Uncommitted => {
839 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
840 diff
841 } else {
842 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
843 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
844 unstaged_diff
845 };
846
847 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
848 diff_state.uncommitted_diff = Some(diff.downgrade())
849 }
850 }
851
852 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
853 let rx = diff_state.wait_for_recalculation();
854
855 anyhow::Ok(async move {
856 if let Some(rx) = rx {
857 rx.await;
858 }
859 Ok(diff)
860 })
861 })
862 })??
863 .await
864 }
865
866 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
867 let diff_state = self.diffs.get(&buffer_id)?;
868 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
869 }
870
871 pub fn get_uncommitted_diff(
872 &self,
873 buffer_id: BufferId,
874 cx: &App,
875 ) -> Option<Entity<BufferDiff>> {
876 let diff_state = self.diffs.get(&buffer_id)?;
877 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
878 }
879
880 pub fn open_conflict_set(
881 &mut self,
882 buffer: Entity<Buffer>,
883 cx: &mut Context<Self>,
884 ) -> Entity<ConflictSet> {
885 log::debug!("open conflict set");
886 let buffer_id = buffer.read(cx).remote_id();
887
888 if let Some(git_state) = self.diffs.get(&buffer_id)
889 && let Some(conflict_set) = git_state
890 .read(cx)
891 .conflict_set
892 .as_ref()
893 .and_then(|weak| weak.upgrade())
894 {
895 let conflict_set = conflict_set;
896 let buffer_snapshot = buffer.read(cx).text_snapshot();
897
898 git_state.update(cx, |state, cx| {
899 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
900 });
901
902 return conflict_set;
903 }
904
905 let is_unmerged = self
906 .repository_and_path_for_buffer_id(buffer_id, cx)
907 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
908 let git_store = cx.weak_entity();
909 let buffer_git_state = self
910 .diffs
911 .entry(buffer_id)
912 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
913 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
914
915 self._subscriptions
916 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
917 cx.emit(GitStoreEvent::ConflictsUpdated);
918 }));
919
920 buffer_git_state.update(cx, |state, cx| {
921 state.conflict_set = Some(conflict_set.downgrade());
922 let buffer_snapshot = buffer.read(cx).text_snapshot();
923 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
924 });
925
926 conflict_set
927 }
928
929 pub fn project_path_git_status(
930 &self,
931 project_path: &ProjectPath,
932 cx: &App,
933 ) -> Option<FileStatus> {
934 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
935 Some(repo.read(cx).status_for_path(&repo_path)?.status)
936 }
937
938 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
939 let mut work_directory_abs_paths = Vec::new();
940 let mut checkpoints = Vec::new();
941 for repository in self.repositories.values() {
942 repository.update(cx, |repository, _| {
943 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
944 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
945 });
946 }
947
948 cx.background_executor().spawn(async move {
949 let checkpoints = future::try_join_all(checkpoints).await?;
950 Ok(GitStoreCheckpoint {
951 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
952 .into_iter()
953 .zip(checkpoints)
954 .collect(),
955 })
956 })
957 }
958
959 pub fn restore_checkpoint(
960 &self,
961 checkpoint: GitStoreCheckpoint,
962 cx: &mut App,
963 ) -> Task<Result<()>> {
964 let repositories_by_work_dir_abs_path = self
965 .repositories
966 .values()
967 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
968 .collect::<HashMap<_, _>>();
969
970 let mut tasks = Vec::new();
971 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
972 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
973 let restore = repository.update(cx, |repository, _| {
974 repository.restore_checkpoint(checkpoint)
975 });
976 tasks.push(async move { restore.await? });
977 }
978 }
979 cx.background_spawn(async move {
980 future::try_join_all(tasks).await?;
981 Ok(())
982 })
983 }
984
985 /// Compares two checkpoints, returning true if they are equal.
986 pub fn compare_checkpoints(
987 &self,
988 left: GitStoreCheckpoint,
989 mut right: GitStoreCheckpoint,
990 cx: &mut App,
991 ) -> Task<Result<bool>> {
992 let repositories_by_work_dir_abs_path = self
993 .repositories
994 .values()
995 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
996 .collect::<HashMap<_, _>>();
997
998 let mut tasks = Vec::new();
999 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1000 if let Some(right_checkpoint) = right
1001 .checkpoints_by_work_dir_abs_path
1002 .remove(&work_dir_abs_path)
1003 {
1004 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1005 {
1006 let compare = repository.update(cx, |repository, _| {
1007 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1008 });
1009
1010 tasks.push(async move { compare.await? });
1011 }
1012 } else {
1013 return Task::ready(Ok(false));
1014 }
1015 }
1016 cx.background_spawn(async move {
1017 Ok(future::try_join_all(tasks)
1018 .await?
1019 .into_iter()
1020 .all(|result| result))
1021 })
1022 }
1023
1024 /// Blames a buffer.
1025 pub fn blame_buffer(
1026 &self,
1027 buffer: &Entity<Buffer>,
1028 version: Option<clock::Global>,
1029 cx: &mut Context<Self>,
1030 ) -> Task<Result<Option<Blame>>> {
1031 let buffer = buffer.read(cx);
1032 let Some((repo, repo_path)) =
1033 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1034 else {
1035 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1036 };
1037 let content = match &version {
1038 Some(version) => buffer.rope_for_version(version),
1039 None => buffer.as_rope().clone(),
1040 };
1041 let line_ending = buffer.line_ending();
1042 let version = version.unwrap_or(buffer.version());
1043 let buffer_id = buffer.remote_id();
1044
1045 let repo = repo.downgrade();
1046 cx.spawn(async move |_, cx| {
1047 let repository_state = repo
1048 .update(cx, |repo, _| repo.repository_state.clone())?
1049 .await
1050 .map_err(|err| anyhow::anyhow!(err))?;
1051 match repository_state {
1052 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1053 .blame(repo_path.clone(), content, line_ending)
1054 .await
1055 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1056 .map(Some),
1057 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1058 let response = client
1059 .request(proto::BlameBuffer {
1060 project_id: project_id.to_proto(),
1061 buffer_id: buffer_id.into(),
1062 version: serialize_version(&version),
1063 })
1064 .await?;
1065 Ok(deserialize_blame_buffer_response(response))
1066 }
1067 }
1068 })
1069 }
1070
1071 pub fn file_history(
1072 &self,
1073 repo: &Entity<Repository>,
1074 path: RepoPath,
1075 cx: &mut App,
1076 ) -> Task<Result<git::repository::FileHistory>> {
1077 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1078
1079 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1080 }
1081
1082 pub fn file_history_paginated(
1083 &self,
1084 repo: &Entity<Repository>,
1085 path: RepoPath,
1086 skip: usize,
1087 limit: Option<usize>,
1088 cx: &mut App,
1089 ) -> Task<Result<git::repository::FileHistory>> {
1090 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1091
1092 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1093 }
1094
1095 pub fn get_permalink_to_line(
1096 &self,
1097 buffer: &Entity<Buffer>,
1098 selection: Range<u32>,
1099 cx: &mut App,
1100 ) -> Task<Result<url::Url>> {
1101 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1102 return Task::ready(Err(anyhow!("buffer has no file")));
1103 };
1104
1105 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1106 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1107 cx,
1108 ) else {
1109 // If we're not in a Git repo, check whether this is a Rust source
1110 // file in the Cargo registry (presumably opened with go-to-definition
1111 // from a normal Rust file). If so, we can put together a permalink
1112 // using crate metadata.
1113 if buffer
1114 .read(cx)
1115 .language()
1116 .is_none_or(|lang| lang.name() != "Rust".into())
1117 {
1118 return Task::ready(Err(anyhow!("no permalink available")));
1119 }
1120 let file_path = file.worktree.read(cx).absolutize(&file.path);
1121 return cx.spawn(async move |cx| {
1122 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1123 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1124 .context("no permalink available")
1125 });
1126 };
1127
1128 let buffer_id = buffer.read(cx).remote_id();
1129 let branch = repo.read(cx).branch.clone();
1130 let remote = branch
1131 .as_ref()
1132 .and_then(|b| b.upstream.as_ref())
1133 .and_then(|b| b.remote_name())
1134 .unwrap_or("origin")
1135 .to_string();
1136
1137 let rx = repo.update(cx, |repo, _| {
1138 repo.send_job(None, move |state, cx| async move {
1139 match state {
1140 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1141 let origin_url = backend
1142 .remote_url(&remote)
1143 .await
1144 .with_context(|| format!("remote \"{remote}\" not found"))?;
1145
1146 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1147
1148 let provider_registry =
1149 cx.update(GitHostingProviderRegistry::default_global);
1150
1151 let (provider, remote) =
1152 parse_git_remote_url(provider_registry, &origin_url)
1153 .context("parsing Git remote URL")?;
1154
1155 Ok(provider.build_permalink(
1156 remote,
1157 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1158 ))
1159 }
1160 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1161 let response = client
1162 .request(proto::GetPermalinkToLine {
1163 project_id: project_id.to_proto(),
1164 buffer_id: buffer_id.into(),
1165 selection: Some(proto::Range {
1166 start: selection.start as u64,
1167 end: selection.end as u64,
1168 }),
1169 })
1170 .await?;
1171
1172 url::Url::parse(&response.permalink).context("failed to parse permalink")
1173 }
1174 }
1175 })
1176 });
1177 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1178 }
1179
1180 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1181 match &self.state {
1182 GitStoreState::Local {
1183 downstream: downstream_client,
1184 ..
1185 } => downstream_client
1186 .as_ref()
1187 .map(|state| (state.client.clone(), state.project_id)),
1188 GitStoreState::Remote {
1189 downstream: downstream_client,
1190 ..
1191 } => downstream_client.clone(),
1192 }
1193 }
1194
1195 fn upstream_client(&self) -> Option<AnyProtoClient> {
1196 match &self.state {
1197 GitStoreState::Local { .. } => None,
1198 GitStoreState::Remote {
1199 upstream_client, ..
1200 } => Some(upstream_client.clone()),
1201 }
1202 }
1203
1204 fn on_worktree_store_event(
1205 &mut self,
1206 worktree_store: Entity<WorktreeStore>,
1207 event: &WorktreeStoreEvent,
1208 cx: &mut Context<Self>,
1209 ) {
1210 let GitStoreState::Local {
1211 project_environment,
1212 downstream,
1213 next_repository_id,
1214 fs,
1215 } = &self.state
1216 else {
1217 return;
1218 };
1219
1220 match event {
1221 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1222 if let Some(worktree) = self
1223 .worktree_store
1224 .read(cx)
1225 .worktree_for_id(*worktree_id, cx)
1226 {
1227 let paths_by_git_repo =
1228 self.process_updated_entries(&worktree, updated_entries, cx);
1229 let downstream = downstream
1230 .as_ref()
1231 .map(|downstream| downstream.updates_tx.clone());
1232 cx.spawn(async move |_, cx| {
1233 let paths_by_git_repo = paths_by_git_repo.await;
1234 for (repo, paths) in paths_by_git_repo {
1235 repo.update(cx, |repo, cx| {
1236 repo.paths_changed(paths, downstream.clone(), cx);
1237 });
1238 }
1239 })
1240 .detach();
1241 }
1242 }
1243 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1244 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1245 else {
1246 return;
1247 };
1248 if !worktree.read(cx).is_visible() {
1249 log::debug!(
1250 "not adding repositories for local worktree {:?} because it's not visible",
1251 worktree.read(cx).abs_path()
1252 );
1253 return;
1254 }
1255 self.update_repositories_from_worktree(
1256 *worktree_id,
1257 project_environment.clone(),
1258 next_repository_id.clone(),
1259 downstream
1260 .as_ref()
1261 .map(|downstream| downstream.updates_tx.clone()),
1262 changed_repos.clone(),
1263 fs.clone(),
1264 cx,
1265 );
1266 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1267 }
1268 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1269 let repos_without_worktree: Vec<RepositoryId> = self
1270 .worktree_ids
1271 .iter_mut()
1272 .filter_map(|(repo_id, worktree_ids)| {
1273 worktree_ids.remove(worktree_id);
1274 if worktree_ids.is_empty() {
1275 Some(*repo_id)
1276 } else {
1277 None
1278 }
1279 })
1280 .collect();
1281 let is_active_repo_removed = repos_without_worktree
1282 .iter()
1283 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1284
1285 for repo_id in repos_without_worktree {
1286 self.repositories.remove(&repo_id);
1287 self.worktree_ids.remove(&repo_id);
1288 if let Some(updates_tx) =
1289 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1290 {
1291 updates_tx
1292 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1293 .ok();
1294 }
1295 }
1296
1297 if is_active_repo_removed {
1298 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1299 self.active_repo_id = Some(repo_id);
1300 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1301 } else {
1302 self.active_repo_id = None;
1303 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1304 }
1305 }
1306 }
1307 _ => {}
1308 }
1309 }
1310 fn on_repository_event(
1311 &mut self,
1312 repo: Entity<Repository>,
1313 event: &RepositoryEvent,
1314 cx: &mut Context<Self>,
1315 ) {
1316 let id = repo.read(cx).id;
1317 let repo_snapshot = repo.read(cx).snapshot.clone();
1318 for (buffer_id, diff) in self.diffs.iter() {
1319 if let Some((buffer_repo, repo_path)) =
1320 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1321 && buffer_repo == repo
1322 {
1323 diff.update(cx, |diff, cx| {
1324 if let Some(conflict_set) = &diff.conflict_set {
1325 let conflict_status_changed =
1326 conflict_set.update(cx, |conflict_set, cx| {
1327 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1328 conflict_set.set_has_conflict(has_conflict, cx)
1329 })?;
1330 if conflict_status_changed {
1331 let buffer_store = self.buffer_store.read(cx);
1332 if let Some(buffer) = buffer_store.get(*buffer_id) {
1333 let _ = diff
1334 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1335 }
1336 }
1337 }
1338 anyhow::Ok(())
1339 })
1340 .ok();
1341 }
1342 }
1343 cx.emit(GitStoreEvent::RepositoryUpdated(
1344 id,
1345 event.clone(),
1346 self.active_repo_id == Some(id),
1347 ))
1348 }
1349
1350 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1351 cx.emit(GitStoreEvent::JobsUpdated)
1352 }
1353
1354 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1355 fn update_repositories_from_worktree(
1356 &mut self,
1357 worktree_id: WorktreeId,
1358 project_environment: Entity<ProjectEnvironment>,
1359 next_repository_id: Arc<AtomicU64>,
1360 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1361 updated_git_repositories: UpdatedGitRepositoriesSet,
1362 fs: Arc<dyn Fs>,
1363 cx: &mut Context<Self>,
1364 ) {
1365 let mut removed_ids = Vec::new();
1366 for update in updated_git_repositories.iter() {
1367 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1368 let existing_work_directory_abs_path =
1369 repo.read(cx).work_directory_abs_path.clone();
1370 Some(&existing_work_directory_abs_path)
1371 == update.old_work_directory_abs_path.as_ref()
1372 || Some(&existing_work_directory_abs_path)
1373 == update.new_work_directory_abs_path.as_ref()
1374 }) {
1375 let repo_id = *id;
1376 if let Some(new_work_directory_abs_path) =
1377 update.new_work_directory_abs_path.clone()
1378 {
1379 self.worktree_ids
1380 .entry(repo_id)
1381 .or_insert_with(HashSet::new)
1382 .insert(worktree_id);
1383 existing.update(cx, |existing, cx| {
1384 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1385 existing.schedule_scan(updates_tx.clone(), cx);
1386 });
1387 } else {
1388 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1389 worktree_ids.remove(&worktree_id);
1390 if worktree_ids.is_empty() {
1391 removed_ids.push(repo_id);
1392 }
1393 }
1394 }
1395 } else if let UpdatedGitRepository {
1396 new_work_directory_abs_path: Some(work_directory_abs_path),
1397 dot_git_abs_path: Some(dot_git_abs_path),
1398 repository_dir_abs_path: Some(_repository_dir_abs_path),
1399 common_dir_abs_path: Some(_common_dir_abs_path),
1400 ..
1401 } = update
1402 {
1403 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1404 let git_store = cx.weak_entity();
1405 let repo = cx.new(|cx| {
1406 let mut repo = Repository::local(
1407 id,
1408 work_directory_abs_path.clone(),
1409 dot_git_abs_path.clone(),
1410 project_environment.downgrade(),
1411 fs.clone(),
1412 git_store,
1413 cx,
1414 );
1415 if let Some(updates_tx) = updates_tx.as_ref() {
1416 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1417 updates_tx
1418 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1419 .ok();
1420 }
1421 repo.schedule_scan(updates_tx.clone(), cx);
1422 repo
1423 });
1424 self._subscriptions
1425 .push(cx.subscribe(&repo, Self::on_repository_event));
1426 self._subscriptions
1427 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1428 self.repositories.insert(id, repo);
1429 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1430 cx.emit(GitStoreEvent::RepositoryAdded);
1431 self.active_repo_id.get_or_insert_with(|| {
1432 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1433 id
1434 });
1435 }
1436 }
1437
1438 for id in removed_ids {
1439 if self.active_repo_id == Some(id) {
1440 self.active_repo_id = None;
1441 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1442 }
1443 self.repositories.remove(&id);
1444 if let Some(updates_tx) = updates_tx.as_ref() {
1445 updates_tx
1446 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1447 .ok();
1448 }
1449 }
1450 }
1451
1452 fn on_buffer_store_event(
1453 &mut self,
1454 _: Entity<BufferStore>,
1455 event: &BufferStoreEvent,
1456 cx: &mut Context<Self>,
1457 ) {
1458 match event {
1459 BufferStoreEvent::BufferAdded(buffer) => {
1460 cx.subscribe(buffer, |this, buffer, event, cx| {
1461 if let BufferEvent::LanguageChanged(_) = event {
1462 let buffer_id = buffer.read(cx).remote_id();
1463 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1464 diff_state.update(cx, |diff_state, cx| {
1465 diff_state.buffer_language_changed(buffer, cx);
1466 });
1467 }
1468 }
1469 })
1470 .detach();
1471 }
1472 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1473 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1474 diffs.remove(buffer_id);
1475 }
1476 }
1477 BufferStoreEvent::BufferDropped(buffer_id) => {
1478 self.diffs.remove(buffer_id);
1479 for diffs in self.shared_diffs.values_mut() {
1480 diffs.remove(buffer_id);
1481 }
1482 }
1483 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1484 // Whenever a buffer's file path changes, it's possible that the
1485 // new path is actually a path that is being tracked by a git
1486 // repository. In that case, we'll want to update the buffer's
1487 // `BufferDiffState`, in case it already has one.
1488 let buffer_id = buffer.read(cx).remote_id();
1489 let diff_state = self.diffs.get(&buffer_id);
1490 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1491
1492 if let Some(diff_state) = diff_state
1493 && let Some((repo, repo_path)) = repo
1494 {
1495 let buffer = buffer.clone();
1496 let diff_state = diff_state.clone();
1497
1498 cx.spawn(async move |_git_store, cx| {
1499 async {
1500 let diff_bases_change = repo
1501 .update(cx, |repo, cx| {
1502 repo.load_committed_text(buffer_id, repo_path, cx)
1503 })
1504 .await?;
1505
1506 diff_state.update(cx, |diff_state, cx| {
1507 let buffer_snapshot = buffer.read(cx).text_snapshot();
1508 diff_state.diff_bases_changed(
1509 buffer_snapshot,
1510 Some(diff_bases_change),
1511 cx,
1512 );
1513 });
1514 anyhow::Ok(())
1515 }
1516 .await
1517 .log_err();
1518 })
1519 .detach();
1520 }
1521 }
1522 _ => {}
1523 }
1524 }
1525
1526 pub fn recalculate_buffer_diffs(
1527 &mut self,
1528 buffers: Vec<Entity<Buffer>>,
1529 cx: &mut Context<Self>,
1530 ) -> impl Future<Output = ()> + use<> {
1531 let mut futures = Vec::new();
1532 for buffer in buffers {
1533 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1534 let buffer = buffer.read(cx).text_snapshot();
1535 diff_state.update(cx, |diff_state, cx| {
1536 diff_state.recalculate_diffs(buffer.clone(), cx);
1537 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1538 });
1539 futures.push(diff_state.update(cx, |diff_state, cx| {
1540 diff_state
1541 .reparse_conflict_markers(buffer, cx)
1542 .map(|_| {})
1543 .boxed()
1544 }));
1545 }
1546 }
1547 async move {
1548 futures::future::join_all(futures).await;
1549 }
1550 }
1551
1552 fn on_buffer_diff_event(
1553 &mut self,
1554 diff: Entity<buffer_diff::BufferDiff>,
1555 event: &BufferDiffEvent,
1556 cx: &mut Context<Self>,
1557 ) {
1558 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1559 let buffer_id = diff.read(cx).buffer_id;
1560 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1561 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1562 diff_state.hunk_staging_operation_count += 1;
1563 diff_state.hunk_staging_operation_count
1564 });
1565 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1566 let recv = repo.update(cx, |repo, cx| {
1567 log::debug!("hunks changed for {}", path.as_unix_str());
1568 repo.spawn_set_index_text_job(
1569 path,
1570 new_index_text.as_ref().map(|rope| rope.to_string()),
1571 Some(hunk_staging_operation_count),
1572 cx,
1573 )
1574 });
1575 let diff = diff.downgrade();
1576 cx.spawn(async move |this, cx| {
1577 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1578 diff.update(cx, |diff, cx| {
1579 diff.clear_pending_hunks(cx);
1580 })
1581 .ok();
1582 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1583 .ok();
1584 }
1585 })
1586 .detach();
1587 }
1588 }
1589 }
1590 }
1591
1592 fn local_worktree_git_repos_changed(
1593 &mut self,
1594 worktree: Entity<Worktree>,
1595 changed_repos: &UpdatedGitRepositoriesSet,
1596 cx: &mut Context<Self>,
1597 ) {
1598 log::debug!("local worktree repos changed");
1599 debug_assert!(worktree.read(cx).is_local());
1600
1601 for repository in self.repositories.values() {
1602 repository.update(cx, |repository, cx| {
1603 let repo_abs_path = &repository.work_directory_abs_path;
1604 if changed_repos.iter().any(|update| {
1605 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1606 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1607 }) {
1608 repository.reload_buffer_diff_bases(cx);
1609 }
1610 });
1611 }
1612 }
1613
1614 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1615 &self.repositories
1616 }
1617
1618 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1619 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1620 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1621 Some(status.status)
1622 }
1623
1624 pub fn repository_and_path_for_buffer_id(
1625 &self,
1626 buffer_id: BufferId,
1627 cx: &App,
1628 ) -> Option<(Entity<Repository>, RepoPath)> {
1629 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1630 let project_path = buffer.read(cx).project_path(cx)?;
1631 self.repository_and_path_for_project_path(&project_path, cx)
1632 }
1633
1634 pub fn repository_and_path_for_project_path(
1635 &self,
1636 path: &ProjectPath,
1637 cx: &App,
1638 ) -> Option<(Entity<Repository>, RepoPath)> {
1639 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1640 self.repositories
1641 .values()
1642 .filter_map(|repo| {
1643 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1644 Some((repo.clone(), repo_path))
1645 })
1646 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1647 }
1648
1649 pub fn git_init(
1650 &self,
1651 path: Arc<Path>,
1652 fallback_branch_name: String,
1653 cx: &App,
1654 ) -> Task<Result<()>> {
1655 match &self.state {
1656 GitStoreState::Local { fs, .. } => {
1657 let fs = fs.clone();
1658 cx.background_executor()
1659 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1660 }
1661 GitStoreState::Remote {
1662 upstream_client,
1663 upstream_project_id: project_id,
1664 ..
1665 } => {
1666 let client = upstream_client.clone();
1667 let project_id = *project_id;
1668 cx.background_executor().spawn(async move {
1669 client
1670 .request(proto::GitInit {
1671 project_id: project_id,
1672 abs_path: path.to_string_lossy().into_owned(),
1673 fallback_branch_name,
1674 })
1675 .await?;
1676 Ok(())
1677 })
1678 }
1679 }
1680 }
1681
1682 pub fn git_clone(
1683 &self,
1684 repo: String,
1685 path: impl Into<Arc<std::path::Path>>,
1686 cx: &App,
1687 ) -> Task<Result<()>> {
1688 let path = path.into();
1689 match &self.state {
1690 GitStoreState::Local { fs, .. } => {
1691 let fs = fs.clone();
1692 cx.background_executor()
1693 .spawn(async move { fs.git_clone(&repo, &path).await })
1694 }
1695 GitStoreState::Remote {
1696 upstream_client,
1697 upstream_project_id,
1698 ..
1699 } => {
1700 if upstream_client.is_via_collab() {
1701 return Task::ready(Err(anyhow!(
1702 "Git Clone isn't supported for project guests"
1703 )));
1704 }
1705 let request = upstream_client.request(proto::GitClone {
1706 project_id: *upstream_project_id,
1707 abs_path: path.to_string_lossy().into_owned(),
1708 remote_repo: repo,
1709 });
1710
1711 cx.background_spawn(async move {
1712 let result = request.await?;
1713
1714 match result.success {
1715 true => Ok(()),
1716 false => Err(anyhow!("Git Clone failed")),
1717 }
1718 })
1719 }
1720 }
1721 }
1722
1723 async fn handle_update_repository(
1724 this: Entity<Self>,
1725 envelope: TypedEnvelope<proto::UpdateRepository>,
1726 mut cx: AsyncApp,
1727 ) -> Result<()> {
1728 this.update(&mut cx, |this, cx| {
1729 let path_style = this.worktree_store.read(cx).path_style();
1730 let mut update = envelope.payload;
1731
1732 let id = RepositoryId::from_proto(update.id);
1733 let client = this.upstream_client().context("no upstream client")?;
1734
1735 let mut repo_subscription = None;
1736 let repo = this.repositories.entry(id).or_insert_with(|| {
1737 let git_store = cx.weak_entity();
1738 let repo = cx.new(|cx| {
1739 Repository::remote(
1740 id,
1741 Path::new(&update.abs_path).into(),
1742 path_style,
1743 ProjectId(update.project_id),
1744 client,
1745 git_store,
1746 cx,
1747 )
1748 });
1749 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1750 cx.emit(GitStoreEvent::RepositoryAdded);
1751 repo
1752 });
1753 this._subscriptions.extend(repo_subscription);
1754
1755 repo.update(cx, {
1756 let update = update.clone();
1757 |repo, cx| repo.apply_remote_update(update, cx)
1758 })?;
1759
1760 this.active_repo_id.get_or_insert_with(|| {
1761 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1762 id
1763 });
1764
1765 if let Some((client, project_id)) = this.downstream_client() {
1766 update.project_id = project_id.to_proto();
1767 client.send(update).log_err();
1768 }
1769 Ok(())
1770 })
1771 }
1772
1773 async fn handle_remove_repository(
1774 this: Entity<Self>,
1775 envelope: TypedEnvelope<proto::RemoveRepository>,
1776 mut cx: AsyncApp,
1777 ) -> Result<()> {
1778 this.update(&mut cx, |this, cx| {
1779 let mut update = envelope.payload;
1780 let id = RepositoryId::from_proto(update.id);
1781 this.repositories.remove(&id);
1782 if let Some((client, project_id)) = this.downstream_client() {
1783 update.project_id = project_id.to_proto();
1784 client.send(update).log_err();
1785 }
1786 if this.active_repo_id == Some(id) {
1787 this.active_repo_id = None;
1788 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1789 }
1790 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1791 });
1792 Ok(())
1793 }
1794
1795 async fn handle_git_init(
1796 this: Entity<Self>,
1797 envelope: TypedEnvelope<proto::GitInit>,
1798 cx: AsyncApp,
1799 ) -> Result<proto::Ack> {
1800 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1801 let name = envelope.payload.fallback_branch_name;
1802 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1803 .await?;
1804
1805 Ok(proto::Ack {})
1806 }
1807
1808 async fn handle_git_clone(
1809 this: Entity<Self>,
1810 envelope: TypedEnvelope<proto::GitClone>,
1811 cx: AsyncApp,
1812 ) -> Result<proto::GitCloneResponse> {
1813 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1814 let repo_name = envelope.payload.remote_repo;
1815 let result = cx
1816 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
1817 .await;
1818
1819 Ok(proto::GitCloneResponse {
1820 success: result.is_ok(),
1821 })
1822 }
1823
1824 async fn handle_fetch(
1825 this: Entity<Self>,
1826 envelope: TypedEnvelope<proto::Fetch>,
1827 mut cx: AsyncApp,
1828 ) -> Result<proto::RemoteMessageResponse> {
1829 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1830 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1831 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1832 let askpass_id = envelope.payload.askpass_id;
1833
1834 let askpass = make_remote_delegate(
1835 this,
1836 envelope.payload.project_id,
1837 repository_id,
1838 askpass_id,
1839 &mut cx,
1840 );
1841
1842 let remote_output = repository_handle
1843 .update(&mut cx, |repository_handle, cx| {
1844 repository_handle.fetch(fetch_options, askpass, cx)
1845 })
1846 .await??;
1847
1848 Ok(proto::RemoteMessageResponse {
1849 stdout: remote_output.stdout,
1850 stderr: remote_output.stderr,
1851 })
1852 }
1853
1854 async fn handle_push(
1855 this: Entity<Self>,
1856 envelope: TypedEnvelope<proto::Push>,
1857 mut cx: AsyncApp,
1858 ) -> Result<proto::RemoteMessageResponse> {
1859 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1860 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1861
1862 let askpass_id = envelope.payload.askpass_id;
1863 let askpass = make_remote_delegate(
1864 this,
1865 envelope.payload.project_id,
1866 repository_id,
1867 askpass_id,
1868 &mut cx,
1869 );
1870
1871 let options = envelope
1872 .payload
1873 .options
1874 .as_ref()
1875 .map(|_| match envelope.payload.options() {
1876 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1877 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1878 });
1879
1880 let branch_name = envelope.payload.branch_name.into();
1881 let remote_branch_name = envelope.payload.remote_branch_name.into();
1882 let remote_name = envelope.payload.remote_name.into();
1883
1884 let remote_output = repository_handle
1885 .update(&mut cx, |repository_handle, cx| {
1886 repository_handle.push(
1887 branch_name,
1888 remote_branch_name,
1889 remote_name,
1890 options,
1891 askpass,
1892 cx,
1893 )
1894 })
1895 .await??;
1896 Ok(proto::RemoteMessageResponse {
1897 stdout: remote_output.stdout,
1898 stderr: remote_output.stderr,
1899 })
1900 }
1901
1902 async fn handle_pull(
1903 this: Entity<Self>,
1904 envelope: TypedEnvelope<proto::Pull>,
1905 mut cx: AsyncApp,
1906 ) -> Result<proto::RemoteMessageResponse> {
1907 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1908 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1909 let askpass_id = envelope.payload.askpass_id;
1910 let askpass = make_remote_delegate(
1911 this,
1912 envelope.payload.project_id,
1913 repository_id,
1914 askpass_id,
1915 &mut cx,
1916 );
1917
1918 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1919 let remote_name = envelope.payload.remote_name.into();
1920 let rebase = envelope.payload.rebase;
1921
1922 let remote_message = repository_handle
1923 .update(&mut cx, |repository_handle, cx| {
1924 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1925 })
1926 .await??;
1927
1928 Ok(proto::RemoteMessageResponse {
1929 stdout: remote_message.stdout,
1930 stderr: remote_message.stderr,
1931 })
1932 }
1933
1934 async fn handle_stage(
1935 this: Entity<Self>,
1936 envelope: TypedEnvelope<proto::Stage>,
1937 mut cx: AsyncApp,
1938 ) -> Result<proto::Ack> {
1939 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1940 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1941
1942 let entries = envelope
1943 .payload
1944 .paths
1945 .into_iter()
1946 .map(|path| RepoPath::new(&path))
1947 .collect::<Result<Vec<_>>>()?;
1948
1949 repository_handle
1950 .update(&mut cx, |repository_handle, cx| {
1951 repository_handle.stage_entries(entries, cx)
1952 })
1953 .await?;
1954 Ok(proto::Ack {})
1955 }
1956
1957 async fn handle_unstage(
1958 this: Entity<Self>,
1959 envelope: TypedEnvelope<proto::Unstage>,
1960 mut cx: AsyncApp,
1961 ) -> Result<proto::Ack> {
1962 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1963 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1964
1965 let entries = envelope
1966 .payload
1967 .paths
1968 .into_iter()
1969 .map(|path| RepoPath::new(&path))
1970 .collect::<Result<Vec<_>>>()?;
1971
1972 repository_handle
1973 .update(&mut cx, |repository_handle, cx| {
1974 repository_handle.unstage_entries(entries, cx)
1975 })
1976 .await?;
1977
1978 Ok(proto::Ack {})
1979 }
1980
1981 async fn handle_stash(
1982 this: Entity<Self>,
1983 envelope: TypedEnvelope<proto::Stash>,
1984 mut cx: AsyncApp,
1985 ) -> Result<proto::Ack> {
1986 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1987 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1988
1989 let entries = envelope
1990 .payload
1991 .paths
1992 .into_iter()
1993 .map(|path| RepoPath::new(&path))
1994 .collect::<Result<Vec<_>>>()?;
1995
1996 repository_handle
1997 .update(&mut cx, |repository_handle, cx| {
1998 repository_handle.stash_entries(entries, cx)
1999 })
2000 .await?;
2001
2002 Ok(proto::Ack {})
2003 }
2004
2005 async fn handle_stash_pop(
2006 this: Entity<Self>,
2007 envelope: TypedEnvelope<proto::StashPop>,
2008 mut cx: AsyncApp,
2009 ) -> Result<proto::Ack> {
2010 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2011 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2012 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2013
2014 repository_handle
2015 .update(&mut cx, |repository_handle, cx| {
2016 repository_handle.stash_pop(stash_index, cx)
2017 })
2018 .await?;
2019
2020 Ok(proto::Ack {})
2021 }
2022
2023 async fn handle_stash_apply(
2024 this: Entity<Self>,
2025 envelope: TypedEnvelope<proto::StashApply>,
2026 mut cx: AsyncApp,
2027 ) -> Result<proto::Ack> {
2028 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2029 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2030 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2031
2032 repository_handle
2033 .update(&mut cx, |repository_handle, cx| {
2034 repository_handle.stash_apply(stash_index, cx)
2035 })
2036 .await?;
2037
2038 Ok(proto::Ack {})
2039 }
2040
2041 async fn handle_stash_drop(
2042 this: Entity<Self>,
2043 envelope: TypedEnvelope<proto::StashDrop>,
2044 mut cx: AsyncApp,
2045 ) -> Result<proto::Ack> {
2046 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2047 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2048 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2049
2050 repository_handle
2051 .update(&mut cx, |repository_handle, cx| {
2052 repository_handle.stash_drop(stash_index, cx)
2053 })
2054 .await??;
2055
2056 Ok(proto::Ack {})
2057 }
2058
2059 async fn handle_set_index_text(
2060 this: Entity<Self>,
2061 envelope: TypedEnvelope<proto::SetIndexText>,
2062 mut cx: AsyncApp,
2063 ) -> Result<proto::Ack> {
2064 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2065 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2066 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2067
2068 repository_handle
2069 .update(&mut cx, |repository_handle, cx| {
2070 repository_handle.spawn_set_index_text_job(
2071 repo_path,
2072 envelope.payload.text,
2073 None,
2074 cx,
2075 )
2076 })
2077 .await??;
2078 Ok(proto::Ack {})
2079 }
2080
2081 async fn handle_run_hook(
2082 this: Entity<Self>,
2083 envelope: TypedEnvelope<proto::RunGitHook>,
2084 mut cx: AsyncApp,
2085 ) -> Result<proto::Ack> {
2086 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2087 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2088 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2089 repository_handle
2090 .update(&mut cx, |repository_handle, cx| {
2091 repository_handle.run_hook(hook, cx)
2092 })
2093 .await??;
2094 Ok(proto::Ack {})
2095 }
2096
2097 async fn handle_commit(
2098 this: Entity<Self>,
2099 envelope: TypedEnvelope<proto::Commit>,
2100 mut cx: AsyncApp,
2101 ) -> Result<proto::Ack> {
2102 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2103 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2104 let askpass_id = envelope.payload.askpass_id;
2105
2106 let askpass = make_remote_delegate(
2107 this,
2108 envelope.payload.project_id,
2109 repository_id,
2110 askpass_id,
2111 &mut cx,
2112 );
2113
2114 let message = SharedString::from(envelope.payload.message);
2115 let name = envelope.payload.name.map(SharedString::from);
2116 let email = envelope.payload.email.map(SharedString::from);
2117 let options = envelope.payload.options.unwrap_or_default();
2118
2119 repository_handle
2120 .update(&mut cx, |repository_handle, cx| {
2121 repository_handle.commit(
2122 message,
2123 name.zip(email),
2124 CommitOptions {
2125 amend: options.amend,
2126 signoff: options.signoff,
2127 },
2128 askpass,
2129 cx,
2130 )
2131 })
2132 .await??;
2133 Ok(proto::Ack {})
2134 }
2135
2136 async fn handle_get_remotes(
2137 this: Entity<Self>,
2138 envelope: TypedEnvelope<proto::GetRemotes>,
2139 mut cx: AsyncApp,
2140 ) -> Result<proto::GetRemotesResponse> {
2141 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2142 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2143
2144 let branch_name = envelope.payload.branch_name;
2145 let is_push = envelope.payload.is_push;
2146
2147 let remotes = repository_handle
2148 .update(&mut cx, |repository_handle, _| {
2149 repository_handle.get_remotes(branch_name, is_push)
2150 })
2151 .await??;
2152
2153 Ok(proto::GetRemotesResponse {
2154 remotes: remotes
2155 .into_iter()
2156 .map(|remotes| proto::get_remotes_response::Remote {
2157 name: remotes.name.to_string(),
2158 })
2159 .collect::<Vec<_>>(),
2160 })
2161 }
2162
2163 async fn handle_get_worktrees(
2164 this: Entity<Self>,
2165 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2166 mut cx: AsyncApp,
2167 ) -> Result<proto::GitWorktreesResponse> {
2168 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2169 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2170
2171 let worktrees = repository_handle
2172 .update(&mut cx, |repository_handle, _| {
2173 repository_handle.worktrees()
2174 })
2175 .await??;
2176
2177 Ok(proto::GitWorktreesResponse {
2178 worktrees: worktrees
2179 .into_iter()
2180 .map(|worktree| worktree_to_proto(&worktree))
2181 .collect::<Vec<_>>(),
2182 })
2183 }
2184
2185 async fn handle_create_worktree(
2186 this: Entity<Self>,
2187 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2188 mut cx: AsyncApp,
2189 ) -> Result<proto::Ack> {
2190 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2191 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2192 let directory = PathBuf::from(envelope.payload.directory);
2193 let name = envelope.payload.name;
2194 let commit = envelope.payload.commit;
2195
2196 repository_handle
2197 .update(&mut cx, |repository_handle, _| {
2198 repository_handle.create_worktree(name, directory, commit)
2199 })
2200 .await??;
2201
2202 Ok(proto::Ack {})
2203 }
2204
2205 async fn handle_get_branches(
2206 this: Entity<Self>,
2207 envelope: TypedEnvelope<proto::GitGetBranches>,
2208 mut cx: AsyncApp,
2209 ) -> Result<proto::GitBranchesResponse> {
2210 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2211 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2212
2213 let branches = repository_handle
2214 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2215 .await??;
2216
2217 Ok(proto::GitBranchesResponse {
2218 branches: branches
2219 .into_iter()
2220 .map(|branch| branch_to_proto(&branch))
2221 .collect::<Vec<_>>(),
2222 })
2223 }
2224 async fn handle_get_default_branch(
2225 this: Entity<Self>,
2226 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2227 mut cx: AsyncApp,
2228 ) -> Result<proto::GetDefaultBranchResponse> {
2229 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2230 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2231
2232 let branch = repository_handle
2233 .update(&mut cx, |repository_handle, _| {
2234 repository_handle.default_branch(false)
2235 })
2236 .await??
2237 .map(Into::into);
2238
2239 Ok(proto::GetDefaultBranchResponse { branch })
2240 }
2241 async fn handle_create_branch(
2242 this: Entity<Self>,
2243 envelope: TypedEnvelope<proto::GitCreateBranch>,
2244 mut cx: AsyncApp,
2245 ) -> Result<proto::Ack> {
2246 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2247 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2248 let branch_name = envelope.payload.branch_name;
2249
2250 repository_handle
2251 .update(&mut cx, |repository_handle, _| {
2252 repository_handle.create_branch(branch_name, None)
2253 })
2254 .await??;
2255
2256 Ok(proto::Ack {})
2257 }
2258
2259 async fn handle_change_branch(
2260 this: Entity<Self>,
2261 envelope: TypedEnvelope<proto::GitChangeBranch>,
2262 mut cx: AsyncApp,
2263 ) -> Result<proto::Ack> {
2264 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2265 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2266 let branch_name = envelope.payload.branch_name;
2267
2268 repository_handle
2269 .update(&mut cx, |repository_handle, _| {
2270 repository_handle.change_branch(branch_name)
2271 })
2272 .await??;
2273
2274 Ok(proto::Ack {})
2275 }
2276
2277 async fn handle_rename_branch(
2278 this: Entity<Self>,
2279 envelope: TypedEnvelope<proto::GitRenameBranch>,
2280 mut cx: AsyncApp,
2281 ) -> Result<proto::Ack> {
2282 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2283 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2284 let branch = envelope.payload.branch;
2285 let new_name = envelope.payload.new_name;
2286
2287 repository_handle
2288 .update(&mut cx, |repository_handle, _| {
2289 repository_handle.rename_branch(branch, new_name)
2290 })
2291 .await??;
2292
2293 Ok(proto::Ack {})
2294 }
2295
2296 async fn handle_create_remote(
2297 this: Entity<Self>,
2298 envelope: TypedEnvelope<proto::GitCreateRemote>,
2299 mut cx: AsyncApp,
2300 ) -> Result<proto::Ack> {
2301 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2302 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2303 let remote_name = envelope.payload.remote_name;
2304 let remote_url = envelope.payload.remote_url;
2305
2306 repository_handle
2307 .update(&mut cx, |repository_handle, _| {
2308 repository_handle.create_remote(remote_name, remote_url)
2309 })
2310 .await??;
2311
2312 Ok(proto::Ack {})
2313 }
2314
2315 async fn handle_delete_branch(
2316 this: Entity<Self>,
2317 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2318 mut cx: AsyncApp,
2319 ) -> Result<proto::Ack> {
2320 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2321 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2322 let branch_name = envelope.payload.branch_name;
2323
2324 repository_handle
2325 .update(&mut cx, |repository_handle, _| {
2326 repository_handle.delete_branch(branch_name)
2327 })
2328 .await??;
2329
2330 Ok(proto::Ack {})
2331 }
2332
2333 async fn handle_remove_remote(
2334 this: Entity<Self>,
2335 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2336 mut cx: AsyncApp,
2337 ) -> Result<proto::Ack> {
2338 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2339 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2340 let remote_name = envelope.payload.remote_name;
2341
2342 repository_handle
2343 .update(&mut cx, |repository_handle, _| {
2344 repository_handle.remove_remote(remote_name)
2345 })
2346 .await??;
2347
2348 Ok(proto::Ack {})
2349 }
2350
2351 async fn handle_show(
2352 this: Entity<Self>,
2353 envelope: TypedEnvelope<proto::GitShow>,
2354 mut cx: AsyncApp,
2355 ) -> Result<proto::GitCommitDetails> {
2356 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2357 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2358
2359 let commit = repository_handle
2360 .update(&mut cx, |repository_handle, _| {
2361 repository_handle.show(envelope.payload.commit)
2362 })
2363 .await??;
2364 Ok(proto::GitCommitDetails {
2365 sha: commit.sha.into(),
2366 message: commit.message.into(),
2367 commit_timestamp: commit.commit_timestamp,
2368 author_email: commit.author_email.into(),
2369 author_name: commit.author_name.into(),
2370 })
2371 }
2372
2373 async fn handle_load_commit_diff(
2374 this: Entity<Self>,
2375 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2376 mut cx: AsyncApp,
2377 ) -> Result<proto::LoadCommitDiffResponse> {
2378 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2379 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2380
2381 let commit_diff = repository_handle
2382 .update(&mut cx, |repository_handle, _| {
2383 repository_handle.load_commit_diff(envelope.payload.commit)
2384 })
2385 .await??;
2386 Ok(proto::LoadCommitDiffResponse {
2387 files: commit_diff
2388 .files
2389 .into_iter()
2390 .map(|file| proto::CommitFile {
2391 path: file.path.to_proto(),
2392 old_text: file.old_text,
2393 new_text: file.new_text,
2394 is_binary: file.is_binary,
2395 })
2396 .collect(),
2397 })
2398 }
2399
2400 async fn handle_file_history(
2401 this: Entity<Self>,
2402 envelope: TypedEnvelope<proto::GitFileHistory>,
2403 mut cx: AsyncApp,
2404 ) -> Result<proto::GitFileHistoryResponse> {
2405 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2406 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2407 let path = RepoPath::from_proto(&envelope.payload.path)?;
2408 let skip = envelope.payload.skip as usize;
2409 let limit = envelope.payload.limit.map(|l| l as usize);
2410
2411 let file_history = repository_handle
2412 .update(&mut cx, |repository_handle, _| {
2413 repository_handle.file_history_paginated(path, skip, limit)
2414 })
2415 .await??;
2416
2417 Ok(proto::GitFileHistoryResponse {
2418 entries: file_history
2419 .entries
2420 .into_iter()
2421 .map(|entry| proto::FileHistoryEntry {
2422 sha: entry.sha.to_string(),
2423 subject: entry.subject.to_string(),
2424 message: entry.message.to_string(),
2425 commit_timestamp: entry.commit_timestamp,
2426 author_name: entry.author_name.to_string(),
2427 author_email: entry.author_email.to_string(),
2428 })
2429 .collect(),
2430 path: file_history.path.to_proto(),
2431 })
2432 }
2433
2434 async fn handle_reset(
2435 this: Entity<Self>,
2436 envelope: TypedEnvelope<proto::GitReset>,
2437 mut cx: AsyncApp,
2438 ) -> Result<proto::Ack> {
2439 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2440 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2441
2442 let mode = match envelope.payload.mode() {
2443 git_reset::ResetMode::Soft => ResetMode::Soft,
2444 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2445 };
2446
2447 repository_handle
2448 .update(&mut cx, |repository_handle, cx| {
2449 repository_handle.reset(envelope.payload.commit, mode, cx)
2450 })
2451 .await??;
2452 Ok(proto::Ack {})
2453 }
2454
2455 async fn handle_checkout_files(
2456 this: Entity<Self>,
2457 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2458 mut cx: AsyncApp,
2459 ) -> Result<proto::Ack> {
2460 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2461 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2462 let paths = envelope
2463 .payload
2464 .paths
2465 .iter()
2466 .map(|s| RepoPath::from_proto(s))
2467 .collect::<Result<Vec<_>>>()?;
2468
2469 repository_handle
2470 .update(&mut cx, |repository_handle, cx| {
2471 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2472 })
2473 .await?;
2474 Ok(proto::Ack {})
2475 }
2476
2477 async fn handle_open_commit_message_buffer(
2478 this: Entity<Self>,
2479 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2480 mut cx: AsyncApp,
2481 ) -> Result<proto::OpenBufferResponse> {
2482 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2483 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2484 let buffer = repository
2485 .update(&mut cx, |repository, cx| {
2486 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2487 })
2488 .await?;
2489
2490 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2491 this.update(&mut cx, |this, cx| {
2492 this.buffer_store.update(cx, |buffer_store, cx| {
2493 buffer_store
2494 .create_buffer_for_peer(
2495 &buffer,
2496 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2497 cx,
2498 )
2499 .detach_and_log_err(cx);
2500 })
2501 });
2502
2503 Ok(proto::OpenBufferResponse {
2504 buffer_id: buffer_id.to_proto(),
2505 })
2506 }
2507
2508 async fn handle_askpass(
2509 this: Entity<Self>,
2510 envelope: TypedEnvelope<proto::AskPassRequest>,
2511 mut cx: AsyncApp,
2512 ) -> Result<proto::AskPassResponse> {
2513 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2514 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2515
2516 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2517 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2518 debug_panic!("no askpass found");
2519 anyhow::bail!("no askpass found");
2520 };
2521
2522 let response = askpass
2523 .ask_password(envelope.payload.prompt)
2524 .await
2525 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2526
2527 delegates
2528 .lock()
2529 .insert(envelope.payload.askpass_id, askpass);
2530
2531 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2532 Ok(proto::AskPassResponse {
2533 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2534 })
2535 }
2536
2537 async fn handle_check_for_pushed_commits(
2538 this: Entity<Self>,
2539 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2540 mut cx: AsyncApp,
2541 ) -> Result<proto::CheckForPushedCommitsResponse> {
2542 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2543 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2544
2545 let branches = repository_handle
2546 .update(&mut cx, |repository_handle, _| {
2547 repository_handle.check_for_pushed_commits()
2548 })
2549 .await??;
2550 Ok(proto::CheckForPushedCommitsResponse {
2551 pushed_to: branches
2552 .into_iter()
2553 .map(|commit| commit.to_string())
2554 .collect(),
2555 })
2556 }
2557
2558 async fn handle_git_diff(
2559 this: Entity<Self>,
2560 envelope: TypedEnvelope<proto::GitDiff>,
2561 mut cx: AsyncApp,
2562 ) -> Result<proto::GitDiffResponse> {
2563 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2564 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2565 let diff_type = match envelope.payload.diff_type() {
2566 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2567 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2568 };
2569
2570 let mut diff = repository_handle
2571 .update(&mut cx, |repository_handle, cx| {
2572 repository_handle.diff(diff_type, cx)
2573 })
2574 .await??;
2575 const ONE_MB: usize = 1_000_000;
2576 if diff.len() > ONE_MB {
2577 diff = diff.chars().take(ONE_MB).collect()
2578 }
2579
2580 Ok(proto::GitDiffResponse { diff })
2581 }
2582
2583 async fn handle_tree_diff(
2584 this: Entity<Self>,
2585 request: TypedEnvelope<proto::GetTreeDiff>,
2586 mut cx: AsyncApp,
2587 ) -> Result<proto::GetTreeDiffResponse> {
2588 let repository_id = RepositoryId(request.payload.repository_id);
2589 let diff_type = if request.payload.is_merge {
2590 DiffTreeType::MergeBase {
2591 base: request.payload.base.into(),
2592 head: request.payload.head.into(),
2593 }
2594 } else {
2595 DiffTreeType::Since {
2596 base: request.payload.base.into(),
2597 head: request.payload.head.into(),
2598 }
2599 };
2600
2601 let diff = this
2602 .update(&mut cx, |this, cx| {
2603 let repository = this.repositories().get(&repository_id)?;
2604 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2605 })
2606 .context("missing repository")?
2607 .await??;
2608
2609 Ok(proto::GetTreeDiffResponse {
2610 entries: diff
2611 .entries
2612 .into_iter()
2613 .map(|(path, status)| proto::TreeDiffStatus {
2614 path: path.as_ref().to_proto(),
2615 status: match status {
2616 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2617 TreeDiffStatus::Modified { .. } => {
2618 proto::tree_diff_status::Status::Modified.into()
2619 }
2620 TreeDiffStatus::Deleted { .. } => {
2621 proto::tree_diff_status::Status::Deleted.into()
2622 }
2623 },
2624 oid: match status {
2625 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2626 Some(old.to_string())
2627 }
2628 TreeDiffStatus::Added => None,
2629 },
2630 })
2631 .collect(),
2632 })
2633 }
2634
2635 async fn handle_get_blob_content(
2636 this: Entity<Self>,
2637 request: TypedEnvelope<proto::GetBlobContent>,
2638 mut cx: AsyncApp,
2639 ) -> Result<proto::GetBlobContentResponse> {
2640 let oid = git::Oid::from_str(&request.payload.oid)?;
2641 let repository_id = RepositoryId(request.payload.repository_id);
2642 let content = this
2643 .update(&mut cx, |this, cx| {
2644 let repository = this.repositories().get(&repository_id)?;
2645 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2646 })
2647 .context("missing repository")?
2648 .await?;
2649 Ok(proto::GetBlobContentResponse { content })
2650 }
2651
2652 async fn handle_open_unstaged_diff(
2653 this: Entity<Self>,
2654 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2655 mut cx: AsyncApp,
2656 ) -> Result<proto::OpenUnstagedDiffResponse> {
2657 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2658 let diff = this
2659 .update(&mut cx, |this, cx| {
2660 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2661 Some(this.open_unstaged_diff(buffer, cx))
2662 })
2663 .context("missing buffer")?
2664 .await?;
2665 this.update(&mut cx, |this, _| {
2666 let shared_diffs = this
2667 .shared_diffs
2668 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2669 .or_default();
2670 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2671 });
2672 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2673 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2674 }
2675
2676 async fn handle_open_uncommitted_diff(
2677 this: Entity<Self>,
2678 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2679 mut cx: AsyncApp,
2680 ) -> Result<proto::OpenUncommittedDiffResponse> {
2681 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2682 let diff = this
2683 .update(&mut cx, |this, cx| {
2684 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2685 Some(this.open_uncommitted_diff(buffer, cx))
2686 })
2687 .context("missing buffer")?
2688 .await?;
2689 this.update(&mut cx, |this, _| {
2690 let shared_diffs = this
2691 .shared_diffs
2692 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2693 .or_default();
2694 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2695 });
2696 Ok(diff.read_with(&cx, |diff, cx| {
2697 use proto::open_uncommitted_diff_response::Mode;
2698
2699 let unstaged_diff = diff.secondary_diff();
2700 let index_snapshot = unstaged_diff.and_then(|diff| {
2701 let diff = diff.read(cx);
2702 diff.base_text_exists().then(|| diff.base_text(cx))
2703 });
2704
2705 let mode;
2706 let staged_text;
2707 let committed_text;
2708 if diff.base_text_exists() {
2709 let committed_snapshot = diff.base_text(cx);
2710 committed_text = Some(committed_snapshot.text());
2711 if let Some(index_text) = index_snapshot {
2712 if index_text.remote_id() == committed_snapshot.remote_id() {
2713 mode = Mode::IndexMatchesHead;
2714 staged_text = None;
2715 } else {
2716 mode = Mode::IndexAndHead;
2717 staged_text = Some(index_text.text());
2718 }
2719 } else {
2720 mode = Mode::IndexAndHead;
2721 staged_text = None;
2722 }
2723 } else {
2724 mode = Mode::IndexAndHead;
2725 committed_text = None;
2726 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2727 }
2728
2729 proto::OpenUncommittedDiffResponse {
2730 committed_text,
2731 staged_text,
2732 mode: mode.into(),
2733 }
2734 }))
2735 }
2736
2737 async fn handle_update_diff_bases(
2738 this: Entity<Self>,
2739 request: TypedEnvelope<proto::UpdateDiffBases>,
2740 mut cx: AsyncApp,
2741 ) -> Result<()> {
2742 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2743 this.update(&mut cx, |this, cx| {
2744 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2745 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2746 {
2747 let buffer = buffer.read(cx).text_snapshot();
2748 diff_state.update(cx, |diff_state, cx| {
2749 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2750 })
2751 }
2752 });
2753 Ok(())
2754 }
2755
2756 async fn handle_blame_buffer(
2757 this: Entity<Self>,
2758 envelope: TypedEnvelope<proto::BlameBuffer>,
2759 mut cx: AsyncApp,
2760 ) -> Result<proto::BlameBufferResponse> {
2761 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2762 let version = deserialize_version(&envelope.payload.version);
2763 let buffer = this.read_with(&cx, |this, cx| {
2764 this.buffer_store.read(cx).get_existing(buffer_id)
2765 })?;
2766 buffer
2767 .update(&mut cx, |buffer, _| {
2768 buffer.wait_for_version(version.clone())
2769 })
2770 .await?;
2771 let blame = this
2772 .update(&mut cx, |this, cx| {
2773 this.blame_buffer(&buffer, Some(version), cx)
2774 })
2775 .await?;
2776 Ok(serialize_blame_buffer_response(blame))
2777 }
2778
2779 async fn handle_get_permalink_to_line(
2780 this: Entity<Self>,
2781 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2782 mut cx: AsyncApp,
2783 ) -> Result<proto::GetPermalinkToLineResponse> {
2784 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2785 // let version = deserialize_version(&envelope.payload.version);
2786 let selection = {
2787 let proto_selection = envelope
2788 .payload
2789 .selection
2790 .context("no selection to get permalink for defined")?;
2791 proto_selection.start as u32..proto_selection.end as u32
2792 };
2793 let buffer = this.read_with(&cx, |this, cx| {
2794 this.buffer_store.read(cx).get_existing(buffer_id)
2795 })?;
2796 let permalink = this
2797 .update(&mut cx, |this, cx| {
2798 this.get_permalink_to_line(&buffer, selection, cx)
2799 })
2800 .await?;
2801 Ok(proto::GetPermalinkToLineResponse {
2802 permalink: permalink.to_string(),
2803 })
2804 }
2805
2806 fn repository_for_request(
2807 this: &Entity<Self>,
2808 id: RepositoryId,
2809 cx: &mut AsyncApp,
2810 ) -> Result<Entity<Repository>> {
2811 this.read_with(cx, |this, _| {
2812 this.repositories
2813 .get(&id)
2814 .context("missing repository handle")
2815 .cloned()
2816 })
2817 }
2818
2819 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2820 self.repositories
2821 .iter()
2822 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2823 .collect()
2824 }
2825
2826 fn process_updated_entries(
2827 &self,
2828 worktree: &Entity<Worktree>,
2829 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2830 cx: &mut App,
2831 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2832 let path_style = worktree.read(cx).path_style();
2833 let mut repo_paths = self
2834 .repositories
2835 .values()
2836 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2837 .collect::<Vec<_>>();
2838 let mut entries: Vec<_> = updated_entries
2839 .iter()
2840 .map(|(path, _, _)| path.clone())
2841 .collect();
2842 entries.sort();
2843 let worktree = worktree.read(cx);
2844
2845 let entries = entries
2846 .into_iter()
2847 .map(|path| worktree.absolutize(&path))
2848 .collect::<Arc<[_]>>();
2849
2850 let executor = cx.background_executor().clone();
2851 cx.background_executor().spawn(async move {
2852 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2853 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2854 let mut tasks = FuturesOrdered::new();
2855 for (repo_path, repo) in repo_paths.into_iter().rev() {
2856 let entries = entries.clone();
2857 let task = executor.spawn(async move {
2858 // Find all repository paths that belong to this repo
2859 let mut ix = entries.partition_point(|path| path < &*repo_path);
2860 if ix == entries.len() {
2861 return None;
2862 };
2863
2864 let mut paths = Vec::new();
2865 // All paths prefixed by a given repo will constitute a continuous range.
2866 while let Some(path) = entries.get(ix)
2867 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2868 &repo_path, path, path_style,
2869 )
2870 {
2871 paths.push((repo_path, ix));
2872 ix += 1;
2873 }
2874 if paths.is_empty() {
2875 None
2876 } else {
2877 Some((repo, paths))
2878 }
2879 });
2880 tasks.push_back(task);
2881 }
2882
2883 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2884 let mut path_was_used = vec![false; entries.len()];
2885 let tasks = tasks.collect::<Vec<_>>().await;
2886 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2887 // We always want to assign a path to it's innermost repository.
2888 for t in tasks {
2889 let Some((repo, paths)) = t else {
2890 continue;
2891 };
2892 let entry = paths_by_git_repo.entry(repo).or_default();
2893 for (repo_path, ix) in paths {
2894 if path_was_used[ix] {
2895 continue;
2896 }
2897 path_was_used[ix] = true;
2898 entry.push(repo_path);
2899 }
2900 }
2901
2902 paths_by_git_repo
2903 })
2904 }
2905}
2906
2907impl BufferGitState {
2908 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2909 Self {
2910 unstaged_diff: Default::default(),
2911 uncommitted_diff: Default::default(),
2912 recalculate_diff_task: Default::default(),
2913 language: Default::default(),
2914 language_registry: Default::default(),
2915 recalculating_tx: postage::watch::channel_with(false).0,
2916 hunk_staging_operation_count: 0,
2917 hunk_staging_operation_count_as_of_write: 0,
2918 head_text: Default::default(),
2919 index_text: Default::default(),
2920 head_changed: Default::default(),
2921 index_changed: Default::default(),
2922 language_changed: Default::default(),
2923 conflict_updated_futures: Default::default(),
2924 conflict_set: Default::default(),
2925 reparse_conflict_markers_task: Default::default(),
2926 }
2927 }
2928
2929 #[ztracing::instrument(skip_all)]
2930 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2931 self.language = buffer.read(cx).language().cloned();
2932 self.language_changed = true;
2933 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2934 }
2935
2936 fn reparse_conflict_markers(
2937 &mut self,
2938 buffer: text::BufferSnapshot,
2939 cx: &mut Context<Self>,
2940 ) -> oneshot::Receiver<()> {
2941 let (tx, rx) = oneshot::channel();
2942
2943 let Some(conflict_set) = self
2944 .conflict_set
2945 .as_ref()
2946 .and_then(|conflict_set| conflict_set.upgrade())
2947 else {
2948 return rx;
2949 };
2950
2951 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2952 if conflict_set.has_conflict {
2953 Some(conflict_set.snapshot())
2954 } else {
2955 None
2956 }
2957 });
2958
2959 if let Some(old_snapshot) = old_snapshot {
2960 self.conflict_updated_futures.push(tx);
2961 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2962 let (snapshot, changed_range) = cx
2963 .background_spawn(async move {
2964 let new_snapshot = ConflictSet::parse(&buffer);
2965 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2966 (new_snapshot, changed_range)
2967 })
2968 .await;
2969 this.update(cx, |this, cx| {
2970 if let Some(conflict_set) = &this.conflict_set {
2971 conflict_set
2972 .update(cx, |conflict_set, cx| {
2973 conflict_set.set_snapshot(snapshot, changed_range, cx);
2974 })
2975 .ok();
2976 }
2977 let futures = std::mem::take(&mut this.conflict_updated_futures);
2978 for tx in futures {
2979 tx.send(()).ok();
2980 }
2981 })
2982 }))
2983 }
2984
2985 rx
2986 }
2987
2988 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2989 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2990 }
2991
2992 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2993 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2994 }
2995
2996 fn handle_base_texts_updated(
2997 &mut self,
2998 buffer: text::BufferSnapshot,
2999 message: proto::UpdateDiffBases,
3000 cx: &mut Context<Self>,
3001 ) {
3002 use proto::update_diff_bases::Mode;
3003
3004 let Some(mode) = Mode::from_i32(message.mode) else {
3005 return;
3006 };
3007
3008 let diff_bases_change = match mode {
3009 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3010 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3011 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3012 Mode::IndexAndHead => DiffBasesChange::SetEach {
3013 index: message.staged_text,
3014 head: message.committed_text,
3015 },
3016 };
3017
3018 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3019 }
3020
3021 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3022 if *self.recalculating_tx.borrow() {
3023 let mut rx = self.recalculating_tx.subscribe();
3024 Some(async move {
3025 loop {
3026 let is_recalculating = rx.recv().await;
3027 if is_recalculating != Some(true) {
3028 break;
3029 }
3030 }
3031 })
3032 } else {
3033 None
3034 }
3035 }
3036
3037 fn diff_bases_changed(
3038 &mut self,
3039 buffer: text::BufferSnapshot,
3040 diff_bases_change: Option<DiffBasesChange>,
3041 cx: &mut Context<Self>,
3042 ) {
3043 match diff_bases_change {
3044 Some(DiffBasesChange::SetIndex(index)) => {
3045 self.index_text = index.map(|mut index| {
3046 text::LineEnding::normalize(&mut index);
3047 Arc::from(index.as_str())
3048 });
3049 self.index_changed = true;
3050 }
3051 Some(DiffBasesChange::SetHead(head)) => {
3052 self.head_text = head.map(|mut head| {
3053 text::LineEnding::normalize(&mut head);
3054 Arc::from(head.as_str())
3055 });
3056 self.head_changed = true;
3057 }
3058 Some(DiffBasesChange::SetBoth(text)) => {
3059 let text = text.map(|mut text| {
3060 text::LineEnding::normalize(&mut text);
3061 Arc::from(text.as_str())
3062 });
3063 self.head_text = text.clone();
3064 self.index_text = text;
3065 self.head_changed = true;
3066 self.index_changed = true;
3067 }
3068 Some(DiffBasesChange::SetEach { index, head }) => {
3069 self.index_text = index.map(|mut index| {
3070 text::LineEnding::normalize(&mut index);
3071 Arc::from(index.as_str())
3072 });
3073 self.index_changed = true;
3074 self.head_text = head.map(|mut head| {
3075 text::LineEnding::normalize(&mut head);
3076 Arc::from(head.as_str())
3077 });
3078 self.head_changed = true;
3079 }
3080 None => {}
3081 }
3082
3083 self.recalculate_diffs(buffer, cx)
3084 }
3085
3086 #[ztracing::instrument(skip_all)]
3087 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3088 *self.recalculating_tx.borrow_mut() = true;
3089
3090 let language = self.language.clone();
3091 let language_registry = self.language_registry.clone();
3092 let unstaged_diff = self.unstaged_diff();
3093 let uncommitted_diff = self.uncommitted_diff();
3094 let head = self.head_text.clone();
3095 let index = self.index_text.clone();
3096 let index_changed = self.index_changed;
3097 let head_changed = self.head_changed;
3098 let language_changed = self.language_changed;
3099 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3100 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3101 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3102 (None, None) => true,
3103 _ => false,
3104 };
3105 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3106 log::debug!(
3107 "start recalculating diffs for buffer {}",
3108 buffer.remote_id()
3109 );
3110
3111 let mut new_unstaged_diff = None;
3112 if let Some(unstaged_diff) = &unstaged_diff {
3113 new_unstaged_diff = Some(
3114 cx.update(|cx| {
3115 unstaged_diff.read(cx).update_diff(
3116 buffer.clone(),
3117 index,
3118 index_changed,
3119 language.clone(),
3120 cx,
3121 )
3122 })
3123 .await,
3124 );
3125 }
3126
3127 // Dropping BufferDiff can be expensive, so yield back to the event loop
3128 // for a bit
3129 yield_now().await;
3130
3131 let mut new_uncommitted_diff = None;
3132 if let Some(uncommitted_diff) = &uncommitted_diff {
3133 new_uncommitted_diff = if index_matches_head {
3134 new_unstaged_diff.clone()
3135 } else {
3136 Some(
3137 cx.update(|cx| {
3138 uncommitted_diff.read(cx).update_diff(
3139 buffer.clone(),
3140 head,
3141 head_changed,
3142 language.clone(),
3143 cx,
3144 )
3145 })
3146 .await,
3147 )
3148 }
3149 }
3150
3151 // Dropping BufferDiff can be expensive, so yield back to the event loop
3152 // for a bit
3153 yield_now().await;
3154
3155 let cancel = this.update(cx, |this, _| {
3156 // This checks whether all pending stage/unstage operations
3157 // have quiesced (i.e. both the corresponding write and the
3158 // read of that write have completed). If not, then we cancel
3159 // this recalculation attempt to avoid invalidating pending
3160 // state too quickly; another recalculation will come along
3161 // later and clear the pending state once the state of the index has settled.
3162 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3163 *this.recalculating_tx.borrow_mut() = false;
3164 true
3165 } else {
3166 false
3167 }
3168 })?;
3169 if cancel {
3170 log::debug!(
3171 concat!(
3172 "aborting recalculating diffs for buffer {}",
3173 "due to subsequent hunk operations",
3174 ),
3175 buffer.remote_id()
3176 );
3177 return Ok(());
3178 }
3179
3180 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3181 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3182 {
3183 let task = unstaged_diff.update(cx, |diff, cx| {
3184 if language_changed {
3185 diff.language_changed(language.clone(), language_registry.clone(), cx);
3186 }
3187 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3188 });
3189 Some(task.await)
3190 } else {
3191 None
3192 };
3193
3194 yield_now().await;
3195
3196 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3197 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3198 {
3199 uncommitted_diff
3200 .update(cx, |diff, cx| {
3201 if language_changed {
3202 diff.language_changed(language, language_registry, cx);
3203 }
3204 diff.set_snapshot_with_secondary(
3205 new_uncommitted_diff,
3206 &buffer,
3207 unstaged_changed_range.flatten(),
3208 true,
3209 cx,
3210 )
3211 })
3212 .await;
3213 }
3214
3215 log::debug!(
3216 "finished recalculating diffs for buffer {}",
3217 buffer.remote_id()
3218 );
3219
3220 if let Some(this) = this.upgrade() {
3221 this.update(cx, |this, _| {
3222 this.index_changed = false;
3223 this.head_changed = false;
3224 this.language_changed = false;
3225 *this.recalculating_tx.borrow_mut() = false;
3226 });
3227 }
3228
3229 Ok(())
3230 }));
3231 }
3232}
3233
3234fn make_remote_delegate(
3235 this: Entity<GitStore>,
3236 project_id: u64,
3237 repository_id: RepositoryId,
3238 askpass_id: u64,
3239 cx: &mut AsyncApp,
3240) -> AskPassDelegate {
3241 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3242 this.update(cx, |this, cx| {
3243 let Some((client, _)) = this.downstream_client() else {
3244 return;
3245 };
3246 let response = client.request(proto::AskPassRequest {
3247 project_id,
3248 repository_id: repository_id.to_proto(),
3249 askpass_id,
3250 prompt,
3251 });
3252 cx.spawn(async move |_, _| {
3253 let mut response = response.await?.response;
3254 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3255 .ok();
3256 response.zeroize();
3257 anyhow::Ok(())
3258 })
3259 .detach_and_log_err(cx);
3260 });
3261 })
3262}
3263
3264impl RepositoryId {
3265 pub fn to_proto(self) -> u64 {
3266 self.0
3267 }
3268
3269 pub fn from_proto(id: u64) -> Self {
3270 RepositoryId(id)
3271 }
3272}
3273
3274impl RepositorySnapshot {
3275 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3276 Self {
3277 id,
3278 statuses_by_path: Default::default(),
3279 work_directory_abs_path,
3280 branch: None,
3281 head_commit: None,
3282 scan_id: 0,
3283 merge: Default::default(),
3284 remote_origin_url: None,
3285 remote_upstream_url: None,
3286 stash_entries: Default::default(),
3287 path_style,
3288 }
3289 }
3290
3291 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3292 proto::UpdateRepository {
3293 branch_summary: self.branch.as_ref().map(branch_to_proto),
3294 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3295 updated_statuses: self
3296 .statuses_by_path
3297 .iter()
3298 .map(|entry| entry.to_proto())
3299 .collect(),
3300 removed_statuses: Default::default(),
3301 current_merge_conflicts: self
3302 .merge
3303 .conflicted_paths
3304 .iter()
3305 .map(|repo_path| repo_path.to_proto())
3306 .collect(),
3307 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3308 project_id,
3309 id: self.id.to_proto(),
3310 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3311 entry_ids: vec![self.id.to_proto()],
3312 scan_id: self.scan_id,
3313 is_last_update: true,
3314 stash_entries: self
3315 .stash_entries
3316 .entries
3317 .iter()
3318 .map(stash_to_proto)
3319 .collect(),
3320 remote_upstream_url: self.remote_upstream_url.clone(),
3321 remote_origin_url: self.remote_origin_url.clone(),
3322 }
3323 }
3324
3325 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3326 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3327 let mut removed_statuses: Vec<String> = Vec::new();
3328
3329 let mut new_statuses = self.statuses_by_path.iter().peekable();
3330 let mut old_statuses = old.statuses_by_path.iter().peekable();
3331
3332 let mut current_new_entry = new_statuses.next();
3333 let mut current_old_entry = old_statuses.next();
3334 loop {
3335 match (current_new_entry, current_old_entry) {
3336 (Some(new_entry), Some(old_entry)) => {
3337 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3338 Ordering::Less => {
3339 updated_statuses.push(new_entry.to_proto());
3340 current_new_entry = new_statuses.next();
3341 }
3342 Ordering::Equal => {
3343 if new_entry.status != old_entry.status {
3344 updated_statuses.push(new_entry.to_proto());
3345 }
3346 current_old_entry = old_statuses.next();
3347 current_new_entry = new_statuses.next();
3348 }
3349 Ordering::Greater => {
3350 removed_statuses.push(old_entry.repo_path.to_proto());
3351 current_old_entry = old_statuses.next();
3352 }
3353 }
3354 }
3355 (None, Some(old_entry)) => {
3356 removed_statuses.push(old_entry.repo_path.to_proto());
3357 current_old_entry = old_statuses.next();
3358 }
3359 (Some(new_entry), None) => {
3360 updated_statuses.push(new_entry.to_proto());
3361 current_new_entry = new_statuses.next();
3362 }
3363 (None, None) => break,
3364 }
3365 }
3366
3367 proto::UpdateRepository {
3368 branch_summary: self.branch.as_ref().map(branch_to_proto),
3369 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3370 updated_statuses,
3371 removed_statuses,
3372 current_merge_conflicts: self
3373 .merge
3374 .conflicted_paths
3375 .iter()
3376 .map(|path| path.to_proto())
3377 .collect(),
3378 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3379 project_id,
3380 id: self.id.to_proto(),
3381 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3382 entry_ids: vec![],
3383 scan_id: self.scan_id,
3384 is_last_update: true,
3385 stash_entries: self
3386 .stash_entries
3387 .entries
3388 .iter()
3389 .map(stash_to_proto)
3390 .collect(),
3391 remote_upstream_url: self.remote_upstream_url.clone(),
3392 remote_origin_url: self.remote_origin_url.clone(),
3393 }
3394 }
3395
3396 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3397 self.statuses_by_path.iter().cloned()
3398 }
3399
3400 pub fn status_summary(&self) -> GitSummary {
3401 self.statuses_by_path.summary().item_summary
3402 }
3403
3404 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3405 self.statuses_by_path
3406 .get(&PathKey(path.as_ref().clone()), ())
3407 .cloned()
3408 }
3409
3410 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3411 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3412 }
3413
3414 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3415 self.path_style
3416 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3417 .unwrap()
3418 .into()
3419 }
3420
3421 #[inline]
3422 fn abs_path_to_repo_path_inner(
3423 work_directory_abs_path: &Path,
3424 abs_path: &Path,
3425 path_style: PathStyle,
3426 ) -> Option<RepoPath> {
3427 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3428 Some(RepoPath::from_rel_path(&rel_path))
3429 }
3430
3431 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3432 self.merge.conflicted_paths.contains(repo_path)
3433 }
3434
3435 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3436 let had_conflict_on_last_merge_head_change =
3437 self.merge.conflicted_paths.contains(repo_path);
3438 let has_conflict_currently = self
3439 .status_for_path(repo_path)
3440 .is_some_and(|entry| entry.status.is_conflicted());
3441 had_conflict_on_last_merge_head_change || has_conflict_currently
3442 }
3443
3444 /// This is the name that will be displayed in the repository selector for this repository.
3445 pub fn display_name(&self) -> SharedString {
3446 self.work_directory_abs_path
3447 .file_name()
3448 .unwrap_or_default()
3449 .to_string_lossy()
3450 .to_string()
3451 .into()
3452 }
3453}
3454
3455pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3456 proto::StashEntry {
3457 oid: entry.oid.as_bytes().to_vec(),
3458 message: entry.message.clone(),
3459 branch: entry.branch.clone(),
3460 index: entry.index as u64,
3461 timestamp: entry.timestamp,
3462 }
3463}
3464
3465pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3466 Ok(StashEntry {
3467 oid: Oid::from_bytes(&entry.oid)?,
3468 message: entry.message.clone(),
3469 index: entry.index as usize,
3470 branch: entry.branch.clone(),
3471 timestamp: entry.timestamp,
3472 })
3473}
3474
3475impl MergeDetails {
3476 async fn load(
3477 backend: &Arc<dyn GitRepository>,
3478 status: &SumTree<StatusEntry>,
3479 prev_snapshot: &RepositorySnapshot,
3480 ) -> Result<(MergeDetails, bool)> {
3481 log::debug!("load merge details");
3482 let message = backend.merge_message().await;
3483 let heads = backend
3484 .revparse_batch(vec![
3485 "MERGE_HEAD".into(),
3486 "CHERRY_PICK_HEAD".into(),
3487 "REBASE_HEAD".into(),
3488 "REVERT_HEAD".into(),
3489 "APPLY_HEAD".into(),
3490 ])
3491 .await
3492 .log_err()
3493 .unwrap_or_default()
3494 .into_iter()
3495 .map(|opt| opt.map(SharedString::from))
3496 .collect::<Vec<_>>();
3497 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3498 let conflicted_paths = if merge_heads_changed {
3499 let current_conflicted_paths = TreeSet::from_ordered_entries(
3500 status
3501 .iter()
3502 .filter(|entry| entry.status.is_conflicted())
3503 .map(|entry| entry.repo_path.clone()),
3504 );
3505
3506 // It can happen that we run a scan while a lengthy merge is in progress
3507 // that will eventually result in conflicts, but before those conflicts
3508 // are reported by `git status`. Since for the moment we only care about
3509 // the merge heads state for the purposes of tracking conflicts, don't update
3510 // this state until we see some conflicts.
3511 if heads.iter().any(Option::is_some)
3512 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3513 && current_conflicted_paths.is_empty()
3514 {
3515 log::debug!("not updating merge heads because no conflicts found");
3516 return Ok((
3517 MergeDetails {
3518 message: message.map(SharedString::from),
3519 ..prev_snapshot.merge.clone()
3520 },
3521 false,
3522 ));
3523 }
3524
3525 current_conflicted_paths
3526 } else {
3527 prev_snapshot.merge.conflicted_paths.clone()
3528 };
3529 let details = MergeDetails {
3530 conflicted_paths,
3531 message: message.map(SharedString::from),
3532 heads,
3533 };
3534 Ok((details, merge_heads_changed))
3535 }
3536}
3537
3538impl Repository {
3539 pub fn snapshot(&self) -> RepositorySnapshot {
3540 self.snapshot.clone()
3541 }
3542
3543 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3544 self.pending_ops.iter().cloned()
3545 }
3546
3547 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3548 self.pending_ops.summary().clone()
3549 }
3550
3551 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3552 self.pending_ops
3553 .get(&PathKey(path.as_ref().clone()), ())
3554 .cloned()
3555 }
3556
3557 fn local(
3558 id: RepositoryId,
3559 work_directory_abs_path: Arc<Path>,
3560 dot_git_abs_path: Arc<Path>,
3561 project_environment: WeakEntity<ProjectEnvironment>,
3562 fs: Arc<dyn Fs>,
3563 git_store: WeakEntity<GitStore>,
3564 cx: &mut Context<Self>,
3565 ) -> Self {
3566 let snapshot =
3567 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3568 let state = cx
3569 .spawn(async move |_, cx| {
3570 LocalRepositoryState::new(
3571 work_directory_abs_path,
3572 dot_git_abs_path,
3573 project_environment,
3574 fs,
3575 cx,
3576 )
3577 .await
3578 .map_err(|err| err.to_string())
3579 })
3580 .shared();
3581 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3582 let state = cx
3583 .spawn(async move |_, _| {
3584 let state = state.await?;
3585 Ok(RepositoryState::Local(state))
3586 })
3587 .shared();
3588
3589 Repository {
3590 this: cx.weak_entity(),
3591 git_store,
3592 snapshot,
3593 pending_ops: Default::default(),
3594 repository_state: state,
3595 commit_message_buffer: None,
3596 askpass_delegates: Default::default(),
3597 paths_needing_status_update: Default::default(),
3598 latest_askpass_id: 0,
3599 job_sender,
3600 job_id: 0,
3601 active_jobs: Default::default(),
3602 }
3603 }
3604
3605 fn remote(
3606 id: RepositoryId,
3607 work_directory_abs_path: Arc<Path>,
3608 path_style: PathStyle,
3609 project_id: ProjectId,
3610 client: AnyProtoClient,
3611 git_store: WeakEntity<GitStore>,
3612 cx: &mut Context<Self>,
3613 ) -> Self {
3614 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3615 let repository_state = RemoteRepositoryState { project_id, client };
3616 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3617 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3618 Self {
3619 this: cx.weak_entity(),
3620 snapshot,
3621 commit_message_buffer: None,
3622 git_store,
3623 pending_ops: Default::default(),
3624 paths_needing_status_update: Default::default(),
3625 job_sender,
3626 repository_state,
3627 askpass_delegates: Default::default(),
3628 latest_askpass_id: 0,
3629 active_jobs: Default::default(),
3630 job_id: 0,
3631 }
3632 }
3633
3634 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3635 self.git_store.upgrade()
3636 }
3637
3638 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3639 let this = cx.weak_entity();
3640 let git_store = self.git_store.clone();
3641 let _ = self.send_keyed_job(
3642 Some(GitJobKey::ReloadBufferDiffBases),
3643 None,
3644 |state, mut cx| async move {
3645 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3646 log::error!("tried to recompute diffs for a non-local repository");
3647 return Ok(());
3648 };
3649
3650 let Some(this) = this.upgrade() else {
3651 return Ok(());
3652 };
3653
3654 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3655 git_store.update(cx, |git_store, cx| {
3656 git_store
3657 .diffs
3658 .iter()
3659 .filter_map(|(buffer_id, diff_state)| {
3660 let buffer_store = git_store.buffer_store.read(cx);
3661 let buffer = buffer_store.get(*buffer_id)?;
3662 let file = File::from_dyn(buffer.read(cx).file())?;
3663 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3664 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3665 log::debug!(
3666 "start reload diff bases for repo path {}",
3667 repo_path.as_unix_str()
3668 );
3669 diff_state.update(cx, |diff_state, _| {
3670 let has_unstaged_diff = diff_state
3671 .unstaged_diff
3672 .as_ref()
3673 .is_some_and(|diff| diff.is_upgradable());
3674 let has_uncommitted_diff = diff_state
3675 .uncommitted_diff
3676 .as_ref()
3677 .is_some_and(|set| set.is_upgradable());
3678
3679 Some((
3680 buffer,
3681 repo_path,
3682 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3683 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3684 ))
3685 })
3686 })
3687 .collect::<Vec<_>>()
3688 })
3689 })?;
3690
3691 let buffer_diff_base_changes = cx
3692 .background_spawn(async move {
3693 let mut changes = Vec::new();
3694 for (buffer, repo_path, current_index_text, current_head_text) in
3695 &repo_diff_state_updates
3696 {
3697 let index_text = if current_index_text.is_some() {
3698 backend.load_index_text(repo_path.clone()).await
3699 } else {
3700 None
3701 };
3702 let head_text = if current_head_text.is_some() {
3703 backend.load_committed_text(repo_path.clone()).await
3704 } else {
3705 None
3706 };
3707
3708 let change =
3709 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3710 (Some(current_index), Some(current_head)) => {
3711 let index_changed =
3712 index_text.as_deref() != current_index.as_deref();
3713 let head_changed =
3714 head_text.as_deref() != current_head.as_deref();
3715 if index_changed && head_changed {
3716 if index_text == head_text {
3717 Some(DiffBasesChange::SetBoth(head_text))
3718 } else {
3719 Some(DiffBasesChange::SetEach {
3720 index: index_text,
3721 head: head_text,
3722 })
3723 }
3724 } else if index_changed {
3725 Some(DiffBasesChange::SetIndex(index_text))
3726 } else if head_changed {
3727 Some(DiffBasesChange::SetHead(head_text))
3728 } else {
3729 None
3730 }
3731 }
3732 (Some(current_index), None) => {
3733 let index_changed =
3734 index_text.as_deref() != current_index.as_deref();
3735 index_changed
3736 .then_some(DiffBasesChange::SetIndex(index_text))
3737 }
3738 (None, Some(current_head)) => {
3739 let head_changed =
3740 head_text.as_deref() != current_head.as_deref();
3741 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3742 }
3743 (None, None) => None,
3744 };
3745
3746 changes.push((buffer.clone(), change))
3747 }
3748 changes
3749 })
3750 .await;
3751
3752 git_store.update(&mut cx, |git_store, cx| {
3753 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3754 let buffer_snapshot = buffer.read(cx).text_snapshot();
3755 let buffer_id = buffer_snapshot.remote_id();
3756 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3757 continue;
3758 };
3759
3760 let downstream_client = git_store.downstream_client();
3761 diff_state.update(cx, |diff_state, cx| {
3762 use proto::update_diff_bases::Mode;
3763
3764 if let Some((diff_bases_change, (client, project_id))) =
3765 diff_bases_change.clone().zip(downstream_client)
3766 {
3767 let (staged_text, committed_text, mode) = match diff_bases_change {
3768 DiffBasesChange::SetIndex(index) => {
3769 (index, None, Mode::IndexOnly)
3770 }
3771 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3772 DiffBasesChange::SetEach { index, head } => {
3773 (index, head, Mode::IndexAndHead)
3774 }
3775 DiffBasesChange::SetBoth(text) => {
3776 (None, text, Mode::IndexMatchesHead)
3777 }
3778 };
3779 client
3780 .send(proto::UpdateDiffBases {
3781 project_id: project_id.to_proto(),
3782 buffer_id: buffer_id.to_proto(),
3783 staged_text,
3784 committed_text,
3785 mode: mode as i32,
3786 })
3787 .log_err();
3788 }
3789
3790 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3791 });
3792 }
3793 })
3794 },
3795 );
3796 }
3797
3798 pub fn send_job<F, Fut, R>(
3799 &mut self,
3800 status: Option<SharedString>,
3801 job: F,
3802 ) -> oneshot::Receiver<R>
3803 where
3804 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3805 Fut: Future<Output = R> + 'static,
3806 R: Send + 'static,
3807 {
3808 self.send_keyed_job(None, status, job)
3809 }
3810
3811 fn send_keyed_job<F, Fut, R>(
3812 &mut self,
3813 key: Option<GitJobKey>,
3814 status: Option<SharedString>,
3815 job: F,
3816 ) -> oneshot::Receiver<R>
3817 where
3818 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3819 Fut: Future<Output = R> + 'static,
3820 R: Send + 'static,
3821 {
3822 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3823 let job_id = post_inc(&mut self.job_id);
3824 let this = self.this.clone();
3825 self.job_sender
3826 .unbounded_send(GitJob {
3827 key,
3828 job: Box::new(move |state, cx: &mut AsyncApp| {
3829 let job = job(state, cx.clone());
3830 cx.spawn(async move |cx| {
3831 if let Some(s) = status.clone() {
3832 this.update(cx, |this, cx| {
3833 this.active_jobs.insert(
3834 job_id,
3835 JobInfo {
3836 start: Instant::now(),
3837 message: s.clone(),
3838 },
3839 );
3840
3841 cx.notify();
3842 })
3843 .ok();
3844 }
3845 let result = job.await;
3846
3847 this.update(cx, |this, cx| {
3848 this.active_jobs.remove(&job_id);
3849 cx.notify();
3850 })
3851 .ok();
3852
3853 result_tx.send(result).ok();
3854 })
3855 }),
3856 })
3857 .ok();
3858 result_rx
3859 }
3860
3861 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3862 let Some(git_store) = self.git_store.upgrade() else {
3863 return;
3864 };
3865 let entity = cx.entity();
3866 git_store.update(cx, |git_store, cx| {
3867 let Some((&id, _)) = git_store
3868 .repositories
3869 .iter()
3870 .find(|(_, handle)| *handle == &entity)
3871 else {
3872 return;
3873 };
3874 git_store.active_repo_id = Some(id);
3875 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3876 });
3877 }
3878
3879 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3880 self.snapshot.status()
3881 }
3882
3883 pub fn cached_stash(&self) -> GitStash {
3884 self.snapshot.stash_entries.clone()
3885 }
3886
3887 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3888 let git_store = self.git_store.upgrade()?;
3889 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3890 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3891 let abs_path = SanitizedPath::new(&abs_path);
3892 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3893 Some(ProjectPath {
3894 worktree_id: worktree.read(cx).id(),
3895 path: relative_path,
3896 })
3897 }
3898
3899 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3900 let git_store = self.git_store.upgrade()?;
3901 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3902 let abs_path = worktree_store.absolutize(path, cx)?;
3903 self.snapshot.abs_path_to_repo_path(&abs_path)
3904 }
3905
3906 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3907 other
3908 .read(cx)
3909 .snapshot
3910 .work_directory_abs_path
3911 .starts_with(&self.snapshot.work_directory_abs_path)
3912 }
3913
3914 pub fn open_commit_buffer(
3915 &mut self,
3916 languages: Option<Arc<LanguageRegistry>>,
3917 buffer_store: Entity<BufferStore>,
3918 cx: &mut Context<Self>,
3919 ) -> Task<Result<Entity<Buffer>>> {
3920 let id = self.id;
3921 if let Some(buffer) = self.commit_message_buffer.clone() {
3922 return Task::ready(Ok(buffer));
3923 }
3924 let this = cx.weak_entity();
3925
3926 let rx = self.send_job(None, move |state, mut cx| async move {
3927 let Some(this) = this.upgrade() else {
3928 bail!("git store was dropped");
3929 };
3930 match state {
3931 RepositoryState::Local(..) => {
3932 this.update(&mut cx, |_, cx| {
3933 Self::open_local_commit_buffer(languages, buffer_store, cx)
3934 })
3935 .await
3936 }
3937 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3938 let request = client.request(proto::OpenCommitMessageBuffer {
3939 project_id: project_id.0,
3940 repository_id: id.to_proto(),
3941 });
3942 let response = request.await.context("requesting to open commit buffer")?;
3943 let buffer_id = BufferId::new(response.buffer_id)?;
3944 let buffer = buffer_store
3945 .update(&mut cx, |buffer_store, cx| {
3946 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3947 })
3948 .await?;
3949 if let Some(language_registry) = languages {
3950 let git_commit_language =
3951 language_registry.language_for_name("Git Commit").await?;
3952 buffer.update(&mut cx, |buffer, cx| {
3953 buffer.set_language(Some(git_commit_language), cx);
3954 });
3955 }
3956 this.update(&mut cx, |this, _| {
3957 this.commit_message_buffer = Some(buffer.clone());
3958 });
3959 Ok(buffer)
3960 }
3961 }
3962 });
3963
3964 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3965 }
3966
3967 fn open_local_commit_buffer(
3968 language_registry: Option<Arc<LanguageRegistry>>,
3969 buffer_store: Entity<BufferStore>,
3970 cx: &mut Context<Self>,
3971 ) -> Task<Result<Entity<Buffer>>> {
3972 cx.spawn(async move |repository, cx| {
3973 let git_commit_language = match language_registry {
3974 Some(language_registry) => {
3975 Some(language_registry.language_for_name("Git Commit").await?)
3976 }
3977 None => None,
3978 };
3979 let buffer = buffer_store
3980 .update(cx, |buffer_store, cx| {
3981 buffer_store.create_buffer(git_commit_language, false, cx)
3982 })
3983 .await?;
3984
3985 repository.update(cx, |repository, _| {
3986 repository.commit_message_buffer = Some(buffer.clone());
3987 })?;
3988 Ok(buffer)
3989 })
3990 }
3991
3992 pub fn checkout_files(
3993 &mut self,
3994 commit: &str,
3995 paths: Vec<RepoPath>,
3996 cx: &mut Context<Self>,
3997 ) -> Task<Result<()>> {
3998 let commit = commit.to_string();
3999 let id = self.id;
4000
4001 self.spawn_job_with_tracking(
4002 paths.clone(),
4003 pending_op::GitStatus::Reverted,
4004 cx,
4005 async move |this, cx| {
4006 this.update(cx, |this, _cx| {
4007 this.send_job(
4008 Some(format!("git checkout {}", commit).into()),
4009 move |git_repo, _| async move {
4010 match git_repo {
4011 RepositoryState::Local(LocalRepositoryState {
4012 backend,
4013 environment,
4014 ..
4015 }) => {
4016 backend
4017 .checkout_files(commit, paths, environment.clone())
4018 .await
4019 }
4020 RepositoryState::Remote(RemoteRepositoryState {
4021 project_id,
4022 client,
4023 }) => {
4024 client
4025 .request(proto::GitCheckoutFiles {
4026 project_id: project_id.0,
4027 repository_id: id.to_proto(),
4028 commit,
4029 paths: paths
4030 .into_iter()
4031 .map(|p| p.to_proto())
4032 .collect(),
4033 })
4034 .await?;
4035
4036 Ok(())
4037 }
4038 }
4039 },
4040 )
4041 })?
4042 .await?
4043 },
4044 )
4045 }
4046
4047 pub fn reset(
4048 &mut self,
4049 commit: String,
4050 reset_mode: ResetMode,
4051 _cx: &mut App,
4052 ) -> oneshot::Receiver<Result<()>> {
4053 let id = self.id;
4054
4055 self.send_job(None, move |git_repo, _| async move {
4056 match git_repo {
4057 RepositoryState::Local(LocalRepositoryState {
4058 backend,
4059 environment,
4060 ..
4061 }) => backend.reset(commit, reset_mode, environment).await,
4062 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4063 client
4064 .request(proto::GitReset {
4065 project_id: project_id.0,
4066 repository_id: id.to_proto(),
4067 commit,
4068 mode: match reset_mode {
4069 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4070 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4071 },
4072 })
4073 .await?;
4074
4075 Ok(())
4076 }
4077 }
4078 })
4079 }
4080
4081 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4082 let id = self.id;
4083 self.send_job(None, move |git_repo, _cx| async move {
4084 match git_repo {
4085 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4086 backend.show(commit).await
4087 }
4088 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4089 let resp = client
4090 .request(proto::GitShow {
4091 project_id: project_id.0,
4092 repository_id: id.to_proto(),
4093 commit,
4094 })
4095 .await?;
4096
4097 Ok(CommitDetails {
4098 sha: resp.sha.into(),
4099 message: resp.message.into(),
4100 commit_timestamp: resp.commit_timestamp,
4101 author_email: resp.author_email.into(),
4102 author_name: resp.author_name.into(),
4103 })
4104 }
4105 }
4106 })
4107 }
4108
4109 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4110 let id = self.id;
4111 self.send_job(None, move |git_repo, cx| async move {
4112 match git_repo {
4113 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4114 backend.load_commit(commit, cx).await
4115 }
4116 RepositoryState::Remote(RemoteRepositoryState {
4117 client, project_id, ..
4118 }) => {
4119 let response = client
4120 .request(proto::LoadCommitDiff {
4121 project_id: project_id.0,
4122 repository_id: id.to_proto(),
4123 commit,
4124 })
4125 .await?;
4126 Ok(CommitDiff {
4127 files: response
4128 .files
4129 .into_iter()
4130 .map(|file| {
4131 Ok(CommitFile {
4132 path: RepoPath::from_proto(&file.path)?,
4133 old_text: file.old_text,
4134 new_text: file.new_text,
4135 is_binary: file.is_binary,
4136 })
4137 })
4138 .collect::<Result<Vec<_>>>()?,
4139 })
4140 }
4141 }
4142 })
4143 }
4144
4145 pub fn file_history(
4146 &mut self,
4147 path: RepoPath,
4148 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4149 self.file_history_paginated(path, 0, None)
4150 }
4151
4152 pub fn file_history_paginated(
4153 &mut self,
4154 path: RepoPath,
4155 skip: usize,
4156 limit: Option<usize>,
4157 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4158 let id = self.id;
4159 self.send_job(None, move |git_repo, _cx| async move {
4160 match git_repo {
4161 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4162 backend.file_history_paginated(path, skip, limit).await
4163 }
4164 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4165 let response = client
4166 .request(proto::GitFileHistory {
4167 project_id: project_id.0,
4168 repository_id: id.to_proto(),
4169 path: path.to_proto(),
4170 skip: skip as u64,
4171 limit: limit.map(|l| l as u64),
4172 })
4173 .await?;
4174 Ok(git::repository::FileHistory {
4175 entries: response
4176 .entries
4177 .into_iter()
4178 .map(|entry| git::repository::FileHistoryEntry {
4179 sha: entry.sha.into(),
4180 subject: entry.subject.into(),
4181 message: entry.message.into(),
4182 commit_timestamp: entry.commit_timestamp,
4183 author_name: entry.author_name.into(),
4184 author_email: entry.author_email.into(),
4185 })
4186 .collect(),
4187 path: RepoPath::from_proto(&response.path)?,
4188 })
4189 }
4190 }
4191 })
4192 }
4193
4194 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4195 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4196 }
4197
4198 fn save_buffers<'a>(
4199 &self,
4200 entries: impl IntoIterator<Item = &'a RepoPath>,
4201 cx: &mut Context<Self>,
4202 ) -> Vec<Task<anyhow::Result<()>>> {
4203 let mut save_futures = Vec::new();
4204 if let Some(buffer_store) = self.buffer_store(cx) {
4205 buffer_store.update(cx, |buffer_store, cx| {
4206 for path in entries {
4207 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4208 continue;
4209 };
4210 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4211 && buffer
4212 .read(cx)
4213 .file()
4214 .is_some_and(|file| file.disk_state().exists())
4215 && buffer.read(cx).has_unsaved_edits()
4216 {
4217 save_futures.push(buffer_store.save_buffer(buffer, cx));
4218 }
4219 }
4220 })
4221 }
4222 save_futures
4223 }
4224
4225 pub fn stage_entries(
4226 &mut self,
4227 entries: Vec<RepoPath>,
4228 cx: &mut Context<Self>,
4229 ) -> Task<anyhow::Result<()>> {
4230 self.stage_or_unstage_entries(true, entries, cx)
4231 }
4232
4233 pub fn unstage_entries(
4234 &mut self,
4235 entries: Vec<RepoPath>,
4236 cx: &mut Context<Self>,
4237 ) -> Task<anyhow::Result<()>> {
4238 self.stage_or_unstage_entries(false, entries, cx)
4239 }
4240
4241 fn stage_or_unstage_entries(
4242 &mut self,
4243 stage: bool,
4244 entries: Vec<RepoPath>,
4245 cx: &mut Context<Self>,
4246 ) -> Task<anyhow::Result<()>> {
4247 if entries.is_empty() {
4248 return Task::ready(Ok(()));
4249 }
4250 let Some(git_store) = self.git_store.upgrade() else {
4251 return Task::ready(Ok(()));
4252 };
4253 let id = self.id;
4254 let save_tasks = self.save_buffers(&entries, cx);
4255 let paths = entries
4256 .iter()
4257 .map(|p| p.as_unix_str())
4258 .collect::<Vec<_>>()
4259 .join(" ");
4260 let status = if stage {
4261 format!("git add {paths}")
4262 } else {
4263 format!("git reset {paths}")
4264 };
4265 let job_key = GitJobKey::WriteIndex(entries.clone());
4266
4267 self.spawn_job_with_tracking(
4268 entries.clone(),
4269 if stage {
4270 pending_op::GitStatus::Staged
4271 } else {
4272 pending_op::GitStatus::Unstaged
4273 },
4274 cx,
4275 async move |this, cx| {
4276 for save_task in save_tasks {
4277 save_task.await?;
4278 }
4279
4280 this.update(cx, |this, cx| {
4281 let weak_this = cx.weak_entity();
4282 this.send_keyed_job(
4283 Some(job_key),
4284 Some(status.into()),
4285 move |git_repo, mut cx| async move {
4286 let hunk_staging_operation_counts = weak_this
4287 .update(&mut cx, |this, cx| {
4288 let mut hunk_staging_operation_counts = HashMap::default();
4289 for path in &entries {
4290 let Some(project_path) =
4291 this.repo_path_to_project_path(path, cx)
4292 else {
4293 continue;
4294 };
4295 let Some(buffer) = git_store
4296 .read(cx)
4297 .buffer_store
4298 .read(cx)
4299 .get_by_path(&project_path)
4300 else {
4301 continue;
4302 };
4303 let Some(diff_state) = git_store
4304 .read(cx)
4305 .diffs
4306 .get(&buffer.read(cx).remote_id())
4307 .cloned()
4308 else {
4309 continue;
4310 };
4311 let Some(uncommitted_diff) =
4312 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4313 |uncommitted_diff| uncommitted_diff.upgrade(),
4314 )
4315 else {
4316 continue;
4317 };
4318 let buffer_snapshot = buffer.read(cx).text_snapshot();
4319 let file_exists = buffer
4320 .read(cx)
4321 .file()
4322 .is_some_and(|file| file.disk_state().exists());
4323 let hunk_staging_operation_count =
4324 diff_state.update(cx, |diff_state, cx| {
4325 uncommitted_diff.update(
4326 cx,
4327 |uncommitted_diff, cx| {
4328 uncommitted_diff
4329 .stage_or_unstage_all_hunks(
4330 stage,
4331 &buffer_snapshot,
4332 file_exists,
4333 cx,
4334 );
4335 },
4336 );
4337
4338 diff_state.hunk_staging_operation_count += 1;
4339 diff_state.hunk_staging_operation_count
4340 });
4341 hunk_staging_operation_counts.insert(
4342 diff_state.downgrade(),
4343 hunk_staging_operation_count,
4344 );
4345 }
4346 hunk_staging_operation_counts
4347 })
4348 .unwrap_or_default();
4349
4350 let result = match git_repo {
4351 RepositoryState::Local(LocalRepositoryState {
4352 backend,
4353 environment,
4354 ..
4355 }) => {
4356 if stage {
4357 backend.stage_paths(entries, environment.clone()).await
4358 } else {
4359 backend.unstage_paths(entries, environment.clone()).await
4360 }
4361 }
4362 RepositoryState::Remote(RemoteRepositoryState {
4363 project_id,
4364 client,
4365 }) => {
4366 if stage {
4367 client
4368 .request(proto::Stage {
4369 project_id: project_id.0,
4370 repository_id: id.to_proto(),
4371 paths: entries
4372 .into_iter()
4373 .map(|repo_path| repo_path.to_proto())
4374 .collect(),
4375 })
4376 .await
4377 .context("sending stage request")
4378 .map(|_| ())
4379 } else {
4380 client
4381 .request(proto::Unstage {
4382 project_id: project_id.0,
4383 repository_id: id.to_proto(),
4384 paths: entries
4385 .into_iter()
4386 .map(|repo_path| repo_path.to_proto())
4387 .collect(),
4388 })
4389 .await
4390 .context("sending unstage request")
4391 .map(|_| ())
4392 }
4393 }
4394 };
4395
4396 for (diff_state, hunk_staging_operation_count) in
4397 hunk_staging_operation_counts
4398 {
4399 diff_state
4400 .update(&mut cx, |diff_state, cx| {
4401 if result.is_ok() {
4402 diff_state.hunk_staging_operation_count_as_of_write =
4403 hunk_staging_operation_count;
4404 } else if let Some(uncommitted_diff) =
4405 &diff_state.uncommitted_diff
4406 {
4407 uncommitted_diff
4408 .update(cx, |uncommitted_diff, cx| {
4409 uncommitted_diff.clear_pending_hunks(cx);
4410 })
4411 .ok();
4412 }
4413 })
4414 .ok();
4415 }
4416
4417 result
4418 },
4419 )
4420 })?
4421 .await?
4422 },
4423 )
4424 }
4425
4426 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4427 let to_stage = self
4428 .cached_status()
4429 .filter_map(|entry| {
4430 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4431 if ops.staging() || ops.staged() {
4432 None
4433 } else {
4434 Some(entry.repo_path)
4435 }
4436 } else if entry.status.staging().is_fully_staged() {
4437 None
4438 } else {
4439 Some(entry.repo_path)
4440 }
4441 })
4442 .collect();
4443 self.stage_or_unstage_entries(true, to_stage, cx)
4444 }
4445
4446 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4447 let to_unstage = self
4448 .cached_status()
4449 .filter_map(|entry| {
4450 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4451 if !ops.staging() && !ops.staged() {
4452 None
4453 } else {
4454 Some(entry.repo_path)
4455 }
4456 } else if entry.status.staging().is_fully_unstaged() {
4457 None
4458 } else {
4459 Some(entry.repo_path)
4460 }
4461 })
4462 .collect();
4463 self.stage_or_unstage_entries(false, to_unstage, cx)
4464 }
4465
4466 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4467 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4468
4469 self.stash_entries(to_stash, cx)
4470 }
4471
4472 pub fn stash_entries(
4473 &mut self,
4474 entries: Vec<RepoPath>,
4475 cx: &mut Context<Self>,
4476 ) -> Task<anyhow::Result<()>> {
4477 let id = self.id;
4478
4479 cx.spawn(async move |this, cx| {
4480 this.update(cx, |this, _| {
4481 this.send_job(None, move |git_repo, _cx| async move {
4482 match git_repo {
4483 RepositoryState::Local(LocalRepositoryState {
4484 backend,
4485 environment,
4486 ..
4487 }) => backend.stash_paths(entries, environment).await,
4488 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4489 client
4490 .request(proto::Stash {
4491 project_id: project_id.0,
4492 repository_id: id.to_proto(),
4493 paths: entries
4494 .into_iter()
4495 .map(|repo_path| repo_path.to_proto())
4496 .collect(),
4497 })
4498 .await
4499 .context("sending stash request")?;
4500 Ok(())
4501 }
4502 }
4503 })
4504 })?
4505 .await??;
4506 Ok(())
4507 })
4508 }
4509
4510 pub fn stash_pop(
4511 &mut self,
4512 index: Option<usize>,
4513 cx: &mut Context<Self>,
4514 ) -> Task<anyhow::Result<()>> {
4515 let id = self.id;
4516 cx.spawn(async move |this, cx| {
4517 this.update(cx, |this, _| {
4518 this.send_job(None, move |git_repo, _cx| async move {
4519 match git_repo {
4520 RepositoryState::Local(LocalRepositoryState {
4521 backend,
4522 environment,
4523 ..
4524 }) => backend.stash_pop(index, environment).await,
4525 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4526 client
4527 .request(proto::StashPop {
4528 project_id: project_id.0,
4529 repository_id: id.to_proto(),
4530 stash_index: index.map(|i| i as u64),
4531 })
4532 .await
4533 .context("sending stash pop request")?;
4534 Ok(())
4535 }
4536 }
4537 })
4538 })?
4539 .await??;
4540 Ok(())
4541 })
4542 }
4543
4544 pub fn stash_apply(
4545 &mut self,
4546 index: Option<usize>,
4547 cx: &mut Context<Self>,
4548 ) -> Task<anyhow::Result<()>> {
4549 let id = self.id;
4550 cx.spawn(async move |this, cx| {
4551 this.update(cx, |this, _| {
4552 this.send_job(None, move |git_repo, _cx| async move {
4553 match git_repo {
4554 RepositoryState::Local(LocalRepositoryState {
4555 backend,
4556 environment,
4557 ..
4558 }) => backend.stash_apply(index, environment).await,
4559 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4560 client
4561 .request(proto::StashApply {
4562 project_id: project_id.0,
4563 repository_id: id.to_proto(),
4564 stash_index: index.map(|i| i as u64),
4565 })
4566 .await
4567 .context("sending stash apply request")?;
4568 Ok(())
4569 }
4570 }
4571 })
4572 })?
4573 .await??;
4574 Ok(())
4575 })
4576 }
4577
4578 pub fn stash_drop(
4579 &mut self,
4580 index: Option<usize>,
4581 cx: &mut Context<Self>,
4582 ) -> oneshot::Receiver<anyhow::Result<()>> {
4583 let id = self.id;
4584 let updates_tx = self
4585 .git_store()
4586 .and_then(|git_store| match &git_store.read(cx).state {
4587 GitStoreState::Local { downstream, .. } => downstream
4588 .as_ref()
4589 .map(|downstream| downstream.updates_tx.clone()),
4590 _ => None,
4591 });
4592 let this = cx.weak_entity();
4593 self.send_job(None, move |git_repo, mut cx| async move {
4594 match git_repo {
4595 RepositoryState::Local(LocalRepositoryState {
4596 backend,
4597 environment,
4598 ..
4599 }) => {
4600 // TODO would be nice to not have to do this manually
4601 let result = backend.stash_drop(index, environment).await;
4602 if result.is_ok()
4603 && let Ok(stash_entries) = backend.stash_entries().await
4604 {
4605 let snapshot = this.update(&mut cx, |this, cx| {
4606 this.snapshot.stash_entries = stash_entries;
4607 cx.emit(RepositoryEvent::StashEntriesChanged);
4608 this.snapshot.clone()
4609 })?;
4610 if let Some(updates_tx) = updates_tx {
4611 updates_tx
4612 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4613 .ok();
4614 }
4615 }
4616
4617 result
4618 }
4619 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4620 client
4621 .request(proto::StashDrop {
4622 project_id: project_id.0,
4623 repository_id: id.to_proto(),
4624 stash_index: index.map(|i| i as u64),
4625 })
4626 .await
4627 .context("sending stash pop request")?;
4628 Ok(())
4629 }
4630 }
4631 })
4632 }
4633
4634 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4635 let id = self.id;
4636 self.send_job(
4637 Some(format!("git hook {}", hook.as_str()).into()),
4638 move |git_repo, _cx| async move {
4639 match git_repo {
4640 RepositoryState::Local(LocalRepositoryState {
4641 backend,
4642 environment,
4643 ..
4644 }) => backend.run_hook(hook, environment.clone()).await,
4645 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4646 client
4647 .request(proto::RunGitHook {
4648 project_id: project_id.0,
4649 repository_id: id.to_proto(),
4650 hook: hook.to_proto(),
4651 })
4652 .await?;
4653
4654 Ok(())
4655 }
4656 }
4657 },
4658 )
4659 }
4660
4661 pub fn commit(
4662 &mut self,
4663 message: SharedString,
4664 name_and_email: Option<(SharedString, SharedString)>,
4665 options: CommitOptions,
4666 askpass: AskPassDelegate,
4667 cx: &mut App,
4668 ) -> oneshot::Receiver<Result<()>> {
4669 let id = self.id;
4670 let askpass_delegates = self.askpass_delegates.clone();
4671 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4672
4673 let rx = self.run_hook(RunHook::PreCommit, cx);
4674
4675 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4676 rx.await??;
4677
4678 match git_repo {
4679 RepositoryState::Local(LocalRepositoryState {
4680 backend,
4681 environment,
4682 ..
4683 }) => {
4684 backend
4685 .commit(message, name_and_email, options, askpass, environment)
4686 .await
4687 }
4688 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4689 askpass_delegates.lock().insert(askpass_id, askpass);
4690 let _defer = util::defer(|| {
4691 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4692 debug_assert!(askpass_delegate.is_some());
4693 });
4694 let (name, email) = name_and_email.unzip();
4695 client
4696 .request(proto::Commit {
4697 project_id: project_id.0,
4698 repository_id: id.to_proto(),
4699 message: String::from(message),
4700 name: name.map(String::from),
4701 email: email.map(String::from),
4702 options: Some(proto::commit::CommitOptions {
4703 amend: options.amend,
4704 signoff: options.signoff,
4705 }),
4706 askpass_id,
4707 })
4708 .await
4709 .context("sending commit request")?;
4710
4711 Ok(())
4712 }
4713 }
4714 })
4715 }
4716
4717 pub fn fetch(
4718 &mut self,
4719 fetch_options: FetchOptions,
4720 askpass: AskPassDelegate,
4721 _cx: &mut App,
4722 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4723 let askpass_delegates = self.askpass_delegates.clone();
4724 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4725 let id = self.id;
4726
4727 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4728 match git_repo {
4729 RepositoryState::Local(LocalRepositoryState {
4730 backend,
4731 environment,
4732 ..
4733 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4734 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4735 askpass_delegates.lock().insert(askpass_id, askpass);
4736 let _defer = util::defer(|| {
4737 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4738 debug_assert!(askpass_delegate.is_some());
4739 });
4740
4741 let response = client
4742 .request(proto::Fetch {
4743 project_id: project_id.0,
4744 repository_id: id.to_proto(),
4745 askpass_id,
4746 remote: fetch_options.to_proto(),
4747 })
4748 .await
4749 .context("sending fetch request")?;
4750
4751 Ok(RemoteCommandOutput {
4752 stdout: response.stdout,
4753 stderr: response.stderr,
4754 })
4755 }
4756 }
4757 })
4758 }
4759
4760 pub fn push(
4761 &mut self,
4762 branch: SharedString,
4763 remote_branch: SharedString,
4764 remote: SharedString,
4765 options: Option<PushOptions>,
4766 askpass: AskPassDelegate,
4767 cx: &mut Context<Self>,
4768 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4769 let askpass_delegates = self.askpass_delegates.clone();
4770 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4771 let id = self.id;
4772
4773 let args = options
4774 .map(|option| match option {
4775 PushOptions::SetUpstream => " --set-upstream",
4776 PushOptions::Force => " --force-with-lease",
4777 })
4778 .unwrap_or("");
4779
4780 let updates_tx = self
4781 .git_store()
4782 .and_then(|git_store| match &git_store.read(cx).state {
4783 GitStoreState::Local { downstream, .. } => downstream
4784 .as_ref()
4785 .map(|downstream| downstream.updates_tx.clone()),
4786 _ => None,
4787 });
4788
4789 let this = cx.weak_entity();
4790 self.send_job(
4791 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
4792 move |git_repo, mut cx| async move {
4793 match git_repo {
4794 RepositoryState::Local(LocalRepositoryState {
4795 backend,
4796 environment,
4797 ..
4798 }) => {
4799 let result = backend
4800 .push(
4801 branch.to_string(),
4802 remote_branch.to_string(),
4803 remote.to_string(),
4804 options,
4805 askpass,
4806 environment.clone(),
4807 cx.clone(),
4808 )
4809 .await;
4810 // TODO would be nice to not have to do this manually
4811 if result.is_ok() {
4812 let branches = backend.branches().await?;
4813 let branch = branches.into_iter().find(|branch| branch.is_head);
4814 log::info!("head branch after scan is {branch:?}");
4815 let snapshot = this.update(&mut cx, |this, cx| {
4816 this.snapshot.branch = branch;
4817 cx.emit(RepositoryEvent::BranchChanged);
4818 this.snapshot.clone()
4819 })?;
4820 if let Some(updates_tx) = updates_tx {
4821 updates_tx
4822 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4823 .ok();
4824 }
4825 }
4826 result
4827 }
4828 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4829 askpass_delegates.lock().insert(askpass_id, askpass);
4830 let _defer = util::defer(|| {
4831 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4832 debug_assert!(askpass_delegate.is_some());
4833 });
4834 let response = client
4835 .request(proto::Push {
4836 project_id: project_id.0,
4837 repository_id: id.to_proto(),
4838 askpass_id,
4839 branch_name: branch.to_string(),
4840 remote_branch_name: remote_branch.to_string(),
4841 remote_name: remote.to_string(),
4842 options: options.map(|options| match options {
4843 PushOptions::Force => proto::push::PushOptions::Force,
4844 PushOptions::SetUpstream => {
4845 proto::push::PushOptions::SetUpstream
4846 }
4847 }
4848 as i32),
4849 })
4850 .await
4851 .context("sending push request")?;
4852
4853 Ok(RemoteCommandOutput {
4854 stdout: response.stdout,
4855 stderr: response.stderr,
4856 })
4857 }
4858 }
4859 },
4860 )
4861 }
4862
4863 pub fn pull(
4864 &mut self,
4865 branch: Option<SharedString>,
4866 remote: SharedString,
4867 rebase: bool,
4868 askpass: AskPassDelegate,
4869 _cx: &mut App,
4870 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4871 let askpass_delegates = self.askpass_delegates.clone();
4872 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4873 let id = self.id;
4874
4875 let mut status = "git pull".to_string();
4876 if rebase {
4877 status.push_str(" --rebase");
4878 }
4879 status.push_str(&format!(" {}", remote));
4880 if let Some(b) = &branch {
4881 status.push_str(&format!(" {}", b));
4882 }
4883
4884 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4885 match git_repo {
4886 RepositoryState::Local(LocalRepositoryState {
4887 backend,
4888 environment,
4889 ..
4890 }) => {
4891 backend
4892 .pull(
4893 branch.as_ref().map(|b| b.to_string()),
4894 remote.to_string(),
4895 rebase,
4896 askpass,
4897 environment.clone(),
4898 cx,
4899 )
4900 .await
4901 }
4902 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4903 askpass_delegates.lock().insert(askpass_id, askpass);
4904 let _defer = util::defer(|| {
4905 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4906 debug_assert!(askpass_delegate.is_some());
4907 });
4908 let response = client
4909 .request(proto::Pull {
4910 project_id: project_id.0,
4911 repository_id: id.to_proto(),
4912 askpass_id,
4913 rebase,
4914 branch_name: branch.as_ref().map(|b| b.to_string()),
4915 remote_name: remote.to_string(),
4916 })
4917 .await
4918 .context("sending pull request")?;
4919
4920 Ok(RemoteCommandOutput {
4921 stdout: response.stdout,
4922 stderr: response.stderr,
4923 })
4924 }
4925 }
4926 })
4927 }
4928
4929 fn spawn_set_index_text_job(
4930 &mut self,
4931 path: RepoPath,
4932 content: Option<String>,
4933 hunk_staging_operation_count: Option<usize>,
4934 cx: &mut Context<Self>,
4935 ) -> oneshot::Receiver<anyhow::Result<()>> {
4936 let id = self.id;
4937 let this = cx.weak_entity();
4938 let git_store = self.git_store.clone();
4939 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4940 self.send_keyed_job(
4941 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4942 None,
4943 move |git_repo, mut cx| async move {
4944 log::debug!(
4945 "start updating index text for buffer {}",
4946 path.as_unix_str()
4947 );
4948
4949 match git_repo {
4950 RepositoryState::Local(LocalRepositoryState {
4951 fs,
4952 backend,
4953 environment,
4954 ..
4955 }) => {
4956 let executable = match fs.metadata(&abs_path).await {
4957 Ok(Some(meta)) => meta.is_executable,
4958 Ok(None) => false,
4959 Err(_err) => false,
4960 };
4961 backend
4962 .set_index_text(path.clone(), content, environment.clone(), executable)
4963 .await?;
4964 }
4965 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4966 client
4967 .request(proto::SetIndexText {
4968 project_id: project_id.0,
4969 repository_id: id.to_proto(),
4970 path: path.to_proto(),
4971 text: content,
4972 })
4973 .await?;
4974 }
4975 }
4976 log::debug!(
4977 "finish updating index text for buffer {}",
4978 path.as_unix_str()
4979 );
4980
4981 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4982 let project_path = this
4983 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4984 .ok()
4985 .flatten();
4986 git_store
4987 .update(&mut cx, |git_store, cx| {
4988 let buffer_id = git_store
4989 .buffer_store
4990 .read(cx)
4991 .get_by_path(&project_path?)?
4992 .read(cx)
4993 .remote_id();
4994 let diff_state = git_store.diffs.get(&buffer_id)?;
4995 diff_state.update(cx, |diff_state, _| {
4996 diff_state.hunk_staging_operation_count_as_of_write =
4997 hunk_staging_operation_count;
4998 });
4999 Some(())
5000 })
5001 .context("Git store dropped")?;
5002 }
5003 Ok(())
5004 },
5005 )
5006 }
5007
5008 pub fn create_remote(
5009 &mut self,
5010 remote_name: String,
5011 remote_url: String,
5012 ) -> oneshot::Receiver<Result<()>> {
5013 let id = self.id;
5014 self.send_job(
5015 Some(format!("git remote add {remote_name} {remote_url}").into()),
5016 move |repo, _cx| async move {
5017 match repo {
5018 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5019 backend.create_remote(remote_name, remote_url).await
5020 }
5021 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5022 client
5023 .request(proto::GitCreateRemote {
5024 project_id: project_id.0,
5025 repository_id: id.to_proto(),
5026 remote_name,
5027 remote_url,
5028 })
5029 .await?;
5030
5031 Ok(())
5032 }
5033 }
5034 },
5035 )
5036 }
5037
5038 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5039 let id = self.id;
5040 self.send_job(
5041 Some(format!("git remove remote {remote_name}").into()),
5042 move |repo, _cx| async move {
5043 match repo {
5044 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5045 backend.remove_remote(remote_name).await
5046 }
5047 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5048 client
5049 .request(proto::GitRemoveRemote {
5050 project_id: project_id.0,
5051 repository_id: id.to_proto(),
5052 remote_name,
5053 })
5054 .await?;
5055
5056 Ok(())
5057 }
5058 }
5059 },
5060 )
5061 }
5062
5063 pub fn get_remotes(
5064 &mut self,
5065 branch_name: Option<String>,
5066 is_push: bool,
5067 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5068 let id = self.id;
5069 self.send_job(None, move |repo, _cx| async move {
5070 match repo {
5071 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5072 let remote = if let Some(branch_name) = branch_name {
5073 if is_push {
5074 backend.get_push_remote(branch_name).await?
5075 } else {
5076 backend.get_branch_remote(branch_name).await?
5077 }
5078 } else {
5079 None
5080 };
5081
5082 match remote {
5083 Some(remote) => Ok(vec![remote]),
5084 None => backend.get_all_remotes().await,
5085 }
5086 }
5087 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5088 let response = client
5089 .request(proto::GetRemotes {
5090 project_id: project_id.0,
5091 repository_id: id.to_proto(),
5092 branch_name,
5093 is_push,
5094 })
5095 .await?;
5096
5097 let remotes = response
5098 .remotes
5099 .into_iter()
5100 .map(|remotes| Remote {
5101 name: remotes.name.into(),
5102 })
5103 .collect();
5104
5105 Ok(remotes)
5106 }
5107 }
5108 })
5109 }
5110
5111 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5112 let id = self.id;
5113 self.send_job(None, move |repo, _| async move {
5114 match repo {
5115 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5116 backend.branches().await
5117 }
5118 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5119 let response = client
5120 .request(proto::GitGetBranches {
5121 project_id: project_id.0,
5122 repository_id: id.to_proto(),
5123 })
5124 .await?;
5125
5126 let branches = response
5127 .branches
5128 .into_iter()
5129 .map(|branch| proto_to_branch(&branch))
5130 .collect();
5131
5132 Ok(branches)
5133 }
5134 }
5135 })
5136 }
5137
5138 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5139 let id = self.id;
5140 self.send_job(None, move |repo, _| async move {
5141 match repo {
5142 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5143 backend.worktrees().await
5144 }
5145 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5146 let response = client
5147 .request(proto::GitGetWorktrees {
5148 project_id: project_id.0,
5149 repository_id: id.to_proto(),
5150 })
5151 .await?;
5152
5153 let worktrees = response
5154 .worktrees
5155 .into_iter()
5156 .map(|worktree| proto_to_worktree(&worktree))
5157 .collect();
5158
5159 Ok(worktrees)
5160 }
5161 }
5162 })
5163 }
5164
5165 pub fn create_worktree(
5166 &mut self,
5167 name: String,
5168 path: PathBuf,
5169 commit: Option<String>,
5170 ) -> oneshot::Receiver<Result<()>> {
5171 let id = self.id;
5172 self.send_job(
5173 Some("git worktree add".into()),
5174 move |repo, _cx| async move {
5175 match repo {
5176 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5177 backend.create_worktree(name, path, commit).await
5178 }
5179 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5180 client
5181 .request(proto::GitCreateWorktree {
5182 project_id: project_id.0,
5183 repository_id: id.to_proto(),
5184 name,
5185 directory: path.to_string_lossy().to_string(),
5186 commit,
5187 })
5188 .await?;
5189
5190 Ok(())
5191 }
5192 }
5193 },
5194 )
5195 }
5196
5197 pub fn default_branch(
5198 &mut self,
5199 include_remote_name: bool,
5200 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5201 let id = self.id;
5202 self.send_job(None, move |repo, _| async move {
5203 match repo {
5204 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5205 backend.default_branch(include_remote_name).await
5206 }
5207 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5208 let response = client
5209 .request(proto::GetDefaultBranch {
5210 project_id: project_id.0,
5211 repository_id: id.to_proto(),
5212 })
5213 .await?;
5214
5215 anyhow::Ok(response.branch.map(SharedString::from))
5216 }
5217 }
5218 })
5219 }
5220
5221 pub fn diff_tree(
5222 &mut self,
5223 diff_type: DiffTreeType,
5224 _cx: &App,
5225 ) -> oneshot::Receiver<Result<TreeDiff>> {
5226 let repository_id = self.snapshot.id;
5227 self.send_job(None, move |repo, _cx| async move {
5228 match repo {
5229 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5230 backend.diff_tree(diff_type).await
5231 }
5232 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5233 let response = client
5234 .request(proto::GetTreeDiff {
5235 project_id: project_id.0,
5236 repository_id: repository_id.0,
5237 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5238 base: diff_type.base().to_string(),
5239 head: diff_type.head().to_string(),
5240 })
5241 .await?;
5242
5243 let entries = response
5244 .entries
5245 .into_iter()
5246 .filter_map(|entry| {
5247 let status = match entry.status() {
5248 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5249 proto::tree_diff_status::Status::Modified => {
5250 TreeDiffStatus::Modified {
5251 old: git::Oid::from_str(
5252 &entry.oid.context("missing oid").log_err()?,
5253 )
5254 .log_err()?,
5255 }
5256 }
5257 proto::tree_diff_status::Status::Deleted => {
5258 TreeDiffStatus::Deleted {
5259 old: git::Oid::from_str(
5260 &entry.oid.context("missing oid").log_err()?,
5261 )
5262 .log_err()?,
5263 }
5264 }
5265 };
5266 Some((
5267 RepoPath::from_rel_path(
5268 &RelPath::from_proto(&entry.path).log_err()?,
5269 ),
5270 status,
5271 ))
5272 })
5273 .collect();
5274
5275 Ok(TreeDiff { entries })
5276 }
5277 }
5278 })
5279 }
5280
5281 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5282 let id = self.id;
5283 self.send_job(None, move |repo, _cx| async move {
5284 match repo {
5285 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5286 backend.diff(diff_type).await
5287 }
5288 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5289 let response = client
5290 .request(proto::GitDiff {
5291 project_id: project_id.0,
5292 repository_id: id.to_proto(),
5293 diff_type: match diff_type {
5294 DiffType::HeadToIndex => {
5295 proto::git_diff::DiffType::HeadToIndex.into()
5296 }
5297 DiffType::HeadToWorktree => {
5298 proto::git_diff::DiffType::HeadToWorktree.into()
5299 }
5300 },
5301 })
5302 .await?;
5303
5304 Ok(response.diff)
5305 }
5306 }
5307 })
5308 }
5309
5310 pub fn create_branch(
5311 &mut self,
5312 branch_name: String,
5313 base_branch: Option<String>,
5314 ) -> oneshot::Receiver<Result<()>> {
5315 let id = self.id;
5316 let status_msg = if let Some(ref base) = base_branch {
5317 format!("git switch -c {branch_name} {base}").into()
5318 } else {
5319 format!("git switch -c {branch_name}").into()
5320 };
5321 self.send_job(Some(status_msg), move |repo, _cx| async move {
5322 match repo {
5323 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5324 backend.create_branch(branch_name, base_branch).await
5325 }
5326 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5327 client
5328 .request(proto::GitCreateBranch {
5329 project_id: project_id.0,
5330 repository_id: id.to_proto(),
5331 branch_name,
5332 })
5333 .await?;
5334
5335 Ok(())
5336 }
5337 }
5338 })
5339 }
5340
5341 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5342 let id = self.id;
5343 self.send_job(
5344 Some(format!("git switch {branch_name}").into()),
5345 move |repo, _cx| async move {
5346 match repo {
5347 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5348 backend.change_branch(branch_name).await
5349 }
5350 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5351 client
5352 .request(proto::GitChangeBranch {
5353 project_id: project_id.0,
5354 repository_id: id.to_proto(),
5355 branch_name,
5356 })
5357 .await?;
5358
5359 Ok(())
5360 }
5361 }
5362 },
5363 )
5364 }
5365
5366 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5367 let id = self.id;
5368 self.send_job(
5369 Some(format!("git branch -d {branch_name}").into()),
5370 move |repo, _cx| async move {
5371 match repo {
5372 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5373 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5374 client
5375 .request(proto::GitDeleteBranch {
5376 project_id: project_id.0,
5377 repository_id: id.to_proto(),
5378 branch_name,
5379 })
5380 .await?;
5381
5382 Ok(())
5383 }
5384 }
5385 },
5386 )
5387 }
5388
5389 pub fn rename_branch(
5390 &mut self,
5391 branch: String,
5392 new_name: String,
5393 ) -> oneshot::Receiver<Result<()>> {
5394 let id = self.id;
5395 self.send_job(
5396 Some(format!("git branch -m {branch} {new_name}").into()),
5397 move |repo, _cx| async move {
5398 match repo {
5399 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5400 backend.rename_branch(branch, new_name).await
5401 }
5402 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5403 client
5404 .request(proto::GitRenameBranch {
5405 project_id: project_id.0,
5406 repository_id: id.to_proto(),
5407 branch,
5408 new_name,
5409 })
5410 .await?;
5411
5412 Ok(())
5413 }
5414 }
5415 },
5416 )
5417 }
5418
5419 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5420 let id = self.id;
5421 self.send_job(None, move |repo, _cx| async move {
5422 match repo {
5423 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5424 backend.check_for_pushed_commit().await
5425 }
5426 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5427 let response = client
5428 .request(proto::CheckForPushedCommits {
5429 project_id: project_id.0,
5430 repository_id: id.to_proto(),
5431 })
5432 .await?;
5433
5434 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5435
5436 Ok(branches)
5437 }
5438 }
5439 })
5440 }
5441
5442 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5443 self.send_job(None, |repo, _cx| async move {
5444 match repo {
5445 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5446 backend.checkpoint().await
5447 }
5448 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5449 }
5450 })
5451 }
5452
5453 pub fn restore_checkpoint(
5454 &mut self,
5455 checkpoint: GitRepositoryCheckpoint,
5456 ) -> oneshot::Receiver<Result<()>> {
5457 self.send_job(None, move |repo, _cx| async move {
5458 match repo {
5459 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5460 backend.restore_checkpoint(checkpoint).await
5461 }
5462 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5463 }
5464 })
5465 }
5466
5467 pub(crate) fn apply_remote_update(
5468 &mut self,
5469 update: proto::UpdateRepository,
5470 cx: &mut Context<Self>,
5471 ) -> Result<()> {
5472 let conflicted_paths = TreeSet::from_ordered_entries(
5473 update
5474 .current_merge_conflicts
5475 .into_iter()
5476 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5477 );
5478 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5479 let new_head_commit = update
5480 .head_commit_details
5481 .as_ref()
5482 .map(proto_to_commit_details);
5483 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5484 cx.emit(RepositoryEvent::BranchChanged)
5485 }
5486 self.snapshot.branch = new_branch;
5487 self.snapshot.head_commit = new_head_commit;
5488
5489 self.snapshot.merge.conflicted_paths = conflicted_paths;
5490 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5491 let new_stash_entries = GitStash {
5492 entries: update
5493 .stash_entries
5494 .iter()
5495 .filter_map(|entry| proto_to_stash(entry).ok())
5496 .collect(),
5497 };
5498 if self.snapshot.stash_entries != new_stash_entries {
5499 cx.emit(RepositoryEvent::StashEntriesChanged)
5500 }
5501 self.snapshot.stash_entries = new_stash_entries;
5502 self.snapshot.remote_upstream_url = update.remote_upstream_url;
5503 self.snapshot.remote_origin_url = update.remote_origin_url;
5504
5505 let edits = update
5506 .removed_statuses
5507 .into_iter()
5508 .filter_map(|path| {
5509 Some(sum_tree::Edit::Remove(PathKey(
5510 RelPath::from_proto(&path).log_err()?,
5511 )))
5512 })
5513 .chain(
5514 update
5515 .updated_statuses
5516 .into_iter()
5517 .filter_map(|updated_status| {
5518 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5519 }),
5520 )
5521 .collect::<Vec<_>>();
5522 if !edits.is_empty() {
5523 cx.emit(RepositoryEvent::StatusesChanged);
5524 }
5525 self.snapshot.statuses_by_path.edit(edits, ());
5526 if update.is_last_update {
5527 self.snapshot.scan_id = update.scan_id;
5528 }
5529 self.clear_pending_ops(cx);
5530 Ok(())
5531 }
5532
5533 pub fn compare_checkpoints(
5534 &mut self,
5535 left: GitRepositoryCheckpoint,
5536 right: GitRepositoryCheckpoint,
5537 ) -> oneshot::Receiver<Result<bool>> {
5538 self.send_job(None, move |repo, _cx| async move {
5539 match repo {
5540 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5541 backend.compare_checkpoints(left, right).await
5542 }
5543 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5544 }
5545 })
5546 }
5547
5548 pub fn diff_checkpoints(
5549 &mut self,
5550 base_checkpoint: GitRepositoryCheckpoint,
5551 target_checkpoint: GitRepositoryCheckpoint,
5552 ) -> oneshot::Receiver<Result<String>> {
5553 self.send_job(None, move |repo, _cx| async move {
5554 match repo {
5555 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5556 backend
5557 .diff_checkpoints(base_checkpoint, target_checkpoint)
5558 .await
5559 }
5560 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5561 }
5562 })
5563 }
5564
5565 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5566 let updated = SumTree::from_iter(
5567 self.pending_ops.iter().filter_map(|ops| {
5568 let inner_ops: Vec<PendingOp> =
5569 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5570 if inner_ops.is_empty() {
5571 None
5572 } else {
5573 Some(PendingOps {
5574 repo_path: ops.repo_path.clone(),
5575 ops: inner_ops,
5576 })
5577 }
5578 }),
5579 (),
5580 );
5581
5582 if updated != self.pending_ops {
5583 cx.emit(RepositoryEvent::PendingOpsChanged {
5584 pending_ops: self.pending_ops.clone(),
5585 })
5586 }
5587
5588 self.pending_ops = updated;
5589 }
5590
5591 fn schedule_scan(
5592 &mut self,
5593 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5594 cx: &mut Context<Self>,
5595 ) {
5596 let this = cx.weak_entity();
5597 let _ = self.send_keyed_job(
5598 Some(GitJobKey::ReloadGitState),
5599 None,
5600 |state, mut cx| async move {
5601 log::debug!("run scheduled git status scan");
5602
5603 let Some(this) = this.upgrade() else {
5604 return Ok(());
5605 };
5606 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5607 bail!("not a local repository")
5608 };
5609 let (snapshot, events) = this
5610 .update(&mut cx, |this, _| {
5611 this.paths_needing_status_update.clear();
5612 compute_snapshot(
5613 this.id,
5614 this.work_directory_abs_path.clone(),
5615 this.snapshot.clone(),
5616 backend.clone(),
5617 )
5618 })
5619 .await?;
5620 this.update(&mut cx, |this, cx| {
5621 this.snapshot = snapshot.clone();
5622 this.clear_pending_ops(cx);
5623 for event in events {
5624 cx.emit(event);
5625 }
5626 });
5627 if let Some(updates_tx) = updates_tx {
5628 updates_tx
5629 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5630 .ok();
5631 }
5632 Ok(())
5633 },
5634 );
5635 }
5636
5637 fn spawn_local_git_worker(
5638 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5639 cx: &mut Context<Self>,
5640 ) -> mpsc::UnboundedSender<GitJob> {
5641 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5642
5643 cx.spawn(async move |_, cx| {
5644 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5645 if let Some(git_hosting_provider_registry) =
5646 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
5647 {
5648 git_hosting_providers::register_additional_providers(
5649 git_hosting_provider_registry,
5650 state.backend.clone(),
5651 )
5652 .await;
5653 }
5654 let state = RepositoryState::Local(state);
5655 let mut jobs = VecDeque::new();
5656 loop {
5657 while let Ok(Some(next_job)) = job_rx.try_next() {
5658 jobs.push_back(next_job);
5659 }
5660
5661 if let Some(job) = jobs.pop_front() {
5662 if let Some(current_key) = &job.key
5663 && jobs
5664 .iter()
5665 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5666 {
5667 continue;
5668 }
5669 (job.job)(state.clone(), cx).await;
5670 } else if let Some(job) = job_rx.next().await {
5671 jobs.push_back(job);
5672 } else {
5673 break;
5674 }
5675 }
5676 anyhow::Ok(())
5677 })
5678 .detach_and_log_err(cx);
5679
5680 job_tx
5681 }
5682
5683 fn spawn_remote_git_worker(
5684 state: RemoteRepositoryState,
5685 cx: &mut Context<Self>,
5686 ) -> mpsc::UnboundedSender<GitJob> {
5687 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5688
5689 cx.spawn(async move |_, cx| {
5690 let state = RepositoryState::Remote(state);
5691 let mut jobs = VecDeque::new();
5692 loop {
5693 while let Ok(Some(next_job)) = job_rx.try_next() {
5694 jobs.push_back(next_job);
5695 }
5696
5697 if let Some(job) = jobs.pop_front() {
5698 if let Some(current_key) = &job.key
5699 && jobs
5700 .iter()
5701 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5702 {
5703 continue;
5704 }
5705 (job.job)(state.clone(), cx).await;
5706 } else if let Some(job) = job_rx.next().await {
5707 jobs.push_back(job);
5708 } else {
5709 break;
5710 }
5711 }
5712 anyhow::Ok(())
5713 })
5714 .detach_and_log_err(cx);
5715
5716 job_tx
5717 }
5718
5719 fn load_staged_text(
5720 &mut self,
5721 buffer_id: BufferId,
5722 repo_path: RepoPath,
5723 cx: &App,
5724 ) -> Task<Result<Option<String>>> {
5725 let rx = self.send_job(None, move |state, _| async move {
5726 match state {
5727 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5728 anyhow::Ok(backend.load_index_text(repo_path).await)
5729 }
5730 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5731 let response = client
5732 .request(proto::OpenUnstagedDiff {
5733 project_id: project_id.to_proto(),
5734 buffer_id: buffer_id.to_proto(),
5735 })
5736 .await?;
5737 Ok(response.staged_text)
5738 }
5739 }
5740 });
5741 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5742 }
5743
5744 fn load_committed_text(
5745 &mut self,
5746 buffer_id: BufferId,
5747 repo_path: RepoPath,
5748 cx: &App,
5749 ) -> Task<Result<DiffBasesChange>> {
5750 let rx = self.send_job(None, move |state, _| async move {
5751 match state {
5752 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5753 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5754 let staged_text = backend.load_index_text(repo_path).await;
5755 let diff_bases_change = if committed_text == staged_text {
5756 DiffBasesChange::SetBoth(committed_text)
5757 } else {
5758 DiffBasesChange::SetEach {
5759 index: staged_text,
5760 head: committed_text,
5761 }
5762 };
5763 anyhow::Ok(diff_bases_change)
5764 }
5765 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5766 use proto::open_uncommitted_diff_response::Mode;
5767
5768 let response = client
5769 .request(proto::OpenUncommittedDiff {
5770 project_id: project_id.to_proto(),
5771 buffer_id: buffer_id.to_proto(),
5772 })
5773 .await?;
5774 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5775 let bases = match mode {
5776 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5777 Mode::IndexAndHead => DiffBasesChange::SetEach {
5778 head: response.committed_text,
5779 index: response.staged_text,
5780 },
5781 };
5782 Ok(bases)
5783 }
5784 }
5785 });
5786
5787 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5788 }
5789
5790 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5791 let repository_id = self.snapshot.id;
5792 let rx = self.send_job(None, move |state, _| async move {
5793 match state {
5794 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5795 backend.load_blob_content(oid).await
5796 }
5797 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5798 let response = client
5799 .request(proto::GetBlobContent {
5800 project_id: project_id.to_proto(),
5801 repository_id: repository_id.0,
5802 oid: oid.to_string(),
5803 })
5804 .await?;
5805 Ok(response.content)
5806 }
5807 }
5808 });
5809 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5810 }
5811
5812 fn paths_changed(
5813 &mut self,
5814 paths: Vec<RepoPath>,
5815 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5816 cx: &mut Context<Self>,
5817 ) {
5818 self.paths_needing_status_update.extend(paths);
5819
5820 let this = cx.weak_entity();
5821 let _ = self.send_keyed_job(
5822 Some(GitJobKey::RefreshStatuses),
5823 None,
5824 |state, mut cx| async move {
5825 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5826 (
5827 this.snapshot.clone(),
5828 mem::take(&mut this.paths_needing_status_update),
5829 )
5830 })?;
5831 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5832 bail!("not a local repository")
5833 };
5834
5835 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5836 if paths.is_empty() {
5837 return Ok(());
5838 }
5839 let statuses = backend.status(&paths).await?;
5840 let stash_entries = backend.stash_entries().await?;
5841
5842 let changed_path_statuses = cx
5843 .background_spawn(async move {
5844 let mut changed_path_statuses = Vec::new();
5845 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5846 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5847
5848 for (repo_path, status) in &*statuses.entries {
5849 changed_paths.remove(repo_path);
5850 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5851 && cursor.item().is_some_and(|entry| entry.status == *status)
5852 {
5853 continue;
5854 }
5855
5856 changed_path_statuses.push(Edit::Insert(StatusEntry {
5857 repo_path: repo_path.clone(),
5858 status: *status,
5859 }));
5860 }
5861 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5862 for path in changed_paths.into_iter() {
5863 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5864 changed_path_statuses
5865 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5866 }
5867 }
5868 changed_path_statuses
5869 })
5870 .await;
5871
5872 this.update(&mut cx, |this, cx| {
5873 if this.snapshot.stash_entries != stash_entries {
5874 cx.emit(RepositoryEvent::StashEntriesChanged);
5875 this.snapshot.stash_entries = stash_entries;
5876 }
5877
5878 if !changed_path_statuses.is_empty() {
5879 cx.emit(RepositoryEvent::StatusesChanged);
5880 this.snapshot
5881 .statuses_by_path
5882 .edit(changed_path_statuses, ());
5883 this.snapshot.scan_id += 1;
5884 }
5885
5886 if let Some(updates_tx) = updates_tx {
5887 updates_tx
5888 .unbounded_send(DownstreamUpdate::UpdateRepository(
5889 this.snapshot.clone(),
5890 ))
5891 .ok();
5892 }
5893 })
5894 },
5895 );
5896 }
5897
5898 /// currently running git command and when it started
5899 pub fn current_job(&self) -> Option<JobInfo> {
5900 self.active_jobs.values().next().cloned()
5901 }
5902
5903 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5904 self.send_job(None, |_, _| async {})
5905 }
5906
5907 fn spawn_job_with_tracking<AsyncFn>(
5908 &mut self,
5909 paths: Vec<RepoPath>,
5910 git_status: pending_op::GitStatus,
5911 cx: &mut Context<Self>,
5912 f: AsyncFn,
5913 ) -> Task<Result<()>>
5914 where
5915 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5916 {
5917 let ids = self.new_pending_ops_for_paths(paths, git_status);
5918
5919 cx.spawn(async move |this, cx| {
5920 let (job_status, result) = match f(this.clone(), cx).await {
5921 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5922 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5923 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5924 };
5925
5926 this.update(cx, |this, _| {
5927 let mut edits = Vec::with_capacity(ids.len());
5928 for (id, entry) in ids {
5929 if let Some(mut ops) = this
5930 .pending_ops
5931 .get(&PathKey(entry.as_ref().clone()), ())
5932 .cloned()
5933 {
5934 if let Some(op) = ops.op_by_id_mut(id) {
5935 op.job_status = job_status;
5936 }
5937 edits.push(sum_tree::Edit::Insert(ops));
5938 }
5939 }
5940 this.pending_ops.edit(edits, ());
5941 })?;
5942
5943 result
5944 })
5945 }
5946
5947 fn new_pending_ops_for_paths(
5948 &mut self,
5949 paths: Vec<RepoPath>,
5950 git_status: pending_op::GitStatus,
5951 ) -> Vec<(PendingOpId, RepoPath)> {
5952 let mut edits = Vec::with_capacity(paths.len());
5953 let mut ids = Vec::with_capacity(paths.len());
5954 for path in paths {
5955 let mut ops = self
5956 .pending_ops
5957 .get(&PathKey(path.as_ref().clone()), ())
5958 .cloned()
5959 .unwrap_or_else(|| PendingOps::new(&path));
5960 let id = ops.max_id() + 1;
5961 ops.ops.push(PendingOp {
5962 id,
5963 git_status,
5964 job_status: pending_op::JobStatus::Running,
5965 });
5966 edits.push(sum_tree::Edit::Insert(ops));
5967 ids.push((id, path));
5968 }
5969 self.pending_ops.edit(edits, ());
5970 ids
5971 }
5972 pub fn default_remote_url(&self) -> Option<String> {
5973 self.remote_upstream_url
5974 .clone()
5975 .or(self.remote_origin_url.clone())
5976 }
5977}
5978
5979fn get_permalink_in_rust_registry_src(
5980 provider_registry: Arc<GitHostingProviderRegistry>,
5981 path: PathBuf,
5982 selection: Range<u32>,
5983) -> Result<url::Url> {
5984 #[derive(Deserialize)]
5985 struct CargoVcsGit {
5986 sha1: String,
5987 }
5988
5989 #[derive(Deserialize)]
5990 struct CargoVcsInfo {
5991 git: CargoVcsGit,
5992 path_in_vcs: String,
5993 }
5994
5995 #[derive(Deserialize)]
5996 struct CargoPackage {
5997 repository: String,
5998 }
5999
6000 #[derive(Deserialize)]
6001 struct CargoToml {
6002 package: CargoPackage,
6003 }
6004
6005 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6006 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6007 Some((dir, json))
6008 }) else {
6009 bail!("No .cargo_vcs_info.json found in parent directories")
6010 };
6011 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6012 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6013 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6014 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6015 .context("parsing package.repository field of manifest")?;
6016 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6017 let permalink = provider.build_permalink(
6018 remote,
6019 BuildPermalinkParams::new(
6020 &cargo_vcs_info.git.sha1,
6021 &RepoPath::from_rel_path(
6022 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6023 ),
6024 Some(selection),
6025 ),
6026 );
6027 Ok(permalink)
6028}
6029
6030fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6031 let Some(blame) = blame else {
6032 return proto::BlameBufferResponse {
6033 blame_response: None,
6034 };
6035 };
6036
6037 let entries = blame
6038 .entries
6039 .into_iter()
6040 .map(|entry| proto::BlameEntry {
6041 sha: entry.sha.as_bytes().into(),
6042 start_line: entry.range.start,
6043 end_line: entry.range.end,
6044 original_line_number: entry.original_line_number,
6045 author: entry.author,
6046 author_mail: entry.author_mail,
6047 author_time: entry.author_time,
6048 author_tz: entry.author_tz,
6049 committer: entry.committer_name,
6050 committer_mail: entry.committer_email,
6051 committer_time: entry.committer_time,
6052 committer_tz: entry.committer_tz,
6053 summary: entry.summary,
6054 previous: entry.previous,
6055 filename: entry.filename,
6056 })
6057 .collect::<Vec<_>>();
6058
6059 let messages = blame
6060 .messages
6061 .into_iter()
6062 .map(|(oid, message)| proto::CommitMessage {
6063 oid: oid.as_bytes().into(),
6064 message,
6065 })
6066 .collect::<Vec<_>>();
6067
6068 proto::BlameBufferResponse {
6069 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6070 }
6071}
6072
6073fn deserialize_blame_buffer_response(
6074 response: proto::BlameBufferResponse,
6075) -> Option<git::blame::Blame> {
6076 let response = response.blame_response?;
6077 let entries = response
6078 .entries
6079 .into_iter()
6080 .filter_map(|entry| {
6081 Some(git::blame::BlameEntry {
6082 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6083 range: entry.start_line..entry.end_line,
6084 original_line_number: entry.original_line_number,
6085 committer_name: entry.committer,
6086 committer_time: entry.committer_time,
6087 committer_tz: entry.committer_tz,
6088 committer_email: entry.committer_mail,
6089 author: entry.author,
6090 author_mail: entry.author_mail,
6091 author_time: entry.author_time,
6092 author_tz: entry.author_tz,
6093 summary: entry.summary,
6094 previous: entry.previous,
6095 filename: entry.filename,
6096 })
6097 })
6098 .collect::<Vec<_>>();
6099
6100 let messages = response
6101 .messages
6102 .into_iter()
6103 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6104 .collect::<HashMap<_, _>>();
6105
6106 Some(Blame { entries, messages })
6107}
6108
6109fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6110 proto::Branch {
6111 is_head: branch.is_head,
6112 ref_name: branch.ref_name.to_string(),
6113 unix_timestamp: branch
6114 .most_recent_commit
6115 .as_ref()
6116 .map(|commit| commit.commit_timestamp as u64),
6117 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6118 ref_name: upstream.ref_name.to_string(),
6119 tracking: upstream
6120 .tracking
6121 .status()
6122 .map(|upstream| proto::UpstreamTracking {
6123 ahead: upstream.ahead as u64,
6124 behind: upstream.behind as u64,
6125 }),
6126 }),
6127 most_recent_commit: branch
6128 .most_recent_commit
6129 .as_ref()
6130 .map(|commit| proto::CommitSummary {
6131 sha: commit.sha.to_string(),
6132 subject: commit.subject.to_string(),
6133 commit_timestamp: commit.commit_timestamp,
6134 author_name: commit.author_name.to_string(),
6135 }),
6136 }
6137}
6138
6139fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6140 proto::Worktree {
6141 path: worktree.path.to_string_lossy().to_string(),
6142 ref_name: worktree.ref_name.to_string(),
6143 sha: worktree.sha.to_string(),
6144 }
6145}
6146
6147fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6148 git::repository::Worktree {
6149 path: PathBuf::from(proto.path.clone()),
6150 ref_name: proto.ref_name.clone().into(),
6151 sha: proto.sha.clone().into(),
6152 }
6153}
6154
6155fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6156 git::repository::Branch {
6157 is_head: proto.is_head,
6158 ref_name: proto.ref_name.clone().into(),
6159 upstream: proto
6160 .upstream
6161 .as_ref()
6162 .map(|upstream| git::repository::Upstream {
6163 ref_name: upstream.ref_name.to_string().into(),
6164 tracking: upstream
6165 .tracking
6166 .as_ref()
6167 .map(|tracking| {
6168 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6169 ahead: tracking.ahead as u32,
6170 behind: tracking.behind as u32,
6171 })
6172 })
6173 .unwrap_or(git::repository::UpstreamTracking::Gone),
6174 }),
6175 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6176 git::repository::CommitSummary {
6177 sha: commit.sha.to_string().into(),
6178 subject: commit.subject.to_string().into(),
6179 commit_timestamp: commit.commit_timestamp,
6180 author_name: commit.author_name.to_string().into(),
6181 has_parent: true,
6182 }
6183 }),
6184 }
6185}
6186
6187fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6188 proto::GitCommitDetails {
6189 sha: commit.sha.to_string(),
6190 message: commit.message.to_string(),
6191 commit_timestamp: commit.commit_timestamp,
6192 author_email: commit.author_email.to_string(),
6193 author_name: commit.author_name.to_string(),
6194 }
6195}
6196
6197fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6198 CommitDetails {
6199 sha: proto.sha.clone().into(),
6200 message: proto.message.clone().into(),
6201 commit_timestamp: proto.commit_timestamp,
6202 author_email: proto.author_email.clone().into(),
6203 author_name: proto.author_name.clone().into(),
6204 }
6205}
6206
6207async fn compute_snapshot(
6208 id: RepositoryId,
6209 work_directory_abs_path: Arc<Path>,
6210 prev_snapshot: RepositorySnapshot,
6211 backend: Arc<dyn GitRepository>,
6212) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6213 let mut events = Vec::new();
6214 let branches = backend.branches().await?;
6215 let branch = branches.into_iter().find(|branch| branch.is_head);
6216 let statuses = backend
6217 .status(&[RepoPath::from_rel_path(
6218 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6219 )])
6220 .await?;
6221 let stash_entries = backend.stash_entries().await?;
6222 let statuses_by_path = SumTree::from_iter(
6223 statuses
6224 .entries
6225 .iter()
6226 .map(|(repo_path, status)| StatusEntry {
6227 repo_path: repo_path.clone(),
6228 status: *status,
6229 }),
6230 (),
6231 );
6232 let (merge_details, merge_heads_changed) =
6233 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6234 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6235
6236 if merge_heads_changed {
6237 events.push(RepositoryEvent::MergeHeadsChanged);
6238 }
6239
6240 if statuses_by_path != prev_snapshot.statuses_by_path {
6241 events.push(RepositoryEvent::StatusesChanged)
6242 }
6243
6244 // Useful when branch is None in detached head state
6245 let head_commit = match backend.head_sha().await {
6246 Some(head_sha) => backend.show(head_sha).await.log_err(),
6247 None => None,
6248 };
6249
6250 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6251 events.push(RepositoryEvent::BranchChanged);
6252 }
6253
6254 let remote_origin_url = backend.remote_url("origin").await;
6255 let remote_upstream_url = backend.remote_url("upstream").await;
6256
6257 let snapshot = RepositorySnapshot {
6258 id,
6259 statuses_by_path,
6260 work_directory_abs_path,
6261 path_style: prev_snapshot.path_style,
6262 scan_id: prev_snapshot.scan_id + 1,
6263 branch,
6264 head_commit,
6265 merge: merge_details,
6266 remote_origin_url,
6267 remote_upstream_url,
6268 stash_entries,
6269 };
6270
6271 Ok((snapshot, events))
6272}
6273
6274fn status_from_proto(
6275 simple_status: i32,
6276 status: Option<proto::GitFileStatus>,
6277) -> anyhow::Result<FileStatus> {
6278 use proto::git_file_status::Variant;
6279
6280 let Some(variant) = status.and_then(|status| status.variant) else {
6281 let code = proto::GitStatus::from_i32(simple_status)
6282 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6283 let result = match code {
6284 proto::GitStatus::Added => TrackedStatus {
6285 worktree_status: StatusCode::Added,
6286 index_status: StatusCode::Unmodified,
6287 }
6288 .into(),
6289 proto::GitStatus::Modified => TrackedStatus {
6290 worktree_status: StatusCode::Modified,
6291 index_status: StatusCode::Unmodified,
6292 }
6293 .into(),
6294 proto::GitStatus::Conflict => UnmergedStatus {
6295 first_head: UnmergedStatusCode::Updated,
6296 second_head: UnmergedStatusCode::Updated,
6297 }
6298 .into(),
6299 proto::GitStatus::Deleted => TrackedStatus {
6300 worktree_status: StatusCode::Deleted,
6301 index_status: StatusCode::Unmodified,
6302 }
6303 .into(),
6304 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6305 };
6306 return Ok(result);
6307 };
6308
6309 let result = match variant {
6310 Variant::Untracked(_) => FileStatus::Untracked,
6311 Variant::Ignored(_) => FileStatus::Ignored,
6312 Variant::Unmerged(unmerged) => {
6313 let [first_head, second_head] =
6314 [unmerged.first_head, unmerged.second_head].map(|head| {
6315 let code = proto::GitStatus::from_i32(head)
6316 .with_context(|| format!("Invalid git status code: {head}"))?;
6317 let result = match code {
6318 proto::GitStatus::Added => UnmergedStatusCode::Added,
6319 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6320 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6321 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6322 };
6323 Ok(result)
6324 });
6325 let [first_head, second_head] = [first_head?, second_head?];
6326 UnmergedStatus {
6327 first_head,
6328 second_head,
6329 }
6330 .into()
6331 }
6332 Variant::Tracked(tracked) => {
6333 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6334 .map(|status| {
6335 let code = proto::GitStatus::from_i32(status)
6336 .with_context(|| format!("Invalid git status code: {status}"))?;
6337 let result = match code {
6338 proto::GitStatus::Modified => StatusCode::Modified,
6339 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6340 proto::GitStatus::Added => StatusCode::Added,
6341 proto::GitStatus::Deleted => StatusCode::Deleted,
6342 proto::GitStatus::Renamed => StatusCode::Renamed,
6343 proto::GitStatus::Copied => StatusCode::Copied,
6344 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6345 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6346 };
6347 Ok(result)
6348 });
6349 let [index_status, worktree_status] = [index_status?, worktree_status?];
6350 TrackedStatus {
6351 index_status,
6352 worktree_status,
6353 }
6354 .into()
6355 }
6356 };
6357 Ok(result)
6358}
6359
6360fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6361 use proto::git_file_status::{Tracked, Unmerged, Variant};
6362
6363 let variant = match status {
6364 FileStatus::Untracked => Variant::Untracked(Default::default()),
6365 FileStatus::Ignored => Variant::Ignored(Default::default()),
6366 FileStatus::Unmerged(UnmergedStatus {
6367 first_head,
6368 second_head,
6369 }) => Variant::Unmerged(Unmerged {
6370 first_head: unmerged_status_to_proto(first_head),
6371 second_head: unmerged_status_to_proto(second_head),
6372 }),
6373 FileStatus::Tracked(TrackedStatus {
6374 index_status,
6375 worktree_status,
6376 }) => Variant::Tracked(Tracked {
6377 index_status: tracked_status_to_proto(index_status),
6378 worktree_status: tracked_status_to_proto(worktree_status),
6379 }),
6380 };
6381 proto::GitFileStatus {
6382 variant: Some(variant),
6383 }
6384}
6385
6386fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6387 match code {
6388 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6389 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6390 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6391 }
6392}
6393
6394fn tracked_status_to_proto(code: StatusCode) -> i32 {
6395 match code {
6396 StatusCode::Added => proto::GitStatus::Added as _,
6397 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6398 StatusCode::Modified => proto::GitStatus::Modified as _,
6399 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6400 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6401 StatusCode::Copied => proto::GitStatus::Copied as _,
6402 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6403 }
6404}