1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<String>>,
133 index_text: Option<Arc<String>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_create_remote);
476 client.add_entity_request_handler(Self::handle_remove_remote);
477 client.add_entity_request_handler(Self::handle_delete_branch);
478 client.add_entity_request_handler(Self::handle_git_init);
479 client.add_entity_request_handler(Self::handle_push);
480 client.add_entity_request_handler(Self::handle_pull);
481 client.add_entity_request_handler(Self::handle_fetch);
482 client.add_entity_request_handler(Self::handle_stage);
483 client.add_entity_request_handler(Self::handle_unstage);
484 client.add_entity_request_handler(Self::handle_stash);
485 client.add_entity_request_handler(Self::handle_stash_pop);
486 client.add_entity_request_handler(Self::handle_stash_apply);
487 client.add_entity_request_handler(Self::handle_stash_drop);
488 client.add_entity_request_handler(Self::handle_commit);
489 client.add_entity_request_handler(Self::handle_run_hook);
490 client.add_entity_request_handler(Self::handle_reset);
491 client.add_entity_request_handler(Self::handle_show);
492 client.add_entity_request_handler(Self::handle_load_commit_diff);
493 client.add_entity_request_handler(Self::handle_file_history);
494 client.add_entity_request_handler(Self::handle_checkout_files);
495 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
496 client.add_entity_request_handler(Self::handle_set_index_text);
497 client.add_entity_request_handler(Self::handle_askpass);
498 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
499 client.add_entity_request_handler(Self::handle_git_diff);
500 client.add_entity_request_handler(Self::handle_tree_diff);
501 client.add_entity_request_handler(Self::handle_get_blob_content);
502 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
503 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
504 client.add_entity_message_handler(Self::handle_update_diff_bases);
505 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
506 client.add_entity_request_handler(Self::handle_blame_buffer);
507 client.add_entity_message_handler(Self::handle_update_repository);
508 client.add_entity_message_handler(Self::handle_remove_repository);
509 client.add_entity_request_handler(Self::handle_git_clone);
510 client.add_entity_request_handler(Self::handle_get_worktrees);
511 client.add_entity_request_handler(Self::handle_create_worktree);
512 }
513
514 pub fn is_local(&self) -> bool {
515 matches!(self.state, GitStoreState::Local { .. })
516 }
517 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
518 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
519 let id = repo.read(cx).id;
520 if self.active_repo_id != Some(id) {
521 self.active_repo_id = Some(id);
522 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
523 }
524 }
525 }
526
527 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
528 match &mut self.state {
529 GitStoreState::Remote {
530 downstream: downstream_client,
531 ..
532 } => {
533 for repo in self.repositories.values() {
534 let update = repo.read(cx).snapshot.initial_update(project_id);
535 for update in split_repository_update(update) {
536 client.send(update).log_err();
537 }
538 }
539 *downstream_client = Some((client, ProjectId(project_id)));
540 }
541 GitStoreState::Local {
542 downstream: downstream_client,
543 ..
544 } => {
545 let mut snapshots = HashMap::default();
546 let (updates_tx, mut updates_rx) = mpsc::unbounded();
547 for repo in self.repositories.values() {
548 updates_tx
549 .unbounded_send(DownstreamUpdate::UpdateRepository(
550 repo.read(cx).snapshot.clone(),
551 ))
552 .ok();
553 }
554 *downstream_client = Some(LocalDownstreamState {
555 client: client.clone(),
556 project_id: ProjectId(project_id),
557 updates_tx,
558 _task: cx.spawn(async move |this, cx| {
559 cx.background_spawn(async move {
560 while let Some(update) = updates_rx.next().await {
561 match update {
562 DownstreamUpdate::UpdateRepository(snapshot) => {
563 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
564 {
565 let update =
566 snapshot.build_update(old_snapshot, project_id);
567 *old_snapshot = snapshot;
568 for update in split_repository_update(update) {
569 client.send(update)?;
570 }
571 } else {
572 let update = snapshot.initial_update(project_id);
573 for update in split_repository_update(update) {
574 client.send(update)?;
575 }
576 snapshots.insert(snapshot.id, snapshot);
577 }
578 }
579 DownstreamUpdate::RemoveRepository(id) => {
580 client.send(proto::RemoveRepository {
581 project_id,
582 id: id.to_proto(),
583 })?;
584 }
585 }
586 }
587 anyhow::Ok(())
588 })
589 .await
590 .ok();
591 this.update(cx, |this, _| {
592 if let GitStoreState::Local {
593 downstream: downstream_client,
594 ..
595 } = &mut this.state
596 {
597 downstream_client.take();
598 } else {
599 unreachable!("unshared called on remote store");
600 }
601 })
602 }),
603 });
604 }
605 }
606 }
607
608 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
609 match &mut self.state {
610 GitStoreState::Local {
611 downstream: downstream_client,
612 ..
613 } => {
614 downstream_client.take();
615 }
616 GitStoreState::Remote {
617 downstream: downstream_client,
618 ..
619 } => {
620 downstream_client.take();
621 }
622 }
623 self.shared_diffs.clear();
624 }
625
626 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
627 self.shared_diffs.remove(peer_id);
628 }
629
630 pub fn active_repository(&self) -> Option<Entity<Repository>> {
631 self.active_repo_id
632 .as_ref()
633 .map(|id| self.repositories[id].clone())
634 }
635
636 pub fn open_unstaged_diff(
637 &mut self,
638 buffer: Entity<Buffer>,
639 cx: &mut Context<Self>,
640 ) -> Task<Result<Entity<BufferDiff>>> {
641 let buffer_id = buffer.read(cx).remote_id();
642 if let Some(diff_state) = self.diffs.get(&buffer_id)
643 && let Some(unstaged_diff) = diff_state
644 .read(cx)
645 .unstaged_diff
646 .as_ref()
647 .and_then(|weak| weak.upgrade())
648 {
649 if let Some(task) =
650 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
651 {
652 return cx.background_executor().spawn(async move {
653 task.await;
654 Ok(unstaged_diff)
655 });
656 }
657 return Task::ready(Ok(unstaged_diff));
658 }
659
660 let Some((repo, repo_path)) =
661 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
662 else {
663 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
664 };
665
666 let task = self
667 .loading_diffs
668 .entry((buffer_id, DiffKind::Unstaged))
669 .or_insert_with(|| {
670 let staged_text = repo.update(cx, |repo, cx| {
671 repo.load_staged_text(buffer_id, repo_path, cx)
672 });
673 cx.spawn(async move |this, cx| {
674 Self::open_diff_internal(
675 this,
676 DiffKind::Unstaged,
677 staged_text.await.map(DiffBasesChange::SetIndex),
678 buffer,
679 cx,
680 )
681 .await
682 .map_err(Arc::new)
683 })
684 .shared()
685 })
686 .clone();
687
688 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
689 }
690
691 pub fn open_diff_since(
692 &mut self,
693 oid: Option<git::Oid>,
694 buffer: Entity<Buffer>,
695 repo: Entity<Repository>,
696 languages: Arc<LanguageRegistry>,
697 cx: &mut Context<Self>,
698 ) -> Task<Result<Entity<BufferDiff>>> {
699 cx.spawn(async move |this, cx| {
700 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
701 let content = match oid {
702 None => None,
703 Some(oid) => Some(
704 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
705 .await?,
706 ),
707 };
708 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
709
710 buffer_diff
711 .update(cx, |buffer_diff, cx| {
712 buffer_diff.set_base_text(
713 content.map(Arc::new),
714 buffer_snapshot.language().cloned(),
715 Some(languages.clone()),
716 buffer_snapshot.text,
717 cx,
718 )
719 })?
720 .await?;
721 let unstaged_diff = this
722 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
723 .await?;
724 buffer_diff.update(cx, |buffer_diff, _| {
725 buffer_diff.set_secondary_diff(unstaged_diff);
726 })?;
727
728 this.update(cx, |_, cx| {
729 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
730 .detach();
731 })?;
732
733 Ok(buffer_diff)
734 })
735 }
736
737 pub fn open_uncommitted_diff(
738 &mut self,
739 buffer: Entity<Buffer>,
740 cx: &mut Context<Self>,
741 ) -> Task<Result<Entity<BufferDiff>>> {
742 let buffer_id = buffer.read(cx).remote_id();
743
744 if let Some(diff_state) = self.diffs.get(&buffer_id)
745 && let Some(uncommitted_diff) = diff_state
746 .read(cx)
747 .uncommitted_diff
748 .as_ref()
749 .and_then(|weak| weak.upgrade())
750 {
751 if let Some(task) =
752 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
753 {
754 return cx.background_executor().spawn(async move {
755 task.await;
756 Ok(uncommitted_diff)
757 });
758 }
759 return Task::ready(Ok(uncommitted_diff));
760 }
761
762 let Some((repo, repo_path)) =
763 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
764 else {
765 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
766 };
767
768 let task = self
769 .loading_diffs
770 .entry((buffer_id, DiffKind::Uncommitted))
771 .or_insert_with(|| {
772 let changes = repo.update(cx, |repo, cx| {
773 repo.load_committed_text(buffer_id, repo_path, cx)
774 });
775
776 // todo(lw): hot foreground spawn
777 cx.spawn(async move |this, cx| {
778 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
779 .await
780 .map_err(Arc::new)
781 })
782 .shared()
783 })
784 .clone();
785
786 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
787 }
788
789 async fn open_diff_internal(
790 this: WeakEntity<Self>,
791 kind: DiffKind,
792 texts: Result<DiffBasesChange>,
793 buffer_entity: Entity<Buffer>,
794 cx: &mut AsyncApp,
795 ) -> Result<Entity<BufferDiff>> {
796 let diff_bases_change = match texts {
797 Err(e) => {
798 this.update(cx, |this, cx| {
799 let buffer = buffer_entity.read(cx);
800 let buffer_id = buffer.remote_id();
801 this.loading_diffs.remove(&(buffer_id, kind));
802 })?;
803 return Err(e);
804 }
805 Ok(change) => change,
806 };
807
808 this.update(cx, |this, cx| {
809 let buffer = buffer_entity.read(cx);
810 let buffer_id = buffer.remote_id();
811 let language = buffer.language().cloned();
812 let language_registry = buffer.language_registry();
813 let text_snapshot = buffer.text_snapshot();
814 this.loading_diffs.remove(&(buffer_id, kind));
815
816 let git_store = cx.weak_entity();
817 let diff_state = this
818 .diffs
819 .entry(buffer_id)
820 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
821
822 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
823
824 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
825 diff_state.update(cx, |diff_state, cx| {
826 diff_state.language = language;
827 diff_state.language_registry = language_registry;
828
829 match kind {
830 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
831 DiffKind::Uncommitted => {
832 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
833 diff
834 } else {
835 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
836 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
837 unstaged_diff
838 };
839
840 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
841 diff_state.uncommitted_diff = Some(diff.downgrade())
842 }
843 }
844
845 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
846 let rx = diff_state.wait_for_recalculation();
847
848 anyhow::Ok(async move {
849 if let Some(rx) = rx {
850 rx.await;
851 }
852 Ok(diff)
853 })
854 })
855 })??
856 .await
857 }
858
859 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
860 let diff_state = self.diffs.get(&buffer_id)?;
861 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
862 }
863
864 pub fn get_uncommitted_diff(
865 &self,
866 buffer_id: BufferId,
867 cx: &App,
868 ) -> Option<Entity<BufferDiff>> {
869 let diff_state = self.diffs.get(&buffer_id)?;
870 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
871 }
872
873 pub fn open_conflict_set(
874 &mut self,
875 buffer: Entity<Buffer>,
876 cx: &mut Context<Self>,
877 ) -> Entity<ConflictSet> {
878 log::debug!("open conflict set");
879 let buffer_id = buffer.read(cx).remote_id();
880
881 if let Some(git_state) = self.diffs.get(&buffer_id)
882 && let Some(conflict_set) = git_state
883 .read(cx)
884 .conflict_set
885 .as_ref()
886 .and_then(|weak| weak.upgrade())
887 {
888 let conflict_set = conflict_set;
889 let buffer_snapshot = buffer.read(cx).text_snapshot();
890
891 git_state.update(cx, |state, cx| {
892 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
893 });
894
895 return conflict_set;
896 }
897
898 let is_unmerged = self
899 .repository_and_path_for_buffer_id(buffer_id, cx)
900 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
901 let git_store = cx.weak_entity();
902 let buffer_git_state = self
903 .diffs
904 .entry(buffer_id)
905 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
906 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
907
908 self._subscriptions
909 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
910 cx.emit(GitStoreEvent::ConflictsUpdated);
911 }));
912
913 buffer_git_state.update(cx, |state, cx| {
914 state.conflict_set = Some(conflict_set.downgrade());
915 let buffer_snapshot = buffer.read(cx).text_snapshot();
916 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
917 });
918
919 conflict_set
920 }
921
922 pub fn project_path_git_status(
923 &self,
924 project_path: &ProjectPath,
925 cx: &App,
926 ) -> Option<FileStatus> {
927 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
928 Some(repo.read(cx).status_for_path(&repo_path)?.status)
929 }
930
931 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
932 let mut work_directory_abs_paths = Vec::new();
933 let mut checkpoints = Vec::new();
934 for repository in self.repositories.values() {
935 repository.update(cx, |repository, _| {
936 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
937 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
938 });
939 }
940
941 cx.background_executor().spawn(async move {
942 let checkpoints = future::try_join_all(checkpoints).await?;
943 Ok(GitStoreCheckpoint {
944 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
945 .into_iter()
946 .zip(checkpoints)
947 .collect(),
948 })
949 })
950 }
951
952 pub fn restore_checkpoint(
953 &self,
954 checkpoint: GitStoreCheckpoint,
955 cx: &mut App,
956 ) -> Task<Result<()>> {
957 let repositories_by_work_dir_abs_path = self
958 .repositories
959 .values()
960 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
961 .collect::<HashMap<_, _>>();
962
963 let mut tasks = Vec::new();
964 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
965 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
966 let restore = repository.update(cx, |repository, _| {
967 repository.restore_checkpoint(checkpoint)
968 });
969 tasks.push(async move { restore.await? });
970 }
971 }
972 cx.background_spawn(async move {
973 future::try_join_all(tasks).await?;
974 Ok(())
975 })
976 }
977
978 /// Compares two checkpoints, returning true if they are equal.
979 pub fn compare_checkpoints(
980 &self,
981 left: GitStoreCheckpoint,
982 mut right: GitStoreCheckpoint,
983 cx: &mut App,
984 ) -> Task<Result<bool>> {
985 let repositories_by_work_dir_abs_path = self
986 .repositories
987 .values()
988 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
989 .collect::<HashMap<_, _>>();
990
991 let mut tasks = Vec::new();
992 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
993 if let Some(right_checkpoint) = right
994 .checkpoints_by_work_dir_abs_path
995 .remove(&work_dir_abs_path)
996 {
997 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
998 {
999 let compare = repository.update(cx, |repository, _| {
1000 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1001 });
1002
1003 tasks.push(async move { compare.await? });
1004 }
1005 } else {
1006 return Task::ready(Ok(false));
1007 }
1008 }
1009 cx.background_spawn(async move {
1010 Ok(future::try_join_all(tasks)
1011 .await?
1012 .into_iter()
1013 .all(|result| result))
1014 })
1015 }
1016
1017 /// Blames a buffer.
1018 pub fn blame_buffer(
1019 &self,
1020 buffer: &Entity<Buffer>,
1021 version: Option<clock::Global>,
1022 cx: &mut Context<Self>,
1023 ) -> Task<Result<Option<Blame>>> {
1024 let buffer = buffer.read(cx);
1025 let Some((repo, repo_path)) =
1026 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1027 else {
1028 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1029 };
1030 let content = match &version {
1031 Some(version) => buffer.rope_for_version(version),
1032 None => buffer.as_rope().clone(),
1033 };
1034 let line_ending = buffer.line_ending();
1035 let version = version.unwrap_or(buffer.version());
1036 let buffer_id = buffer.remote_id();
1037
1038 let repo = repo.downgrade();
1039 cx.spawn(async move |_, cx| {
1040 let repository_state = repo
1041 .update(cx, |repo, _| repo.repository_state.clone())?
1042 .await
1043 .map_err(|err| anyhow::anyhow!(err))?;
1044 match repository_state {
1045 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1046 .blame(repo_path.clone(), content, line_ending)
1047 .await
1048 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1049 .map(Some),
1050 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1051 let response = client
1052 .request(proto::BlameBuffer {
1053 project_id: project_id.to_proto(),
1054 buffer_id: buffer_id.into(),
1055 version: serialize_version(&version),
1056 })
1057 .await?;
1058 Ok(deserialize_blame_buffer_response(response))
1059 }
1060 }
1061 })
1062 }
1063
1064 pub fn file_history(
1065 &self,
1066 repo: &Entity<Repository>,
1067 path: RepoPath,
1068 cx: &mut App,
1069 ) -> Task<Result<git::repository::FileHistory>> {
1070 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1071
1072 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1073 }
1074
1075 pub fn file_history_paginated(
1076 &self,
1077 repo: &Entity<Repository>,
1078 path: RepoPath,
1079 skip: usize,
1080 limit: Option<usize>,
1081 cx: &mut App,
1082 ) -> Task<Result<git::repository::FileHistory>> {
1083 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1084
1085 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1086 }
1087
1088 pub fn get_permalink_to_line(
1089 &self,
1090 buffer: &Entity<Buffer>,
1091 selection: Range<u32>,
1092 cx: &mut App,
1093 ) -> Task<Result<url::Url>> {
1094 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1095 return Task::ready(Err(anyhow!("buffer has no file")));
1096 };
1097
1098 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1099 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1100 cx,
1101 ) else {
1102 // If we're not in a Git repo, check whether this is a Rust source
1103 // file in the Cargo registry (presumably opened with go-to-definition
1104 // from a normal Rust file). If so, we can put together a permalink
1105 // using crate metadata.
1106 if buffer
1107 .read(cx)
1108 .language()
1109 .is_none_or(|lang| lang.name() != "Rust".into())
1110 {
1111 return Task::ready(Err(anyhow!("no permalink available")));
1112 }
1113 let file_path = file.worktree.read(cx).absolutize(&file.path);
1114 return cx.spawn(async move |cx| {
1115 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1116 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1117 .context("no permalink available")
1118 });
1119 };
1120
1121 let buffer_id = buffer.read(cx).remote_id();
1122 let branch = repo.read(cx).branch.clone();
1123 let remote = branch
1124 .as_ref()
1125 .and_then(|b| b.upstream.as_ref())
1126 .and_then(|b| b.remote_name())
1127 .unwrap_or("origin")
1128 .to_string();
1129
1130 let rx = repo.update(cx, |repo, _| {
1131 repo.send_job(None, move |state, cx| async move {
1132 match state {
1133 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1134 let origin_url = backend
1135 .remote_url(&remote)
1136 .await
1137 .with_context(|| format!("remote \"{remote}\" not found"))?;
1138
1139 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1140
1141 let provider_registry =
1142 cx.update(GitHostingProviderRegistry::default_global)?;
1143
1144 let (provider, remote) =
1145 parse_git_remote_url(provider_registry, &origin_url)
1146 .context("parsing Git remote URL")?;
1147
1148 Ok(provider.build_permalink(
1149 remote,
1150 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1151 ))
1152 }
1153 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1154 let response = client
1155 .request(proto::GetPermalinkToLine {
1156 project_id: project_id.to_proto(),
1157 buffer_id: buffer_id.into(),
1158 selection: Some(proto::Range {
1159 start: selection.start as u64,
1160 end: selection.end as u64,
1161 }),
1162 })
1163 .await?;
1164
1165 url::Url::parse(&response.permalink).context("failed to parse permalink")
1166 }
1167 }
1168 })
1169 });
1170 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1171 }
1172
1173 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1174 match &self.state {
1175 GitStoreState::Local {
1176 downstream: downstream_client,
1177 ..
1178 } => downstream_client
1179 .as_ref()
1180 .map(|state| (state.client.clone(), state.project_id)),
1181 GitStoreState::Remote {
1182 downstream: downstream_client,
1183 ..
1184 } => downstream_client.clone(),
1185 }
1186 }
1187
1188 fn upstream_client(&self) -> Option<AnyProtoClient> {
1189 match &self.state {
1190 GitStoreState::Local { .. } => None,
1191 GitStoreState::Remote {
1192 upstream_client, ..
1193 } => Some(upstream_client.clone()),
1194 }
1195 }
1196
1197 fn on_worktree_store_event(
1198 &mut self,
1199 worktree_store: Entity<WorktreeStore>,
1200 event: &WorktreeStoreEvent,
1201 cx: &mut Context<Self>,
1202 ) {
1203 let GitStoreState::Local {
1204 project_environment,
1205 downstream,
1206 next_repository_id,
1207 fs,
1208 } = &self.state
1209 else {
1210 return;
1211 };
1212
1213 match event {
1214 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1215 if let Some(worktree) = self
1216 .worktree_store
1217 .read(cx)
1218 .worktree_for_id(*worktree_id, cx)
1219 {
1220 let paths_by_git_repo =
1221 self.process_updated_entries(&worktree, updated_entries, cx);
1222 let downstream = downstream
1223 .as_ref()
1224 .map(|downstream| downstream.updates_tx.clone());
1225 cx.spawn(async move |_, cx| {
1226 let paths_by_git_repo = paths_by_git_repo.await;
1227 for (repo, paths) in paths_by_git_repo {
1228 repo.update(cx, |repo, cx| {
1229 repo.paths_changed(paths, downstream.clone(), cx);
1230 })
1231 .ok();
1232 }
1233 })
1234 .detach();
1235 }
1236 }
1237 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1238 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1239 else {
1240 return;
1241 };
1242 if !worktree.read(cx).is_visible() {
1243 log::debug!(
1244 "not adding repositories for local worktree {:?} because it's not visible",
1245 worktree.read(cx).abs_path()
1246 );
1247 return;
1248 }
1249 self.update_repositories_from_worktree(
1250 *worktree_id,
1251 project_environment.clone(),
1252 next_repository_id.clone(),
1253 downstream
1254 .as_ref()
1255 .map(|downstream| downstream.updates_tx.clone()),
1256 changed_repos.clone(),
1257 fs.clone(),
1258 cx,
1259 );
1260 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1261 }
1262 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1263 let repos_without_worktree: Vec<RepositoryId> = self
1264 .worktree_ids
1265 .iter_mut()
1266 .filter_map(|(repo_id, worktree_ids)| {
1267 worktree_ids.remove(worktree_id);
1268 if worktree_ids.is_empty() {
1269 Some(*repo_id)
1270 } else {
1271 None
1272 }
1273 })
1274 .collect();
1275 let is_active_repo_removed = repos_without_worktree
1276 .iter()
1277 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1278
1279 for repo_id in repos_without_worktree {
1280 self.repositories.remove(&repo_id);
1281 self.worktree_ids.remove(&repo_id);
1282 if let Some(updates_tx) =
1283 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1284 {
1285 updates_tx
1286 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1287 .ok();
1288 }
1289 }
1290
1291 if is_active_repo_removed {
1292 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1293 self.active_repo_id = Some(repo_id);
1294 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1295 } else {
1296 self.active_repo_id = None;
1297 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1298 }
1299 }
1300 }
1301 _ => {}
1302 }
1303 }
1304 fn on_repository_event(
1305 &mut self,
1306 repo: Entity<Repository>,
1307 event: &RepositoryEvent,
1308 cx: &mut Context<Self>,
1309 ) {
1310 let id = repo.read(cx).id;
1311 let repo_snapshot = repo.read(cx).snapshot.clone();
1312 for (buffer_id, diff) in self.diffs.iter() {
1313 if let Some((buffer_repo, repo_path)) =
1314 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1315 && buffer_repo == repo
1316 {
1317 diff.update(cx, |diff, cx| {
1318 if let Some(conflict_set) = &diff.conflict_set {
1319 let conflict_status_changed =
1320 conflict_set.update(cx, |conflict_set, cx| {
1321 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1322 conflict_set.set_has_conflict(has_conflict, cx)
1323 })?;
1324 if conflict_status_changed {
1325 let buffer_store = self.buffer_store.read(cx);
1326 if let Some(buffer) = buffer_store.get(*buffer_id) {
1327 let _ = diff
1328 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1329 }
1330 }
1331 }
1332 anyhow::Ok(())
1333 })
1334 .ok();
1335 }
1336 }
1337 cx.emit(GitStoreEvent::RepositoryUpdated(
1338 id,
1339 event.clone(),
1340 self.active_repo_id == Some(id),
1341 ))
1342 }
1343
1344 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1345 cx.emit(GitStoreEvent::JobsUpdated)
1346 }
1347
1348 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1349 fn update_repositories_from_worktree(
1350 &mut self,
1351 worktree_id: WorktreeId,
1352 project_environment: Entity<ProjectEnvironment>,
1353 next_repository_id: Arc<AtomicU64>,
1354 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1355 updated_git_repositories: UpdatedGitRepositoriesSet,
1356 fs: Arc<dyn Fs>,
1357 cx: &mut Context<Self>,
1358 ) {
1359 let mut removed_ids = Vec::new();
1360 for update in updated_git_repositories.iter() {
1361 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1362 let existing_work_directory_abs_path =
1363 repo.read(cx).work_directory_abs_path.clone();
1364 Some(&existing_work_directory_abs_path)
1365 == update.old_work_directory_abs_path.as_ref()
1366 || Some(&existing_work_directory_abs_path)
1367 == update.new_work_directory_abs_path.as_ref()
1368 }) {
1369 let repo_id = *id;
1370 if let Some(new_work_directory_abs_path) =
1371 update.new_work_directory_abs_path.clone()
1372 {
1373 self.worktree_ids
1374 .entry(repo_id)
1375 .or_insert_with(HashSet::new)
1376 .insert(worktree_id);
1377 existing.update(cx, |existing, cx| {
1378 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1379 existing.schedule_scan(updates_tx.clone(), cx);
1380 });
1381 } else {
1382 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1383 worktree_ids.remove(&worktree_id);
1384 if worktree_ids.is_empty() {
1385 removed_ids.push(repo_id);
1386 }
1387 }
1388 }
1389 } else if let UpdatedGitRepository {
1390 new_work_directory_abs_path: Some(work_directory_abs_path),
1391 dot_git_abs_path: Some(dot_git_abs_path),
1392 repository_dir_abs_path: Some(_repository_dir_abs_path),
1393 common_dir_abs_path: Some(_common_dir_abs_path),
1394 ..
1395 } = update
1396 {
1397 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1398 let git_store = cx.weak_entity();
1399 let repo = cx.new(|cx| {
1400 let mut repo = Repository::local(
1401 id,
1402 work_directory_abs_path.clone(),
1403 dot_git_abs_path.clone(),
1404 project_environment.downgrade(),
1405 fs.clone(),
1406 git_store,
1407 cx,
1408 );
1409 if let Some(updates_tx) = updates_tx.as_ref() {
1410 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1411 updates_tx
1412 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1413 .ok();
1414 }
1415 repo.schedule_scan(updates_tx.clone(), cx);
1416 repo
1417 });
1418 self._subscriptions
1419 .push(cx.subscribe(&repo, Self::on_repository_event));
1420 self._subscriptions
1421 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1422 self.repositories.insert(id, repo);
1423 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1424 cx.emit(GitStoreEvent::RepositoryAdded);
1425 self.active_repo_id.get_or_insert_with(|| {
1426 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1427 id
1428 });
1429 }
1430 }
1431
1432 for id in removed_ids {
1433 if self.active_repo_id == Some(id) {
1434 self.active_repo_id = None;
1435 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1436 }
1437 self.repositories.remove(&id);
1438 if let Some(updates_tx) = updates_tx.as_ref() {
1439 updates_tx
1440 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1441 .ok();
1442 }
1443 }
1444 }
1445
1446 fn on_buffer_store_event(
1447 &mut self,
1448 _: Entity<BufferStore>,
1449 event: &BufferStoreEvent,
1450 cx: &mut Context<Self>,
1451 ) {
1452 match event {
1453 BufferStoreEvent::BufferAdded(buffer) => {
1454 cx.subscribe(buffer, |this, buffer, event, cx| {
1455 if let BufferEvent::LanguageChanged(_) = event {
1456 let buffer_id = buffer.read(cx).remote_id();
1457 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1458 diff_state.update(cx, |diff_state, cx| {
1459 diff_state.buffer_language_changed(buffer, cx);
1460 });
1461 }
1462 }
1463 })
1464 .detach();
1465 }
1466 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1467 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1468 diffs.remove(buffer_id);
1469 }
1470 }
1471 BufferStoreEvent::BufferDropped(buffer_id) => {
1472 self.diffs.remove(buffer_id);
1473 for diffs in self.shared_diffs.values_mut() {
1474 diffs.remove(buffer_id);
1475 }
1476 }
1477 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1478 // Whenever a buffer's file path changes, it's possible that the
1479 // new path is actually a path that is being tracked by a git
1480 // repository. In that case, we'll want to update the buffer's
1481 // `BufferDiffState`, in case it already has one.
1482 let buffer_id = buffer.read(cx).remote_id();
1483 let diff_state = self.diffs.get(&buffer_id);
1484 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1485
1486 if let Some(diff_state) = diff_state
1487 && let Some((repo, repo_path)) = repo
1488 {
1489 let buffer = buffer.clone();
1490 let diff_state = diff_state.clone();
1491
1492 cx.spawn(async move |_git_store, cx| {
1493 async {
1494 let diff_bases_change = repo
1495 .update(cx, |repo, cx| {
1496 repo.load_committed_text(buffer_id, repo_path, cx)
1497 })?
1498 .await?;
1499
1500 diff_state.update(cx, |diff_state, cx| {
1501 let buffer_snapshot = buffer.read(cx).text_snapshot();
1502 diff_state.diff_bases_changed(
1503 buffer_snapshot,
1504 Some(diff_bases_change),
1505 cx,
1506 );
1507 })
1508 }
1509 .await
1510 .log_err();
1511 })
1512 .detach();
1513 }
1514 }
1515 _ => {}
1516 }
1517 }
1518
1519 pub fn recalculate_buffer_diffs(
1520 &mut self,
1521 buffers: Vec<Entity<Buffer>>,
1522 cx: &mut Context<Self>,
1523 ) -> impl Future<Output = ()> + use<> {
1524 let mut futures = Vec::new();
1525 for buffer in buffers {
1526 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1527 let buffer = buffer.read(cx).text_snapshot();
1528 diff_state.update(cx, |diff_state, cx| {
1529 diff_state.recalculate_diffs(buffer.clone(), cx);
1530 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1531 });
1532 futures.push(diff_state.update(cx, |diff_state, cx| {
1533 diff_state
1534 .reparse_conflict_markers(buffer, cx)
1535 .map(|_| {})
1536 .boxed()
1537 }));
1538 }
1539 }
1540 async move {
1541 futures::future::join_all(futures).await;
1542 }
1543 }
1544
1545 fn on_buffer_diff_event(
1546 &mut self,
1547 diff: Entity<buffer_diff::BufferDiff>,
1548 event: &BufferDiffEvent,
1549 cx: &mut Context<Self>,
1550 ) {
1551 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1552 let buffer_id = diff.read(cx).buffer_id;
1553 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1554 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1555 diff_state.hunk_staging_operation_count += 1;
1556 diff_state.hunk_staging_operation_count
1557 });
1558 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1559 let recv = repo.update(cx, |repo, cx| {
1560 log::debug!("hunks changed for {}", path.as_unix_str());
1561 repo.spawn_set_index_text_job(
1562 path,
1563 new_index_text.as_ref().map(|rope| rope.to_string()),
1564 Some(hunk_staging_operation_count),
1565 cx,
1566 )
1567 });
1568 let diff = diff.downgrade();
1569 cx.spawn(async move |this, cx| {
1570 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1571 diff.update(cx, |diff, cx| {
1572 diff.clear_pending_hunks(cx);
1573 })
1574 .ok();
1575 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1576 .ok();
1577 }
1578 })
1579 .detach();
1580 }
1581 }
1582 }
1583 }
1584
1585 fn local_worktree_git_repos_changed(
1586 &mut self,
1587 worktree: Entity<Worktree>,
1588 changed_repos: &UpdatedGitRepositoriesSet,
1589 cx: &mut Context<Self>,
1590 ) {
1591 log::debug!("local worktree repos changed");
1592 debug_assert!(worktree.read(cx).is_local());
1593
1594 for repository in self.repositories.values() {
1595 repository.update(cx, |repository, cx| {
1596 let repo_abs_path = &repository.work_directory_abs_path;
1597 if changed_repos.iter().any(|update| {
1598 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1599 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1600 }) {
1601 repository.reload_buffer_diff_bases(cx);
1602 }
1603 });
1604 }
1605 }
1606
1607 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1608 &self.repositories
1609 }
1610
1611 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1612 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1613 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1614 Some(status.status)
1615 }
1616
1617 pub fn repository_and_path_for_buffer_id(
1618 &self,
1619 buffer_id: BufferId,
1620 cx: &App,
1621 ) -> Option<(Entity<Repository>, RepoPath)> {
1622 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1623 let project_path = buffer.read(cx).project_path(cx)?;
1624 self.repository_and_path_for_project_path(&project_path, cx)
1625 }
1626
1627 pub fn repository_and_path_for_project_path(
1628 &self,
1629 path: &ProjectPath,
1630 cx: &App,
1631 ) -> Option<(Entity<Repository>, RepoPath)> {
1632 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1633 self.repositories
1634 .values()
1635 .filter_map(|repo| {
1636 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1637 Some((repo.clone(), repo_path))
1638 })
1639 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1640 }
1641
1642 pub fn git_init(
1643 &self,
1644 path: Arc<Path>,
1645 fallback_branch_name: String,
1646 cx: &App,
1647 ) -> Task<Result<()>> {
1648 match &self.state {
1649 GitStoreState::Local { fs, .. } => {
1650 let fs = fs.clone();
1651 cx.background_executor()
1652 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1653 }
1654 GitStoreState::Remote {
1655 upstream_client,
1656 upstream_project_id: project_id,
1657 ..
1658 } => {
1659 let client = upstream_client.clone();
1660 let project_id = *project_id;
1661 cx.background_executor().spawn(async move {
1662 client
1663 .request(proto::GitInit {
1664 project_id: project_id,
1665 abs_path: path.to_string_lossy().into_owned(),
1666 fallback_branch_name,
1667 })
1668 .await?;
1669 Ok(())
1670 })
1671 }
1672 }
1673 }
1674
1675 pub fn git_clone(
1676 &self,
1677 repo: String,
1678 path: impl Into<Arc<std::path::Path>>,
1679 cx: &App,
1680 ) -> Task<Result<()>> {
1681 let path = path.into();
1682 match &self.state {
1683 GitStoreState::Local { fs, .. } => {
1684 let fs = fs.clone();
1685 cx.background_executor()
1686 .spawn(async move { fs.git_clone(&repo, &path).await })
1687 }
1688 GitStoreState::Remote {
1689 upstream_client,
1690 upstream_project_id,
1691 ..
1692 } => {
1693 if upstream_client.is_via_collab() {
1694 return Task::ready(Err(anyhow!(
1695 "Git Clone isn't supported for project guests"
1696 )));
1697 }
1698 let request = upstream_client.request(proto::GitClone {
1699 project_id: *upstream_project_id,
1700 abs_path: path.to_string_lossy().into_owned(),
1701 remote_repo: repo,
1702 });
1703
1704 cx.background_spawn(async move {
1705 let result = request.await?;
1706
1707 match result.success {
1708 true => Ok(()),
1709 false => Err(anyhow!("Git Clone failed")),
1710 }
1711 })
1712 }
1713 }
1714 }
1715
1716 async fn handle_update_repository(
1717 this: Entity<Self>,
1718 envelope: TypedEnvelope<proto::UpdateRepository>,
1719 mut cx: AsyncApp,
1720 ) -> Result<()> {
1721 this.update(&mut cx, |this, cx| {
1722 let path_style = this.worktree_store.read(cx).path_style();
1723 let mut update = envelope.payload;
1724
1725 let id = RepositoryId::from_proto(update.id);
1726 let client = this.upstream_client().context("no upstream client")?;
1727
1728 let mut repo_subscription = None;
1729 let repo = this.repositories.entry(id).or_insert_with(|| {
1730 let git_store = cx.weak_entity();
1731 let repo = cx.new(|cx| {
1732 Repository::remote(
1733 id,
1734 Path::new(&update.abs_path).into(),
1735 path_style,
1736 ProjectId(update.project_id),
1737 client,
1738 git_store,
1739 cx,
1740 )
1741 });
1742 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1743 cx.emit(GitStoreEvent::RepositoryAdded);
1744 repo
1745 });
1746 this._subscriptions.extend(repo_subscription);
1747
1748 repo.update(cx, {
1749 let update = update.clone();
1750 |repo, cx| repo.apply_remote_update(update, cx)
1751 })?;
1752
1753 this.active_repo_id.get_or_insert_with(|| {
1754 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1755 id
1756 });
1757
1758 if let Some((client, project_id)) = this.downstream_client() {
1759 update.project_id = project_id.to_proto();
1760 client.send(update).log_err();
1761 }
1762 Ok(())
1763 })?
1764 }
1765
1766 async fn handle_remove_repository(
1767 this: Entity<Self>,
1768 envelope: TypedEnvelope<proto::RemoveRepository>,
1769 mut cx: AsyncApp,
1770 ) -> Result<()> {
1771 this.update(&mut cx, |this, cx| {
1772 let mut update = envelope.payload;
1773 let id = RepositoryId::from_proto(update.id);
1774 this.repositories.remove(&id);
1775 if let Some((client, project_id)) = this.downstream_client() {
1776 update.project_id = project_id.to_proto();
1777 client.send(update).log_err();
1778 }
1779 if this.active_repo_id == Some(id) {
1780 this.active_repo_id = None;
1781 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1782 }
1783 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1784 })
1785 }
1786
1787 async fn handle_git_init(
1788 this: Entity<Self>,
1789 envelope: TypedEnvelope<proto::GitInit>,
1790 cx: AsyncApp,
1791 ) -> Result<proto::Ack> {
1792 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1793 let name = envelope.payload.fallback_branch_name;
1794 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1795 .await?;
1796
1797 Ok(proto::Ack {})
1798 }
1799
1800 async fn handle_git_clone(
1801 this: Entity<Self>,
1802 envelope: TypedEnvelope<proto::GitClone>,
1803 cx: AsyncApp,
1804 ) -> Result<proto::GitCloneResponse> {
1805 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1806 let repo_name = envelope.payload.remote_repo;
1807 let result = cx
1808 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1809 .await;
1810
1811 Ok(proto::GitCloneResponse {
1812 success: result.is_ok(),
1813 })
1814 }
1815
1816 async fn handle_fetch(
1817 this: Entity<Self>,
1818 envelope: TypedEnvelope<proto::Fetch>,
1819 mut cx: AsyncApp,
1820 ) -> Result<proto::RemoteMessageResponse> {
1821 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1822 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1823 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1824 let askpass_id = envelope.payload.askpass_id;
1825
1826 let askpass = make_remote_delegate(
1827 this,
1828 envelope.payload.project_id,
1829 repository_id,
1830 askpass_id,
1831 &mut cx,
1832 );
1833
1834 let remote_output = repository_handle
1835 .update(&mut cx, |repository_handle, cx| {
1836 repository_handle.fetch(fetch_options, askpass, cx)
1837 })?
1838 .await??;
1839
1840 Ok(proto::RemoteMessageResponse {
1841 stdout: remote_output.stdout,
1842 stderr: remote_output.stderr,
1843 })
1844 }
1845
1846 async fn handle_push(
1847 this: Entity<Self>,
1848 envelope: TypedEnvelope<proto::Push>,
1849 mut cx: AsyncApp,
1850 ) -> Result<proto::RemoteMessageResponse> {
1851 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1852 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1853
1854 let askpass_id = envelope.payload.askpass_id;
1855 let askpass = make_remote_delegate(
1856 this,
1857 envelope.payload.project_id,
1858 repository_id,
1859 askpass_id,
1860 &mut cx,
1861 );
1862
1863 let options = envelope
1864 .payload
1865 .options
1866 .as_ref()
1867 .map(|_| match envelope.payload.options() {
1868 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1869 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1870 });
1871
1872 let branch_name = envelope.payload.branch_name.into();
1873 let remote_name = envelope.payload.remote_name.into();
1874
1875 let remote_output = repository_handle
1876 .update(&mut cx, |repository_handle, cx| {
1877 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1878 })?
1879 .await??;
1880 Ok(proto::RemoteMessageResponse {
1881 stdout: remote_output.stdout,
1882 stderr: remote_output.stderr,
1883 })
1884 }
1885
1886 async fn handle_pull(
1887 this: Entity<Self>,
1888 envelope: TypedEnvelope<proto::Pull>,
1889 mut cx: AsyncApp,
1890 ) -> Result<proto::RemoteMessageResponse> {
1891 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1892 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1893 let askpass_id = envelope.payload.askpass_id;
1894 let askpass = make_remote_delegate(
1895 this,
1896 envelope.payload.project_id,
1897 repository_id,
1898 askpass_id,
1899 &mut cx,
1900 );
1901
1902 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1903 let remote_name = envelope.payload.remote_name.into();
1904 let rebase = envelope.payload.rebase;
1905
1906 let remote_message = repository_handle
1907 .update(&mut cx, |repository_handle, cx| {
1908 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1909 })?
1910 .await??;
1911
1912 Ok(proto::RemoteMessageResponse {
1913 stdout: remote_message.stdout,
1914 stderr: remote_message.stderr,
1915 })
1916 }
1917
1918 async fn handle_stage(
1919 this: Entity<Self>,
1920 envelope: TypedEnvelope<proto::Stage>,
1921 mut cx: AsyncApp,
1922 ) -> Result<proto::Ack> {
1923 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1924 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1925
1926 let entries = envelope
1927 .payload
1928 .paths
1929 .into_iter()
1930 .map(|path| RepoPath::new(&path))
1931 .collect::<Result<Vec<_>>>()?;
1932
1933 repository_handle
1934 .update(&mut cx, |repository_handle, cx| {
1935 repository_handle.stage_entries(entries, cx)
1936 })?
1937 .await?;
1938 Ok(proto::Ack {})
1939 }
1940
1941 async fn handle_unstage(
1942 this: Entity<Self>,
1943 envelope: TypedEnvelope<proto::Unstage>,
1944 mut cx: AsyncApp,
1945 ) -> Result<proto::Ack> {
1946 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1947 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1948
1949 let entries = envelope
1950 .payload
1951 .paths
1952 .into_iter()
1953 .map(|path| RepoPath::new(&path))
1954 .collect::<Result<Vec<_>>>()?;
1955
1956 repository_handle
1957 .update(&mut cx, |repository_handle, cx| {
1958 repository_handle.unstage_entries(entries, cx)
1959 })?
1960 .await?;
1961
1962 Ok(proto::Ack {})
1963 }
1964
1965 async fn handle_stash(
1966 this: Entity<Self>,
1967 envelope: TypedEnvelope<proto::Stash>,
1968 mut cx: AsyncApp,
1969 ) -> Result<proto::Ack> {
1970 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1971 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1972
1973 let entries = envelope
1974 .payload
1975 .paths
1976 .into_iter()
1977 .map(|path| RepoPath::new(&path))
1978 .collect::<Result<Vec<_>>>()?;
1979
1980 repository_handle
1981 .update(&mut cx, |repository_handle, cx| {
1982 repository_handle.stash_entries(entries, cx)
1983 })?
1984 .await?;
1985
1986 Ok(proto::Ack {})
1987 }
1988
1989 async fn handle_stash_pop(
1990 this: Entity<Self>,
1991 envelope: TypedEnvelope<proto::StashPop>,
1992 mut cx: AsyncApp,
1993 ) -> Result<proto::Ack> {
1994 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1995 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1996 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1997
1998 repository_handle
1999 .update(&mut cx, |repository_handle, cx| {
2000 repository_handle.stash_pop(stash_index, cx)
2001 })?
2002 .await?;
2003
2004 Ok(proto::Ack {})
2005 }
2006
2007 async fn handle_stash_apply(
2008 this: Entity<Self>,
2009 envelope: TypedEnvelope<proto::StashApply>,
2010 mut cx: AsyncApp,
2011 ) -> Result<proto::Ack> {
2012 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2013 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2014 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2015
2016 repository_handle
2017 .update(&mut cx, |repository_handle, cx| {
2018 repository_handle.stash_apply(stash_index, cx)
2019 })?
2020 .await?;
2021
2022 Ok(proto::Ack {})
2023 }
2024
2025 async fn handle_stash_drop(
2026 this: Entity<Self>,
2027 envelope: TypedEnvelope<proto::StashDrop>,
2028 mut cx: AsyncApp,
2029 ) -> Result<proto::Ack> {
2030 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2031 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2032 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2033
2034 repository_handle
2035 .update(&mut cx, |repository_handle, cx| {
2036 repository_handle.stash_drop(stash_index, cx)
2037 })?
2038 .await??;
2039
2040 Ok(proto::Ack {})
2041 }
2042
2043 async fn handle_set_index_text(
2044 this: Entity<Self>,
2045 envelope: TypedEnvelope<proto::SetIndexText>,
2046 mut cx: AsyncApp,
2047 ) -> Result<proto::Ack> {
2048 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2049 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2050 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2051
2052 repository_handle
2053 .update(&mut cx, |repository_handle, cx| {
2054 repository_handle.spawn_set_index_text_job(
2055 repo_path,
2056 envelope.payload.text,
2057 None,
2058 cx,
2059 )
2060 })?
2061 .await??;
2062 Ok(proto::Ack {})
2063 }
2064
2065 async fn handle_run_hook(
2066 this: Entity<Self>,
2067 envelope: TypedEnvelope<proto::RunGitHook>,
2068 mut cx: AsyncApp,
2069 ) -> Result<proto::Ack> {
2070 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2071 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2072 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2073 repository_handle
2074 .update(&mut cx, |repository_handle, cx| {
2075 repository_handle.run_hook(hook, cx)
2076 })?
2077 .await??;
2078 Ok(proto::Ack {})
2079 }
2080
2081 async fn handle_commit(
2082 this: Entity<Self>,
2083 envelope: TypedEnvelope<proto::Commit>,
2084 mut cx: AsyncApp,
2085 ) -> Result<proto::Ack> {
2086 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2087 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2088 let askpass_id = envelope.payload.askpass_id;
2089
2090 let askpass = make_remote_delegate(
2091 this,
2092 envelope.payload.project_id,
2093 repository_id,
2094 askpass_id,
2095 &mut cx,
2096 );
2097
2098 let message = SharedString::from(envelope.payload.message);
2099 let name = envelope.payload.name.map(SharedString::from);
2100 let email = envelope.payload.email.map(SharedString::from);
2101 let options = envelope.payload.options.unwrap_or_default();
2102
2103 repository_handle
2104 .update(&mut cx, |repository_handle, cx| {
2105 repository_handle.commit(
2106 message,
2107 name.zip(email),
2108 CommitOptions {
2109 amend: options.amend,
2110 signoff: options.signoff,
2111 },
2112 askpass,
2113 cx,
2114 )
2115 })?
2116 .await??;
2117 Ok(proto::Ack {})
2118 }
2119
2120 async fn handle_get_remotes(
2121 this: Entity<Self>,
2122 envelope: TypedEnvelope<proto::GetRemotes>,
2123 mut cx: AsyncApp,
2124 ) -> Result<proto::GetRemotesResponse> {
2125 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2126 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2127
2128 let branch_name = envelope.payload.branch_name;
2129 let is_push = envelope.payload.is_push;
2130
2131 let remotes = repository_handle
2132 .update(&mut cx, |repository_handle, _| {
2133 repository_handle.get_remotes(branch_name, is_push)
2134 })?
2135 .await??;
2136
2137 Ok(proto::GetRemotesResponse {
2138 remotes: remotes
2139 .into_iter()
2140 .map(|remotes| proto::get_remotes_response::Remote {
2141 name: remotes.name.to_string(),
2142 })
2143 .collect::<Vec<_>>(),
2144 })
2145 }
2146
2147 async fn handle_get_worktrees(
2148 this: Entity<Self>,
2149 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2150 mut cx: AsyncApp,
2151 ) -> Result<proto::GitWorktreesResponse> {
2152 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2153 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2154
2155 let worktrees = repository_handle
2156 .update(&mut cx, |repository_handle, _| {
2157 repository_handle.worktrees()
2158 })?
2159 .await??;
2160
2161 Ok(proto::GitWorktreesResponse {
2162 worktrees: worktrees
2163 .into_iter()
2164 .map(|worktree| worktree_to_proto(&worktree))
2165 .collect::<Vec<_>>(),
2166 })
2167 }
2168
2169 async fn handle_create_worktree(
2170 this: Entity<Self>,
2171 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2172 mut cx: AsyncApp,
2173 ) -> Result<proto::Ack> {
2174 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2175 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2176 let directory = PathBuf::from(envelope.payload.directory);
2177 let name = envelope.payload.name;
2178 let commit = envelope.payload.commit;
2179
2180 repository_handle
2181 .update(&mut cx, |repository_handle, _| {
2182 repository_handle.create_worktree(name, directory, commit)
2183 })?
2184 .await??;
2185
2186 Ok(proto::Ack {})
2187 }
2188
2189 async fn handle_get_branches(
2190 this: Entity<Self>,
2191 envelope: TypedEnvelope<proto::GitGetBranches>,
2192 mut cx: AsyncApp,
2193 ) -> Result<proto::GitBranchesResponse> {
2194 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2195 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2196
2197 let branches = repository_handle
2198 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2199 .await??;
2200
2201 Ok(proto::GitBranchesResponse {
2202 branches: branches
2203 .into_iter()
2204 .map(|branch| branch_to_proto(&branch))
2205 .collect::<Vec<_>>(),
2206 })
2207 }
2208 async fn handle_get_default_branch(
2209 this: Entity<Self>,
2210 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2211 mut cx: AsyncApp,
2212 ) -> Result<proto::GetDefaultBranchResponse> {
2213 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2214 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2215
2216 let branch = repository_handle
2217 .update(&mut cx, |repository_handle, _| {
2218 repository_handle.default_branch()
2219 })?
2220 .await??
2221 .map(Into::into);
2222
2223 Ok(proto::GetDefaultBranchResponse { branch })
2224 }
2225 async fn handle_create_branch(
2226 this: Entity<Self>,
2227 envelope: TypedEnvelope<proto::GitCreateBranch>,
2228 mut cx: AsyncApp,
2229 ) -> Result<proto::Ack> {
2230 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2231 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2232 let branch_name = envelope.payload.branch_name;
2233
2234 repository_handle
2235 .update(&mut cx, |repository_handle, _| {
2236 repository_handle.create_branch(branch_name, None)
2237 })?
2238 .await??;
2239
2240 Ok(proto::Ack {})
2241 }
2242
2243 async fn handle_change_branch(
2244 this: Entity<Self>,
2245 envelope: TypedEnvelope<proto::GitChangeBranch>,
2246 mut cx: AsyncApp,
2247 ) -> Result<proto::Ack> {
2248 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2249 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2250 let branch_name = envelope.payload.branch_name;
2251
2252 repository_handle
2253 .update(&mut cx, |repository_handle, _| {
2254 repository_handle.change_branch(branch_name)
2255 })?
2256 .await??;
2257
2258 Ok(proto::Ack {})
2259 }
2260
2261 async fn handle_rename_branch(
2262 this: Entity<Self>,
2263 envelope: TypedEnvelope<proto::GitRenameBranch>,
2264 mut cx: AsyncApp,
2265 ) -> Result<proto::Ack> {
2266 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2267 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2268 let branch = envelope.payload.branch;
2269 let new_name = envelope.payload.new_name;
2270
2271 repository_handle
2272 .update(&mut cx, |repository_handle, _| {
2273 repository_handle.rename_branch(branch, new_name)
2274 })?
2275 .await??;
2276
2277 Ok(proto::Ack {})
2278 }
2279
2280 async fn handle_create_remote(
2281 this: Entity<Self>,
2282 envelope: TypedEnvelope<proto::GitCreateRemote>,
2283 mut cx: AsyncApp,
2284 ) -> Result<proto::Ack> {
2285 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2286 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2287 let remote_name = envelope.payload.remote_name;
2288 let remote_url = envelope.payload.remote_url;
2289
2290 repository_handle
2291 .update(&mut cx, |repository_handle, _| {
2292 repository_handle.create_remote(remote_name, remote_url)
2293 })?
2294 .await??;
2295
2296 Ok(proto::Ack {})
2297 }
2298
2299 async fn handle_delete_branch(
2300 this: Entity<Self>,
2301 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2302 mut cx: AsyncApp,
2303 ) -> Result<proto::Ack> {
2304 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2305 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2306 let branch_name = envelope.payload.branch_name;
2307
2308 repository_handle
2309 .update(&mut cx, |repository_handle, _| {
2310 repository_handle.delete_branch(branch_name)
2311 })?
2312 .await??;
2313
2314 Ok(proto::Ack {})
2315 }
2316
2317 async fn handle_remove_remote(
2318 this: Entity<Self>,
2319 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2320 mut cx: AsyncApp,
2321 ) -> Result<proto::Ack> {
2322 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2323 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2324 let remote_name = envelope.payload.remote_name;
2325
2326 repository_handle
2327 .update(&mut cx, |repository_handle, _| {
2328 repository_handle.remove_remote(remote_name)
2329 })?
2330 .await??;
2331
2332 Ok(proto::Ack {})
2333 }
2334
2335 async fn handle_show(
2336 this: Entity<Self>,
2337 envelope: TypedEnvelope<proto::GitShow>,
2338 mut cx: AsyncApp,
2339 ) -> Result<proto::GitCommitDetails> {
2340 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2341 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2342
2343 let commit = repository_handle
2344 .update(&mut cx, |repository_handle, _| {
2345 repository_handle.show(envelope.payload.commit)
2346 })?
2347 .await??;
2348 Ok(proto::GitCommitDetails {
2349 sha: commit.sha.into(),
2350 message: commit.message.into(),
2351 commit_timestamp: commit.commit_timestamp,
2352 author_email: commit.author_email.into(),
2353 author_name: commit.author_name.into(),
2354 })
2355 }
2356
2357 async fn handle_load_commit_diff(
2358 this: Entity<Self>,
2359 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2360 mut cx: AsyncApp,
2361 ) -> Result<proto::LoadCommitDiffResponse> {
2362 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2363 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2364
2365 let commit_diff = repository_handle
2366 .update(&mut cx, |repository_handle, _| {
2367 repository_handle.load_commit_diff(envelope.payload.commit)
2368 })?
2369 .await??;
2370 Ok(proto::LoadCommitDiffResponse {
2371 files: commit_diff
2372 .files
2373 .into_iter()
2374 .map(|file| proto::CommitFile {
2375 path: file.path.to_proto(),
2376 old_text: file.old_text,
2377 new_text: file.new_text,
2378 })
2379 .collect(),
2380 })
2381 }
2382
2383 async fn handle_file_history(
2384 this: Entity<Self>,
2385 envelope: TypedEnvelope<proto::GitFileHistory>,
2386 mut cx: AsyncApp,
2387 ) -> Result<proto::GitFileHistoryResponse> {
2388 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2389 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2390 let path = RepoPath::from_proto(&envelope.payload.path)?;
2391 let skip = envelope.payload.skip as usize;
2392 let limit = envelope.payload.limit.map(|l| l as usize);
2393
2394 let file_history = repository_handle
2395 .update(&mut cx, |repository_handle, _| {
2396 repository_handle.file_history_paginated(path, skip, limit)
2397 })?
2398 .await??;
2399
2400 Ok(proto::GitFileHistoryResponse {
2401 entries: file_history
2402 .entries
2403 .into_iter()
2404 .map(|entry| proto::FileHistoryEntry {
2405 sha: entry.sha.to_string(),
2406 subject: entry.subject.to_string(),
2407 message: entry.message.to_string(),
2408 commit_timestamp: entry.commit_timestamp,
2409 author_name: entry.author_name.to_string(),
2410 author_email: entry.author_email.to_string(),
2411 })
2412 .collect(),
2413 path: file_history.path.to_proto(),
2414 })
2415 }
2416
2417 async fn handle_reset(
2418 this: Entity<Self>,
2419 envelope: TypedEnvelope<proto::GitReset>,
2420 mut cx: AsyncApp,
2421 ) -> Result<proto::Ack> {
2422 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2423 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2424
2425 let mode = match envelope.payload.mode() {
2426 git_reset::ResetMode::Soft => ResetMode::Soft,
2427 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2428 };
2429
2430 repository_handle
2431 .update(&mut cx, |repository_handle, cx| {
2432 repository_handle.reset(envelope.payload.commit, mode, cx)
2433 })?
2434 .await??;
2435 Ok(proto::Ack {})
2436 }
2437
2438 async fn handle_checkout_files(
2439 this: Entity<Self>,
2440 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2441 mut cx: AsyncApp,
2442 ) -> Result<proto::Ack> {
2443 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2444 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2445 let paths = envelope
2446 .payload
2447 .paths
2448 .iter()
2449 .map(|s| RepoPath::from_proto(s))
2450 .collect::<Result<Vec<_>>>()?;
2451
2452 repository_handle
2453 .update(&mut cx, |repository_handle, cx| {
2454 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2455 })?
2456 .await?;
2457 Ok(proto::Ack {})
2458 }
2459
2460 async fn handle_open_commit_message_buffer(
2461 this: Entity<Self>,
2462 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2463 mut cx: AsyncApp,
2464 ) -> Result<proto::OpenBufferResponse> {
2465 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2466 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2467 let buffer = repository
2468 .update(&mut cx, |repository, cx| {
2469 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2470 })?
2471 .await?;
2472
2473 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2474 this.update(&mut cx, |this, cx| {
2475 this.buffer_store.update(cx, |buffer_store, cx| {
2476 buffer_store
2477 .create_buffer_for_peer(
2478 &buffer,
2479 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2480 cx,
2481 )
2482 .detach_and_log_err(cx);
2483 })
2484 })?;
2485
2486 Ok(proto::OpenBufferResponse {
2487 buffer_id: buffer_id.to_proto(),
2488 })
2489 }
2490
2491 async fn handle_askpass(
2492 this: Entity<Self>,
2493 envelope: TypedEnvelope<proto::AskPassRequest>,
2494 mut cx: AsyncApp,
2495 ) -> Result<proto::AskPassResponse> {
2496 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2497 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2498
2499 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2500 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2501 debug_panic!("no askpass found");
2502 anyhow::bail!("no askpass found");
2503 };
2504
2505 let response = askpass
2506 .ask_password(envelope.payload.prompt)
2507 .await
2508 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2509
2510 delegates
2511 .lock()
2512 .insert(envelope.payload.askpass_id, askpass);
2513
2514 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2515 Ok(proto::AskPassResponse {
2516 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2517 })
2518 }
2519
2520 async fn handle_check_for_pushed_commits(
2521 this: Entity<Self>,
2522 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2523 mut cx: AsyncApp,
2524 ) -> Result<proto::CheckForPushedCommitsResponse> {
2525 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2526 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2527
2528 let branches = repository_handle
2529 .update(&mut cx, |repository_handle, _| {
2530 repository_handle.check_for_pushed_commits()
2531 })?
2532 .await??;
2533 Ok(proto::CheckForPushedCommitsResponse {
2534 pushed_to: branches
2535 .into_iter()
2536 .map(|commit| commit.to_string())
2537 .collect(),
2538 })
2539 }
2540
2541 async fn handle_git_diff(
2542 this: Entity<Self>,
2543 envelope: TypedEnvelope<proto::GitDiff>,
2544 mut cx: AsyncApp,
2545 ) -> Result<proto::GitDiffResponse> {
2546 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2547 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2548 let diff_type = match envelope.payload.diff_type() {
2549 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2550 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2551 };
2552
2553 let mut diff = repository_handle
2554 .update(&mut cx, |repository_handle, cx| {
2555 repository_handle.diff(diff_type, cx)
2556 })?
2557 .await??;
2558 const ONE_MB: usize = 1_000_000;
2559 if diff.len() > ONE_MB {
2560 diff = diff.chars().take(ONE_MB).collect()
2561 }
2562
2563 Ok(proto::GitDiffResponse { diff })
2564 }
2565
2566 async fn handle_tree_diff(
2567 this: Entity<Self>,
2568 request: TypedEnvelope<proto::GetTreeDiff>,
2569 mut cx: AsyncApp,
2570 ) -> Result<proto::GetTreeDiffResponse> {
2571 let repository_id = RepositoryId(request.payload.repository_id);
2572 let diff_type = if request.payload.is_merge {
2573 DiffTreeType::MergeBase {
2574 base: request.payload.base.into(),
2575 head: request.payload.head.into(),
2576 }
2577 } else {
2578 DiffTreeType::Since {
2579 base: request.payload.base.into(),
2580 head: request.payload.head.into(),
2581 }
2582 };
2583
2584 let diff = this
2585 .update(&mut cx, |this, cx| {
2586 let repository = this.repositories().get(&repository_id)?;
2587 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2588 })?
2589 .context("missing repository")?
2590 .await??;
2591
2592 Ok(proto::GetTreeDiffResponse {
2593 entries: diff
2594 .entries
2595 .into_iter()
2596 .map(|(path, status)| proto::TreeDiffStatus {
2597 path: path.as_ref().to_proto(),
2598 status: match status {
2599 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2600 TreeDiffStatus::Modified { .. } => {
2601 proto::tree_diff_status::Status::Modified.into()
2602 }
2603 TreeDiffStatus::Deleted { .. } => {
2604 proto::tree_diff_status::Status::Deleted.into()
2605 }
2606 },
2607 oid: match status {
2608 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2609 Some(old.to_string())
2610 }
2611 TreeDiffStatus::Added => None,
2612 },
2613 })
2614 .collect(),
2615 })
2616 }
2617
2618 async fn handle_get_blob_content(
2619 this: Entity<Self>,
2620 request: TypedEnvelope<proto::GetBlobContent>,
2621 mut cx: AsyncApp,
2622 ) -> Result<proto::GetBlobContentResponse> {
2623 let oid = git::Oid::from_str(&request.payload.oid)?;
2624 let repository_id = RepositoryId(request.payload.repository_id);
2625 let content = this
2626 .update(&mut cx, |this, cx| {
2627 let repository = this.repositories().get(&repository_id)?;
2628 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2629 })?
2630 .context("missing repository")?
2631 .await?;
2632 Ok(proto::GetBlobContentResponse { content })
2633 }
2634
2635 async fn handle_open_unstaged_diff(
2636 this: Entity<Self>,
2637 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2638 mut cx: AsyncApp,
2639 ) -> Result<proto::OpenUnstagedDiffResponse> {
2640 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2641 let diff = this
2642 .update(&mut cx, |this, cx| {
2643 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2644 Some(this.open_unstaged_diff(buffer, cx))
2645 })?
2646 .context("missing buffer")?
2647 .await?;
2648 this.update(&mut cx, |this, _| {
2649 let shared_diffs = this
2650 .shared_diffs
2651 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2652 .or_default();
2653 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2654 })?;
2655 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2656 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2657 }
2658
2659 async fn handle_open_uncommitted_diff(
2660 this: Entity<Self>,
2661 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2662 mut cx: AsyncApp,
2663 ) -> Result<proto::OpenUncommittedDiffResponse> {
2664 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2665 let diff = this
2666 .update(&mut cx, |this, cx| {
2667 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2668 Some(this.open_uncommitted_diff(buffer, cx))
2669 })?
2670 .context("missing buffer")?
2671 .await?;
2672 this.update(&mut cx, |this, _| {
2673 let shared_diffs = this
2674 .shared_diffs
2675 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2676 .or_default();
2677 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2678 })?;
2679 diff.read_with(&cx, |diff, cx| {
2680 use proto::open_uncommitted_diff_response::Mode;
2681
2682 let unstaged_diff = diff.secondary_diff();
2683 let index_snapshot = unstaged_diff.and_then(|diff| {
2684 let diff = diff.read(cx);
2685 diff.base_text_exists().then(|| diff.base_text())
2686 });
2687
2688 let mode;
2689 let staged_text;
2690 let committed_text;
2691 if diff.base_text_exists() {
2692 let committed_snapshot = diff.base_text();
2693 committed_text = Some(committed_snapshot.text());
2694 if let Some(index_text) = index_snapshot {
2695 if index_text.remote_id() == committed_snapshot.remote_id() {
2696 mode = Mode::IndexMatchesHead;
2697 staged_text = None;
2698 } else {
2699 mode = Mode::IndexAndHead;
2700 staged_text = Some(index_text.text());
2701 }
2702 } else {
2703 mode = Mode::IndexAndHead;
2704 staged_text = None;
2705 }
2706 } else {
2707 mode = Mode::IndexAndHead;
2708 committed_text = None;
2709 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2710 }
2711
2712 proto::OpenUncommittedDiffResponse {
2713 committed_text,
2714 staged_text,
2715 mode: mode.into(),
2716 }
2717 })
2718 }
2719
2720 async fn handle_update_diff_bases(
2721 this: Entity<Self>,
2722 request: TypedEnvelope<proto::UpdateDiffBases>,
2723 mut cx: AsyncApp,
2724 ) -> Result<()> {
2725 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2726 this.update(&mut cx, |this, cx| {
2727 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2728 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2729 {
2730 let buffer = buffer.read(cx).text_snapshot();
2731 diff_state.update(cx, |diff_state, cx| {
2732 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2733 })
2734 }
2735 })
2736 }
2737
2738 async fn handle_blame_buffer(
2739 this: Entity<Self>,
2740 envelope: TypedEnvelope<proto::BlameBuffer>,
2741 mut cx: AsyncApp,
2742 ) -> Result<proto::BlameBufferResponse> {
2743 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2744 let version = deserialize_version(&envelope.payload.version);
2745 let buffer = this.read_with(&cx, |this, cx| {
2746 this.buffer_store.read(cx).get_existing(buffer_id)
2747 })??;
2748 buffer
2749 .update(&mut cx, |buffer, _| {
2750 buffer.wait_for_version(version.clone())
2751 })?
2752 .await?;
2753 let blame = this
2754 .update(&mut cx, |this, cx| {
2755 this.blame_buffer(&buffer, Some(version), cx)
2756 })?
2757 .await?;
2758 Ok(serialize_blame_buffer_response(blame))
2759 }
2760
2761 async fn handle_get_permalink_to_line(
2762 this: Entity<Self>,
2763 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2764 mut cx: AsyncApp,
2765 ) -> Result<proto::GetPermalinkToLineResponse> {
2766 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2767 // let version = deserialize_version(&envelope.payload.version);
2768 let selection = {
2769 let proto_selection = envelope
2770 .payload
2771 .selection
2772 .context("no selection to get permalink for defined")?;
2773 proto_selection.start as u32..proto_selection.end as u32
2774 };
2775 let buffer = this.read_with(&cx, |this, cx| {
2776 this.buffer_store.read(cx).get_existing(buffer_id)
2777 })??;
2778 let permalink = this
2779 .update(&mut cx, |this, cx| {
2780 this.get_permalink_to_line(&buffer, selection, cx)
2781 })?
2782 .await?;
2783 Ok(proto::GetPermalinkToLineResponse {
2784 permalink: permalink.to_string(),
2785 })
2786 }
2787
2788 fn repository_for_request(
2789 this: &Entity<Self>,
2790 id: RepositoryId,
2791 cx: &mut AsyncApp,
2792 ) -> Result<Entity<Repository>> {
2793 this.read_with(cx, |this, _| {
2794 this.repositories
2795 .get(&id)
2796 .context("missing repository handle")
2797 .cloned()
2798 })?
2799 }
2800
2801 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2802 self.repositories
2803 .iter()
2804 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2805 .collect()
2806 }
2807
2808 fn process_updated_entries(
2809 &self,
2810 worktree: &Entity<Worktree>,
2811 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2812 cx: &mut App,
2813 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2814 let path_style = worktree.read(cx).path_style();
2815 let mut repo_paths = self
2816 .repositories
2817 .values()
2818 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2819 .collect::<Vec<_>>();
2820 let mut entries: Vec<_> = updated_entries
2821 .iter()
2822 .map(|(path, _, _)| path.clone())
2823 .collect();
2824 entries.sort();
2825 let worktree = worktree.read(cx);
2826
2827 let entries = entries
2828 .into_iter()
2829 .map(|path| worktree.absolutize(&path))
2830 .collect::<Arc<[_]>>();
2831
2832 let executor = cx.background_executor().clone();
2833 cx.background_executor().spawn(async move {
2834 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2835 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2836 let mut tasks = FuturesOrdered::new();
2837 for (repo_path, repo) in repo_paths.into_iter().rev() {
2838 let entries = entries.clone();
2839 let task = executor.spawn(async move {
2840 // Find all repository paths that belong to this repo
2841 let mut ix = entries.partition_point(|path| path < &*repo_path);
2842 if ix == entries.len() {
2843 return None;
2844 };
2845
2846 let mut paths = Vec::new();
2847 // All paths prefixed by a given repo will constitute a continuous range.
2848 while let Some(path) = entries.get(ix)
2849 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2850 &repo_path, path, path_style,
2851 )
2852 {
2853 paths.push((repo_path, ix));
2854 ix += 1;
2855 }
2856 if paths.is_empty() {
2857 None
2858 } else {
2859 Some((repo, paths))
2860 }
2861 });
2862 tasks.push_back(task);
2863 }
2864
2865 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2866 let mut path_was_used = vec![false; entries.len()];
2867 let tasks = tasks.collect::<Vec<_>>().await;
2868 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2869 // We always want to assign a path to it's innermost repository.
2870 for t in tasks {
2871 let Some((repo, paths)) = t else {
2872 continue;
2873 };
2874 let entry = paths_by_git_repo.entry(repo).or_default();
2875 for (repo_path, ix) in paths {
2876 if path_was_used[ix] {
2877 continue;
2878 }
2879 path_was_used[ix] = true;
2880 entry.push(repo_path);
2881 }
2882 }
2883
2884 paths_by_git_repo
2885 })
2886 }
2887}
2888
2889impl BufferGitState {
2890 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2891 Self {
2892 unstaged_diff: Default::default(),
2893 uncommitted_diff: Default::default(),
2894 recalculate_diff_task: Default::default(),
2895 language: Default::default(),
2896 language_registry: Default::default(),
2897 recalculating_tx: postage::watch::channel_with(false).0,
2898 hunk_staging_operation_count: 0,
2899 hunk_staging_operation_count_as_of_write: 0,
2900 head_text: Default::default(),
2901 index_text: Default::default(),
2902 head_changed: Default::default(),
2903 index_changed: Default::default(),
2904 language_changed: Default::default(),
2905 conflict_updated_futures: Default::default(),
2906 conflict_set: Default::default(),
2907 reparse_conflict_markers_task: Default::default(),
2908 }
2909 }
2910
2911 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2912 self.language = buffer.read(cx).language().cloned();
2913 self.language_changed = true;
2914 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2915 }
2916
2917 fn reparse_conflict_markers(
2918 &mut self,
2919 buffer: text::BufferSnapshot,
2920 cx: &mut Context<Self>,
2921 ) -> oneshot::Receiver<()> {
2922 let (tx, rx) = oneshot::channel();
2923
2924 let Some(conflict_set) = self
2925 .conflict_set
2926 .as_ref()
2927 .and_then(|conflict_set| conflict_set.upgrade())
2928 else {
2929 return rx;
2930 };
2931
2932 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2933 if conflict_set.has_conflict {
2934 Some(conflict_set.snapshot())
2935 } else {
2936 None
2937 }
2938 });
2939
2940 if let Some(old_snapshot) = old_snapshot {
2941 self.conflict_updated_futures.push(tx);
2942 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2943 let (snapshot, changed_range) = cx
2944 .background_spawn(async move {
2945 let new_snapshot = ConflictSet::parse(&buffer);
2946 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2947 (new_snapshot, changed_range)
2948 })
2949 .await;
2950 this.update(cx, |this, cx| {
2951 if let Some(conflict_set) = &this.conflict_set {
2952 conflict_set
2953 .update(cx, |conflict_set, cx| {
2954 conflict_set.set_snapshot(snapshot, changed_range, cx);
2955 })
2956 .ok();
2957 }
2958 let futures = std::mem::take(&mut this.conflict_updated_futures);
2959 for tx in futures {
2960 tx.send(()).ok();
2961 }
2962 })
2963 }))
2964 }
2965
2966 rx
2967 }
2968
2969 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2970 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2971 }
2972
2973 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2974 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2975 }
2976
2977 fn handle_base_texts_updated(
2978 &mut self,
2979 buffer: text::BufferSnapshot,
2980 message: proto::UpdateDiffBases,
2981 cx: &mut Context<Self>,
2982 ) {
2983 use proto::update_diff_bases::Mode;
2984
2985 let Some(mode) = Mode::from_i32(message.mode) else {
2986 return;
2987 };
2988
2989 let diff_bases_change = match mode {
2990 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2991 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2992 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2993 Mode::IndexAndHead => DiffBasesChange::SetEach {
2994 index: message.staged_text,
2995 head: message.committed_text,
2996 },
2997 };
2998
2999 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3000 }
3001
3002 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3003 if *self.recalculating_tx.borrow() {
3004 let mut rx = self.recalculating_tx.subscribe();
3005 Some(async move {
3006 loop {
3007 let is_recalculating = rx.recv().await;
3008 if is_recalculating != Some(true) {
3009 break;
3010 }
3011 }
3012 })
3013 } else {
3014 None
3015 }
3016 }
3017
3018 fn diff_bases_changed(
3019 &mut self,
3020 buffer: text::BufferSnapshot,
3021 diff_bases_change: Option<DiffBasesChange>,
3022 cx: &mut Context<Self>,
3023 ) {
3024 match diff_bases_change {
3025 Some(DiffBasesChange::SetIndex(index)) => {
3026 self.index_text = index.map(|mut index| {
3027 text::LineEnding::normalize(&mut index);
3028 Arc::new(index)
3029 });
3030 self.index_changed = true;
3031 }
3032 Some(DiffBasesChange::SetHead(head)) => {
3033 self.head_text = head.map(|mut head| {
3034 text::LineEnding::normalize(&mut head);
3035 Arc::new(head)
3036 });
3037 self.head_changed = true;
3038 }
3039 Some(DiffBasesChange::SetBoth(text)) => {
3040 let text = text.map(|mut text| {
3041 text::LineEnding::normalize(&mut text);
3042 Arc::new(text)
3043 });
3044 self.head_text = text.clone();
3045 self.index_text = text;
3046 self.head_changed = true;
3047 self.index_changed = true;
3048 }
3049 Some(DiffBasesChange::SetEach { index, head }) => {
3050 self.index_text = index.map(|mut index| {
3051 text::LineEnding::normalize(&mut index);
3052 Arc::new(index)
3053 });
3054 self.index_changed = true;
3055 self.head_text = head.map(|mut head| {
3056 text::LineEnding::normalize(&mut head);
3057 Arc::new(head)
3058 });
3059 self.head_changed = true;
3060 }
3061 None => {}
3062 }
3063
3064 self.recalculate_diffs(buffer, cx)
3065 }
3066
3067 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3068 *self.recalculating_tx.borrow_mut() = true;
3069
3070 let language = self.language.clone();
3071 let language_registry = self.language_registry.clone();
3072 let unstaged_diff = self.unstaged_diff();
3073 let uncommitted_diff = self.uncommitted_diff();
3074 let head = self.head_text.clone();
3075 let index = self.index_text.clone();
3076 let index_changed = self.index_changed;
3077 let head_changed = self.head_changed;
3078 let language_changed = self.language_changed;
3079 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3080 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3081 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3082 (None, None) => true,
3083 _ => false,
3084 };
3085 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3086 log::debug!(
3087 "start recalculating diffs for buffer {}",
3088 buffer.remote_id()
3089 );
3090
3091 let mut new_unstaged_diff = None;
3092 if let Some(unstaged_diff) = &unstaged_diff {
3093 new_unstaged_diff = Some(
3094 BufferDiff::update_diff(
3095 unstaged_diff.clone(),
3096 buffer.clone(),
3097 index,
3098 index_changed,
3099 language_changed,
3100 language.clone(),
3101 language_registry.clone(),
3102 cx,
3103 )
3104 .await?,
3105 );
3106 }
3107
3108 // Dropping BufferDiff can be expensive, so yield back to the event loop
3109 // for a bit
3110 yield_now().await;
3111
3112 let mut new_uncommitted_diff = None;
3113 if let Some(uncommitted_diff) = &uncommitted_diff {
3114 new_uncommitted_diff = if index_matches_head {
3115 new_unstaged_diff.clone()
3116 } else {
3117 Some(
3118 BufferDiff::update_diff(
3119 uncommitted_diff.clone(),
3120 buffer.clone(),
3121 head,
3122 head_changed,
3123 language_changed,
3124 language.clone(),
3125 language_registry.clone(),
3126 cx,
3127 )
3128 .await?,
3129 )
3130 }
3131 }
3132
3133 // Dropping BufferDiff can be expensive, so yield back to the event loop
3134 // for a bit
3135 yield_now().await;
3136
3137 let cancel = this.update(cx, |this, _| {
3138 // This checks whether all pending stage/unstage operations
3139 // have quiesced (i.e. both the corresponding write and the
3140 // read of that write have completed). If not, then we cancel
3141 // this recalculation attempt to avoid invalidating pending
3142 // state too quickly; another recalculation will come along
3143 // later and clear the pending state once the state of the index has settled.
3144 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3145 *this.recalculating_tx.borrow_mut() = false;
3146 true
3147 } else {
3148 false
3149 }
3150 })?;
3151 if cancel {
3152 log::debug!(
3153 concat!(
3154 "aborting recalculating diffs for buffer {}",
3155 "due to subsequent hunk operations",
3156 ),
3157 buffer.remote_id()
3158 );
3159 return Ok(());
3160 }
3161
3162 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3163 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3164 {
3165 unstaged_diff.update(cx, |diff, cx| {
3166 if language_changed {
3167 diff.language_changed(cx);
3168 }
3169 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3170 })?
3171 } else {
3172 None
3173 };
3174
3175 yield_now().await;
3176
3177 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3178 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3179 {
3180 uncommitted_diff.update(cx, |diff, cx| {
3181 if language_changed {
3182 diff.language_changed(cx);
3183 }
3184 diff.set_snapshot_with_secondary(
3185 new_uncommitted_diff,
3186 &buffer,
3187 unstaged_changed_range,
3188 true,
3189 cx,
3190 );
3191 })?;
3192 }
3193
3194 log::debug!(
3195 "finished recalculating diffs for buffer {}",
3196 buffer.remote_id()
3197 );
3198
3199 if let Some(this) = this.upgrade() {
3200 this.update(cx, |this, _| {
3201 this.index_changed = false;
3202 this.head_changed = false;
3203 this.language_changed = false;
3204 *this.recalculating_tx.borrow_mut() = false;
3205 })?;
3206 }
3207
3208 Ok(())
3209 }));
3210 }
3211}
3212
3213fn make_remote_delegate(
3214 this: Entity<GitStore>,
3215 project_id: u64,
3216 repository_id: RepositoryId,
3217 askpass_id: u64,
3218 cx: &mut AsyncApp,
3219) -> AskPassDelegate {
3220 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3221 this.update(cx, |this, cx| {
3222 let Some((client, _)) = this.downstream_client() else {
3223 return;
3224 };
3225 let response = client.request(proto::AskPassRequest {
3226 project_id,
3227 repository_id: repository_id.to_proto(),
3228 askpass_id,
3229 prompt,
3230 });
3231 cx.spawn(async move |_, _| {
3232 let mut response = response.await?.response;
3233 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3234 .ok();
3235 response.zeroize();
3236 anyhow::Ok(())
3237 })
3238 .detach_and_log_err(cx);
3239 })
3240 .log_err();
3241 })
3242}
3243
3244impl RepositoryId {
3245 pub fn to_proto(self) -> u64 {
3246 self.0
3247 }
3248
3249 pub fn from_proto(id: u64) -> Self {
3250 RepositoryId(id)
3251 }
3252}
3253
3254impl RepositorySnapshot {
3255 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3256 Self {
3257 id,
3258 statuses_by_path: Default::default(),
3259 work_directory_abs_path,
3260 branch: None,
3261 head_commit: None,
3262 scan_id: 0,
3263 merge: Default::default(),
3264 remote_origin_url: None,
3265 remote_upstream_url: None,
3266 stash_entries: Default::default(),
3267 path_style,
3268 }
3269 }
3270
3271 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3272 proto::UpdateRepository {
3273 branch_summary: self.branch.as_ref().map(branch_to_proto),
3274 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3275 updated_statuses: self
3276 .statuses_by_path
3277 .iter()
3278 .map(|entry| entry.to_proto())
3279 .collect(),
3280 removed_statuses: Default::default(),
3281 current_merge_conflicts: self
3282 .merge
3283 .conflicted_paths
3284 .iter()
3285 .map(|repo_path| repo_path.to_proto())
3286 .collect(),
3287 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3288 project_id,
3289 id: self.id.to_proto(),
3290 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3291 entry_ids: vec![self.id.to_proto()],
3292 scan_id: self.scan_id,
3293 is_last_update: true,
3294 stash_entries: self
3295 .stash_entries
3296 .entries
3297 .iter()
3298 .map(stash_to_proto)
3299 .collect(),
3300 remote_upstream_url: self.remote_upstream_url.clone(),
3301 remote_origin_url: self.remote_origin_url.clone(),
3302 }
3303 }
3304
3305 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3306 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3307 let mut removed_statuses: Vec<String> = Vec::new();
3308
3309 let mut new_statuses = self.statuses_by_path.iter().peekable();
3310 let mut old_statuses = old.statuses_by_path.iter().peekable();
3311
3312 let mut current_new_entry = new_statuses.next();
3313 let mut current_old_entry = old_statuses.next();
3314 loop {
3315 match (current_new_entry, current_old_entry) {
3316 (Some(new_entry), Some(old_entry)) => {
3317 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3318 Ordering::Less => {
3319 updated_statuses.push(new_entry.to_proto());
3320 current_new_entry = new_statuses.next();
3321 }
3322 Ordering::Equal => {
3323 if new_entry.status != old_entry.status {
3324 updated_statuses.push(new_entry.to_proto());
3325 }
3326 current_old_entry = old_statuses.next();
3327 current_new_entry = new_statuses.next();
3328 }
3329 Ordering::Greater => {
3330 removed_statuses.push(old_entry.repo_path.to_proto());
3331 current_old_entry = old_statuses.next();
3332 }
3333 }
3334 }
3335 (None, Some(old_entry)) => {
3336 removed_statuses.push(old_entry.repo_path.to_proto());
3337 current_old_entry = old_statuses.next();
3338 }
3339 (Some(new_entry), None) => {
3340 updated_statuses.push(new_entry.to_proto());
3341 current_new_entry = new_statuses.next();
3342 }
3343 (None, None) => break,
3344 }
3345 }
3346
3347 proto::UpdateRepository {
3348 branch_summary: self.branch.as_ref().map(branch_to_proto),
3349 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3350 updated_statuses,
3351 removed_statuses,
3352 current_merge_conflicts: self
3353 .merge
3354 .conflicted_paths
3355 .iter()
3356 .map(|path| path.to_proto())
3357 .collect(),
3358 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3359 project_id,
3360 id: self.id.to_proto(),
3361 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3362 entry_ids: vec![],
3363 scan_id: self.scan_id,
3364 is_last_update: true,
3365 stash_entries: self
3366 .stash_entries
3367 .entries
3368 .iter()
3369 .map(stash_to_proto)
3370 .collect(),
3371 remote_upstream_url: self.remote_upstream_url.clone(),
3372 remote_origin_url: self.remote_origin_url.clone(),
3373 }
3374 }
3375
3376 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3377 self.statuses_by_path.iter().cloned()
3378 }
3379
3380 pub fn status_summary(&self) -> GitSummary {
3381 self.statuses_by_path.summary().item_summary
3382 }
3383
3384 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3385 self.statuses_by_path
3386 .get(&PathKey(path.as_ref().clone()), ())
3387 .cloned()
3388 }
3389
3390 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3391 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3392 }
3393
3394 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3395 self.path_style
3396 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3397 .unwrap()
3398 .into()
3399 }
3400
3401 #[inline]
3402 fn abs_path_to_repo_path_inner(
3403 work_directory_abs_path: &Path,
3404 abs_path: &Path,
3405 path_style: PathStyle,
3406 ) -> Option<RepoPath> {
3407 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3408 Some(RepoPath::from_rel_path(&rel_path))
3409 }
3410
3411 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3412 self.merge.conflicted_paths.contains(repo_path)
3413 }
3414
3415 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3416 let had_conflict_on_last_merge_head_change =
3417 self.merge.conflicted_paths.contains(repo_path);
3418 let has_conflict_currently = self
3419 .status_for_path(repo_path)
3420 .is_some_and(|entry| entry.status.is_conflicted());
3421 had_conflict_on_last_merge_head_change || has_conflict_currently
3422 }
3423
3424 /// This is the name that will be displayed in the repository selector for this repository.
3425 pub fn display_name(&self) -> SharedString {
3426 self.work_directory_abs_path
3427 .file_name()
3428 .unwrap_or_default()
3429 .to_string_lossy()
3430 .to_string()
3431 .into()
3432 }
3433}
3434
3435pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3436 proto::StashEntry {
3437 oid: entry.oid.as_bytes().to_vec(),
3438 message: entry.message.clone(),
3439 branch: entry.branch.clone(),
3440 index: entry.index as u64,
3441 timestamp: entry.timestamp,
3442 }
3443}
3444
3445pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3446 Ok(StashEntry {
3447 oid: Oid::from_bytes(&entry.oid)?,
3448 message: entry.message.clone(),
3449 index: entry.index as usize,
3450 branch: entry.branch.clone(),
3451 timestamp: entry.timestamp,
3452 })
3453}
3454
3455impl MergeDetails {
3456 async fn load(
3457 backend: &Arc<dyn GitRepository>,
3458 status: &SumTree<StatusEntry>,
3459 prev_snapshot: &RepositorySnapshot,
3460 ) -> Result<(MergeDetails, bool)> {
3461 log::debug!("load merge details");
3462 let message = backend.merge_message().await;
3463 let heads = backend
3464 .revparse_batch(vec![
3465 "MERGE_HEAD".into(),
3466 "CHERRY_PICK_HEAD".into(),
3467 "REBASE_HEAD".into(),
3468 "REVERT_HEAD".into(),
3469 "APPLY_HEAD".into(),
3470 ])
3471 .await
3472 .log_err()
3473 .unwrap_or_default()
3474 .into_iter()
3475 .map(|opt| opt.map(SharedString::from))
3476 .collect::<Vec<_>>();
3477 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3478 let conflicted_paths = if merge_heads_changed {
3479 let current_conflicted_paths = TreeSet::from_ordered_entries(
3480 status
3481 .iter()
3482 .filter(|entry| entry.status.is_conflicted())
3483 .map(|entry| entry.repo_path.clone()),
3484 );
3485
3486 // It can happen that we run a scan while a lengthy merge is in progress
3487 // that will eventually result in conflicts, but before those conflicts
3488 // are reported by `git status`. Since for the moment we only care about
3489 // the merge heads state for the purposes of tracking conflicts, don't update
3490 // this state until we see some conflicts.
3491 if heads.iter().any(Option::is_some)
3492 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3493 && current_conflicted_paths.is_empty()
3494 {
3495 log::debug!("not updating merge heads because no conflicts found");
3496 return Ok((
3497 MergeDetails {
3498 message: message.map(SharedString::from),
3499 ..prev_snapshot.merge.clone()
3500 },
3501 false,
3502 ));
3503 }
3504
3505 current_conflicted_paths
3506 } else {
3507 prev_snapshot.merge.conflicted_paths.clone()
3508 };
3509 let details = MergeDetails {
3510 conflicted_paths,
3511 message: message.map(SharedString::from),
3512 heads,
3513 };
3514 Ok((details, merge_heads_changed))
3515 }
3516}
3517
3518impl Repository {
3519 pub fn snapshot(&self) -> RepositorySnapshot {
3520 self.snapshot.clone()
3521 }
3522
3523 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3524 self.pending_ops.iter().cloned()
3525 }
3526
3527 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3528 self.pending_ops.summary().clone()
3529 }
3530
3531 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3532 self.pending_ops
3533 .get(&PathKey(path.as_ref().clone()), ())
3534 .cloned()
3535 }
3536
3537 fn local(
3538 id: RepositoryId,
3539 work_directory_abs_path: Arc<Path>,
3540 dot_git_abs_path: Arc<Path>,
3541 project_environment: WeakEntity<ProjectEnvironment>,
3542 fs: Arc<dyn Fs>,
3543 git_store: WeakEntity<GitStore>,
3544 cx: &mut Context<Self>,
3545 ) -> Self {
3546 let snapshot =
3547 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3548 let state = cx
3549 .spawn(async move |_, cx| {
3550 LocalRepositoryState::new(
3551 work_directory_abs_path,
3552 dot_git_abs_path,
3553 project_environment,
3554 fs,
3555 cx,
3556 )
3557 .await
3558 .map_err(|err| err.to_string())
3559 })
3560 .shared();
3561 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3562 let state = cx
3563 .spawn(async move |_, _| {
3564 let state = state.await?;
3565 Ok(RepositoryState::Local(state))
3566 })
3567 .shared();
3568
3569 Repository {
3570 this: cx.weak_entity(),
3571 git_store,
3572 snapshot,
3573 pending_ops: Default::default(),
3574 repository_state: state,
3575 commit_message_buffer: None,
3576 askpass_delegates: Default::default(),
3577 paths_needing_status_update: Default::default(),
3578 latest_askpass_id: 0,
3579 job_sender,
3580 job_id: 0,
3581 active_jobs: Default::default(),
3582 }
3583 }
3584
3585 fn remote(
3586 id: RepositoryId,
3587 work_directory_abs_path: Arc<Path>,
3588 path_style: PathStyle,
3589 project_id: ProjectId,
3590 client: AnyProtoClient,
3591 git_store: WeakEntity<GitStore>,
3592 cx: &mut Context<Self>,
3593 ) -> Self {
3594 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3595 let repository_state = RemoteRepositoryState { project_id, client };
3596 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3597 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3598 Self {
3599 this: cx.weak_entity(),
3600 snapshot,
3601 commit_message_buffer: None,
3602 git_store,
3603 pending_ops: Default::default(),
3604 paths_needing_status_update: Default::default(),
3605 job_sender,
3606 repository_state,
3607 askpass_delegates: Default::default(),
3608 latest_askpass_id: 0,
3609 active_jobs: Default::default(),
3610 job_id: 0,
3611 }
3612 }
3613
3614 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3615 self.git_store.upgrade()
3616 }
3617
3618 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3619 let this = cx.weak_entity();
3620 let git_store = self.git_store.clone();
3621 let _ = self.send_keyed_job(
3622 Some(GitJobKey::ReloadBufferDiffBases),
3623 None,
3624 |state, mut cx| async move {
3625 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3626 log::error!("tried to recompute diffs for a non-local repository");
3627 return Ok(());
3628 };
3629
3630 let Some(this) = this.upgrade() else {
3631 return Ok(());
3632 };
3633
3634 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3635 git_store.update(cx, |git_store, cx| {
3636 git_store
3637 .diffs
3638 .iter()
3639 .filter_map(|(buffer_id, diff_state)| {
3640 let buffer_store = git_store.buffer_store.read(cx);
3641 let buffer = buffer_store.get(*buffer_id)?;
3642 let file = File::from_dyn(buffer.read(cx).file())?;
3643 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3644 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3645 log::debug!(
3646 "start reload diff bases for repo path {}",
3647 repo_path.as_unix_str()
3648 );
3649 diff_state.update(cx, |diff_state, _| {
3650 let has_unstaged_diff = diff_state
3651 .unstaged_diff
3652 .as_ref()
3653 .is_some_and(|diff| diff.is_upgradable());
3654 let has_uncommitted_diff = diff_state
3655 .uncommitted_diff
3656 .as_ref()
3657 .is_some_and(|set| set.is_upgradable());
3658
3659 Some((
3660 buffer,
3661 repo_path,
3662 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3663 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3664 ))
3665 })
3666 })
3667 .collect::<Vec<_>>()
3668 })
3669 })??;
3670
3671 let buffer_diff_base_changes = cx
3672 .background_spawn(async move {
3673 let mut changes = Vec::new();
3674 for (buffer, repo_path, current_index_text, current_head_text) in
3675 &repo_diff_state_updates
3676 {
3677 let index_text = if current_index_text.is_some() {
3678 backend.load_index_text(repo_path.clone()).await
3679 } else {
3680 None
3681 };
3682 let head_text = if current_head_text.is_some() {
3683 backend.load_committed_text(repo_path.clone()).await
3684 } else {
3685 None
3686 };
3687
3688 let change =
3689 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3690 (Some(current_index), Some(current_head)) => {
3691 let index_changed =
3692 index_text.as_ref() != current_index.as_deref();
3693 let head_changed =
3694 head_text.as_ref() != current_head.as_deref();
3695 if index_changed && head_changed {
3696 if index_text == head_text {
3697 Some(DiffBasesChange::SetBoth(head_text))
3698 } else {
3699 Some(DiffBasesChange::SetEach {
3700 index: index_text,
3701 head: head_text,
3702 })
3703 }
3704 } else if index_changed {
3705 Some(DiffBasesChange::SetIndex(index_text))
3706 } else if head_changed {
3707 Some(DiffBasesChange::SetHead(head_text))
3708 } else {
3709 None
3710 }
3711 }
3712 (Some(current_index), None) => {
3713 let index_changed =
3714 index_text.as_ref() != current_index.as_deref();
3715 index_changed
3716 .then_some(DiffBasesChange::SetIndex(index_text))
3717 }
3718 (None, Some(current_head)) => {
3719 let head_changed =
3720 head_text.as_ref() != current_head.as_deref();
3721 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3722 }
3723 (None, None) => None,
3724 };
3725
3726 changes.push((buffer.clone(), change))
3727 }
3728 changes
3729 })
3730 .await;
3731
3732 git_store.update(&mut cx, |git_store, cx| {
3733 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3734 let buffer_snapshot = buffer.read(cx).text_snapshot();
3735 let buffer_id = buffer_snapshot.remote_id();
3736 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3737 continue;
3738 };
3739
3740 let downstream_client = git_store.downstream_client();
3741 diff_state.update(cx, |diff_state, cx| {
3742 use proto::update_diff_bases::Mode;
3743
3744 if let Some((diff_bases_change, (client, project_id))) =
3745 diff_bases_change.clone().zip(downstream_client)
3746 {
3747 let (staged_text, committed_text, mode) = match diff_bases_change {
3748 DiffBasesChange::SetIndex(index) => {
3749 (index, None, Mode::IndexOnly)
3750 }
3751 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3752 DiffBasesChange::SetEach { index, head } => {
3753 (index, head, Mode::IndexAndHead)
3754 }
3755 DiffBasesChange::SetBoth(text) => {
3756 (None, text, Mode::IndexMatchesHead)
3757 }
3758 };
3759 client
3760 .send(proto::UpdateDiffBases {
3761 project_id: project_id.to_proto(),
3762 buffer_id: buffer_id.to_proto(),
3763 staged_text,
3764 committed_text,
3765 mode: mode as i32,
3766 })
3767 .log_err();
3768 }
3769
3770 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3771 });
3772 }
3773 })
3774 },
3775 );
3776 }
3777
3778 pub fn send_job<F, Fut, R>(
3779 &mut self,
3780 status: Option<SharedString>,
3781 job: F,
3782 ) -> oneshot::Receiver<R>
3783 where
3784 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3785 Fut: Future<Output = R> + 'static,
3786 R: Send + 'static,
3787 {
3788 self.send_keyed_job(None, status, job)
3789 }
3790
3791 fn send_keyed_job<F, Fut, R>(
3792 &mut self,
3793 key: Option<GitJobKey>,
3794 status: Option<SharedString>,
3795 job: F,
3796 ) -> oneshot::Receiver<R>
3797 where
3798 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3799 Fut: Future<Output = R> + 'static,
3800 R: Send + 'static,
3801 {
3802 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3803 let job_id = post_inc(&mut self.job_id);
3804 let this = self.this.clone();
3805 self.job_sender
3806 .unbounded_send(GitJob {
3807 key,
3808 job: Box::new(move |state, cx: &mut AsyncApp| {
3809 let job = job(state, cx.clone());
3810 cx.spawn(async move |cx| {
3811 if let Some(s) = status.clone() {
3812 this.update(cx, |this, cx| {
3813 this.active_jobs.insert(
3814 job_id,
3815 JobInfo {
3816 start: Instant::now(),
3817 message: s.clone(),
3818 },
3819 );
3820
3821 cx.notify();
3822 })
3823 .ok();
3824 }
3825 let result = job.await;
3826
3827 this.update(cx, |this, cx| {
3828 this.active_jobs.remove(&job_id);
3829 cx.notify();
3830 })
3831 .ok();
3832
3833 result_tx.send(result).ok();
3834 })
3835 }),
3836 })
3837 .ok();
3838 result_rx
3839 }
3840
3841 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3842 let Some(git_store) = self.git_store.upgrade() else {
3843 return;
3844 };
3845 let entity = cx.entity();
3846 git_store.update(cx, |git_store, cx| {
3847 let Some((&id, _)) = git_store
3848 .repositories
3849 .iter()
3850 .find(|(_, handle)| *handle == &entity)
3851 else {
3852 return;
3853 };
3854 git_store.active_repo_id = Some(id);
3855 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3856 });
3857 }
3858
3859 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3860 self.snapshot.status()
3861 }
3862
3863 pub fn cached_stash(&self) -> GitStash {
3864 self.snapshot.stash_entries.clone()
3865 }
3866
3867 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3868 let git_store = self.git_store.upgrade()?;
3869 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3870 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3871 let abs_path = SanitizedPath::new(&abs_path);
3872 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3873 Some(ProjectPath {
3874 worktree_id: worktree.read(cx).id(),
3875 path: relative_path,
3876 })
3877 }
3878
3879 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3880 let git_store = self.git_store.upgrade()?;
3881 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3882 let abs_path = worktree_store.absolutize(path, cx)?;
3883 self.snapshot.abs_path_to_repo_path(&abs_path)
3884 }
3885
3886 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3887 other
3888 .read(cx)
3889 .snapshot
3890 .work_directory_abs_path
3891 .starts_with(&self.snapshot.work_directory_abs_path)
3892 }
3893
3894 pub fn open_commit_buffer(
3895 &mut self,
3896 languages: Option<Arc<LanguageRegistry>>,
3897 buffer_store: Entity<BufferStore>,
3898 cx: &mut Context<Self>,
3899 ) -> Task<Result<Entity<Buffer>>> {
3900 let id = self.id;
3901 if let Some(buffer) = self.commit_message_buffer.clone() {
3902 return Task::ready(Ok(buffer));
3903 }
3904 let this = cx.weak_entity();
3905
3906 let rx = self.send_job(None, move |state, mut cx| async move {
3907 let Some(this) = this.upgrade() else {
3908 bail!("git store was dropped");
3909 };
3910 match state {
3911 RepositoryState::Local(..) => {
3912 this.update(&mut cx, |_, cx| {
3913 Self::open_local_commit_buffer(languages, buffer_store, cx)
3914 })?
3915 .await
3916 }
3917 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3918 let request = client.request(proto::OpenCommitMessageBuffer {
3919 project_id: project_id.0,
3920 repository_id: id.to_proto(),
3921 });
3922 let response = request.await.context("requesting to open commit buffer")?;
3923 let buffer_id = BufferId::new(response.buffer_id)?;
3924 let buffer = buffer_store
3925 .update(&mut cx, |buffer_store, cx| {
3926 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3927 })?
3928 .await?;
3929 if let Some(language_registry) = languages {
3930 let git_commit_language =
3931 language_registry.language_for_name("Git Commit").await?;
3932 buffer.update(&mut cx, |buffer, cx| {
3933 buffer.set_language(Some(git_commit_language), cx);
3934 })?;
3935 }
3936 this.update(&mut cx, |this, _| {
3937 this.commit_message_buffer = Some(buffer.clone());
3938 })?;
3939 Ok(buffer)
3940 }
3941 }
3942 });
3943
3944 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3945 }
3946
3947 fn open_local_commit_buffer(
3948 language_registry: Option<Arc<LanguageRegistry>>,
3949 buffer_store: Entity<BufferStore>,
3950 cx: &mut Context<Self>,
3951 ) -> Task<Result<Entity<Buffer>>> {
3952 cx.spawn(async move |repository, cx| {
3953 let buffer = buffer_store
3954 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3955 .await?;
3956
3957 if let Some(language_registry) = language_registry {
3958 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3959 buffer.update(cx, |buffer, cx| {
3960 buffer.set_language(Some(git_commit_language), cx);
3961 })?;
3962 }
3963
3964 repository.update(cx, |repository, _| {
3965 repository.commit_message_buffer = Some(buffer.clone());
3966 })?;
3967 Ok(buffer)
3968 })
3969 }
3970
3971 pub fn checkout_files(
3972 &mut self,
3973 commit: &str,
3974 paths: Vec<RepoPath>,
3975 cx: &mut Context<Self>,
3976 ) -> Task<Result<()>> {
3977 let commit = commit.to_string();
3978 let id = self.id;
3979
3980 self.spawn_job_with_tracking(
3981 paths.clone(),
3982 pending_op::GitStatus::Reverted,
3983 cx,
3984 async move |this, cx| {
3985 this.update(cx, |this, _cx| {
3986 this.send_job(
3987 Some(format!("git checkout {}", commit).into()),
3988 move |git_repo, _| async move {
3989 match git_repo {
3990 RepositoryState::Local(LocalRepositoryState {
3991 backend,
3992 environment,
3993 ..
3994 }) => {
3995 backend
3996 .checkout_files(commit, paths, environment.clone())
3997 .await
3998 }
3999 RepositoryState::Remote(RemoteRepositoryState {
4000 project_id,
4001 client,
4002 }) => {
4003 client
4004 .request(proto::GitCheckoutFiles {
4005 project_id: project_id.0,
4006 repository_id: id.to_proto(),
4007 commit,
4008 paths: paths
4009 .into_iter()
4010 .map(|p| p.to_proto())
4011 .collect(),
4012 })
4013 .await?;
4014
4015 Ok(())
4016 }
4017 }
4018 },
4019 )
4020 })?
4021 .await?
4022 },
4023 )
4024 }
4025
4026 pub fn reset(
4027 &mut self,
4028 commit: String,
4029 reset_mode: ResetMode,
4030 _cx: &mut App,
4031 ) -> oneshot::Receiver<Result<()>> {
4032 let id = self.id;
4033
4034 self.send_job(None, move |git_repo, _| async move {
4035 match git_repo {
4036 RepositoryState::Local(LocalRepositoryState {
4037 backend,
4038 environment,
4039 ..
4040 }) => backend.reset(commit, reset_mode, environment).await,
4041 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4042 client
4043 .request(proto::GitReset {
4044 project_id: project_id.0,
4045 repository_id: id.to_proto(),
4046 commit,
4047 mode: match reset_mode {
4048 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4049 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4050 },
4051 })
4052 .await?;
4053
4054 Ok(())
4055 }
4056 }
4057 })
4058 }
4059
4060 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4061 let id = self.id;
4062 self.send_job(None, move |git_repo, _cx| async move {
4063 match git_repo {
4064 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4065 backend.show(commit).await
4066 }
4067 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4068 let resp = client
4069 .request(proto::GitShow {
4070 project_id: project_id.0,
4071 repository_id: id.to_proto(),
4072 commit,
4073 })
4074 .await?;
4075
4076 Ok(CommitDetails {
4077 sha: resp.sha.into(),
4078 message: resp.message.into(),
4079 commit_timestamp: resp.commit_timestamp,
4080 author_email: resp.author_email.into(),
4081 author_name: resp.author_name.into(),
4082 })
4083 }
4084 }
4085 })
4086 }
4087
4088 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4089 let id = self.id;
4090 self.send_job(None, move |git_repo, cx| async move {
4091 match git_repo {
4092 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4093 backend.load_commit(commit, cx).await
4094 }
4095 RepositoryState::Remote(RemoteRepositoryState {
4096 client, project_id, ..
4097 }) => {
4098 let response = client
4099 .request(proto::LoadCommitDiff {
4100 project_id: project_id.0,
4101 repository_id: id.to_proto(),
4102 commit,
4103 })
4104 .await?;
4105 Ok(CommitDiff {
4106 files: response
4107 .files
4108 .into_iter()
4109 .map(|file| {
4110 Ok(CommitFile {
4111 path: RepoPath::from_proto(&file.path)?,
4112 old_text: file.old_text,
4113 new_text: file.new_text,
4114 })
4115 })
4116 .collect::<Result<Vec<_>>>()?,
4117 })
4118 }
4119 }
4120 })
4121 }
4122
4123 pub fn file_history(
4124 &mut self,
4125 path: RepoPath,
4126 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4127 self.file_history_paginated(path, 0, None)
4128 }
4129
4130 pub fn file_history_paginated(
4131 &mut self,
4132 path: RepoPath,
4133 skip: usize,
4134 limit: Option<usize>,
4135 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4136 let id = self.id;
4137 self.send_job(None, move |git_repo, _cx| async move {
4138 match git_repo {
4139 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4140 backend.file_history_paginated(path, skip, limit).await
4141 }
4142 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4143 let response = client
4144 .request(proto::GitFileHistory {
4145 project_id: project_id.0,
4146 repository_id: id.to_proto(),
4147 path: path.to_proto(),
4148 skip: skip as u64,
4149 limit: limit.map(|l| l as u64),
4150 })
4151 .await?;
4152 Ok(git::repository::FileHistory {
4153 entries: response
4154 .entries
4155 .into_iter()
4156 .map(|entry| git::repository::FileHistoryEntry {
4157 sha: entry.sha.into(),
4158 subject: entry.subject.into(),
4159 message: entry.message.into(),
4160 commit_timestamp: entry.commit_timestamp,
4161 author_name: entry.author_name.into(),
4162 author_email: entry.author_email.into(),
4163 })
4164 .collect(),
4165 path: RepoPath::from_proto(&response.path)?,
4166 })
4167 }
4168 }
4169 })
4170 }
4171
4172 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4173 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4174 }
4175
4176 fn save_buffers<'a>(
4177 &self,
4178 entries: impl IntoIterator<Item = &'a RepoPath>,
4179 cx: &mut Context<Self>,
4180 ) -> Vec<Task<anyhow::Result<()>>> {
4181 let mut save_futures = Vec::new();
4182 if let Some(buffer_store) = self.buffer_store(cx) {
4183 buffer_store.update(cx, |buffer_store, cx| {
4184 for path in entries {
4185 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4186 continue;
4187 };
4188 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4189 && buffer
4190 .read(cx)
4191 .file()
4192 .is_some_and(|file| file.disk_state().exists())
4193 && buffer.read(cx).has_unsaved_edits()
4194 {
4195 save_futures.push(buffer_store.save_buffer(buffer, cx));
4196 }
4197 }
4198 })
4199 }
4200 save_futures
4201 }
4202
4203 pub fn stage_entries(
4204 &mut self,
4205 entries: Vec<RepoPath>,
4206 cx: &mut Context<Self>,
4207 ) -> Task<anyhow::Result<()>> {
4208 if entries.is_empty() {
4209 return Task::ready(Ok(()));
4210 }
4211 let id = self.id;
4212 let save_tasks = self.save_buffers(&entries, cx);
4213 let paths = entries
4214 .iter()
4215 .map(|p| p.as_unix_str())
4216 .collect::<Vec<_>>()
4217 .join(" ");
4218 let status = format!("git add {paths}");
4219 let job_key = GitJobKey::WriteIndex(entries.clone());
4220
4221 self.spawn_job_with_tracking(
4222 entries.clone(),
4223 pending_op::GitStatus::Staged,
4224 cx,
4225 async move |this, cx| {
4226 for save_task in save_tasks {
4227 save_task.await?;
4228 }
4229
4230 this.update(cx, |this, _| {
4231 this.send_keyed_job(
4232 Some(job_key),
4233 Some(status.into()),
4234 move |git_repo, _cx| async move {
4235 match git_repo {
4236 RepositoryState::Local(LocalRepositoryState {
4237 backend,
4238 environment,
4239 ..
4240 }) => backend.stage_paths(entries, environment.clone()).await,
4241 RepositoryState::Remote(RemoteRepositoryState {
4242 project_id,
4243 client,
4244 }) => {
4245 client
4246 .request(proto::Stage {
4247 project_id: project_id.0,
4248 repository_id: id.to_proto(),
4249 paths: entries
4250 .into_iter()
4251 .map(|repo_path| repo_path.to_proto())
4252 .collect(),
4253 })
4254 .await
4255 .context("sending stage request")?;
4256
4257 Ok(())
4258 }
4259 }
4260 },
4261 )
4262 })?
4263 .await?
4264 },
4265 )
4266 }
4267
4268 pub fn unstage_entries(
4269 &mut self,
4270 entries: Vec<RepoPath>,
4271 cx: &mut Context<Self>,
4272 ) -> Task<anyhow::Result<()>> {
4273 if entries.is_empty() {
4274 return Task::ready(Ok(()));
4275 }
4276 let id = self.id;
4277 let save_tasks = self.save_buffers(&entries, cx);
4278 let paths = entries
4279 .iter()
4280 .map(|p| p.as_unix_str())
4281 .collect::<Vec<_>>()
4282 .join(" ");
4283 let status = format!("git reset {paths}");
4284 let job_key = GitJobKey::WriteIndex(entries.clone());
4285
4286 self.spawn_job_with_tracking(
4287 entries.clone(),
4288 pending_op::GitStatus::Unstaged,
4289 cx,
4290 async move |this, cx| {
4291 for save_task in save_tasks {
4292 save_task.await?;
4293 }
4294
4295 this.update(cx, |this, _| {
4296 this.send_keyed_job(
4297 Some(job_key),
4298 Some(status.into()),
4299 move |git_repo, _cx| async move {
4300 match git_repo {
4301 RepositoryState::Local(LocalRepositoryState {
4302 backend,
4303 environment,
4304 ..
4305 }) => backend.unstage_paths(entries, environment).await,
4306 RepositoryState::Remote(RemoteRepositoryState {
4307 project_id,
4308 client,
4309 }) => {
4310 client
4311 .request(proto::Unstage {
4312 project_id: project_id.0,
4313 repository_id: id.to_proto(),
4314 paths: entries
4315 .into_iter()
4316 .map(|repo_path| repo_path.to_proto())
4317 .collect(),
4318 })
4319 .await
4320 .context("sending unstage request")?;
4321
4322 Ok(())
4323 }
4324 }
4325 },
4326 )
4327 })?
4328 .await?
4329 },
4330 )
4331 }
4332
4333 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4334 let to_stage = self
4335 .cached_status()
4336 .filter_map(|entry| {
4337 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4338 if ops.staging() || ops.staged() {
4339 None
4340 } else {
4341 Some(entry.repo_path)
4342 }
4343 } else if entry.status.staging().is_fully_staged() {
4344 None
4345 } else {
4346 Some(entry.repo_path)
4347 }
4348 })
4349 .collect();
4350 self.stage_entries(to_stage, cx)
4351 }
4352
4353 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4354 let to_unstage = self
4355 .cached_status()
4356 .filter_map(|entry| {
4357 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4358 if !ops.staging() && !ops.staged() {
4359 None
4360 } else {
4361 Some(entry.repo_path)
4362 }
4363 } else if entry.status.staging().is_fully_unstaged() {
4364 None
4365 } else {
4366 Some(entry.repo_path)
4367 }
4368 })
4369 .collect();
4370 self.unstage_entries(to_unstage, cx)
4371 }
4372
4373 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4374 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4375
4376 self.stash_entries(to_stash, cx)
4377 }
4378
4379 pub fn stash_entries(
4380 &mut self,
4381 entries: Vec<RepoPath>,
4382 cx: &mut Context<Self>,
4383 ) -> Task<anyhow::Result<()>> {
4384 let id = self.id;
4385
4386 cx.spawn(async move |this, cx| {
4387 this.update(cx, |this, _| {
4388 this.send_job(None, move |git_repo, _cx| async move {
4389 match git_repo {
4390 RepositoryState::Local(LocalRepositoryState {
4391 backend,
4392 environment,
4393 ..
4394 }) => backend.stash_paths(entries, environment).await,
4395 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4396 client
4397 .request(proto::Stash {
4398 project_id: project_id.0,
4399 repository_id: id.to_proto(),
4400 paths: entries
4401 .into_iter()
4402 .map(|repo_path| repo_path.to_proto())
4403 .collect(),
4404 })
4405 .await
4406 .context("sending stash request")?;
4407 Ok(())
4408 }
4409 }
4410 })
4411 })?
4412 .await??;
4413 Ok(())
4414 })
4415 }
4416
4417 pub fn stash_pop(
4418 &mut self,
4419 index: Option<usize>,
4420 cx: &mut Context<Self>,
4421 ) -> Task<anyhow::Result<()>> {
4422 let id = self.id;
4423 cx.spawn(async move |this, cx| {
4424 this.update(cx, |this, _| {
4425 this.send_job(None, move |git_repo, _cx| async move {
4426 match git_repo {
4427 RepositoryState::Local(LocalRepositoryState {
4428 backend,
4429 environment,
4430 ..
4431 }) => backend.stash_pop(index, environment).await,
4432 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4433 client
4434 .request(proto::StashPop {
4435 project_id: project_id.0,
4436 repository_id: id.to_proto(),
4437 stash_index: index.map(|i| i as u64),
4438 })
4439 .await
4440 .context("sending stash pop request")?;
4441 Ok(())
4442 }
4443 }
4444 })
4445 })?
4446 .await??;
4447 Ok(())
4448 })
4449 }
4450
4451 pub fn stash_apply(
4452 &mut self,
4453 index: Option<usize>,
4454 cx: &mut Context<Self>,
4455 ) -> Task<anyhow::Result<()>> {
4456 let id = self.id;
4457 cx.spawn(async move |this, cx| {
4458 this.update(cx, |this, _| {
4459 this.send_job(None, move |git_repo, _cx| async move {
4460 match git_repo {
4461 RepositoryState::Local(LocalRepositoryState {
4462 backend,
4463 environment,
4464 ..
4465 }) => backend.stash_apply(index, environment).await,
4466 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4467 client
4468 .request(proto::StashApply {
4469 project_id: project_id.0,
4470 repository_id: id.to_proto(),
4471 stash_index: index.map(|i| i as u64),
4472 })
4473 .await
4474 .context("sending stash apply request")?;
4475 Ok(())
4476 }
4477 }
4478 })
4479 })?
4480 .await??;
4481 Ok(())
4482 })
4483 }
4484
4485 pub fn stash_drop(
4486 &mut self,
4487 index: Option<usize>,
4488 cx: &mut Context<Self>,
4489 ) -> oneshot::Receiver<anyhow::Result<()>> {
4490 let id = self.id;
4491 let updates_tx = self
4492 .git_store()
4493 .and_then(|git_store| match &git_store.read(cx).state {
4494 GitStoreState::Local { downstream, .. } => downstream
4495 .as_ref()
4496 .map(|downstream| downstream.updates_tx.clone()),
4497 _ => None,
4498 });
4499 let this = cx.weak_entity();
4500 self.send_job(None, move |git_repo, mut cx| async move {
4501 match git_repo {
4502 RepositoryState::Local(LocalRepositoryState {
4503 backend,
4504 environment,
4505 ..
4506 }) => {
4507 // TODO would be nice to not have to do this manually
4508 let result = backend.stash_drop(index, environment).await;
4509 if result.is_ok()
4510 && let Ok(stash_entries) = backend.stash_entries().await
4511 {
4512 let snapshot = this.update(&mut cx, |this, cx| {
4513 this.snapshot.stash_entries = stash_entries;
4514 cx.emit(RepositoryEvent::StashEntriesChanged);
4515 this.snapshot.clone()
4516 })?;
4517 if let Some(updates_tx) = updates_tx {
4518 updates_tx
4519 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4520 .ok();
4521 }
4522 }
4523
4524 result
4525 }
4526 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4527 client
4528 .request(proto::StashDrop {
4529 project_id: project_id.0,
4530 repository_id: id.to_proto(),
4531 stash_index: index.map(|i| i as u64),
4532 })
4533 .await
4534 .context("sending stash pop request")?;
4535 Ok(())
4536 }
4537 }
4538 })
4539 }
4540
4541 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4542 let id = self.id;
4543 self.send_job(
4544 Some(format!("git hook {}", hook.as_str()).into()),
4545 move |git_repo, _cx| async move {
4546 match git_repo {
4547 RepositoryState::Local(LocalRepositoryState {
4548 backend,
4549 environment,
4550 ..
4551 }) => backend.run_hook(hook, environment.clone()).await,
4552 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4553 client
4554 .request(proto::RunGitHook {
4555 project_id: project_id.0,
4556 repository_id: id.to_proto(),
4557 hook: hook.to_proto(),
4558 })
4559 .await?;
4560
4561 Ok(())
4562 }
4563 }
4564 },
4565 )
4566 }
4567
4568 pub fn commit(
4569 &mut self,
4570 message: SharedString,
4571 name_and_email: Option<(SharedString, SharedString)>,
4572 options: CommitOptions,
4573 askpass: AskPassDelegate,
4574 cx: &mut App,
4575 ) -> oneshot::Receiver<Result<()>> {
4576 let id = self.id;
4577 let askpass_delegates = self.askpass_delegates.clone();
4578 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4579
4580 let rx = self.run_hook(RunHook::PreCommit, cx);
4581
4582 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4583 rx.await??;
4584
4585 match git_repo {
4586 RepositoryState::Local(LocalRepositoryState {
4587 backend,
4588 environment,
4589 ..
4590 }) => {
4591 backend
4592 .commit(message, name_and_email, options, askpass, environment)
4593 .await
4594 }
4595 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4596 askpass_delegates.lock().insert(askpass_id, askpass);
4597 let _defer = util::defer(|| {
4598 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4599 debug_assert!(askpass_delegate.is_some());
4600 });
4601 let (name, email) = name_and_email.unzip();
4602 client
4603 .request(proto::Commit {
4604 project_id: project_id.0,
4605 repository_id: id.to_proto(),
4606 message: String::from(message),
4607 name: name.map(String::from),
4608 email: email.map(String::from),
4609 options: Some(proto::commit::CommitOptions {
4610 amend: options.amend,
4611 signoff: options.signoff,
4612 }),
4613 askpass_id,
4614 })
4615 .await
4616 .context("sending commit request")?;
4617
4618 Ok(())
4619 }
4620 }
4621 })
4622 }
4623
4624 pub fn fetch(
4625 &mut self,
4626 fetch_options: FetchOptions,
4627 askpass: AskPassDelegate,
4628 _cx: &mut App,
4629 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4630 let askpass_delegates = self.askpass_delegates.clone();
4631 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4632 let id = self.id;
4633
4634 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4635 match git_repo {
4636 RepositoryState::Local(LocalRepositoryState {
4637 backend,
4638 environment,
4639 ..
4640 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4641 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4642 askpass_delegates.lock().insert(askpass_id, askpass);
4643 let _defer = util::defer(|| {
4644 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4645 debug_assert!(askpass_delegate.is_some());
4646 });
4647
4648 let response = client
4649 .request(proto::Fetch {
4650 project_id: project_id.0,
4651 repository_id: id.to_proto(),
4652 askpass_id,
4653 remote: fetch_options.to_proto(),
4654 })
4655 .await
4656 .context("sending fetch request")?;
4657
4658 Ok(RemoteCommandOutput {
4659 stdout: response.stdout,
4660 stderr: response.stderr,
4661 })
4662 }
4663 }
4664 })
4665 }
4666
4667 pub fn push(
4668 &mut self,
4669 branch: SharedString,
4670 remote: SharedString,
4671 options: Option<PushOptions>,
4672 askpass: AskPassDelegate,
4673 cx: &mut Context<Self>,
4674 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4675 let askpass_delegates = self.askpass_delegates.clone();
4676 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4677 let id = self.id;
4678
4679 let args = options
4680 .map(|option| match option {
4681 PushOptions::SetUpstream => " --set-upstream",
4682 PushOptions::Force => " --force-with-lease",
4683 })
4684 .unwrap_or("");
4685
4686 let updates_tx = self
4687 .git_store()
4688 .and_then(|git_store| match &git_store.read(cx).state {
4689 GitStoreState::Local { downstream, .. } => downstream
4690 .as_ref()
4691 .map(|downstream| downstream.updates_tx.clone()),
4692 _ => None,
4693 });
4694
4695 let this = cx.weak_entity();
4696 self.send_job(
4697 Some(format!("git push {} {} {}", args, remote, branch).into()),
4698 move |git_repo, mut cx| async move {
4699 match git_repo {
4700 RepositoryState::Local(LocalRepositoryState {
4701 backend,
4702 environment,
4703 ..
4704 }) => {
4705 let result = backend
4706 .push(
4707 branch.to_string(),
4708 remote.to_string(),
4709 options,
4710 askpass,
4711 environment.clone(),
4712 cx.clone(),
4713 )
4714 .await;
4715 // TODO would be nice to not have to do this manually
4716 if result.is_ok() {
4717 let branches = backend.branches().await?;
4718 let branch = branches.into_iter().find(|branch| branch.is_head);
4719 log::info!("head branch after scan is {branch:?}");
4720 let snapshot = this.update(&mut cx, |this, cx| {
4721 this.snapshot.branch = branch;
4722 cx.emit(RepositoryEvent::BranchChanged);
4723 this.snapshot.clone()
4724 })?;
4725 if let Some(updates_tx) = updates_tx {
4726 updates_tx
4727 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4728 .ok();
4729 }
4730 }
4731 result
4732 }
4733 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4734 askpass_delegates.lock().insert(askpass_id, askpass);
4735 let _defer = util::defer(|| {
4736 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4737 debug_assert!(askpass_delegate.is_some());
4738 });
4739 let response = client
4740 .request(proto::Push {
4741 project_id: project_id.0,
4742 repository_id: id.to_proto(),
4743 askpass_id,
4744 branch_name: branch.to_string(),
4745 remote_name: remote.to_string(),
4746 options: options.map(|options| match options {
4747 PushOptions::Force => proto::push::PushOptions::Force,
4748 PushOptions::SetUpstream => {
4749 proto::push::PushOptions::SetUpstream
4750 }
4751 }
4752 as i32),
4753 })
4754 .await
4755 .context("sending push request")?;
4756
4757 Ok(RemoteCommandOutput {
4758 stdout: response.stdout,
4759 stderr: response.stderr,
4760 })
4761 }
4762 }
4763 },
4764 )
4765 }
4766
4767 pub fn pull(
4768 &mut self,
4769 branch: Option<SharedString>,
4770 remote: SharedString,
4771 rebase: bool,
4772 askpass: AskPassDelegate,
4773 _cx: &mut App,
4774 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4775 let askpass_delegates = self.askpass_delegates.clone();
4776 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4777 let id = self.id;
4778
4779 let mut status = "git pull".to_string();
4780 if rebase {
4781 status.push_str(" --rebase");
4782 }
4783 status.push_str(&format!(" {}", remote));
4784 if let Some(b) = &branch {
4785 status.push_str(&format!(" {}", b));
4786 }
4787
4788 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4789 match git_repo {
4790 RepositoryState::Local(LocalRepositoryState {
4791 backend,
4792 environment,
4793 ..
4794 }) => {
4795 backend
4796 .pull(
4797 branch.as_ref().map(|b| b.to_string()),
4798 remote.to_string(),
4799 rebase,
4800 askpass,
4801 environment.clone(),
4802 cx,
4803 )
4804 .await
4805 }
4806 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4807 askpass_delegates.lock().insert(askpass_id, askpass);
4808 let _defer = util::defer(|| {
4809 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4810 debug_assert!(askpass_delegate.is_some());
4811 });
4812 let response = client
4813 .request(proto::Pull {
4814 project_id: project_id.0,
4815 repository_id: id.to_proto(),
4816 askpass_id,
4817 rebase,
4818 branch_name: branch.as_ref().map(|b| b.to_string()),
4819 remote_name: remote.to_string(),
4820 })
4821 .await
4822 .context("sending pull request")?;
4823
4824 Ok(RemoteCommandOutput {
4825 stdout: response.stdout,
4826 stderr: response.stderr,
4827 })
4828 }
4829 }
4830 })
4831 }
4832
4833 fn spawn_set_index_text_job(
4834 &mut self,
4835 path: RepoPath,
4836 content: Option<String>,
4837 hunk_staging_operation_count: Option<usize>,
4838 cx: &mut Context<Self>,
4839 ) -> oneshot::Receiver<anyhow::Result<()>> {
4840 let id = self.id;
4841 let this = cx.weak_entity();
4842 let git_store = self.git_store.clone();
4843 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4844 self.send_keyed_job(
4845 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4846 None,
4847 move |git_repo, mut cx| async move {
4848 log::debug!(
4849 "start updating index text for buffer {}",
4850 path.as_unix_str()
4851 );
4852
4853 match git_repo {
4854 RepositoryState::Local(LocalRepositoryState {
4855 fs,
4856 backend,
4857 environment,
4858 ..
4859 }) => {
4860 let executable = match fs.metadata(&abs_path).await {
4861 Ok(Some(meta)) => meta.is_executable,
4862 Ok(None) => false,
4863 Err(_err) => false,
4864 };
4865 backend
4866 .set_index_text(path.clone(), content, environment.clone(), executable)
4867 .await?;
4868 }
4869 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4870 client
4871 .request(proto::SetIndexText {
4872 project_id: project_id.0,
4873 repository_id: id.to_proto(),
4874 path: path.to_proto(),
4875 text: content,
4876 })
4877 .await?;
4878 }
4879 }
4880 log::debug!(
4881 "finish updating index text for buffer {}",
4882 path.as_unix_str()
4883 );
4884
4885 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4886 let project_path = this
4887 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4888 .ok()
4889 .flatten();
4890 git_store.update(&mut cx, |git_store, cx| {
4891 let buffer_id = git_store
4892 .buffer_store
4893 .read(cx)
4894 .get_by_path(&project_path?)?
4895 .read(cx)
4896 .remote_id();
4897 let diff_state = git_store.diffs.get(&buffer_id)?;
4898 diff_state.update(cx, |diff_state, _| {
4899 diff_state.hunk_staging_operation_count_as_of_write =
4900 hunk_staging_operation_count;
4901 });
4902 Some(())
4903 })?;
4904 }
4905 Ok(())
4906 },
4907 )
4908 }
4909
4910 pub fn create_remote(
4911 &mut self,
4912 remote_name: String,
4913 remote_url: String,
4914 ) -> oneshot::Receiver<Result<()>> {
4915 let id = self.id;
4916 self.send_job(
4917 Some(format!("git remote add {remote_name} {remote_url}").into()),
4918 move |repo, _cx| async move {
4919 match repo {
4920 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4921 backend.create_remote(remote_name, remote_url).await
4922 }
4923 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4924 client
4925 .request(proto::GitCreateRemote {
4926 project_id: project_id.0,
4927 repository_id: id.to_proto(),
4928 remote_name,
4929 remote_url,
4930 })
4931 .await?;
4932
4933 Ok(())
4934 }
4935 }
4936 },
4937 )
4938 }
4939
4940 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
4941 let id = self.id;
4942 self.send_job(
4943 Some(format!("git remove remote {remote_name}").into()),
4944 move |repo, _cx| async move {
4945 match repo {
4946 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4947 backend.remove_remote(remote_name).await
4948 }
4949 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4950 client
4951 .request(proto::GitRemoveRemote {
4952 project_id: project_id.0,
4953 repository_id: id.to_proto(),
4954 remote_name,
4955 })
4956 .await?;
4957
4958 Ok(())
4959 }
4960 }
4961 },
4962 )
4963 }
4964
4965 pub fn get_remotes(
4966 &mut self,
4967 branch_name: Option<String>,
4968 is_push: bool,
4969 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4970 let id = self.id;
4971 self.send_job(None, move |repo, _cx| async move {
4972 match repo {
4973 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4974 let remote = if let Some(branch_name) = branch_name {
4975 if is_push {
4976 backend.get_push_remote(branch_name).await?
4977 } else {
4978 backend.get_branch_remote(branch_name).await?
4979 }
4980 } else {
4981 None
4982 };
4983
4984 match remote {
4985 Some(remote) => Ok(vec![remote]),
4986 None => backend.get_all_remotes().await,
4987 }
4988 }
4989 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4990 let response = client
4991 .request(proto::GetRemotes {
4992 project_id: project_id.0,
4993 repository_id: id.to_proto(),
4994 branch_name,
4995 is_push,
4996 })
4997 .await?;
4998
4999 let remotes = response
5000 .remotes
5001 .into_iter()
5002 .map(|remotes| Remote {
5003 name: remotes.name.into(),
5004 })
5005 .collect();
5006
5007 Ok(remotes)
5008 }
5009 }
5010 })
5011 }
5012
5013 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5014 let id = self.id;
5015 self.send_job(None, move |repo, _| async move {
5016 match repo {
5017 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5018 backend.branches().await
5019 }
5020 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5021 let response = client
5022 .request(proto::GitGetBranches {
5023 project_id: project_id.0,
5024 repository_id: id.to_proto(),
5025 })
5026 .await?;
5027
5028 let branches = response
5029 .branches
5030 .into_iter()
5031 .map(|branch| proto_to_branch(&branch))
5032 .collect();
5033
5034 Ok(branches)
5035 }
5036 }
5037 })
5038 }
5039
5040 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5041 let id = self.id;
5042 self.send_job(None, move |repo, _| async move {
5043 match repo {
5044 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5045 backend.worktrees().await
5046 }
5047 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5048 let response = client
5049 .request(proto::GitGetWorktrees {
5050 project_id: project_id.0,
5051 repository_id: id.to_proto(),
5052 })
5053 .await?;
5054
5055 let worktrees = response
5056 .worktrees
5057 .into_iter()
5058 .map(|worktree| proto_to_worktree(&worktree))
5059 .collect();
5060
5061 Ok(worktrees)
5062 }
5063 }
5064 })
5065 }
5066
5067 pub fn create_worktree(
5068 &mut self,
5069 name: String,
5070 path: PathBuf,
5071 commit: Option<String>,
5072 ) -> oneshot::Receiver<Result<()>> {
5073 let id = self.id;
5074 self.send_job(
5075 Some("git worktree add".into()),
5076 move |repo, _cx| async move {
5077 match repo {
5078 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5079 backend.create_worktree(name, path, commit).await
5080 }
5081 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5082 client
5083 .request(proto::GitCreateWorktree {
5084 project_id: project_id.0,
5085 repository_id: id.to_proto(),
5086 name,
5087 directory: path.to_string_lossy().to_string(),
5088 commit,
5089 })
5090 .await?;
5091
5092 Ok(())
5093 }
5094 }
5095 },
5096 )
5097 }
5098
5099 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
5100 let id = self.id;
5101 self.send_job(None, move |repo, _| async move {
5102 match repo {
5103 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5104 backend.default_branch().await
5105 }
5106 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5107 let response = client
5108 .request(proto::GetDefaultBranch {
5109 project_id: project_id.0,
5110 repository_id: id.to_proto(),
5111 })
5112 .await?;
5113
5114 anyhow::Ok(response.branch.map(SharedString::from))
5115 }
5116 }
5117 })
5118 }
5119
5120 pub fn diff_tree(
5121 &mut self,
5122 diff_type: DiffTreeType,
5123 _cx: &App,
5124 ) -> oneshot::Receiver<Result<TreeDiff>> {
5125 let repository_id = self.snapshot.id;
5126 self.send_job(None, move |repo, _cx| async move {
5127 match repo {
5128 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5129 backend.diff_tree(diff_type).await
5130 }
5131 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5132 let response = client
5133 .request(proto::GetTreeDiff {
5134 project_id: project_id.0,
5135 repository_id: repository_id.0,
5136 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5137 base: diff_type.base().to_string(),
5138 head: diff_type.head().to_string(),
5139 })
5140 .await?;
5141
5142 let entries = response
5143 .entries
5144 .into_iter()
5145 .filter_map(|entry| {
5146 let status = match entry.status() {
5147 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5148 proto::tree_diff_status::Status::Modified => {
5149 TreeDiffStatus::Modified {
5150 old: git::Oid::from_str(
5151 &entry.oid.context("missing oid").log_err()?,
5152 )
5153 .log_err()?,
5154 }
5155 }
5156 proto::tree_diff_status::Status::Deleted => {
5157 TreeDiffStatus::Deleted {
5158 old: git::Oid::from_str(
5159 &entry.oid.context("missing oid").log_err()?,
5160 )
5161 .log_err()?,
5162 }
5163 }
5164 };
5165 Some((
5166 RepoPath::from_rel_path(
5167 &RelPath::from_proto(&entry.path).log_err()?,
5168 ),
5169 status,
5170 ))
5171 })
5172 .collect();
5173
5174 Ok(TreeDiff { entries })
5175 }
5176 }
5177 })
5178 }
5179
5180 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5181 let id = self.id;
5182 self.send_job(None, move |repo, _cx| async move {
5183 match repo {
5184 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5185 backend.diff(diff_type).await
5186 }
5187 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5188 let response = client
5189 .request(proto::GitDiff {
5190 project_id: project_id.0,
5191 repository_id: id.to_proto(),
5192 diff_type: match diff_type {
5193 DiffType::HeadToIndex => {
5194 proto::git_diff::DiffType::HeadToIndex.into()
5195 }
5196 DiffType::HeadToWorktree => {
5197 proto::git_diff::DiffType::HeadToWorktree.into()
5198 }
5199 },
5200 })
5201 .await?;
5202
5203 Ok(response.diff)
5204 }
5205 }
5206 })
5207 }
5208
5209 pub fn create_branch(
5210 &mut self,
5211 branch_name: String,
5212 base_branch: Option<String>,
5213 ) -> oneshot::Receiver<Result<()>> {
5214 let id = self.id;
5215 let status_msg = if let Some(ref base) = base_branch {
5216 format!("git switch -c {branch_name} {base}").into()
5217 } else {
5218 format!("git switch -c {branch_name}").into()
5219 };
5220 self.send_job(Some(status_msg), move |repo, _cx| async move {
5221 match repo {
5222 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5223 backend.create_branch(branch_name, base_branch).await
5224 }
5225 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5226 client
5227 .request(proto::GitCreateBranch {
5228 project_id: project_id.0,
5229 repository_id: id.to_proto(),
5230 branch_name,
5231 })
5232 .await?;
5233
5234 Ok(())
5235 }
5236 }
5237 })
5238 }
5239
5240 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5241 let id = self.id;
5242 self.send_job(
5243 Some(format!("git switch {branch_name}").into()),
5244 move |repo, _cx| async move {
5245 match repo {
5246 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5247 backend.change_branch(branch_name).await
5248 }
5249 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5250 client
5251 .request(proto::GitChangeBranch {
5252 project_id: project_id.0,
5253 repository_id: id.to_proto(),
5254 branch_name,
5255 })
5256 .await?;
5257
5258 Ok(())
5259 }
5260 }
5261 },
5262 )
5263 }
5264
5265 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5266 let id = self.id;
5267 self.send_job(
5268 Some(format!("git branch -d {branch_name}").into()),
5269 move |repo, _cx| async move {
5270 match repo {
5271 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5272 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5273 client
5274 .request(proto::GitDeleteBranch {
5275 project_id: project_id.0,
5276 repository_id: id.to_proto(),
5277 branch_name,
5278 })
5279 .await?;
5280
5281 Ok(())
5282 }
5283 }
5284 },
5285 )
5286 }
5287
5288 pub fn rename_branch(
5289 &mut self,
5290 branch: String,
5291 new_name: String,
5292 ) -> oneshot::Receiver<Result<()>> {
5293 let id = self.id;
5294 self.send_job(
5295 Some(format!("git branch -m {branch} {new_name}").into()),
5296 move |repo, _cx| async move {
5297 match repo {
5298 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5299 backend.rename_branch(branch, new_name).await
5300 }
5301 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5302 client
5303 .request(proto::GitRenameBranch {
5304 project_id: project_id.0,
5305 repository_id: id.to_proto(),
5306 branch,
5307 new_name,
5308 })
5309 .await?;
5310
5311 Ok(())
5312 }
5313 }
5314 },
5315 )
5316 }
5317
5318 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5319 let id = self.id;
5320 self.send_job(None, move |repo, _cx| async move {
5321 match repo {
5322 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5323 backend.check_for_pushed_commit().await
5324 }
5325 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5326 let response = client
5327 .request(proto::CheckForPushedCommits {
5328 project_id: project_id.0,
5329 repository_id: id.to_proto(),
5330 })
5331 .await?;
5332
5333 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5334
5335 Ok(branches)
5336 }
5337 }
5338 })
5339 }
5340
5341 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5342 self.send_job(None, |repo, _cx| async move {
5343 match repo {
5344 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5345 backend.checkpoint().await
5346 }
5347 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5348 }
5349 })
5350 }
5351
5352 pub fn restore_checkpoint(
5353 &mut self,
5354 checkpoint: GitRepositoryCheckpoint,
5355 ) -> oneshot::Receiver<Result<()>> {
5356 self.send_job(None, move |repo, _cx| async move {
5357 match repo {
5358 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5359 backend.restore_checkpoint(checkpoint).await
5360 }
5361 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5362 }
5363 })
5364 }
5365
5366 pub(crate) fn apply_remote_update(
5367 &mut self,
5368 update: proto::UpdateRepository,
5369 cx: &mut Context<Self>,
5370 ) -> Result<()> {
5371 let conflicted_paths = TreeSet::from_ordered_entries(
5372 update
5373 .current_merge_conflicts
5374 .into_iter()
5375 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5376 );
5377 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5378 let new_head_commit = update
5379 .head_commit_details
5380 .as_ref()
5381 .map(proto_to_commit_details);
5382 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5383 cx.emit(RepositoryEvent::BranchChanged)
5384 }
5385 self.snapshot.branch = new_branch;
5386 self.snapshot.head_commit = new_head_commit;
5387
5388 self.snapshot.merge.conflicted_paths = conflicted_paths;
5389 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5390 let new_stash_entries = GitStash {
5391 entries: update
5392 .stash_entries
5393 .iter()
5394 .filter_map(|entry| proto_to_stash(entry).ok())
5395 .collect(),
5396 };
5397 if self.snapshot.stash_entries != new_stash_entries {
5398 cx.emit(RepositoryEvent::StashEntriesChanged)
5399 }
5400 self.snapshot.stash_entries = new_stash_entries;
5401 self.snapshot.remote_upstream_url = update.remote_upstream_url;
5402 self.snapshot.remote_origin_url = update.remote_origin_url;
5403
5404 let edits = update
5405 .removed_statuses
5406 .into_iter()
5407 .filter_map(|path| {
5408 Some(sum_tree::Edit::Remove(PathKey(
5409 RelPath::from_proto(&path).log_err()?,
5410 )))
5411 })
5412 .chain(
5413 update
5414 .updated_statuses
5415 .into_iter()
5416 .filter_map(|updated_status| {
5417 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5418 }),
5419 )
5420 .collect::<Vec<_>>();
5421 if !edits.is_empty() {
5422 cx.emit(RepositoryEvent::StatusesChanged);
5423 }
5424 self.snapshot.statuses_by_path.edit(edits, ());
5425 if update.is_last_update {
5426 self.snapshot.scan_id = update.scan_id;
5427 }
5428 self.clear_pending_ops(cx);
5429 Ok(())
5430 }
5431
5432 pub fn compare_checkpoints(
5433 &mut self,
5434 left: GitRepositoryCheckpoint,
5435 right: GitRepositoryCheckpoint,
5436 ) -> oneshot::Receiver<Result<bool>> {
5437 self.send_job(None, move |repo, _cx| async move {
5438 match repo {
5439 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5440 backend.compare_checkpoints(left, right).await
5441 }
5442 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5443 }
5444 })
5445 }
5446
5447 pub fn diff_checkpoints(
5448 &mut self,
5449 base_checkpoint: GitRepositoryCheckpoint,
5450 target_checkpoint: GitRepositoryCheckpoint,
5451 ) -> oneshot::Receiver<Result<String>> {
5452 self.send_job(None, move |repo, _cx| async move {
5453 match repo {
5454 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5455 backend
5456 .diff_checkpoints(base_checkpoint, target_checkpoint)
5457 .await
5458 }
5459 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5460 }
5461 })
5462 }
5463
5464 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5465 let updated = SumTree::from_iter(
5466 self.pending_ops.iter().filter_map(|ops| {
5467 let inner_ops: Vec<PendingOp> =
5468 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5469 if inner_ops.is_empty() {
5470 None
5471 } else {
5472 Some(PendingOps {
5473 repo_path: ops.repo_path.clone(),
5474 ops: inner_ops,
5475 })
5476 }
5477 }),
5478 (),
5479 );
5480
5481 if updated != self.pending_ops {
5482 cx.emit(RepositoryEvent::PendingOpsChanged {
5483 pending_ops: self.pending_ops.clone(),
5484 })
5485 }
5486
5487 self.pending_ops = updated;
5488 }
5489
5490 fn schedule_scan(
5491 &mut self,
5492 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5493 cx: &mut Context<Self>,
5494 ) {
5495 let this = cx.weak_entity();
5496 let _ = self.send_keyed_job(
5497 Some(GitJobKey::ReloadGitState),
5498 None,
5499 |state, mut cx| async move {
5500 log::debug!("run scheduled git status scan");
5501
5502 let Some(this) = this.upgrade() else {
5503 return Ok(());
5504 };
5505 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5506 bail!("not a local repository")
5507 };
5508 let (snapshot, events) = this
5509 .update(&mut cx, |this, _| {
5510 this.paths_needing_status_update.clear();
5511 compute_snapshot(
5512 this.id,
5513 this.work_directory_abs_path.clone(),
5514 this.snapshot.clone(),
5515 backend.clone(),
5516 )
5517 })?
5518 .await?;
5519 this.update(&mut cx, |this, cx| {
5520 this.snapshot = snapshot.clone();
5521 this.clear_pending_ops(cx);
5522 for event in events {
5523 cx.emit(event);
5524 }
5525 })?;
5526 if let Some(updates_tx) = updates_tx {
5527 updates_tx
5528 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5529 .ok();
5530 }
5531 Ok(())
5532 },
5533 );
5534 }
5535
5536 fn spawn_local_git_worker(
5537 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5538 cx: &mut Context<Self>,
5539 ) -> mpsc::UnboundedSender<GitJob> {
5540 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5541
5542 cx.spawn(async move |_, cx| {
5543 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5544 if let Some(git_hosting_provider_registry) =
5545 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5546 {
5547 git_hosting_providers::register_additional_providers(
5548 git_hosting_provider_registry,
5549 state.backend.clone(),
5550 )
5551 .await;
5552 }
5553 let state = RepositoryState::Local(state);
5554 let mut jobs = VecDeque::new();
5555 loop {
5556 while let Ok(Some(next_job)) = job_rx.try_next() {
5557 jobs.push_back(next_job);
5558 }
5559
5560 if let Some(job) = jobs.pop_front() {
5561 if let Some(current_key) = &job.key
5562 && jobs
5563 .iter()
5564 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5565 {
5566 continue;
5567 }
5568 (job.job)(state.clone(), cx).await;
5569 } else if let Some(job) = job_rx.next().await {
5570 jobs.push_back(job);
5571 } else {
5572 break;
5573 }
5574 }
5575 anyhow::Ok(())
5576 })
5577 .detach_and_log_err(cx);
5578
5579 job_tx
5580 }
5581
5582 fn spawn_remote_git_worker(
5583 state: RemoteRepositoryState,
5584 cx: &mut Context<Self>,
5585 ) -> mpsc::UnboundedSender<GitJob> {
5586 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5587
5588 cx.spawn(async move |_, cx| {
5589 let state = RepositoryState::Remote(state);
5590 let mut jobs = VecDeque::new();
5591 loop {
5592 while let Ok(Some(next_job)) = job_rx.try_next() {
5593 jobs.push_back(next_job);
5594 }
5595
5596 if let Some(job) = jobs.pop_front() {
5597 if let Some(current_key) = &job.key
5598 && jobs
5599 .iter()
5600 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5601 {
5602 continue;
5603 }
5604 (job.job)(state.clone(), cx).await;
5605 } else if let Some(job) = job_rx.next().await {
5606 jobs.push_back(job);
5607 } else {
5608 break;
5609 }
5610 }
5611 anyhow::Ok(())
5612 })
5613 .detach_and_log_err(cx);
5614
5615 job_tx
5616 }
5617
5618 fn load_staged_text(
5619 &mut self,
5620 buffer_id: BufferId,
5621 repo_path: RepoPath,
5622 cx: &App,
5623 ) -> Task<Result<Option<String>>> {
5624 let rx = self.send_job(None, move |state, _| async move {
5625 match state {
5626 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5627 anyhow::Ok(backend.load_index_text(repo_path).await)
5628 }
5629 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5630 let response = client
5631 .request(proto::OpenUnstagedDiff {
5632 project_id: project_id.to_proto(),
5633 buffer_id: buffer_id.to_proto(),
5634 })
5635 .await?;
5636 Ok(response.staged_text)
5637 }
5638 }
5639 });
5640 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5641 }
5642
5643 fn load_committed_text(
5644 &mut self,
5645 buffer_id: BufferId,
5646 repo_path: RepoPath,
5647 cx: &App,
5648 ) -> Task<Result<DiffBasesChange>> {
5649 let rx = self.send_job(None, move |state, _| async move {
5650 match state {
5651 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5652 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5653 let staged_text = backend.load_index_text(repo_path).await;
5654 let diff_bases_change = if committed_text == staged_text {
5655 DiffBasesChange::SetBoth(committed_text)
5656 } else {
5657 DiffBasesChange::SetEach {
5658 index: staged_text,
5659 head: committed_text,
5660 }
5661 };
5662 anyhow::Ok(diff_bases_change)
5663 }
5664 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5665 use proto::open_uncommitted_diff_response::Mode;
5666
5667 let response = client
5668 .request(proto::OpenUncommittedDiff {
5669 project_id: project_id.to_proto(),
5670 buffer_id: buffer_id.to_proto(),
5671 })
5672 .await?;
5673 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5674 let bases = match mode {
5675 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5676 Mode::IndexAndHead => DiffBasesChange::SetEach {
5677 head: response.committed_text,
5678 index: response.staged_text,
5679 },
5680 };
5681 Ok(bases)
5682 }
5683 }
5684 });
5685
5686 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5687 }
5688 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5689 let repository_id = self.snapshot.id;
5690 let rx = self.send_job(None, move |state, _| async move {
5691 match state {
5692 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5693 backend.load_blob_content(oid).await
5694 }
5695 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5696 let response = client
5697 .request(proto::GetBlobContent {
5698 project_id: project_id.to_proto(),
5699 repository_id: repository_id.0,
5700 oid: oid.to_string(),
5701 })
5702 .await?;
5703 Ok(response.content)
5704 }
5705 }
5706 });
5707 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5708 }
5709
5710 fn paths_changed(
5711 &mut self,
5712 paths: Vec<RepoPath>,
5713 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5714 cx: &mut Context<Self>,
5715 ) {
5716 self.paths_needing_status_update.extend(paths);
5717
5718 let this = cx.weak_entity();
5719 let _ = self.send_keyed_job(
5720 Some(GitJobKey::RefreshStatuses),
5721 None,
5722 |state, mut cx| async move {
5723 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5724 (
5725 this.snapshot.clone(),
5726 mem::take(&mut this.paths_needing_status_update),
5727 )
5728 })?;
5729 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5730 bail!("not a local repository")
5731 };
5732
5733 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5734 if paths.is_empty() {
5735 return Ok(());
5736 }
5737 let statuses = backend.status(&paths).await?;
5738 let stash_entries = backend.stash_entries().await?;
5739
5740 let changed_path_statuses = cx
5741 .background_spawn(async move {
5742 let mut changed_path_statuses = Vec::new();
5743 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5744 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5745
5746 for (repo_path, status) in &*statuses.entries {
5747 changed_paths.remove(repo_path);
5748 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5749 && cursor.item().is_some_and(|entry| entry.status == *status)
5750 {
5751 continue;
5752 }
5753
5754 changed_path_statuses.push(Edit::Insert(StatusEntry {
5755 repo_path: repo_path.clone(),
5756 status: *status,
5757 }));
5758 }
5759 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5760 for path in changed_paths.into_iter() {
5761 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5762 changed_path_statuses
5763 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5764 }
5765 }
5766 changed_path_statuses
5767 })
5768 .await;
5769
5770 this.update(&mut cx, |this, cx| {
5771 if this.snapshot.stash_entries != stash_entries {
5772 cx.emit(RepositoryEvent::StashEntriesChanged);
5773 this.snapshot.stash_entries = stash_entries;
5774 }
5775
5776 if !changed_path_statuses.is_empty() {
5777 cx.emit(RepositoryEvent::StatusesChanged);
5778 this.snapshot
5779 .statuses_by_path
5780 .edit(changed_path_statuses, ());
5781 this.snapshot.scan_id += 1;
5782 }
5783
5784 if let Some(updates_tx) = updates_tx {
5785 updates_tx
5786 .unbounded_send(DownstreamUpdate::UpdateRepository(
5787 this.snapshot.clone(),
5788 ))
5789 .ok();
5790 }
5791 })
5792 },
5793 );
5794 }
5795
5796 /// currently running git command and when it started
5797 pub fn current_job(&self) -> Option<JobInfo> {
5798 self.active_jobs.values().next().cloned()
5799 }
5800
5801 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5802 self.send_job(None, |_, _| async {})
5803 }
5804
5805 fn spawn_job_with_tracking<AsyncFn>(
5806 &mut self,
5807 paths: Vec<RepoPath>,
5808 git_status: pending_op::GitStatus,
5809 cx: &mut Context<Self>,
5810 f: AsyncFn,
5811 ) -> Task<Result<()>>
5812 where
5813 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5814 {
5815 let ids = self.new_pending_ops_for_paths(paths, git_status);
5816
5817 cx.spawn(async move |this, cx| {
5818 let (job_status, result) = match f(this.clone(), cx).await {
5819 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5820 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5821 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5822 };
5823
5824 this.update(cx, |this, _| {
5825 let mut edits = Vec::with_capacity(ids.len());
5826 for (id, entry) in ids {
5827 if let Some(mut ops) = this
5828 .pending_ops
5829 .get(&PathKey(entry.as_ref().clone()), ())
5830 .cloned()
5831 {
5832 if let Some(op) = ops.op_by_id_mut(id) {
5833 op.job_status = job_status;
5834 }
5835 edits.push(sum_tree::Edit::Insert(ops));
5836 }
5837 }
5838 this.pending_ops.edit(edits, ());
5839 })?;
5840
5841 result
5842 })
5843 }
5844
5845 fn new_pending_ops_for_paths(
5846 &mut self,
5847 paths: Vec<RepoPath>,
5848 git_status: pending_op::GitStatus,
5849 ) -> Vec<(PendingOpId, RepoPath)> {
5850 let mut edits = Vec::with_capacity(paths.len());
5851 let mut ids = Vec::with_capacity(paths.len());
5852 for path in paths {
5853 let mut ops = self
5854 .pending_ops
5855 .get(&PathKey(path.as_ref().clone()), ())
5856 .cloned()
5857 .unwrap_or_else(|| PendingOps::new(&path));
5858 let id = ops.max_id() + 1;
5859 ops.ops.push(PendingOp {
5860 id,
5861 git_status,
5862 job_status: pending_op::JobStatus::Running,
5863 });
5864 edits.push(sum_tree::Edit::Insert(ops));
5865 ids.push((id, path));
5866 }
5867 self.pending_ops.edit(edits, ());
5868 ids
5869 }
5870}
5871
5872fn get_permalink_in_rust_registry_src(
5873 provider_registry: Arc<GitHostingProviderRegistry>,
5874 path: PathBuf,
5875 selection: Range<u32>,
5876) -> Result<url::Url> {
5877 #[derive(Deserialize)]
5878 struct CargoVcsGit {
5879 sha1: String,
5880 }
5881
5882 #[derive(Deserialize)]
5883 struct CargoVcsInfo {
5884 git: CargoVcsGit,
5885 path_in_vcs: String,
5886 }
5887
5888 #[derive(Deserialize)]
5889 struct CargoPackage {
5890 repository: String,
5891 }
5892
5893 #[derive(Deserialize)]
5894 struct CargoToml {
5895 package: CargoPackage,
5896 }
5897
5898 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5899 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5900 Some((dir, json))
5901 }) else {
5902 bail!("No .cargo_vcs_info.json found in parent directories")
5903 };
5904 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5905 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5906 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5907 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5908 .context("parsing package.repository field of manifest")?;
5909 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5910 let permalink = provider.build_permalink(
5911 remote,
5912 BuildPermalinkParams::new(
5913 &cargo_vcs_info.git.sha1,
5914 &RepoPath::from_rel_path(
5915 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5916 ),
5917 Some(selection),
5918 ),
5919 );
5920 Ok(permalink)
5921}
5922
5923fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5924 let Some(blame) = blame else {
5925 return proto::BlameBufferResponse {
5926 blame_response: None,
5927 };
5928 };
5929
5930 let entries = blame
5931 .entries
5932 .into_iter()
5933 .map(|entry| proto::BlameEntry {
5934 sha: entry.sha.as_bytes().into(),
5935 start_line: entry.range.start,
5936 end_line: entry.range.end,
5937 original_line_number: entry.original_line_number,
5938 author: entry.author,
5939 author_mail: entry.author_mail,
5940 author_time: entry.author_time,
5941 author_tz: entry.author_tz,
5942 committer: entry.committer_name,
5943 committer_mail: entry.committer_email,
5944 committer_time: entry.committer_time,
5945 committer_tz: entry.committer_tz,
5946 summary: entry.summary,
5947 previous: entry.previous,
5948 filename: entry.filename,
5949 })
5950 .collect::<Vec<_>>();
5951
5952 let messages = blame
5953 .messages
5954 .into_iter()
5955 .map(|(oid, message)| proto::CommitMessage {
5956 oid: oid.as_bytes().into(),
5957 message,
5958 })
5959 .collect::<Vec<_>>();
5960
5961 proto::BlameBufferResponse {
5962 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
5963 }
5964}
5965
5966fn deserialize_blame_buffer_response(
5967 response: proto::BlameBufferResponse,
5968) -> Option<git::blame::Blame> {
5969 let response = response.blame_response?;
5970 let entries = response
5971 .entries
5972 .into_iter()
5973 .filter_map(|entry| {
5974 Some(git::blame::BlameEntry {
5975 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5976 range: entry.start_line..entry.end_line,
5977 original_line_number: entry.original_line_number,
5978 committer_name: entry.committer,
5979 committer_time: entry.committer_time,
5980 committer_tz: entry.committer_tz,
5981 committer_email: entry.committer_mail,
5982 author: entry.author,
5983 author_mail: entry.author_mail,
5984 author_time: entry.author_time,
5985 author_tz: entry.author_tz,
5986 summary: entry.summary,
5987 previous: entry.previous,
5988 filename: entry.filename,
5989 })
5990 })
5991 .collect::<Vec<_>>();
5992
5993 let messages = response
5994 .messages
5995 .into_iter()
5996 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5997 .collect::<HashMap<_, _>>();
5998
5999 Some(Blame { entries, messages })
6000}
6001
6002fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6003 proto::Branch {
6004 is_head: branch.is_head,
6005 ref_name: branch.ref_name.to_string(),
6006 unix_timestamp: branch
6007 .most_recent_commit
6008 .as_ref()
6009 .map(|commit| commit.commit_timestamp as u64),
6010 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6011 ref_name: upstream.ref_name.to_string(),
6012 tracking: upstream
6013 .tracking
6014 .status()
6015 .map(|upstream| proto::UpstreamTracking {
6016 ahead: upstream.ahead as u64,
6017 behind: upstream.behind as u64,
6018 }),
6019 }),
6020 most_recent_commit: branch
6021 .most_recent_commit
6022 .as_ref()
6023 .map(|commit| proto::CommitSummary {
6024 sha: commit.sha.to_string(),
6025 subject: commit.subject.to_string(),
6026 commit_timestamp: commit.commit_timestamp,
6027 author_name: commit.author_name.to_string(),
6028 }),
6029 }
6030}
6031
6032fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6033 proto::Worktree {
6034 path: worktree.path.to_string_lossy().to_string(),
6035 ref_name: worktree.ref_name.to_string(),
6036 sha: worktree.sha.to_string(),
6037 }
6038}
6039
6040fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6041 git::repository::Worktree {
6042 path: PathBuf::from(proto.path.clone()),
6043 ref_name: proto.ref_name.clone().into(),
6044 sha: proto.sha.clone().into(),
6045 }
6046}
6047
6048fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6049 git::repository::Branch {
6050 is_head: proto.is_head,
6051 ref_name: proto.ref_name.clone().into(),
6052 upstream: proto
6053 .upstream
6054 .as_ref()
6055 .map(|upstream| git::repository::Upstream {
6056 ref_name: upstream.ref_name.to_string().into(),
6057 tracking: upstream
6058 .tracking
6059 .as_ref()
6060 .map(|tracking| {
6061 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6062 ahead: tracking.ahead as u32,
6063 behind: tracking.behind as u32,
6064 })
6065 })
6066 .unwrap_or(git::repository::UpstreamTracking::Gone),
6067 }),
6068 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6069 git::repository::CommitSummary {
6070 sha: commit.sha.to_string().into(),
6071 subject: commit.subject.to_string().into(),
6072 commit_timestamp: commit.commit_timestamp,
6073 author_name: commit.author_name.to_string().into(),
6074 has_parent: true,
6075 }
6076 }),
6077 }
6078}
6079
6080fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6081 proto::GitCommitDetails {
6082 sha: commit.sha.to_string(),
6083 message: commit.message.to_string(),
6084 commit_timestamp: commit.commit_timestamp,
6085 author_email: commit.author_email.to_string(),
6086 author_name: commit.author_name.to_string(),
6087 }
6088}
6089
6090fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6091 CommitDetails {
6092 sha: proto.sha.clone().into(),
6093 message: proto.message.clone().into(),
6094 commit_timestamp: proto.commit_timestamp,
6095 author_email: proto.author_email.clone().into(),
6096 author_name: proto.author_name.clone().into(),
6097 }
6098}
6099
6100async fn compute_snapshot(
6101 id: RepositoryId,
6102 work_directory_abs_path: Arc<Path>,
6103 prev_snapshot: RepositorySnapshot,
6104 backend: Arc<dyn GitRepository>,
6105) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6106 let mut events = Vec::new();
6107 let branches = backend.branches().await?;
6108 let branch = branches.into_iter().find(|branch| branch.is_head);
6109 let statuses = backend
6110 .status(&[RepoPath::from_rel_path(
6111 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6112 )])
6113 .await?;
6114 let stash_entries = backend.stash_entries().await?;
6115 let statuses_by_path = SumTree::from_iter(
6116 statuses
6117 .entries
6118 .iter()
6119 .map(|(repo_path, status)| StatusEntry {
6120 repo_path: repo_path.clone(),
6121 status: *status,
6122 }),
6123 (),
6124 );
6125 let (merge_details, merge_heads_changed) =
6126 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6127 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6128
6129 if merge_heads_changed {
6130 events.push(RepositoryEvent::MergeHeadsChanged);
6131 }
6132
6133 if statuses_by_path != prev_snapshot.statuses_by_path {
6134 events.push(RepositoryEvent::StatusesChanged)
6135 }
6136
6137 // Useful when branch is None in detached head state
6138 let head_commit = match backend.head_sha().await {
6139 Some(head_sha) => backend.show(head_sha).await.log_err(),
6140 None => None,
6141 };
6142
6143 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6144 events.push(RepositoryEvent::BranchChanged);
6145 }
6146
6147 let remote_origin_url = backend.remote_url("origin").await;
6148 let remote_upstream_url = backend.remote_url("upstream").await;
6149
6150 let snapshot = RepositorySnapshot {
6151 id,
6152 statuses_by_path,
6153 work_directory_abs_path,
6154 path_style: prev_snapshot.path_style,
6155 scan_id: prev_snapshot.scan_id + 1,
6156 branch,
6157 head_commit,
6158 merge: merge_details,
6159 remote_origin_url,
6160 remote_upstream_url,
6161 stash_entries,
6162 };
6163
6164 Ok((snapshot, events))
6165}
6166
6167fn status_from_proto(
6168 simple_status: i32,
6169 status: Option<proto::GitFileStatus>,
6170) -> anyhow::Result<FileStatus> {
6171 use proto::git_file_status::Variant;
6172
6173 let Some(variant) = status.and_then(|status| status.variant) else {
6174 let code = proto::GitStatus::from_i32(simple_status)
6175 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6176 let result = match code {
6177 proto::GitStatus::Added => TrackedStatus {
6178 worktree_status: StatusCode::Added,
6179 index_status: StatusCode::Unmodified,
6180 }
6181 .into(),
6182 proto::GitStatus::Modified => TrackedStatus {
6183 worktree_status: StatusCode::Modified,
6184 index_status: StatusCode::Unmodified,
6185 }
6186 .into(),
6187 proto::GitStatus::Conflict => UnmergedStatus {
6188 first_head: UnmergedStatusCode::Updated,
6189 second_head: UnmergedStatusCode::Updated,
6190 }
6191 .into(),
6192 proto::GitStatus::Deleted => TrackedStatus {
6193 worktree_status: StatusCode::Deleted,
6194 index_status: StatusCode::Unmodified,
6195 }
6196 .into(),
6197 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6198 };
6199 return Ok(result);
6200 };
6201
6202 let result = match variant {
6203 Variant::Untracked(_) => FileStatus::Untracked,
6204 Variant::Ignored(_) => FileStatus::Ignored,
6205 Variant::Unmerged(unmerged) => {
6206 let [first_head, second_head] =
6207 [unmerged.first_head, unmerged.second_head].map(|head| {
6208 let code = proto::GitStatus::from_i32(head)
6209 .with_context(|| format!("Invalid git status code: {head}"))?;
6210 let result = match code {
6211 proto::GitStatus::Added => UnmergedStatusCode::Added,
6212 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6213 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6214 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6215 };
6216 Ok(result)
6217 });
6218 let [first_head, second_head] = [first_head?, second_head?];
6219 UnmergedStatus {
6220 first_head,
6221 second_head,
6222 }
6223 .into()
6224 }
6225 Variant::Tracked(tracked) => {
6226 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6227 .map(|status| {
6228 let code = proto::GitStatus::from_i32(status)
6229 .with_context(|| format!("Invalid git status code: {status}"))?;
6230 let result = match code {
6231 proto::GitStatus::Modified => StatusCode::Modified,
6232 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6233 proto::GitStatus::Added => StatusCode::Added,
6234 proto::GitStatus::Deleted => StatusCode::Deleted,
6235 proto::GitStatus::Renamed => StatusCode::Renamed,
6236 proto::GitStatus::Copied => StatusCode::Copied,
6237 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6238 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6239 };
6240 Ok(result)
6241 });
6242 let [index_status, worktree_status] = [index_status?, worktree_status?];
6243 TrackedStatus {
6244 index_status,
6245 worktree_status,
6246 }
6247 .into()
6248 }
6249 };
6250 Ok(result)
6251}
6252
6253fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6254 use proto::git_file_status::{Tracked, Unmerged, Variant};
6255
6256 let variant = match status {
6257 FileStatus::Untracked => Variant::Untracked(Default::default()),
6258 FileStatus::Ignored => Variant::Ignored(Default::default()),
6259 FileStatus::Unmerged(UnmergedStatus {
6260 first_head,
6261 second_head,
6262 }) => Variant::Unmerged(Unmerged {
6263 first_head: unmerged_status_to_proto(first_head),
6264 second_head: unmerged_status_to_proto(second_head),
6265 }),
6266 FileStatus::Tracked(TrackedStatus {
6267 index_status,
6268 worktree_status,
6269 }) => Variant::Tracked(Tracked {
6270 index_status: tracked_status_to_proto(index_status),
6271 worktree_status: tracked_status_to_proto(worktree_status),
6272 }),
6273 };
6274 proto::GitFileStatus {
6275 variant: Some(variant),
6276 }
6277}
6278
6279fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6280 match code {
6281 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6282 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6283 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6284 }
6285}
6286
6287fn tracked_status_to_proto(code: StatusCode) -> i32 {
6288 match code {
6289 StatusCode::Added => proto::GitStatus::Added as _,
6290 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6291 StatusCode::Modified => proto::GitStatus::Modified as _,
6292 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6293 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6294 StatusCode::Copied => proto::GitStatus::Copied as _,
6295 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6296 }
6297}