1pub mod git_traversal;
2
3use crate::{
4 ProjectEnvironment, ProjectItem, ProjectPath,
5 buffer_store::{BufferStore, BufferStoreEvent},
6 worktree_store::{WorktreeStore, WorktreeStoreEvent},
7};
8use anyhow::{Context as _, Result, anyhow, bail};
9use askpass::AskPassDelegate;
10use buffer_diff::{BufferDiff, BufferDiffEvent};
11use client::ProjectId;
12use collections::HashMap;
13use fs::Fs;
14use futures::{
15 FutureExt as _, StreamExt as _,
16 channel::{mpsc, oneshot},
17 future::{self, Shared},
18};
19use git::{
20 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
21 blame::Blame,
22 parse_git_remote_url,
23 repository::{
24 Branch, CommitDetails, CommitDiff, CommitFile, DiffType, GitRepository,
25 GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
26 UpstreamTrackingStatus,
27 },
28 status::{
29 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
30 },
31};
32use gpui::{
33 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
34 WeakEntity,
35};
36use language::{
37 Buffer, BufferEvent, Language, LanguageRegistry,
38 proto::{deserialize_version, serialize_version},
39};
40use parking_lot::Mutex;
41use rpc::{
42 AnyProtoClient, TypedEnvelope,
43 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
44};
45use serde::Deserialize;
46use std::{
47 cmp::Ordering,
48 collections::{BTreeSet, VecDeque},
49 future::Future,
50 mem,
51 ops::Range,
52 path::{Path, PathBuf},
53 sync::{
54 Arc,
55 atomic::{self, AtomicU64},
56 },
57};
58use sum_tree::{Edit, SumTree, TreeSet};
59use text::{Bias, BufferId};
60use util::{ResultExt, debug_panic};
61use worktree::{
62 File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet, Worktree,
63};
64
65pub struct GitStore {
66 state: GitStoreState,
67 buffer_store: Entity<BufferStore>,
68 worktree_store: Entity<WorktreeStore>,
69 repositories: HashMap<RepositoryId, Entity<Repository>>,
70 active_repo_id: Option<RepositoryId>,
71 #[allow(clippy::type_complexity)]
72 loading_diffs:
73 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
74 diffs: HashMap<BufferId, Entity<BufferDiffState>>,
75 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
76 _subscriptions: Vec<Subscription>,
77}
78
79#[derive(Default)]
80struct SharedDiffs {
81 unstaged: Option<Entity<BufferDiff>>,
82 uncommitted: Option<Entity<BufferDiff>>,
83}
84
85#[derive(Default)]
86struct BufferDiffState {
87 unstaged_diff: Option<WeakEntity<BufferDiff>>,
88 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
89 recalculate_diff_task: Option<Task<Result<()>>>,
90 language: Option<Arc<Language>>,
91 language_registry: Option<Arc<LanguageRegistry>>,
92 diff_updated_futures: Vec<oneshot::Sender<()>>,
93 hunk_staging_operation_count: usize,
94
95 head_text: Option<Arc<String>>,
96 index_text: Option<Arc<String>>,
97 head_changed: bool,
98 index_changed: bool,
99 language_changed: bool,
100}
101
102#[derive(Clone, Debug)]
103enum DiffBasesChange {
104 SetIndex(Option<String>),
105 SetHead(Option<String>),
106 SetEach {
107 index: Option<String>,
108 head: Option<String>,
109 },
110 SetBoth(Option<String>),
111}
112
113#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
114enum DiffKind {
115 Unstaged,
116 Uncommitted,
117}
118
119enum GitStoreState {
120 Local {
121 next_repository_id: Arc<AtomicU64>,
122 downstream: Option<LocalDownstreamState>,
123 project_environment: Entity<ProjectEnvironment>,
124 fs: Arc<dyn Fs>,
125 },
126 Ssh {
127 upstream_client: AnyProtoClient,
128 upstream_project_id: ProjectId,
129 downstream: Option<(AnyProtoClient, ProjectId)>,
130 },
131 Remote {
132 upstream_client: AnyProtoClient,
133 upstream_project_id: ProjectId,
134 },
135}
136
137enum DownstreamUpdate {
138 UpdateRepository(RepositorySnapshot),
139 RemoveRepository(RepositoryId),
140}
141
142struct LocalDownstreamState {
143 client: AnyProtoClient,
144 project_id: ProjectId,
145 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
146 _task: Task<Result<()>>,
147}
148
149#[derive(Clone)]
150pub struct GitStoreCheckpoint {
151 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
152}
153
154#[derive(Clone, Debug, PartialEq, Eq)]
155pub struct StatusEntry {
156 pub repo_path: RepoPath,
157 pub status: FileStatus,
158}
159
160impl StatusEntry {
161 fn to_proto(&self) -> proto::StatusEntry {
162 let simple_status = match self.status {
163 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
164 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
165 FileStatus::Tracked(TrackedStatus {
166 index_status,
167 worktree_status,
168 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
169 worktree_status
170 } else {
171 index_status
172 }),
173 };
174
175 proto::StatusEntry {
176 repo_path: self.repo_path.as_ref().to_proto(),
177 simple_status,
178 status: Some(status_to_proto(self.status)),
179 }
180 }
181}
182
183impl TryFrom<proto::StatusEntry> for StatusEntry {
184 type Error = anyhow::Error;
185
186 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
187 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
188 let status = status_from_proto(value.simple_status, value.status)?;
189 Ok(Self { repo_path, status })
190 }
191}
192
193impl sum_tree::Item for StatusEntry {
194 type Summary = PathSummary<GitSummary>;
195
196 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
197 PathSummary {
198 max_path: self.repo_path.0.clone(),
199 item_summary: self.status.summary(),
200 }
201 }
202}
203
204impl sum_tree::KeyedItem for StatusEntry {
205 type Key = PathKey;
206
207 fn key(&self) -> Self::Key {
208 PathKey(self.repo_path.0.clone())
209 }
210}
211
212#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
213pub struct RepositoryId(pub u64);
214
215#[derive(Clone, Debug, PartialEq, Eq)]
216pub struct RepositorySnapshot {
217 pub id: RepositoryId,
218 pub merge_message: Option<SharedString>,
219 pub statuses_by_path: SumTree<StatusEntry>,
220 pub work_directory_abs_path: Arc<Path>,
221 pub branch: Option<Branch>,
222 pub merge_conflicts: TreeSet<RepoPath>,
223 pub merge_head_shas: Vec<SharedString>,
224 pub scan_id: u64,
225}
226
227pub struct Repository {
228 snapshot: RepositorySnapshot,
229 commit_message_buffer: Option<Entity<Buffer>>,
230 git_store: WeakEntity<GitStore>,
231 // For a local repository, holds paths that have had worktree events since the last status scan completed,
232 // and that should be examined during the next status scan.
233 paths_needing_status_update: BTreeSet<RepoPath>,
234 job_sender: mpsc::UnboundedSender<GitJob>,
235 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
236 latest_askpass_id: u64,
237}
238
239impl std::ops::Deref for Repository {
240 type Target = RepositorySnapshot;
241
242 fn deref(&self) -> &Self::Target {
243 &self.snapshot
244 }
245}
246
247#[derive(Clone)]
248pub enum RepositoryState {
249 Local {
250 backend: Arc<dyn GitRepository>,
251 environment: Arc<HashMap<String, String>>,
252 },
253 Remote {
254 project_id: ProjectId,
255 client: AnyProtoClient,
256 },
257}
258
259#[derive(Clone, Debug)]
260pub enum RepositoryEvent {
261 Updated,
262 MergeHeadsChanged,
263}
264
265#[derive(Debug)]
266pub enum GitStoreEvent {
267 ActiveRepositoryChanged(Option<RepositoryId>),
268 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
269 RepositoryAdded(RepositoryId),
270 RepositoryRemoved(RepositoryId),
271 IndexWriteError(anyhow::Error),
272}
273
274impl EventEmitter<RepositoryEvent> for Repository {}
275impl EventEmitter<GitStoreEvent> for GitStore {}
276
277struct GitJob {
278 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
279 key: Option<GitJobKey>,
280}
281
282#[derive(PartialEq, Eq)]
283enum GitJobKey {
284 WriteIndex(RepoPath),
285 BatchReadIndex,
286 RefreshStatuses,
287 ReloadGitState,
288}
289
290impl GitStore {
291 pub fn local(
292 worktree_store: &Entity<WorktreeStore>,
293 buffer_store: Entity<BufferStore>,
294 environment: Entity<ProjectEnvironment>,
295 fs: Arc<dyn Fs>,
296 cx: &mut Context<Self>,
297 ) -> Self {
298 Self::new(
299 worktree_store.clone(),
300 buffer_store,
301 GitStoreState::Local {
302 next_repository_id: Arc::new(AtomicU64::new(1)),
303 downstream: None,
304 project_environment: environment,
305 fs,
306 },
307 cx,
308 )
309 }
310
311 pub fn remote(
312 worktree_store: &Entity<WorktreeStore>,
313 buffer_store: Entity<BufferStore>,
314 upstream_client: AnyProtoClient,
315 project_id: ProjectId,
316 cx: &mut Context<Self>,
317 ) -> Self {
318 Self::new(
319 worktree_store.clone(),
320 buffer_store,
321 GitStoreState::Remote {
322 upstream_client,
323 upstream_project_id: project_id,
324 },
325 cx,
326 )
327 }
328
329 pub fn ssh(
330 worktree_store: &Entity<WorktreeStore>,
331 buffer_store: Entity<BufferStore>,
332 upstream_client: AnyProtoClient,
333 cx: &mut Context<Self>,
334 ) -> Self {
335 Self::new(
336 worktree_store.clone(),
337 buffer_store,
338 GitStoreState::Ssh {
339 upstream_client,
340 upstream_project_id: ProjectId(SSH_PROJECT_ID),
341 downstream: None,
342 },
343 cx,
344 )
345 }
346
347 fn new(
348 worktree_store: Entity<WorktreeStore>,
349 buffer_store: Entity<BufferStore>,
350 state: GitStoreState,
351 cx: &mut Context<Self>,
352 ) -> Self {
353 let _subscriptions = vec![
354 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
355 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
356 ];
357
358 GitStore {
359 state,
360 buffer_store,
361 worktree_store,
362 repositories: HashMap::default(),
363 active_repo_id: None,
364 _subscriptions,
365 loading_diffs: HashMap::default(),
366 shared_diffs: HashMap::default(),
367 diffs: HashMap::default(),
368 }
369 }
370
371 pub fn init(client: &AnyProtoClient) {
372 client.add_entity_request_handler(Self::handle_get_remotes);
373 client.add_entity_request_handler(Self::handle_get_branches);
374 client.add_entity_request_handler(Self::handle_change_branch);
375 client.add_entity_request_handler(Self::handle_create_branch);
376 client.add_entity_request_handler(Self::handle_git_init);
377 client.add_entity_request_handler(Self::handle_push);
378 client.add_entity_request_handler(Self::handle_pull);
379 client.add_entity_request_handler(Self::handle_fetch);
380 client.add_entity_request_handler(Self::handle_stage);
381 client.add_entity_request_handler(Self::handle_unstage);
382 client.add_entity_request_handler(Self::handle_commit);
383 client.add_entity_request_handler(Self::handle_reset);
384 client.add_entity_request_handler(Self::handle_show);
385 client.add_entity_request_handler(Self::handle_load_commit_diff);
386 client.add_entity_request_handler(Self::handle_checkout_files);
387 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
388 client.add_entity_request_handler(Self::handle_set_index_text);
389 client.add_entity_request_handler(Self::handle_askpass);
390 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
391 client.add_entity_request_handler(Self::handle_git_diff);
392 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
393 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
394 client.add_entity_message_handler(Self::handle_update_diff_bases);
395 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
396 client.add_entity_request_handler(Self::handle_blame_buffer);
397 client.add_entity_message_handler(Self::handle_update_repository);
398 client.add_entity_message_handler(Self::handle_remove_repository);
399 }
400
401 pub fn is_local(&self) -> bool {
402 matches!(self.state, GitStoreState::Local { .. })
403 }
404
405 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
406 match &mut self.state {
407 GitStoreState::Ssh {
408 downstream: downstream_client,
409 ..
410 } => {
411 for repo in self.repositories.values() {
412 let update = repo.read(cx).snapshot.initial_update(project_id);
413 for update in split_repository_update(update) {
414 client.send(update).log_err();
415 }
416 }
417 *downstream_client = Some((client, ProjectId(project_id)));
418 }
419 GitStoreState::Local {
420 downstream: downstream_client,
421 ..
422 } => {
423 let mut snapshots = HashMap::default();
424 let (updates_tx, mut updates_rx) = mpsc::unbounded();
425 for repo in self.repositories.values() {
426 updates_tx
427 .unbounded_send(DownstreamUpdate::UpdateRepository(
428 repo.read(cx).snapshot.clone(),
429 ))
430 .ok();
431 }
432 *downstream_client = Some(LocalDownstreamState {
433 client: client.clone(),
434 project_id: ProjectId(project_id),
435 updates_tx,
436 _task: cx.spawn(async move |this, cx| {
437 cx.background_spawn(async move {
438 while let Some(update) = updates_rx.next().await {
439 match update {
440 DownstreamUpdate::UpdateRepository(snapshot) => {
441 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
442 {
443 let update =
444 snapshot.build_update(old_snapshot, project_id);
445 *old_snapshot = snapshot;
446 for update in split_repository_update(update) {
447 client.send(update)?;
448 }
449 } else {
450 let update = snapshot.initial_update(project_id);
451 for update in split_repository_update(update) {
452 client.send(update)?;
453 }
454 snapshots.insert(snapshot.id, snapshot);
455 }
456 }
457 DownstreamUpdate::RemoveRepository(id) => {
458 client.send(proto::RemoveRepository {
459 project_id,
460 id: id.to_proto(),
461 })?;
462 }
463 }
464 }
465 anyhow::Ok(())
466 })
467 .await
468 .ok();
469 this.update(cx, |this, _| {
470 if let GitStoreState::Local {
471 downstream: downstream_client,
472 ..
473 } = &mut this.state
474 {
475 downstream_client.take();
476 } else {
477 unreachable!("unshared called on remote store");
478 }
479 })
480 }),
481 });
482 }
483 GitStoreState::Remote { .. } => {
484 debug_panic!("shared called on remote store");
485 }
486 }
487 }
488
489 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
490 match &mut self.state {
491 GitStoreState::Local {
492 downstream: downstream_client,
493 ..
494 } => {
495 downstream_client.take();
496 }
497 GitStoreState::Ssh {
498 downstream: downstream_client,
499 ..
500 } => {
501 downstream_client.take();
502 }
503 GitStoreState::Remote { .. } => {
504 debug_panic!("unshared called on remote store");
505 }
506 }
507 self.shared_diffs.clear();
508 }
509
510 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
511 self.shared_diffs.remove(peer_id);
512 }
513
514 pub fn active_repository(&self) -> Option<Entity<Repository>> {
515 self.active_repo_id
516 .as_ref()
517 .map(|id| self.repositories[&id].clone())
518 }
519
520 pub fn open_unstaged_diff(
521 &mut self,
522 buffer: Entity<Buffer>,
523 cx: &mut Context<Self>,
524 ) -> Task<Result<Entity<BufferDiff>>> {
525 let buffer_id = buffer.read(cx).remote_id();
526 if let Some(diff_state) = self.diffs.get(&buffer_id) {
527 if let Some(unstaged_diff) = diff_state
528 .read(cx)
529 .unstaged_diff
530 .as_ref()
531 .and_then(|weak| weak.upgrade())
532 {
533 if let Some(task) =
534 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
535 {
536 return cx.background_executor().spawn(async move {
537 task.await?;
538 Ok(unstaged_diff)
539 });
540 }
541 return Task::ready(Ok(unstaged_diff));
542 }
543 }
544
545 let Some((repo, repo_path)) =
546 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
547 else {
548 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
549 };
550
551 let task = self
552 .loading_diffs
553 .entry((buffer_id, DiffKind::Unstaged))
554 .or_insert_with(|| {
555 let staged_text = repo.read(cx).load_staged_text(buffer_id, repo_path, cx);
556 cx.spawn(async move |this, cx| {
557 Self::open_diff_internal(
558 this,
559 DiffKind::Unstaged,
560 staged_text.await.map(DiffBasesChange::SetIndex),
561 buffer,
562 cx,
563 )
564 .await
565 .map_err(Arc::new)
566 })
567 .shared()
568 })
569 .clone();
570
571 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
572 }
573
574 pub fn open_uncommitted_diff(
575 &mut self,
576 buffer: Entity<Buffer>,
577 cx: &mut Context<Self>,
578 ) -> Task<Result<Entity<BufferDiff>>> {
579 let buffer_id = buffer.read(cx).remote_id();
580
581 if let Some(diff_state) = self.diffs.get(&buffer_id) {
582 if let Some(uncommitted_diff) = diff_state
583 .read(cx)
584 .uncommitted_diff
585 .as_ref()
586 .and_then(|weak| weak.upgrade())
587 {
588 if let Some(task) =
589 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
590 {
591 return cx.background_executor().spawn(async move {
592 task.await?;
593 Ok(uncommitted_diff)
594 });
595 }
596 return Task::ready(Ok(uncommitted_diff));
597 }
598 }
599
600 let Some((repo, repo_path)) =
601 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
602 else {
603 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
604 };
605
606 let task = self
607 .loading_diffs
608 .entry((buffer_id, DiffKind::Uncommitted))
609 .or_insert_with(|| {
610 let changes = repo.read(cx).load_committed_text(buffer_id, repo_path, cx);
611 cx.spawn(async move |this, cx| {
612 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
613 .await
614 .map_err(Arc::new)
615 })
616 .shared()
617 })
618 .clone();
619
620 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
621 }
622
623 async fn open_diff_internal(
624 this: WeakEntity<Self>,
625 kind: DiffKind,
626 texts: Result<DiffBasesChange>,
627 buffer_entity: Entity<Buffer>,
628 cx: &mut AsyncApp,
629 ) -> Result<Entity<BufferDiff>> {
630 let diff_bases_change = match texts {
631 Err(e) => {
632 this.update(cx, |this, cx| {
633 let buffer = buffer_entity.read(cx);
634 let buffer_id = buffer.remote_id();
635 this.loading_diffs.remove(&(buffer_id, kind));
636 })?;
637 return Err(e);
638 }
639 Ok(change) => change,
640 };
641
642 this.update(cx, |this, cx| {
643 let buffer = buffer_entity.read(cx);
644 let buffer_id = buffer.remote_id();
645 let language = buffer.language().cloned();
646 let language_registry = buffer.language_registry();
647 let text_snapshot = buffer.text_snapshot();
648 this.loading_diffs.remove(&(buffer_id, kind));
649
650 let diff_state = this
651 .diffs
652 .entry(buffer_id)
653 .or_insert_with(|| cx.new(|_| BufferDiffState::default()));
654
655 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
656
657 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
658 diff_state.update(cx, |diff_state, cx| {
659 diff_state.language = language;
660 diff_state.language_registry = language_registry;
661
662 match kind {
663 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
664 DiffKind::Uncommitted => {
665 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
666 diff
667 } else {
668 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
669 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
670 unstaged_diff
671 };
672
673 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
674 diff_state.uncommitted_diff = Some(diff.downgrade())
675 }
676 }
677
678 let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, 0, cx);
679
680 anyhow::Ok(async move {
681 rx.await.ok();
682 Ok(diff)
683 })
684 })
685 })??
686 .await
687 }
688
689 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
690 let diff_state = self.diffs.get(&buffer_id)?;
691 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
692 }
693
694 pub fn get_uncommitted_diff(
695 &self,
696 buffer_id: BufferId,
697 cx: &App,
698 ) -> Option<Entity<BufferDiff>> {
699 let diff_state = self.diffs.get(&buffer_id)?;
700 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
701 }
702
703 pub fn project_path_git_status(
704 &self,
705 project_path: &ProjectPath,
706 cx: &App,
707 ) -> Option<FileStatus> {
708 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
709 Some(repo.read(cx).status_for_path(&repo_path)?.status)
710 }
711
712 pub fn checkpoint(&self, cx: &App) -> Task<Result<GitStoreCheckpoint>> {
713 let mut work_directory_abs_paths = Vec::new();
714 let mut checkpoints = Vec::new();
715 for repository in self.repositories.values() {
716 let repository = repository.read(cx);
717 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
718 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
719 }
720
721 cx.background_executor().spawn(async move {
722 let checkpoints = future::try_join_all(checkpoints).await?;
723 Ok(GitStoreCheckpoint {
724 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
725 .into_iter()
726 .zip(checkpoints)
727 .collect(),
728 })
729 })
730 }
731
732 pub fn restore_checkpoint(&self, checkpoint: GitStoreCheckpoint, cx: &App) -> Task<Result<()>> {
733 let repositories_by_work_dir_abs_path = self
734 .repositories
735 .values()
736 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
737 .collect::<HashMap<_, _>>();
738
739 let mut tasks = Vec::new();
740 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
741 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
742 let restore = repository.read(cx).restore_checkpoint(checkpoint);
743 tasks.push(async move { restore.await? });
744 }
745 }
746 cx.background_spawn(async move {
747 future::try_join_all(tasks).await?;
748 Ok(())
749 })
750 }
751
752 /// Compares two checkpoints, returning true if they are equal.
753 pub fn compare_checkpoints(
754 &self,
755 left: GitStoreCheckpoint,
756 mut right: GitStoreCheckpoint,
757 cx: &App,
758 ) -> Task<Result<bool>> {
759 let repositories_by_work_dir_abs_path = self
760 .repositories
761 .values()
762 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
763 .collect::<HashMap<_, _>>();
764
765 let mut tasks = Vec::new();
766 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
767 if let Some(right_checkpoint) = right
768 .checkpoints_by_work_dir_abs_path
769 .remove(&work_dir_abs_path)
770 {
771 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
772 {
773 let compare = repository
774 .read(cx)
775 .compare_checkpoints(left_checkpoint, right_checkpoint);
776 tasks.push(async move { compare.await? });
777 }
778 } else {
779 return Task::ready(Ok(false));
780 }
781 }
782 cx.background_spawn(async move {
783 Ok(future::try_join_all(tasks)
784 .await?
785 .into_iter()
786 .all(|result| result))
787 })
788 }
789
790 pub fn delete_checkpoint(&self, checkpoint: GitStoreCheckpoint, cx: &App) -> Task<Result<()>> {
791 let repositories_by_work_directory_abs_path = self
792 .repositories
793 .values()
794 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
795 .collect::<HashMap<_, _>>();
796
797 let mut tasks = Vec::new();
798 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
799 if let Some(repository) =
800 repositories_by_work_directory_abs_path.get(&work_dir_abs_path)
801 {
802 let delete = repository.read(cx).delete_checkpoint(checkpoint);
803 tasks.push(async move { delete.await? });
804 }
805 }
806 cx.background_spawn(async move {
807 future::try_join_all(tasks).await?;
808 Ok(())
809 })
810 }
811
812 /// Blames a buffer.
813 pub fn blame_buffer(
814 &self,
815 buffer: &Entity<Buffer>,
816 version: Option<clock::Global>,
817 cx: &App,
818 ) -> Task<Result<Option<Blame>>> {
819 let buffer = buffer.read(cx);
820 let Some((repo, repo_path)) =
821 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
822 else {
823 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
824 };
825 let content = match &version {
826 Some(version) => buffer.rope_for_version(version).clone(),
827 None => buffer.as_rope().clone(),
828 };
829 let version = version.unwrap_or(buffer.version());
830 let buffer_id = buffer.remote_id();
831
832 let rx = repo.read(cx).send_job(move |state, _| async move {
833 match state {
834 RepositoryState::Local { backend, .. } => backend
835 .blame(repo_path.clone(), content)
836 .await
837 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
838 .map(Some),
839 RepositoryState::Remote { project_id, client } => {
840 let response = client
841 .request(proto::BlameBuffer {
842 project_id: project_id.to_proto(),
843 buffer_id: buffer_id.into(),
844 version: serialize_version(&version),
845 })
846 .await?;
847 Ok(deserialize_blame_buffer_response(response))
848 }
849 }
850 });
851
852 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
853 }
854
855 pub fn get_permalink_to_line(
856 &self,
857 buffer: &Entity<Buffer>,
858 selection: Range<u32>,
859 cx: &App,
860 ) -> Task<Result<url::Url>> {
861 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
862 return Task::ready(Err(anyhow!("buffer has no file")));
863 };
864
865 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
866 &(file.worktree.read(cx).id(), file.path.clone()).into(),
867 cx,
868 ) else {
869 // If we're not in a Git repo, check whether this is a Rust source
870 // file in the Cargo registry (presumably opened with go-to-definition
871 // from a normal Rust file). If so, we can put together a permalink
872 // using crate metadata.
873 if buffer
874 .read(cx)
875 .language()
876 .is_none_or(|lang| lang.name() != "Rust".into())
877 {
878 return Task::ready(Err(anyhow!("no permalink available")));
879 }
880 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
881 return Task::ready(Err(anyhow!("no permalink available")));
882 };
883 return cx.spawn(async move |cx| {
884 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
885 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
886 .map_err(|_| anyhow!("no permalink available"))
887 });
888
889 // TODO remote case
890 };
891
892 let buffer_id = buffer.read(cx).remote_id();
893 let branch = repo.read(cx).branch.clone();
894 let remote = branch
895 .as_ref()
896 .and_then(|b| b.upstream.as_ref())
897 .and_then(|b| b.remote_name())
898 .unwrap_or("origin")
899 .to_string();
900 let rx = repo.read(cx).send_job(move |state, cx| async move {
901 match state {
902 RepositoryState::Local { backend, .. } => {
903 let origin_url = backend
904 .remote_url(&remote)
905 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
906
907 let sha = backend
908 .head_sha()
909 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
910
911 let provider_registry =
912 cx.update(GitHostingProviderRegistry::default_global)?;
913
914 let (provider, remote) =
915 parse_git_remote_url(provider_registry, &origin_url)
916 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
917
918 let path = repo_path
919 .to_str()
920 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
921
922 Ok(provider.build_permalink(
923 remote,
924 BuildPermalinkParams {
925 sha: &sha,
926 path,
927 selection: Some(selection),
928 },
929 ))
930 }
931 RepositoryState::Remote { project_id, client } => {
932 let response = client
933 .request(proto::GetPermalinkToLine {
934 project_id: project_id.to_proto(),
935 buffer_id: buffer_id.into(),
936 selection: Some(proto::Range {
937 start: selection.start as u64,
938 end: selection.end as u64,
939 }),
940 })
941 .await?;
942
943 url::Url::parse(&response.permalink).context("failed to parse permalink")
944 }
945 }
946 });
947 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
948 }
949
950 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
951 match &self.state {
952 GitStoreState::Local {
953 downstream: downstream_client,
954 ..
955 } => downstream_client
956 .as_ref()
957 .map(|state| (state.client.clone(), state.project_id)),
958 GitStoreState::Ssh {
959 downstream: downstream_client,
960 ..
961 } => downstream_client.clone(),
962 GitStoreState::Remote { .. } => None,
963 }
964 }
965
966 fn upstream_client(&self) -> Option<AnyProtoClient> {
967 match &self.state {
968 GitStoreState::Local { .. } => None,
969 GitStoreState::Ssh {
970 upstream_client, ..
971 }
972 | GitStoreState::Remote {
973 upstream_client, ..
974 } => Some(upstream_client.clone()),
975 }
976 }
977
978 fn on_worktree_store_event(
979 &mut self,
980 worktree_store: Entity<WorktreeStore>,
981 event: &WorktreeStoreEvent,
982 cx: &mut Context<Self>,
983 ) {
984 let GitStoreState::Local {
985 project_environment,
986 downstream,
987 next_repository_id,
988 fs,
989 } = &self.state
990 else {
991 return;
992 };
993
994 match event {
995 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
996 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
997 for (relative_path, _, _) in updated_entries.iter() {
998 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
999 &(*worktree_id, relative_path.clone()).into(),
1000 cx,
1001 ) else {
1002 continue;
1003 };
1004 paths_by_git_repo.entry(repo).or_default().push(repo_path)
1005 }
1006
1007 for (repo, paths) in paths_by_git_repo {
1008 repo.update(cx, |repo, cx| {
1009 repo.paths_changed(
1010 paths,
1011 downstream
1012 .as_ref()
1013 .map(|downstream| downstream.updates_tx.clone()),
1014 cx,
1015 );
1016 });
1017 }
1018 }
1019 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1020 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1021 else {
1022 return;
1023 };
1024 if !worktree.read(cx).is_visible() {
1025 log::debug!(
1026 "not adding repositories for local worktree {:?} because it's not visible",
1027 worktree.read(cx).abs_path()
1028 );
1029 return;
1030 }
1031 self.update_repositories_from_worktree(
1032 project_environment.clone(),
1033 next_repository_id.clone(),
1034 downstream
1035 .as_ref()
1036 .map(|downstream| downstream.updates_tx.clone()),
1037 changed_repos.clone(),
1038 fs.clone(),
1039 cx,
1040 );
1041 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1042 }
1043 _ => {}
1044 }
1045 }
1046
1047 fn on_repository_event(
1048 &mut self,
1049 repo: Entity<Repository>,
1050 event: &RepositoryEvent,
1051 cx: &mut Context<Self>,
1052 ) {
1053 let id = repo.read(cx).id;
1054 cx.emit(GitStoreEvent::RepositoryUpdated(
1055 id,
1056 event.clone(),
1057 self.active_repo_id == Some(id),
1058 ))
1059 }
1060
1061 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1062 fn update_repositories_from_worktree(
1063 &mut self,
1064 project_environment: Entity<ProjectEnvironment>,
1065 next_repository_id: Arc<AtomicU64>,
1066 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1067 updated_git_repositories: UpdatedGitRepositoriesSet,
1068 fs: Arc<dyn Fs>,
1069 cx: &mut Context<Self>,
1070 ) {
1071 let mut removed_ids = Vec::new();
1072 for update in updated_git_repositories.iter() {
1073 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1074 let existing_work_directory_abs_path =
1075 repo.read(cx).work_directory_abs_path.clone();
1076 Some(&existing_work_directory_abs_path)
1077 == update.old_work_directory_abs_path.as_ref()
1078 || Some(&existing_work_directory_abs_path)
1079 == update.new_work_directory_abs_path.as_ref()
1080 }) {
1081 if let Some(new_work_directory_abs_path) =
1082 update.new_work_directory_abs_path.clone()
1083 {
1084 existing.update(cx, |existing, cx| {
1085 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1086 existing.schedule_scan(updates_tx.clone(), cx);
1087 });
1088 } else {
1089 removed_ids.push(*id);
1090 }
1091 } else if let Some((work_directory_abs_path, dot_git_abs_path)) = update
1092 .new_work_directory_abs_path
1093 .clone()
1094 .zip(update.dot_git_abs_path.clone())
1095 {
1096 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1097 let git_store = cx.weak_entity();
1098 let repo = cx.new(|cx| {
1099 let mut repo = Repository::local(
1100 id,
1101 work_directory_abs_path,
1102 dot_git_abs_path,
1103 project_environment.downgrade(),
1104 fs.clone(),
1105 git_store,
1106 cx,
1107 );
1108 repo.schedule_scan(updates_tx.clone(), cx);
1109 repo
1110 });
1111 self._subscriptions
1112 .push(cx.subscribe(&repo, Self::on_repository_event));
1113 self.repositories.insert(id, repo);
1114 cx.emit(GitStoreEvent::RepositoryAdded(id));
1115 self.active_repo_id.get_or_insert_with(|| {
1116 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1117 id
1118 });
1119 }
1120 }
1121
1122 for id in removed_ids {
1123 if self.active_repo_id == Some(id) {
1124 self.active_repo_id = None;
1125 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1126 }
1127 self.repositories.remove(&id);
1128 if let Some(updates_tx) = updates_tx.as_ref() {
1129 updates_tx
1130 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1131 .ok();
1132 }
1133 }
1134 }
1135
1136 fn on_buffer_store_event(
1137 &mut self,
1138 _: Entity<BufferStore>,
1139 event: &BufferStoreEvent,
1140 cx: &mut Context<Self>,
1141 ) {
1142 match event {
1143 BufferStoreEvent::BufferAdded(buffer) => {
1144 cx.subscribe(&buffer, |this, buffer, event, cx| {
1145 if let BufferEvent::LanguageChanged = event {
1146 let buffer_id = buffer.read(cx).remote_id();
1147 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1148 diff_state.update(cx, |diff_state, cx| {
1149 diff_state.buffer_language_changed(buffer, cx);
1150 });
1151 }
1152 }
1153 })
1154 .detach();
1155 }
1156 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1157 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1158 diffs.remove(buffer_id);
1159 }
1160 }
1161 BufferStoreEvent::BufferDropped(buffer_id) => {
1162 self.diffs.remove(&buffer_id);
1163 for diffs in self.shared_diffs.values_mut() {
1164 diffs.remove(buffer_id);
1165 }
1166 }
1167
1168 _ => {}
1169 }
1170 }
1171
1172 pub fn recalculate_buffer_diffs(
1173 &mut self,
1174 buffers: Vec<Entity<Buffer>>,
1175 cx: &mut Context<Self>,
1176 ) -> impl Future<Output = ()> + use<> {
1177 let mut futures = Vec::new();
1178 for buffer in buffers {
1179 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1180 let buffer = buffer.read(cx).text_snapshot();
1181 futures.push(diff_state.update(cx, |diff_state, cx| {
1182 diff_state.recalculate_diffs(
1183 buffer,
1184 diff_state.hunk_staging_operation_count,
1185 cx,
1186 )
1187 }));
1188 }
1189 }
1190 async move {
1191 futures::future::join_all(futures).await;
1192 }
1193 }
1194
1195 fn on_buffer_diff_event(
1196 &mut self,
1197 diff: Entity<buffer_diff::BufferDiff>,
1198 event: &BufferDiffEvent,
1199 cx: &mut Context<Self>,
1200 ) {
1201 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1202 let buffer_id = diff.read(cx).buffer_id;
1203 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1204 diff_state.update(cx, |diff_state, _| {
1205 diff_state.hunk_staging_operation_count += 1;
1206 });
1207 }
1208 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1209 let recv = repo.update(cx, |repo, cx| {
1210 log::debug!("updating index text for buffer {}", path.display());
1211 repo.spawn_set_index_text_job(
1212 path,
1213 new_index_text.as_ref().map(|rope| rope.to_string()),
1214 cx,
1215 )
1216 });
1217 let diff = diff.downgrade();
1218 cx.spawn(async move |this, cx| {
1219 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1220 diff.update(cx, |diff, cx| {
1221 diff.clear_pending_hunks(cx);
1222 })
1223 .ok();
1224 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1225 .ok();
1226 }
1227 })
1228 .detach();
1229 }
1230 }
1231 }
1232
1233 fn local_worktree_git_repos_changed(
1234 &mut self,
1235 worktree: Entity<Worktree>,
1236 changed_repos: &UpdatedGitRepositoriesSet,
1237 cx: &mut Context<Self>,
1238 ) {
1239 log::debug!("local worktree repos changed");
1240 debug_assert!(worktree.read(cx).is_local());
1241
1242 let mut diff_state_updates = HashMap::<Entity<Repository>, Vec<_>>::default();
1243 for (buffer_id, diff_state) in &self.diffs {
1244 let Some(buffer) = self.buffer_store.read(cx).get(*buffer_id) else {
1245 continue;
1246 };
1247 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1248 continue;
1249 };
1250 if file.worktree != worktree {
1251 continue;
1252 }
1253 let Some((repo, repo_path)) =
1254 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
1255 else {
1256 continue;
1257 };
1258 if !changed_repos.iter().any(|update| {
1259 update.old_work_directory_abs_path.as_ref()
1260 == Some(&repo.read(cx).work_directory_abs_path)
1261 || update.new_work_directory_abs_path.as_ref()
1262 == Some(&repo.read(cx).work_directory_abs_path)
1263 }) {
1264 continue;
1265 }
1266
1267 let diff_state = diff_state.read(cx);
1268 let has_unstaged_diff = diff_state
1269 .unstaged_diff
1270 .as_ref()
1271 .is_some_and(|diff| diff.is_upgradable());
1272 let has_uncommitted_diff = diff_state
1273 .uncommitted_diff
1274 .as_ref()
1275 .is_some_and(|set| set.is_upgradable());
1276
1277 let update = (
1278 buffer,
1279 repo_path,
1280 has_unstaged_diff.then(|| diff_state.index_text.clone()),
1281 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
1282 diff_state.hunk_staging_operation_count,
1283 );
1284 diff_state_updates.entry(repo).or_default().push(update);
1285 }
1286
1287 if diff_state_updates.is_empty() {
1288 return;
1289 }
1290
1291 for (repo, repo_diff_state_updates) in diff_state_updates.into_iter() {
1292 let git_store = cx.weak_entity();
1293
1294 let _ = repo.read(cx).send_keyed_job(
1295 Some(GitJobKey::BatchReadIndex),
1296 |state, mut cx| async move {
1297 let RepositoryState::Local { backend, .. } = state else {
1298 log::error!("tried to recompute diffs for a non-local repository");
1299 return;
1300 };
1301 let mut diff_bases_changes_by_buffer = Vec::new();
1302 for (
1303 buffer,
1304 repo_path,
1305 current_index_text,
1306 current_head_text,
1307 hunk_staging_operation_count,
1308 ) in &repo_diff_state_updates
1309 {
1310 let index_text = if current_index_text.is_some() {
1311 backend.load_index_text(repo_path.clone()).await
1312 } else {
1313 None
1314 };
1315 let head_text = if current_head_text.is_some() {
1316 backend.load_committed_text(repo_path.clone()).await
1317 } else {
1318 None
1319 };
1320
1321 // Avoid triggering a diff update if the base text has not changed.
1322 if let Some((current_index, current_head)) =
1323 current_index_text.as_ref().zip(current_head_text.as_ref())
1324 {
1325 if current_index.as_deref() == index_text.as_ref()
1326 && current_head.as_deref() == head_text.as_ref()
1327 {
1328 continue;
1329 }
1330 }
1331
1332 let diff_bases_change =
1333 match (current_index_text.is_some(), current_head_text.is_some()) {
1334 (true, true) => Some(if index_text == head_text {
1335 DiffBasesChange::SetBoth(head_text)
1336 } else {
1337 DiffBasesChange::SetEach {
1338 index: index_text,
1339 head: head_text,
1340 }
1341 }),
1342 (true, false) => Some(DiffBasesChange::SetIndex(index_text)),
1343 (false, true) => Some(DiffBasesChange::SetHead(head_text)),
1344 (false, false) => None,
1345 };
1346
1347 diff_bases_changes_by_buffer.push((
1348 buffer,
1349 diff_bases_change,
1350 *hunk_staging_operation_count,
1351 ))
1352 }
1353
1354 git_store
1355 .update(&mut cx, |git_store, cx| {
1356 for (buffer, diff_bases_change, hunk_staging_operation_count) in
1357 diff_bases_changes_by_buffer
1358 {
1359 let Some(diff_state) =
1360 git_store.diffs.get(&buffer.read(cx).remote_id())
1361 else {
1362 continue;
1363 };
1364 let Some(diff_bases_change) = diff_bases_change else {
1365 continue;
1366 };
1367
1368 let downstream_client = git_store.downstream_client();
1369 diff_state.update(cx, |diff_state, cx| {
1370 use proto::update_diff_bases::Mode;
1371
1372 let buffer = buffer.read(cx);
1373 if let Some((client, project_id)) = downstream_client {
1374 let (staged_text, committed_text, mode) =
1375 match diff_bases_change.clone() {
1376 DiffBasesChange::SetIndex(index) => {
1377 (index, None, Mode::IndexOnly)
1378 }
1379 DiffBasesChange::SetHead(head) => {
1380 (None, head, Mode::HeadOnly)
1381 }
1382 DiffBasesChange::SetEach { index, head } => {
1383 (index, head, Mode::IndexAndHead)
1384 }
1385 DiffBasesChange::SetBoth(text) => {
1386 (None, text, Mode::IndexMatchesHead)
1387 }
1388 };
1389 let message = proto::UpdateDiffBases {
1390 project_id: project_id.to_proto(),
1391 buffer_id: buffer.remote_id().to_proto(),
1392 staged_text,
1393 committed_text,
1394 mode: mode as i32,
1395 };
1396
1397 client.send(message).log_err();
1398 }
1399
1400 let _ = diff_state.diff_bases_changed(
1401 buffer.text_snapshot(),
1402 diff_bases_change,
1403 hunk_staging_operation_count,
1404 cx,
1405 );
1406 });
1407 }
1408 })
1409 .ok();
1410 },
1411 );
1412 }
1413 }
1414
1415 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1416 &self.repositories
1417 }
1418
1419 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1420 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1421 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1422 Some(status.status)
1423 }
1424
1425 pub fn repository_and_path_for_buffer_id(
1426 &self,
1427 buffer_id: BufferId,
1428 cx: &App,
1429 ) -> Option<(Entity<Repository>, RepoPath)> {
1430 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1431 let project_path = buffer.read(cx).project_path(cx)?;
1432 self.repository_and_path_for_project_path(&project_path, cx)
1433 }
1434
1435 pub fn repository_and_path_for_project_path(
1436 &self,
1437 path: &ProjectPath,
1438 cx: &App,
1439 ) -> Option<(Entity<Repository>, RepoPath)> {
1440 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1441 self.repositories
1442 .values()
1443 .filter_map(|repo| {
1444 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1445 Some((repo.clone(), repo_path))
1446 })
1447 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1448 }
1449
1450 pub fn git_init(
1451 &self,
1452 path: Arc<Path>,
1453 fallback_branch_name: String,
1454 cx: &App,
1455 ) -> Task<Result<()>> {
1456 match &self.state {
1457 GitStoreState::Local { fs, .. } => {
1458 let fs = fs.clone();
1459 cx.background_executor()
1460 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1461 }
1462 GitStoreState::Ssh {
1463 upstream_client,
1464 upstream_project_id: project_id,
1465 ..
1466 }
1467 | GitStoreState::Remote {
1468 upstream_client,
1469 upstream_project_id: project_id,
1470 ..
1471 } => {
1472 let client = upstream_client.clone();
1473 let project_id = *project_id;
1474 cx.background_executor().spawn(async move {
1475 client
1476 .request(proto::GitInit {
1477 project_id: project_id.0,
1478 abs_path: path.to_string_lossy().to_string(),
1479 fallback_branch_name,
1480 })
1481 .await?;
1482 Ok(())
1483 })
1484 }
1485 }
1486 }
1487
1488 async fn handle_update_repository(
1489 this: Entity<Self>,
1490 envelope: TypedEnvelope<proto::UpdateRepository>,
1491 mut cx: AsyncApp,
1492 ) -> Result<()> {
1493 this.update(&mut cx, |this, cx| {
1494 let mut update = envelope.payload;
1495
1496 let id = RepositoryId::from_proto(update.id);
1497 let client = this
1498 .upstream_client()
1499 .context("no upstream client")?
1500 .clone();
1501
1502 let mut is_new = false;
1503 let repo = this.repositories.entry(id).or_insert_with(|| {
1504 is_new = true;
1505 let git_store = cx.weak_entity();
1506 cx.new(|cx| {
1507 Repository::remote(
1508 id,
1509 Path::new(&update.abs_path).into(),
1510 ProjectId(update.project_id),
1511 client,
1512 git_store,
1513 cx,
1514 )
1515 })
1516 });
1517 if is_new {
1518 this._subscriptions
1519 .push(cx.subscribe(&repo, Self::on_repository_event))
1520 }
1521
1522 repo.update(cx, {
1523 let update = update.clone();
1524 |repo, cx| repo.apply_remote_update(update, cx)
1525 })?;
1526
1527 this.active_repo_id.get_or_insert_with(|| {
1528 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1529 id
1530 });
1531
1532 if let Some((client, project_id)) = this.downstream_client() {
1533 update.project_id = project_id.to_proto();
1534 client.send(update).log_err();
1535 }
1536 Ok(())
1537 })?
1538 }
1539
1540 async fn handle_remove_repository(
1541 this: Entity<Self>,
1542 envelope: TypedEnvelope<proto::RemoveRepository>,
1543 mut cx: AsyncApp,
1544 ) -> Result<()> {
1545 this.update(&mut cx, |this, cx| {
1546 let mut update = envelope.payload;
1547 let id = RepositoryId::from_proto(update.id);
1548 this.repositories.remove(&id);
1549 if let Some((client, project_id)) = this.downstream_client() {
1550 update.project_id = project_id.to_proto();
1551 client.send(update).log_err();
1552 }
1553 if this.active_repo_id == Some(id) {
1554 this.active_repo_id = None;
1555 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1556 }
1557 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1558 })
1559 }
1560
1561 async fn handle_git_init(
1562 this: Entity<Self>,
1563 envelope: TypedEnvelope<proto::GitInit>,
1564 cx: AsyncApp,
1565 ) -> Result<proto::Ack> {
1566 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1567 let name = envelope.payload.fallback_branch_name;
1568 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1569 .await?;
1570
1571 Ok(proto::Ack {})
1572 }
1573
1574 async fn handle_fetch(
1575 this: Entity<Self>,
1576 envelope: TypedEnvelope<proto::Fetch>,
1577 mut cx: AsyncApp,
1578 ) -> Result<proto::RemoteMessageResponse> {
1579 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1580 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1581 let askpass_id = envelope.payload.askpass_id;
1582
1583 let askpass = make_remote_delegate(
1584 this,
1585 envelope.payload.project_id,
1586 repository_id,
1587 askpass_id,
1588 &mut cx,
1589 );
1590
1591 let remote_output = repository_handle
1592 .update(&mut cx, |repository_handle, cx| {
1593 repository_handle.fetch(askpass, cx)
1594 })?
1595 .await??;
1596
1597 Ok(proto::RemoteMessageResponse {
1598 stdout: remote_output.stdout,
1599 stderr: remote_output.stderr,
1600 })
1601 }
1602
1603 async fn handle_push(
1604 this: Entity<Self>,
1605 envelope: TypedEnvelope<proto::Push>,
1606 mut cx: AsyncApp,
1607 ) -> Result<proto::RemoteMessageResponse> {
1608 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1609 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1610
1611 let askpass_id = envelope.payload.askpass_id;
1612 let askpass = make_remote_delegate(
1613 this,
1614 envelope.payload.project_id,
1615 repository_id,
1616 askpass_id,
1617 &mut cx,
1618 );
1619
1620 let options = envelope
1621 .payload
1622 .options
1623 .as_ref()
1624 .map(|_| match envelope.payload.options() {
1625 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1626 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1627 });
1628
1629 let branch_name = envelope.payload.branch_name.into();
1630 let remote_name = envelope.payload.remote_name.into();
1631
1632 let remote_output = repository_handle
1633 .update(&mut cx, |repository_handle, cx| {
1634 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1635 })?
1636 .await??;
1637 Ok(proto::RemoteMessageResponse {
1638 stdout: remote_output.stdout,
1639 stderr: remote_output.stderr,
1640 })
1641 }
1642
1643 async fn handle_pull(
1644 this: Entity<Self>,
1645 envelope: TypedEnvelope<proto::Pull>,
1646 mut cx: AsyncApp,
1647 ) -> Result<proto::RemoteMessageResponse> {
1648 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1649 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1650 let askpass_id = envelope.payload.askpass_id;
1651 let askpass = make_remote_delegate(
1652 this,
1653 envelope.payload.project_id,
1654 repository_id,
1655 askpass_id,
1656 &mut cx,
1657 );
1658
1659 let branch_name = envelope.payload.branch_name.into();
1660 let remote_name = envelope.payload.remote_name.into();
1661
1662 let remote_message = repository_handle
1663 .update(&mut cx, |repository_handle, cx| {
1664 repository_handle.pull(branch_name, remote_name, askpass, cx)
1665 })?
1666 .await??;
1667
1668 Ok(proto::RemoteMessageResponse {
1669 stdout: remote_message.stdout,
1670 stderr: remote_message.stderr,
1671 })
1672 }
1673
1674 async fn handle_stage(
1675 this: Entity<Self>,
1676 envelope: TypedEnvelope<proto::Stage>,
1677 mut cx: AsyncApp,
1678 ) -> Result<proto::Ack> {
1679 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1680 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1681
1682 let entries = envelope
1683 .payload
1684 .paths
1685 .into_iter()
1686 .map(PathBuf::from)
1687 .map(RepoPath::new)
1688 .collect();
1689
1690 repository_handle
1691 .update(&mut cx, |repository_handle, cx| {
1692 repository_handle.stage_entries(entries, cx)
1693 })?
1694 .await?;
1695 Ok(proto::Ack {})
1696 }
1697
1698 async fn handle_unstage(
1699 this: Entity<Self>,
1700 envelope: TypedEnvelope<proto::Unstage>,
1701 mut cx: AsyncApp,
1702 ) -> Result<proto::Ack> {
1703 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1704 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1705
1706 let entries = envelope
1707 .payload
1708 .paths
1709 .into_iter()
1710 .map(PathBuf::from)
1711 .map(RepoPath::new)
1712 .collect();
1713
1714 repository_handle
1715 .update(&mut cx, |repository_handle, cx| {
1716 repository_handle.unstage_entries(entries, cx)
1717 })?
1718 .await?;
1719
1720 Ok(proto::Ack {})
1721 }
1722
1723 async fn handle_set_index_text(
1724 this: Entity<Self>,
1725 envelope: TypedEnvelope<proto::SetIndexText>,
1726 mut cx: AsyncApp,
1727 ) -> Result<proto::Ack> {
1728 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1729 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1730
1731 repository_handle
1732 .update(&mut cx, |repository_handle, cx| {
1733 repository_handle.spawn_set_index_text_job(
1734 RepoPath::from_str(&envelope.payload.path),
1735 envelope.payload.text,
1736 cx,
1737 )
1738 })?
1739 .await??;
1740 Ok(proto::Ack {})
1741 }
1742
1743 async fn handle_commit(
1744 this: Entity<Self>,
1745 envelope: TypedEnvelope<proto::Commit>,
1746 mut cx: AsyncApp,
1747 ) -> Result<proto::Ack> {
1748 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1749 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1750
1751 let message = SharedString::from(envelope.payload.message);
1752 let name = envelope.payload.name.map(SharedString::from);
1753 let email = envelope.payload.email.map(SharedString::from);
1754
1755 repository_handle
1756 .update(&mut cx, |repository_handle, cx| {
1757 repository_handle.commit(message, name.zip(email), cx)
1758 })?
1759 .await??;
1760 Ok(proto::Ack {})
1761 }
1762
1763 async fn handle_get_remotes(
1764 this: Entity<Self>,
1765 envelope: TypedEnvelope<proto::GetRemotes>,
1766 mut cx: AsyncApp,
1767 ) -> Result<proto::GetRemotesResponse> {
1768 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1769 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1770
1771 let branch_name = envelope.payload.branch_name;
1772
1773 let remotes = repository_handle
1774 .update(&mut cx, |repository_handle, _| {
1775 repository_handle.get_remotes(branch_name)
1776 })?
1777 .await??;
1778
1779 Ok(proto::GetRemotesResponse {
1780 remotes: remotes
1781 .into_iter()
1782 .map(|remotes| proto::get_remotes_response::Remote {
1783 name: remotes.name.to_string(),
1784 })
1785 .collect::<Vec<_>>(),
1786 })
1787 }
1788
1789 async fn handle_get_branches(
1790 this: Entity<Self>,
1791 envelope: TypedEnvelope<proto::GitGetBranches>,
1792 mut cx: AsyncApp,
1793 ) -> Result<proto::GitBranchesResponse> {
1794 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1795 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1796
1797 let branches = repository_handle
1798 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1799 .await??;
1800
1801 Ok(proto::GitBranchesResponse {
1802 branches: branches
1803 .into_iter()
1804 .map(|branch| branch_to_proto(&branch))
1805 .collect::<Vec<_>>(),
1806 })
1807 }
1808 async fn handle_create_branch(
1809 this: Entity<Self>,
1810 envelope: TypedEnvelope<proto::GitCreateBranch>,
1811 mut cx: AsyncApp,
1812 ) -> Result<proto::Ack> {
1813 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1814 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1815 let branch_name = envelope.payload.branch_name;
1816
1817 repository_handle
1818 .update(&mut cx, |repository_handle, _| {
1819 repository_handle.create_branch(branch_name)
1820 })?
1821 .await??;
1822
1823 Ok(proto::Ack {})
1824 }
1825
1826 async fn handle_change_branch(
1827 this: Entity<Self>,
1828 envelope: TypedEnvelope<proto::GitChangeBranch>,
1829 mut cx: AsyncApp,
1830 ) -> Result<proto::Ack> {
1831 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1832 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1833 let branch_name = envelope.payload.branch_name;
1834
1835 repository_handle
1836 .update(&mut cx, |repository_handle, _| {
1837 repository_handle.change_branch(branch_name)
1838 })?
1839 .await??;
1840
1841 Ok(proto::Ack {})
1842 }
1843
1844 async fn handle_show(
1845 this: Entity<Self>,
1846 envelope: TypedEnvelope<proto::GitShow>,
1847 mut cx: AsyncApp,
1848 ) -> Result<proto::GitCommitDetails> {
1849 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1850 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1851
1852 let commit = repository_handle
1853 .update(&mut cx, |repository_handle, _| {
1854 repository_handle.show(envelope.payload.commit)
1855 })?
1856 .await??;
1857 Ok(proto::GitCommitDetails {
1858 sha: commit.sha.into(),
1859 message: commit.message.into(),
1860 commit_timestamp: commit.commit_timestamp,
1861 author_email: commit.author_email.into(),
1862 author_name: commit.author_name.into(),
1863 })
1864 }
1865
1866 async fn handle_load_commit_diff(
1867 this: Entity<Self>,
1868 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1869 mut cx: AsyncApp,
1870 ) -> Result<proto::LoadCommitDiffResponse> {
1871 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1872 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1873
1874 let commit_diff = repository_handle
1875 .update(&mut cx, |repository_handle, _| {
1876 repository_handle.load_commit_diff(envelope.payload.commit)
1877 })?
1878 .await??;
1879 Ok(proto::LoadCommitDiffResponse {
1880 files: commit_diff
1881 .files
1882 .into_iter()
1883 .map(|file| proto::CommitFile {
1884 path: file.path.to_string(),
1885 old_text: file.old_text,
1886 new_text: file.new_text,
1887 })
1888 .collect(),
1889 })
1890 }
1891
1892 async fn handle_reset(
1893 this: Entity<Self>,
1894 envelope: TypedEnvelope<proto::GitReset>,
1895 mut cx: AsyncApp,
1896 ) -> Result<proto::Ack> {
1897 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1898 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1899
1900 let mode = match envelope.payload.mode() {
1901 git_reset::ResetMode::Soft => ResetMode::Soft,
1902 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1903 };
1904
1905 repository_handle
1906 .update(&mut cx, |repository_handle, cx| {
1907 repository_handle.reset(envelope.payload.commit, mode, cx)
1908 })?
1909 .await??;
1910 Ok(proto::Ack {})
1911 }
1912
1913 async fn handle_checkout_files(
1914 this: Entity<Self>,
1915 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1916 mut cx: AsyncApp,
1917 ) -> Result<proto::Ack> {
1918 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1919 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1920 let paths = envelope
1921 .payload
1922 .paths
1923 .iter()
1924 .map(|s| RepoPath::from_str(s))
1925 .collect();
1926
1927 repository_handle
1928 .update(&mut cx, |repository_handle, cx| {
1929 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1930 })?
1931 .await??;
1932 Ok(proto::Ack {})
1933 }
1934
1935 async fn handle_open_commit_message_buffer(
1936 this: Entity<Self>,
1937 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1938 mut cx: AsyncApp,
1939 ) -> Result<proto::OpenBufferResponse> {
1940 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1941 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1942 let buffer = repository
1943 .update(&mut cx, |repository, cx| {
1944 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1945 })?
1946 .await?;
1947
1948 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1949 this.update(&mut cx, |this, cx| {
1950 this.buffer_store.update(cx, |buffer_store, cx| {
1951 buffer_store
1952 .create_buffer_for_peer(
1953 &buffer,
1954 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1955 cx,
1956 )
1957 .detach_and_log_err(cx);
1958 })
1959 })?;
1960
1961 Ok(proto::OpenBufferResponse {
1962 buffer_id: buffer_id.to_proto(),
1963 })
1964 }
1965
1966 async fn handle_askpass(
1967 this: Entity<Self>,
1968 envelope: TypedEnvelope<proto::AskPassRequest>,
1969 mut cx: AsyncApp,
1970 ) -> Result<proto::AskPassResponse> {
1971 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1972 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1973
1974 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
1975 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
1976 debug_panic!("no askpass found");
1977 return Err(anyhow::anyhow!("no askpass found"));
1978 };
1979
1980 let response = askpass.ask_password(envelope.payload.prompt).await?;
1981
1982 delegates
1983 .lock()
1984 .insert(envelope.payload.askpass_id, askpass);
1985
1986 Ok(proto::AskPassResponse { response })
1987 }
1988
1989 async fn handle_check_for_pushed_commits(
1990 this: Entity<Self>,
1991 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
1992 mut cx: AsyncApp,
1993 ) -> Result<proto::CheckForPushedCommitsResponse> {
1994 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1995 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1996
1997 let branches = repository_handle
1998 .update(&mut cx, |repository_handle, _| {
1999 repository_handle.check_for_pushed_commits()
2000 })?
2001 .await??;
2002 Ok(proto::CheckForPushedCommitsResponse {
2003 pushed_to: branches
2004 .into_iter()
2005 .map(|commit| commit.to_string())
2006 .collect(),
2007 })
2008 }
2009
2010 async fn handle_git_diff(
2011 this: Entity<Self>,
2012 envelope: TypedEnvelope<proto::GitDiff>,
2013 mut cx: AsyncApp,
2014 ) -> Result<proto::GitDiffResponse> {
2015 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2016 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2017 let diff_type = match envelope.payload.diff_type() {
2018 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2019 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2020 };
2021
2022 let mut diff = repository_handle
2023 .update(&mut cx, |repository_handle, cx| {
2024 repository_handle.diff(diff_type, cx)
2025 })?
2026 .await??;
2027 const ONE_MB: usize = 1_000_000;
2028 if diff.len() > ONE_MB {
2029 diff = diff.chars().take(ONE_MB).collect()
2030 }
2031
2032 Ok(proto::GitDiffResponse { diff })
2033 }
2034
2035 async fn handle_open_unstaged_diff(
2036 this: Entity<Self>,
2037 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2038 mut cx: AsyncApp,
2039 ) -> Result<proto::OpenUnstagedDiffResponse> {
2040 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2041 let diff = this
2042 .update(&mut cx, |this, cx| {
2043 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2044 Some(this.open_unstaged_diff(buffer, cx))
2045 })?
2046 .ok_or_else(|| anyhow!("no such buffer"))?
2047 .await?;
2048 this.update(&mut cx, |this, _| {
2049 let shared_diffs = this
2050 .shared_diffs
2051 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2052 .or_default();
2053 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2054 })?;
2055 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2056 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2057 }
2058
2059 async fn handle_open_uncommitted_diff(
2060 this: Entity<Self>,
2061 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2062 mut cx: AsyncApp,
2063 ) -> Result<proto::OpenUncommittedDiffResponse> {
2064 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2065 let diff = this
2066 .update(&mut cx, |this, cx| {
2067 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2068 Some(this.open_uncommitted_diff(buffer, cx))
2069 })?
2070 .ok_or_else(|| anyhow!("no such buffer"))?
2071 .await?;
2072 this.update(&mut cx, |this, _| {
2073 let shared_diffs = this
2074 .shared_diffs
2075 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2076 .or_default();
2077 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2078 })?;
2079 diff.read_with(&cx, |diff, cx| {
2080 use proto::open_uncommitted_diff_response::Mode;
2081
2082 let unstaged_diff = diff.secondary_diff();
2083 let index_snapshot = unstaged_diff.and_then(|diff| {
2084 let diff = diff.read(cx);
2085 diff.base_text_exists().then(|| diff.base_text())
2086 });
2087
2088 let mode;
2089 let staged_text;
2090 let committed_text;
2091 if diff.base_text_exists() {
2092 let committed_snapshot = diff.base_text();
2093 committed_text = Some(committed_snapshot.text());
2094 if let Some(index_text) = index_snapshot {
2095 if index_text.remote_id() == committed_snapshot.remote_id() {
2096 mode = Mode::IndexMatchesHead;
2097 staged_text = None;
2098 } else {
2099 mode = Mode::IndexAndHead;
2100 staged_text = Some(index_text.text());
2101 }
2102 } else {
2103 mode = Mode::IndexAndHead;
2104 staged_text = None;
2105 }
2106 } else {
2107 mode = Mode::IndexAndHead;
2108 committed_text = None;
2109 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2110 }
2111
2112 proto::OpenUncommittedDiffResponse {
2113 committed_text,
2114 staged_text,
2115 mode: mode.into(),
2116 }
2117 })
2118 }
2119
2120 async fn handle_update_diff_bases(
2121 this: Entity<Self>,
2122 request: TypedEnvelope<proto::UpdateDiffBases>,
2123 mut cx: AsyncApp,
2124 ) -> Result<()> {
2125 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2126 this.update(&mut cx, |this, cx| {
2127 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2128 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2129 let buffer = buffer.read(cx).text_snapshot();
2130 diff_state.update(cx, |diff_state, cx| {
2131 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2132 })
2133 }
2134 }
2135 })
2136 }
2137
2138 async fn handle_blame_buffer(
2139 this: Entity<Self>,
2140 envelope: TypedEnvelope<proto::BlameBuffer>,
2141 mut cx: AsyncApp,
2142 ) -> Result<proto::BlameBufferResponse> {
2143 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2144 let version = deserialize_version(&envelope.payload.version);
2145 let buffer = this.read_with(&cx, |this, cx| {
2146 this.buffer_store.read(cx).get_existing(buffer_id)
2147 })??;
2148 buffer
2149 .update(&mut cx, |buffer, _| {
2150 buffer.wait_for_version(version.clone())
2151 })?
2152 .await?;
2153 let blame = this
2154 .update(&mut cx, |this, cx| {
2155 this.blame_buffer(&buffer, Some(version), cx)
2156 })?
2157 .await?;
2158 Ok(serialize_blame_buffer_response(blame))
2159 }
2160
2161 async fn handle_get_permalink_to_line(
2162 this: Entity<Self>,
2163 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2164 mut cx: AsyncApp,
2165 ) -> Result<proto::GetPermalinkToLineResponse> {
2166 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2167 // let version = deserialize_version(&envelope.payload.version);
2168 let selection = {
2169 let proto_selection = envelope
2170 .payload
2171 .selection
2172 .context("no selection to get permalink for defined")?;
2173 proto_selection.start as u32..proto_selection.end as u32
2174 };
2175 let buffer = this.read_with(&cx, |this, cx| {
2176 this.buffer_store.read(cx).get_existing(buffer_id)
2177 })??;
2178 let permalink = this
2179 .update(&mut cx, |this, cx| {
2180 this.get_permalink_to_line(&buffer, selection, cx)
2181 })?
2182 .await?;
2183 Ok(proto::GetPermalinkToLineResponse {
2184 permalink: permalink.to_string(),
2185 })
2186 }
2187
2188 fn repository_for_request(
2189 this: &Entity<Self>,
2190 id: RepositoryId,
2191 cx: &mut AsyncApp,
2192 ) -> Result<Entity<Repository>> {
2193 this.update(cx, |this, _| {
2194 this.repositories
2195 .get(&id)
2196 .context("missing repository handle")
2197 .cloned()
2198 })?
2199 }
2200
2201 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2202 self.repositories
2203 .iter()
2204 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2205 .collect()
2206 }
2207}
2208
2209impl BufferDiffState {
2210 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2211 self.language = buffer.read(cx).language().cloned();
2212 self.language_changed = true;
2213 let _ = self.recalculate_diffs(
2214 buffer.read(cx).text_snapshot(),
2215 self.hunk_staging_operation_count,
2216 cx,
2217 );
2218 }
2219
2220 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2221 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2222 }
2223
2224 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2225 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2226 }
2227
2228 fn handle_base_texts_updated(
2229 &mut self,
2230 buffer: text::BufferSnapshot,
2231 message: proto::UpdateDiffBases,
2232 cx: &mut Context<Self>,
2233 ) {
2234 use proto::update_diff_bases::Mode;
2235
2236 let Some(mode) = Mode::from_i32(message.mode) else {
2237 return;
2238 };
2239
2240 let diff_bases_change = match mode {
2241 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2242 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2243 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2244 Mode::IndexAndHead => DiffBasesChange::SetEach {
2245 index: message.staged_text,
2246 head: message.committed_text,
2247 },
2248 };
2249
2250 let _ = self.diff_bases_changed(
2251 buffer,
2252 diff_bases_change,
2253 self.hunk_staging_operation_count,
2254 cx,
2255 );
2256 }
2257
2258 pub fn wait_for_recalculation(&mut self) -> Option<oneshot::Receiver<()>> {
2259 if self.diff_updated_futures.is_empty() {
2260 return None;
2261 }
2262 let (tx, rx) = oneshot::channel();
2263 self.diff_updated_futures.push(tx);
2264 Some(rx)
2265 }
2266
2267 fn diff_bases_changed(
2268 &mut self,
2269 buffer: text::BufferSnapshot,
2270 diff_bases_change: DiffBasesChange,
2271 prev_hunk_staging_operation_count: usize,
2272 cx: &mut Context<Self>,
2273 ) -> oneshot::Receiver<()> {
2274 match diff_bases_change {
2275 DiffBasesChange::SetIndex(index) => {
2276 self.index_text = index.map(|mut index| {
2277 text::LineEnding::normalize(&mut index);
2278 Arc::new(index)
2279 });
2280 self.index_changed = true;
2281 }
2282 DiffBasesChange::SetHead(head) => {
2283 self.head_text = head.map(|mut head| {
2284 text::LineEnding::normalize(&mut head);
2285 Arc::new(head)
2286 });
2287 self.head_changed = true;
2288 }
2289 DiffBasesChange::SetBoth(text) => {
2290 let text = text.map(|mut text| {
2291 text::LineEnding::normalize(&mut text);
2292 Arc::new(text)
2293 });
2294 self.head_text = text.clone();
2295 self.index_text = text;
2296 self.head_changed = true;
2297 self.index_changed = true;
2298 }
2299 DiffBasesChange::SetEach { index, head } => {
2300 self.index_text = index.map(|mut index| {
2301 text::LineEnding::normalize(&mut index);
2302 Arc::new(index)
2303 });
2304 self.index_changed = true;
2305 self.head_text = head.map(|mut head| {
2306 text::LineEnding::normalize(&mut head);
2307 Arc::new(head)
2308 });
2309 self.head_changed = true;
2310 }
2311 }
2312
2313 self.recalculate_diffs(buffer, prev_hunk_staging_operation_count, cx)
2314 }
2315
2316 fn recalculate_diffs(
2317 &mut self,
2318 buffer: text::BufferSnapshot,
2319 prev_hunk_staging_operation_count: usize,
2320 cx: &mut Context<Self>,
2321 ) -> oneshot::Receiver<()> {
2322 log::debug!("recalculate diffs");
2323 let (tx, rx) = oneshot::channel();
2324 self.diff_updated_futures.push(tx);
2325
2326 let language = self.language.clone();
2327 let language_registry = self.language_registry.clone();
2328 let unstaged_diff = self.unstaged_diff();
2329 let uncommitted_diff = self.uncommitted_diff();
2330 let head = self.head_text.clone();
2331 let index = self.index_text.clone();
2332 let index_changed = self.index_changed;
2333 let head_changed = self.head_changed;
2334 let language_changed = self.language_changed;
2335 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2336 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2337 (None, None) => true,
2338 _ => false,
2339 };
2340 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2341 let mut new_unstaged_diff = None;
2342 if let Some(unstaged_diff) = &unstaged_diff {
2343 new_unstaged_diff = Some(
2344 BufferDiff::update_diff(
2345 unstaged_diff.clone(),
2346 buffer.clone(),
2347 index,
2348 index_changed,
2349 language_changed,
2350 language.clone(),
2351 language_registry.clone(),
2352 cx,
2353 )
2354 .await?,
2355 );
2356 }
2357
2358 let mut new_uncommitted_diff = None;
2359 if let Some(uncommitted_diff) = &uncommitted_diff {
2360 new_uncommitted_diff = if index_matches_head {
2361 new_unstaged_diff.clone()
2362 } else {
2363 Some(
2364 BufferDiff::update_diff(
2365 uncommitted_diff.clone(),
2366 buffer.clone(),
2367 head,
2368 head_changed,
2369 language_changed,
2370 language.clone(),
2371 language_registry.clone(),
2372 cx,
2373 )
2374 .await?,
2375 )
2376 }
2377 }
2378
2379 if this.update(cx, |this, _| {
2380 this.hunk_staging_operation_count > prev_hunk_staging_operation_count
2381 })? {
2382 eprintln!("early return");
2383 return Ok(());
2384 }
2385
2386 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2387 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2388 {
2389 unstaged_diff.update(cx, |diff, cx| {
2390 if language_changed {
2391 diff.language_changed(cx);
2392 }
2393 diff.set_snapshot(new_unstaged_diff, &buffer, None, cx)
2394 })?
2395 } else {
2396 None
2397 };
2398
2399 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2400 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2401 {
2402 uncommitted_diff.update(cx, |diff, cx| {
2403 if language_changed {
2404 diff.language_changed(cx);
2405 }
2406 diff.set_snapshot(new_uncommitted_diff, &buffer, unstaged_changed_range, cx);
2407 })?;
2408 }
2409
2410 if let Some(this) = this.upgrade() {
2411 this.update(cx, |this, _| {
2412 this.index_changed = false;
2413 this.head_changed = false;
2414 this.language_changed = false;
2415 for tx in this.diff_updated_futures.drain(..) {
2416 tx.send(()).ok();
2417 }
2418 })?;
2419 }
2420
2421 Ok(())
2422 }));
2423
2424 rx
2425 }
2426}
2427
2428fn make_remote_delegate(
2429 this: Entity<GitStore>,
2430 project_id: u64,
2431 repository_id: RepositoryId,
2432 askpass_id: u64,
2433 cx: &mut AsyncApp,
2434) -> AskPassDelegate {
2435 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2436 this.update(cx, |this, cx| {
2437 let Some((client, _)) = this.downstream_client() else {
2438 return;
2439 };
2440 let response = client.request(proto::AskPassRequest {
2441 project_id,
2442 repository_id: repository_id.to_proto(),
2443 askpass_id,
2444 prompt,
2445 });
2446 cx.spawn(async move |_, _| {
2447 tx.send(response.await?.response).ok();
2448 anyhow::Ok(())
2449 })
2450 .detach_and_log_err(cx);
2451 })
2452 .log_err();
2453 })
2454}
2455
2456impl RepositoryId {
2457 pub fn to_proto(self) -> u64 {
2458 self.0
2459 }
2460
2461 pub fn from_proto(id: u64) -> Self {
2462 RepositoryId(id)
2463 }
2464}
2465
2466impl RepositorySnapshot {
2467 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2468 Self {
2469 id,
2470 merge_message: None,
2471 statuses_by_path: Default::default(),
2472 work_directory_abs_path,
2473 branch: None,
2474 merge_conflicts: Default::default(),
2475 merge_head_shas: Default::default(),
2476 scan_id: 0,
2477 }
2478 }
2479
2480 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2481 proto::UpdateRepository {
2482 branch_summary: self.branch.as_ref().map(branch_to_proto),
2483 updated_statuses: self
2484 .statuses_by_path
2485 .iter()
2486 .map(|entry| entry.to_proto())
2487 .collect(),
2488 removed_statuses: Default::default(),
2489 current_merge_conflicts: self
2490 .merge_conflicts
2491 .iter()
2492 .map(|repo_path| repo_path.to_proto())
2493 .collect(),
2494 project_id,
2495 id: self.id.to_proto(),
2496 abs_path: self.work_directory_abs_path.to_proto(),
2497 entry_ids: vec![self.id.to_proto()],
2498 scan_id: self.scan_id,
2499 is_last_update: true,
2500 }
2501 }
2502
2503 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2504 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2505 let mut removed_statuses: Vec<String> = Vec::new();
2506
2507 let mut new_statuses = self.statuses_by_path.iter().peekable();
2508 let mut old_statuses = old.statuses_by_path.iter().peekable();
2509
2510 let mut current_new_entry = new_statuses.next();
2511 let mut current_old_entry = old_statuses.next();
2512 loop {
2513 match (current_new_entry, current_old_entry) {
2514 (Some(new_entry), Some(old_entry)) => {
2515 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2516 Ordering::Less => {
2517 updated_statuses.push(new_entry.to_proto());
2518 current_new_entry = new_statuses.next();
2519 }
2520 Ordering::Equal => {
2521 if new_entry.status != old_entry.status {
2522 updated_statuses.push(new_entry.to_proto());
2523 }
2524 current_old_entry = old_statuses.next();
2525 current_new_entry = new_statuses.next();
2526 }
2527 Ordering::Greater => {
2528 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2529 current_old_entry = old_statuses.next();
2530 }
2531 }
2532 }
2533 (None, Some(old_entry)) => {
2534 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2535 current_old_entry = old_statuses.next();
2536 }
2537 (Some(new_entry), None) => {
2538 updated_statuses.push(new_entry.to_proto());
2539 current_new_entry = new_statuses.next();
2540 }
2541 (None, None) => break,
2542 }
2543 }
2544
2545 proto::UpdateRepository {
2546 branch_summary: self.branch.as_ref().map(branch_to_proto),
2547 updated_statuses,
2548 removed_statuses,
2549 current_merge_conflicts: self
2550 .merge_conflicts
2551 .iter()
2552 .map(|path| path.as_ref().to_proto())
2553 .collect(),
2554 project_id,
2555 id: self.id.to_proto(),
2556 abs_path: self.work_directory_abs_path.to_proto(),
2557 entry_ids: vec![],
2558 scan_id: self.scan_id,
2559 is_last_update: true,
2560 }
2561 }
2562
2563 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2564 self.statuses_by_path.iter().cloned()
2565 }
2566
2567 pub fn status_summary(&self) -> GitSummary {
2568 self.statuses_by_path.summary().item_summary
2569 }
2570
2571 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2572 self.statuses_by_path
2573 .get(&PathKey(path.0.clone()), &())
2574 .cloned()
2575 }
2576
2577 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2578 abs_path
2579 .strip_prefix(&self.work_directory_abs_path)
2580 .map(RepoPath::from)
2581 .ok()
2582 }
2583
2584 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2585 self.statuses_by_path
2586 .get(&PathKey(repo_path.0.clone()), &())
2587 .map_or(false, |entry| entry.status.is_conflicted())
2588 }
2589
2590 /// This is the name that will be displayed in the repository selector for this repository.
2591 pub fn display_name(&self) -> SharedString {
2592 self.work_directory_abs_path
2593 .file_name()
2594 .unwrap_or_default()
2595 .to_string_lossy()
2596 .to_string()
2597 .into()
2598 }
2599}
2600
2601impl Repository {
2602 fn local(
2603 id: RepositoryId,
2604 work_directory_abs_path: Arc<Path>,
2605 dot_git_abs_path: Arc<Path>,
2606 project_environment: WeakEntity<ProjectEnvironment>,
2607 fs: Arc<dyn Fs>,
2608 git_store: WeakEntity<GitStore>,
2609 cx: &mut Context<Self>,
2610 ) -> Self {
2611 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2612 Repository {
2613 git_store,
2614 snapshot,
2615 commit_message_buffer: None,
2616 askpass_delegates: Default::default(),
2617 paths_needing_status_update: Default::default(),
2618 latest_askpass_id: 0,
2619 job_sender: Repository::spawn_local_git_worker(
2620 work_directory_abs_path,
2621 dot_git_abs_path,
2622 project_environment,
2623 fs,
2624 cx,
2625 ),
2626 }
2627 }
2628
2629 fn remote(
2630 id: RepositoryId,
2631 work_directory_abs_path: Arc<Path>,
2632 project_id: ProjectId,
2633 client: AnyProtoClient,
2634 git_store: WeakEntity<GitStore>,
2635 cx: &mut Context<Self>,
2636 ) -> Self {
2637 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2638 Self {
2639 snapshot,
2640 commit_message_buffer: None,
2641 git_store,
2642 paths_needing_status_update: Default::default(),
2643 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2644 askpass_delegates: Default::default(),
2645 latest_askpass_id: 0,
2646 }
2647 }
2648
2649 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2650 self.git_store.upgrade()
2651 }
2652
2653 pub fn send_job<F, Fut, R>(&self, job: F) -> oneshot::Receiver<R>
2654 where
2655 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2656 Fut: Future<Output = R> + 'static,
2657 R: Send + 'static,
2658 {
2659 self.send_keyed_job(None, job)
2660 }
2661
2662 fn send_keyed_job<F, Fut, R>(&self, key: Option<GitJobKey>, job: F) -> oneshot::Receiver<R>
2663 where
2664 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2665 Fut: Future<Output = R> + 'static,
2666 R: Send + 'static,
2667 {
2668 let (result_tx, result_rx) = futures::channel::oneshot::channel();
2669 self.job_sender
2670 .unbounded_send(GitJob {
2671 key,
2672 job: Box::new(|state, cx: &mut AsyncApp| {
2673 let job = job(state, cx.clone());
2674 cx.spawn(async move |_| {
2675 let result = job.await;
2676 result_tx.send(result).ok();
2677 })
2678 }),
2679 })
2680 .ok();
2681 result_rx
2682 }
2683
2684 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
2685 let Some(git_store) = self.git_store.upgrade() else {
2686 return;
2687 };
2688 let entity = cx.entity();
2689 git_store.update(cx, |git_store, cx| {
2690 let Some((&id, _)) = git_store
2691 .repositories
2692 .iter()
2693 .find(|(_, handle)| *handle == &entity)
2694 else {
2695 return;
2696 };
2697 git_store.active_repo_id = Some(id);
2698 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2699 });
2700 }
2701
2702 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
2703 self.snapshot.status()
2704 }
2705
2706 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
2707 let git_store = self.git_store.upgrade()?;
2708 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2709 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
2710 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
2711 Some(ProjectPath {
2712 worktree_id: worktree.read(cx).id(),
2713 path: relative_path.into(),
2714 })
2715 }
2716
2717 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
2718 let git_store = self.git_store.upgrade()?;
2719 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2720 let abs_path = worktree_store.absolutize(path, cx)?;
2721 self.snapshot.abs_path_to_repo_path(&abs_path)
2722 }
2723
2724 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
2725 other
2726 .read(cx)
2727 .snapshot
2728 .work_directory_abs_path
2729 .starts_with(&self.snapshot.work_directory_abs_path)
2730 }
2731
2732 pub fn open_commit_buffer(
2733 &mut self,
2734 languages: Option<Arc<LanguageRegistry>>,
2735 buffer_store: Entity<BufferStore>,
2736 cx: &mut Context<Self>,
2737 ) -> Task<Result<Entity<Buffer>>> {
2738 let id = self.id;
2739 if let Some(buffer) = self.commit_message_buffer.clone() {
2740 return Task::ready(Ok(buffer));
2741 }
2742 let this = cx.weak_entity();
2743
2744 let rx = self.send_job(move |state, mut cx| async move {
2745 let Some(this) = this.upgrade() else {
2746 bail!("git store was dropped");
2747 };
2748 match state {
2749 RepositoryState::Local { .. } => {
2750 this.update(&mut cx, |_, cx| {
2751 Self::open_local_commit_buffer(languages, buffer_store, cx)
2752 })?
2753 .await
2754 }
2755 RepositoryState::Remote { project_id, client } => {
2756 let request = client.request(proto::OpenCommitMessageBuffer {
2757 project_id: project_id.0,
2758 repository_id: id.to_proto(),
2759 });
2760 let response = request.await.context("requesting to open commit buffer")?;
2761 let buffer_id = BufferId::new(response.buffer_id)?;
2762 let buffer = buffer_store
2763 .update(&mut cx, |buffer_store, cx| {
2764 buffer_store.wait_for_remote_buffer(buffer_id, cx)
2765 })?
2766 .await?;
2767 if let Some(language_registry) = languages {
2768 let git_commit_language =
2769 language_registry.language_for_name("Git Commit").await?;
2770 buffer.update(&mut cx, |buffer, cx| {
2771 buffer.set_language(Some(git_commit_language), cx);
2772 })?;
2773 }
2774 this.update(&mut cx, |this, _| {
2775 this.commit_message_buffer = Some(buffer.clone());
2776 })?;
2777 Ok(buffer)
2778 }
2779 }
2780 });
2781
2782 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
2783 }
2784
2785 fn open_local_commit_buffer(
2786 language_registry: Option<Arc<LanguageRegistry>>,
2787 buffer_store: Entity<BufferStore>,
2788 cx: &mut Context<Self>,
2789 ) -> Task<Result<Entity<Buffer>>> {
2790 cx.spawn(async move |repository, cx| {
2791 let buffer = buffer_store
2792 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
2793 .await?;
2794
2795 if let Some(language_registry) = language_registry {
2796 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
2797 buffer.update(cx, |buffer, cx| {
2798 buffer.set_language(Some(git_commit_language), cx);
2799 })?;
2800 }
2801
2802 repository.update(cx, |repository, _| {
2803 repository.commit_message_buffer = Some(buffer.clone());
2804 })?;
2805 Ok(buffer)
2806 })
2807 }
2808
2809 pub fn checkout_files(
2810 &self,
2811 commit: &str,
2812 paths: Vec<RepoPath>,
2813 _cx: &mut App,
2814 ) -> oneshot::Receiver<Result<()>> {
2815 let commit = commit.to_string();
2816 let id = self.id;
2817
2818 self.send_job(move |git_repo, _| async move {
2819 match git_repo {
2820 RepositoryState::Local {
2821 backend,
2822 environment,
2823 ..
2824 } => {
2825 backend
2826 .checkout_files(commit, paths, environment.clone())
2827 .await
2828 }
2829 RepositoryState::Remote { project_id, client } => {
2830 client
2831 .request(proto::GitCheckoutFiles {
2832 project_id: project_id.0,
2833 repository_id: id.to_proto(),
2834 commit,
2835 paths: paths
2836 .into_iter()
2837 .map(|p| p.to_string_lossy().to_string())
2838 .collect(),
2839 })
2840 .await?;
2841
2842 Ok(())
2843 }
2844 }
2845 })
2846 }
2847
2848 pub fn reset(
2849 &self,
2850 commit: String,
2851 reset_mode: ResetMode,
2852 _cx: &mut App,
2853 ) -> oneshot::Receiver<Result<()>> {
2854 let commit = commit.to_string();
2855 let id = self.id;
2856
2857 self.send_job(move |git_repo, _| async move {
2858 match git_repo {
2859 RepositoryState::Local {
2860 backend,
2861 environment,
2862 ..
2863 } => backend.reset(commit, reset_mode, environment).await,
2864 RepositoryState::Remote { project_id, client } => {
2865 client
2866 .request(proto::GitReset {
2867 project_id: project_id.0,
2868 repository_id: id.to_proto(),
2869 commit,
2870 mode: match reset_mode {
2871 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
2872 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
2873 },
2874 })
2875 .await?;
2876
2877 Ok(())
2878 }
2879 }
2880 })
2881 }
2882
2883 pub fn show(&self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
2884 let id = self.id;
2885 self.send_job(move |git_repo, _cx| async move {
2886 match git_repo {
2887 RepositoryState::Local { backend, .. } => backend.show(commit).await,
2888 RepositoryState::Remote { project_id, client } => {
2889 let resp = client
2890 .request(proto::GitShow {
2891 project_id: project_id.0,
2892 repository_id: id.to_proto(),
2893 commit,
2894 })
2895 .await?;
2896
2897 Ok(CommitDetails {
2898 sha: resp.sha.into(),
2899 message: resp.message.into(),
2900 commit_timestamp: resp.commit_timestamp,
2901 author_email: resp.author_email.into(),
2902 author_name: resp.author_name.into(),
2903 })
2904 }
2905 }
2906 })
2907 }
2908
2909 pub fn load_commit_diff(&self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
2910 let id = self.id;
2911 self.send_job(move |git_repo, cx| async move {
2912 match git_repo {
2913 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
2914 RepositoryState::Remote {
2915 client, project_id, ..
2916 } => {
2917 let response = client
2918 .request(proto::LoadCommitDiff {
2919 project_id: project_id.0,
2920 repository_id: id.to_proto(),
2921 commit,
2922 })
2923 .await?;
2924 Ok(CommitDiff {
2925 files: response
2926 .files
2927 .into_iter()
2928 .map(|file| CommitFile {
2929 path: Path::new(&file.path).into(),
2930 old_text: file.old_text,
2931 new_text: file.new_text,
2932 })
2933 .collect(),
2934 })
2935 }
2936 }
2937 })
2938 }
2939
2940 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
2941 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
2942 }
2943
2944 pub fn stage_entries(
2945 &self,
2946 entries: Vec<RepoPath>,
2947 cx: &mut Context<Self>,
2948 ) -> Task<anyhow::Result<()>> {
2949 if entries.is_empty() {
2950 return Task::ready(Ok(()));
2951 }
2952 let id = self.id;
2953
2954 let mut save_futures = Vec::new();
2955 if let Some(buffer_store) = self.buffer_store(cx) {
2956 buffer_store.update(cx, |buffer_store, cx| {
2957 for path in &entries {
2958 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
2959 continue;
2960 };
2961 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
2962 if buffer
2963 .read(cx)
2964 .file()
2965 .map_or(false, |file| file.disk_state().exists())
2966 {
2967 save_futures.push(buffer_store.save_buffer(buffer, cx));
2968 }
2969 }
2970 }
2971 })
2972 }
2973
2974 cx.spawn(async move |this, cx| {
2975 for save_future in save_futures {
2976 save_future.await?;
2977 }
2978
2979 this.update(cx, |this, _| {
2980 this.send_job(move |git_repo, _cx| async move {
2981 match git_repo {
2982 RepositoryState::Local {
2983 backend,
2984 environment,
2985 ..
2986 } => backend.stage_paths(entries, environment.clone()).await,
2987 RepositoryState::Remote { project_id, client } => {
2988 client
2989 .request(proto::Stage {
2990 project_id: project_id.0,
2991 repository_id: id.to_proto(),
2992 paths: entries
2993 .into_iter()
2994 .map(|repo_path| repo_path.as_ref().to_proto())
2995 .collect(),
2996 })
2997 .await
2998 .context("sending stage request")?;
2999
3000 Ok(())
3001 }
3002 }
3003 })
3004 })?
3005 .await??;
3006
3007 Ok(())
3008 })
3009 }
3010
3011 pub fn unstage_entries(
3012 &self,
3013 entries: Vec<RepoPath>,
3014 cx: &mut Context<Self>,
3015 ) -> Task<anyhow::Result<()>> {
3016 if entries.is_empty() {
3017 return Task::ready(Ok(()));
3018 }
3019 let id = self.id;
3020
3021 let mut save_futures = Vec::new();
3022 if let Some(buffer_store) = self.buffer_store(cx) {
3023 buffer_store.update(cx, |buffer_store, cx| {
3024 for path in &entries {
3025 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3026 continue;
3027 };
3028 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3029 if buffer
3030 .read(cx)
3031 .file()
3032 .map_or(false, |file| file.disk_state().exists())
3033 {
3034 save_futures.push(buffer_store.save_buffer(buffer, cx));
3035 }
3036 }
3037 }
3038 })
3039 }
3040
3041 cx.spawn(async move |this, cx| {
3042 for save_future in save_futures {
3043 save_future.await?;
3044 }
3045
3046 this.update(cx, |this, _| {
3047 this.send_job(move |git_repo, _cx| async move {
3048 match git_repo {
3049 RepositoryState::Local {
3050 backend,
3051 environment,
3052 ..
3053 } => backend.unstage_paths(entries, environment).await,
3054 RepositoryState::Remote { project_id, client } => {
3055 client
3056 .request(proto::Unstage {
3057 project_id: project_id.0,
3058 repository_id: id.to_proto(),
3059 paths: entries
3060 .into_iter()
3061 .map(|repo_path| repo_path.as_ref().to_proto())
3062 .collect(),
3063 })
3064 .await
3065 .context("sending unstage request")?;
3066
3067 Ok(())
3068 }
3069 }
3070 })
3071 })?
3072 .await??;
3073
3074 Ok(())
3075 })
3076 }
3077
3078 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3079 let to_stage = self
3080 .cached_status()
3081 .filter(|entry| !entry.status.staging().is_fully_staged())
3082 .map(|entry| entry.repo_path.clone())
3083 .collect();
3084 self.stage_entries(to_stage, cx)
3085 }
3086
3087 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3088 let to_unstage = self
3089 .cached_status()
3090 .filter(|entry| entry.status.staging().has_staged())
3091 .map(|entry| entry.repo_path.clone())
3092 .collect();
3093 self.unstage_entries(to_unstage, cx)
3094 }
3095
3096 pub fn commit(
3097 &self,
3098 message: SharedString,
3099 name_and_email: Option<(SharedString, SharedString)>,
3100 _cx: &mut App,
3101 ) -> oneshot::Receiver<Result<()>> {
3102 let id = self.id;
3103
3104 self.send_job(move |git_repo, _cx| async move {
3105 match git_repo {
3106 RepositoryState::Local {
3107 backend,
3108 environment,
3109 ..
3110 } => backend.commit(message, name_and_email, environment).await,
3111 RepositoryState::Remote { project_id, client } => {
3112 let (name, email) = name_and_email.unzip();
3113 client
3114 .request(proto::Commit {
3115 project_id: project_id.0,
3116 repository_id: id.to_proto(),
3117 message: String::from(message),
3118 name: name.map(String::from),
3119 email: email.map(String::from),
3120 })
3121 .await
3122 .context("sending commit request")?;
3123
3124 Ok(())
3125 }
3126 }
3127 })
3128 }
3129
3130 pub fn fetch(
3131 &mut self,
3132 askpass: AskPassDelegate,
3133 _cx: &mut App,
3134 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3135 let askpass_delegates = self.askpass_delegates.clone();
3136 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3137 let id = self.id;
3138
3139 self.send_job(move |git_repo, cx| async move {
3140 match git_repo {
3141 RepositoryState::Local {
3142 backend,
3143 environment,
3144 ..
3145 } => backend.fetch(askpass, environment, cx).await,
3146 RepositoryState::Remote { project_id, client } => {
3147 askpass_delegates.lock().insert(askpass_id, askpass);
3148 let _defer = util::defer(|| {
3149 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3150 debug_assert!(askpass_delegate.is_some());
3151 });
3152
3153 let response = client
3154 .request(proto::Fetch {
3155 project_id: project_id.0,
3156 repository_id: id.to_proto(),
3157 askpass_id,
3158 })
3159 .await
3160 .context("sending fetch request")?;
3161
3162 Ok(RemoteCommandOutput {
3163 stdout: response.stdout,
3164 stderr: response.stderr,
3165 })
3166 }
3167 }
3168 })
3169 }
3170
3171 pub fn push(
3172 &mut self,
3173 branch: SharedString,
3174 remote: SharedString,
3175 options: Option<PushOptions>,
3176 askpass: AskPassDelegate,
3177 _cx: &mut App,
3178 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3179 let askpass_delegates = self.askpass_delegates.clone();
3180 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3181 let id = self.id;
3182
3183 self.send_job(move |git_repo, cx| async move {
3184 match git_repo {
3185 RepositoryState::Local {
3186 backend,
3187 environment,
3188 ..
3189 } => {
3190 backend
3191 .push(
3192 branch.to_string(),
3193 remote.to_string(),
3194 options,
3195 askpass,
3196 environment.clone(),
3197 cx,
3198 )
3199 .await
3200 }
3201 RepositoryState::Remote { project_id, client } => {
3202 askpass_delegates.lock().insert(askpass_id, askpass);
3203 let _defer = util::defer(|| {
3204 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3205 debug_assert!(askpass_delegate.is_some());
3206 });
3207 let response = client
3208 .request(proto::Push {
3209 project_id: project_id.0,
3210 repository_id: id.to_proto(),
3211 askpass_id,
3212 branch_name: branch.to_string(),
3213 remote_name: remote.to_string(),
3214 options: options.map(|options| match options {
3215 PushOptions::Force => proto::push::PushOptions::Force,
3216 PushOptions::SetUpstream => proto::push::PushOptions::SetUpstream,
3217 } as i32),
3218 })
3219 .await
3220 .context("sending push request")?;
3221
3222 Ok(RemoteCommandOutput {
3223 stdout: response.stdout,
3224 stderr: response.stderr,
3225 })
3226 }
3227 }
3228 })
3229 }
3230
3231 pub fn pull(
3232 &mut self,
3233 branch: SharedString,
3234 remote: SharedString,
3235 askpass: AskPassDelegate,
3236 _cx: &mut App,
3237 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3238 let askpass_delegates = self.askpass_delegates.clone();
3239 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3240 let id = self.id;
3241
3242 self.send_job(move |git_repo, cx| async move {
3243 match git_repo {
3244 RepositoryState::Local {
3245 backend,
3246 environment,
3247 ..
3248 } => {
3249 backend
3250 .pull(
3251 branch.to_string(),
3252 remote.to_string(),
3253 askpass,
3254 environment.clone(),
3255 cx,
3256 )
3257 .await
3258 }
3259 RepositoryState::Remote { project_id, client } => {
3260 askpass_delegates.lock().insert(askpass_id, askpass);
3261 let _defer = util::defer(|| {
3262 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3263 debug_assert!(askpass_delegate.is_some());
3264 });
3265 let response = client
3266 .request(proto::Pull {
3267 project_id: project_id.0,
3268 repository_id: id.to_proto(),
3269 askpass_id,
3270 branch_name: branch.to_string(),
3271 remote_name: remote.to_string(),
3272 })
3273 .await
3274 .context("sending pull request")?;
3275
3276 Ok(RemoteCommandOutput {
3277 stdout: response.stdout,
3278 stderr: response.stderr,
3279 })
3280 }
3281 }
3282 })
3283 }
3284
3285 fn spawn_set_index_text_job(
3286 &self,
3287 path: RepoPath,
3288 content: Option<String>,
3289 _cx: &mut App,
3290 ) -> oneshot::Receiver<anyhow::Result<()>> {
3291 let id = self.id;
3292
3293 self.send_keyed_job(
3294 Some(GitJobKey::WriteIndex(path.clone())),
3295 move |git_repo, _cx| async move {
3296 match git_repo {
3297 RepositoryState::Local {
3298 backend,
3299 environment,
3300 ..
3301 } => {
3302 backend
3303 .set_index_text(path, content, environment.clone())
3304 .await
3305 }
3306 RepositoryState::Remote { project_id, client } => {
3307 client
3308 .request(proto::SetIndexText {
3309 project_id: project_id.0,
3310 repository_id: id.to_proto(),
3311 path: path.as_ref().to_proto(),
3312 text: content,
3313 })
3314 .await?;
3315 Ok(())
3316 }
3317 }
3318 },
3319 )
3320 }
3321
3322 pub fn get_remotes(
3323 &self,
3324 branch_name: Option<String>,
3325 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3326 let id = self.id;
3327 self.send_job(move |repo, _cx| async move {
3328 match repo {
3329 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3330 RepositoryState::Remote { project_id, client } => {
3331 let response = client
3332 .request(proto::GetRemotes {
3333 project_id: project_id.0,
3334 repository_id: id.to_proto(),
3335 branch_name,
3336 })
3337 .await?;
3338
3339 let remotes = response
3340 .remotes
3341 .into_iter()
3342 .map(|remotes| git::repository::Remote {
3343 name: remotes.name.into(),
3344 })
3345 .collect();
3346
3347 Ok(remotes)
3348 }
3349 }
3350 })
3351 }
3352
3353 pub fn branches(&self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3354 let id = self.id;
3355 self.send_job(move |repo, cx| async move {
3356 match repo {
3357 RepositoryState::Local { backend, .. } => {
3358 let backend = backend.clone();
3359 cx.background_spawn(async move { backend.branches().await })
3360 .await
3361 }
3362 RepositoryState::Remote { project_id, client } => {
3363 let response = client
3364 .request(proto::GitGetBranches {
3365 project_id: project_id.0,
3366 repository_id: id.to_proto(),
3367 })
3368 .await?;
3369
3370 let branches = response
3371 .branches
3372 .into_iter()
3373 .map(|branch| proto_to_branch(&branch))
3374 .collect();
3375
3376 Ok(branches)
3377 }
3378 }
3379 })
3380 }
3381
3382 pub fn diff(&self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3383 let id = self.id;
3384 self.send_job(move |repo, _cx| async move {
3385 match repo {
3386 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3387 RepositoryState::Remote { project_id, client } => {
3388 let response = client
3389 .request(proto::GitDiff {
3390 project_id: project_id.0,
3391 repository_id: id.to_proto(),
3392 diff_type: match diff_type {
3393 DiffType::HeadToIndex => {
3394 proto::git_diff::DiffType::HeadToIndex.into()
3395 }
3396 DiffType::HeadToWorktree => {
3397 proto::git_diff::DiffType::HeadToWorktree.into()
3398 }
3399 },
3400 })
3401 .await?;
3402
3403 Ok(response.diff)
3404 }
3405 }
3406 })
3407 }
3408
3409 pub fn create_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3410 let id = self.id;
3411 self.send_job(move |repo, _cx| async move {
3412 match repo {
3413 RepositoryState::Local { backend, .. } => backend.create_branch(branch_name).await,
3414 RepositoryState::Remote { project_id, client } => {
3415 client
3416 .request(proto::GitCreateBranch {
3417 project_id: project_id.0,
3418 repository_id: id.to_proto(),
3419 branch_name,
3420 })
3421 .await?;
3422
3423 Ok(())
3424 }
3425 }
3426 })
3427 }
3428
3429 pub fn change_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3430 let id = self.id;
3431 self.send_job(move |repo, _cx| async move {
3432 match repo {
3433 RepositoryState::Local { backend, .. } => backend.change_branch(branch_name).await,
3434 RepositoryState::Remote { project_id, client } => {
3435 client
3436 .request(proto::GitChangeBranch {
3437 project_id: project_id.0,
3438 repository_id: id.to_proto(),
3439 branch_name,
3440 })
3441 .await?;
3442
3443 Ok(())
3444 }
3445 }
3446 })
3447 }
3448
3449 pub fn check_for_pushed_commits(&self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3450 let id = self.id;
3451 self.send_job(move |repo, _cx| async move {
3452 match repo {
3453 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3454 RepositoryState::Remote { project_id, client } => {
3455 let response = client
3456 .request(proto::CheckForPushedCommits {
3457 project_id: project_id.0,
3458 repository_id: id.to_proto(),
3459 })
3460 .await?;
3461
3462 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3463
3464 Ok(branches)
3465 }
3466 }
3467 })
3468 }
3469
3470 pub fn checkpoint(&self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3471 self.send_job(|repo, _cx| async move {
3472 match repo {
3473 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
3474 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3475 }
3476 })
3477 }
3478
3479 pub fn restore_checkpoint(
3480 &self,
3481 checkpoint: GitRepositoryCheckpoint,
3482 ) -> oneshot::Receiver<Result<()>> {
3483 self.send_job(move |repo, _cx| async move {
3484 match repo {
3485 RepositoryState::Local { backend, .. } => {
3486 backend.restore_checkpoint(checkpoint).await
3487 }
3488 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3489 }
3490 })
3491 }
3492
3493 pub(crate) fn apply_remote_update(
3494 &mut self,
3495 update: proto::UpdateRepository,
3496 cx: &mut Context<Self>,
3497 ) -> Result<()> {
3498 let conflicted_paths = TreeSet::from_ordered_entries(
3499 update
3500 .current_merge_conflicts
3501 .into_iter()
3502 .map(|path| RepoPath(Path::new(&path).into())),
3503 );
3504 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
3505 self.snapshot.merge_conflicts = conflicted_paths;
3506
3507 let edits = update
3508 .removed_statuses
3509 .into_iter()
3510 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3511 .chain(
3512 update
3513 .updated_statuses
3514 .into_iter()
3515 .filter_map(|updated_status| {
3516 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3517 }),
3518 )
3519 .collect::<Vec<_>>();
3520 self.snapshot.statuses_by_path.edit(edits, &());
3521 if update.is_last_update {
3522 self.snapshot.scan_id = update.scan_id;
3523 }
3524 cx.emit(RepositoryEvent::Updated);
3525 Ok(())
3526 }
3527
3528 pub fn compare_checkpoints(
3529 &self,
3530 left: GitRepositoryCheckpoint,
3531 right: GitRepositoryCheckpoint,
3532 ) -> oneshot::Receiver<Result<bool>> {
3533 self.send_job(move |repo, _cx| async move {
3534 match repo {
3535 RepositoryState::Local { backend, .. } => {
3536 backend.compare_checkpoints(left, right).await
3537 }
3538 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3539 }
3540 })
3541 }
3542
3543 pub fn delete_checkpoint(
3544 &self,
3545 checkpoint: GitRepositoryCheckpoint,
3546 ) -> oneshot::Receiver<Result<()>> {
3547 self.send_job(move |repo, _cx| async move {
3548 match repo {
3549 RepositoryState::Local { backend, .. } => {
3550 backend.delete_checkpoint(checkpoint).await
3551 }
3552 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3553 }
3554 })
3555 }
3556
3557 pub fn diff_checkpoints(
3558 &self,
3559 base_checkpoint: GitRepositoryCheckpoint,
3560 target_checkpoint: GitRepositoryCheckpoint,
3561 ) -> oneshot::Receiver<Result<String>> {
3562 self.send_job(move |repo, _cx| async move {
3563 match repo {
3564 RepositoryState::Local { backend, .. } => {
3565 backend
3566 .diff_checkpoints(base_checkpoint, target_checkpoint)
3567 .await
3568 }
3569 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3570 }
3571 })
3572 }
3573
3574 fn schedule_scan(
3575 &mut self,
3576 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3577 cx: &mut Context<Self>,
3578 ) {
3579 self.paths_changed(
3580 vec![git::repository::WORK_DIRECTORY_REPO_PATH.clone()],
3581 updates_tx.clone(),
3582 cx,
3583 );
3584
3585 let this = cx.weak_entity();
3586 let _ = self.send_keyed_job(
3587 Some(GitJobKey::ReloadGitState),
3588 |state, mut cx| async move {
3589 let Some(this) = this.upgrade() else {
3590 return Ok(());
3591 };
3592 let RepositoryState::Local { backend, .. } = state else {
3593 bail!("not a local repository")
3594 };
3595 let (snapshot, events) = this
3596 .update(&mut cx, |this, _| {
3597 compute_snapshot(
3598 this.id,
3599 this.work_directory_abs_path.clone(),
3600 this.snapshot.clone(),
3601 backend.clone(),
3602 )
3603 })?
3604 .await?;
3605 this.update(&mut cx, |this, cx| {
3606 this.snapshot = snapshot.clone();
3607 for event in events {
3608 cx.emit(event);
3609 }
3610 })?;
3611 if let Some(updates_tx) = updates_tx {
3612 updates_tx
3613 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3614 .ok();
3615 }
3616 Ok(())
3617 },
3618 );
3619 }
3620
3621 fn spawn_local_git_worker(
3622 work_directory_abs_path: Arc<Path>,
3623 dot_git_abs_path: Arc<Path>,
3624 project_environment: WeakEntity<ProjectEnvironment>,
3625 fs: Arc<dyn Fs>,
3626 cx: &mut Context<Self>,
3627 ) -> mpsc::UnboundedSender<GitJob> {
3628 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3629
3630 cx.spawn(async move |_, cx| {
3631 let environment = project_environment
3632 .upgrade()
3633 .ok_or_else(|| anyhow!("missing project environment"))?
3634 .update(cx, |project_environment, cx| {
3635 project_environment.get_environment(Some(work_directory_abs_path.clone()), cx)
3636 })?
3637 .await
3638 .unwrap_or_else(|| {
3639 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
3640 HashMap::default()
3641 });
3642 let backend = cx
3643 .background_spawn(async move {
3644 fs.open_repo(&dot_git_abs_path)
3645 .ok_or_else(|| anyhow!("failed to build repository"))
3646 })
3647 .await?;
3648
3649 if let Some(git_hosting_provider_registry) =
3650 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
3651 {
3652 git_hosting_providers::register_additional_providers(
3653 git_hosting_provider_registry,
3654 backend.clone(),
3655 );
3656 }
3657
3658 let state = RepositoryState::Local {
3659 backend,
3660 environment: Arc::new(environment),
3661 };
3662 let mut jobs = VecDeque::new();
3663 loop {
3664 while let Ok(Some(next_job)) = job_rx.try_next() {
3665 jobs.push_back(next_job);
3666 }
3667
3668 if let Some(job) = jobs.pop_front() {
3669 if let Some(current_key) = &job.key {
3670 if jobs
3671 .iter()
3672 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3673 {
3674 continue;
3675 }
3676 }
3677 (job.job)(state.clone(), cx).await;
3678 } else if let Some(job) = job_rx.next().await {
3679 jobs.push_back(job);
3680 } else {
3681 break;
3682 }
3683 }
3684 anyhow::Ok(())
3685 })
3686 .detach_and_log_err(cx);
3687
3688 job_tx
3689 }
3690
3691 fn spawn_remote_git_worker(
3692 project_id: ProjectId,
3693 client: AnyProtoClient,
3694 cx: &mut Context<Self>,
3695 ) -> mpsc::UnboundedSender<GitJob> {
3696 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3697
3698 cx.spawn(async move |_, cx| {
3699 let state = RepositoryState::Remote { project_id, client };
3700 let mut jobs = VecDeque::new();
3701 loop {
3702 while let Ok(Some(next_job)) = job_rx.try_next() {
3703 jobs.push_back(next_job);
3704 }
3705
3706 if let Some(job) = jobs.pop_front() {
3707 if let Some(current_key) = &job.key {
3708 if jobs
3709 .iter()
3710 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3711 {
3712 continue;
3713 }
3714 }
3715 (job.job)(state.clone(), cx).await;
3716 } else if let Some(job) = job_rx.next().await {
3717 jobs.push_back(job);
3718 } else {
3719 break;
3720 }
3721 }
3722 anyhow::Ok(())
3723 })
3724 .detach_and_log_err(cx);
3725
3726 job_tx
3727 }
3728
3729 fn load_staged_text(
3730 &self,
3731 buffer_id: BufferId,
3732 repo_path: RepoPath,
3733 cx: &App,
3734 ) -> Task<Result<Option<String>>> {
3735 let rx = self.send_job(move |state, _| async move {
3736 match state {
3737 RepositoryState::Local { backend, .. } => {
3738 anyhow::Ok(backend.load_index_text(repo_path).await)
3739 }
3740 RepositoryState::Remote { project_id, client } => {
3741 let response = client
3742 .request(proto::OpenUnstagedDiff {
3743 project_id: project_id.to_proto(),
3744 buffer_id: buffer_id.to_proto(),
3745 })
3746 .await?;
3747 Ok(response.staged_text)
3748 }
3749 }
3750 });
3751 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3752 }
3753
3754 fn load_committed_text(
3755 &self,
3756 buffer_id: BufferId,
3757 repo_path: RepoPath,
3758 cx: &App,
3759 ) -> Task<Result<DiffBasesChange>> {
3760 let rx = self.send_job(move |state, _| async move {
3761 match state {
3762 RepositoryState::Local { backend, .. } => {
3763 let committed_text = backend.load_committed_text(repo_path.clone()).await;
3764 let staged_text = backend.load_index_text(repo_path).await;
3765 let diff_bases_change = if committed_text == staged_text {
3766 DiffBasesChange::SetBoth(committed_text)
3767 } else {
3768 DiffBasesChange::SetEach {
3769 index: staged_text,
3770 head: committed_text,
3771 }
3772 };
3773 anyhow::Ok(diff_bases_change)
3774 }
3775 RepositoryState::Remote { project_id, client } => {
3776 use proto::open_uncommitted_diff_response::Mode;
3777
3778 let response = client
3779 .request(proto::OpenUncommittedDiff {
3780 project_id: project_id.to_proto(),
3781 buffer_id: buffer_id.to_proto(),
3782 })
3783 .await?;
3784 let mode =
3785 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
3786 let bases = match mode {
3787 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
3788 Mode::IndexAndHead => DiffBasesChange::SetEach {
3789 head: response.committed_text,
3790 index: response.staged_text,
3791 },
3792 };
3793 Ok(bases)
3794 }
3795 }
3796 });
3797
3798 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3799 }
3800
3801 fn paths_changed(
3802 &mut self,
3803 paths: Vec<RepoPath>,
3804 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3805 cx: &mut Context<Self>,
3806 ) {
3807 self.paths_needing_status_update.extend(paths);
3808
3809 let this = cx.weak_entity();
3810 let _ = self.send_keyed_job(
3811 Some(GitJobKey::RefreshStatuses),
3812 |state, mut cx| async move {
3813 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
3814 (
3815 this.snapshot.clone(),
3816 mem::take(&mut this.paths_needing_status_update),
3817 )
3818 })?;
3819 let RepositoryState::Local { backend, .. } = state else {
3820 bail!("not a local repository")
3821 };
3822
3823 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
3824 let statuses = backend.status(&paths).await?;
3825
3826 let changed_path_statuses = cx
3827 .background_spawn(async move {
3828 let mut changed_path_statuses = Vec::new();
3829 let prev_statuses = prev_snapshot.statuses_by_path.clone();
3830 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
3831
3832 for (repo_path, status) in &*statuses.entries {
3833 changed_paths.remove(repo_path);
3834 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
3835 if &cursor.item().unwrap().status == status {
3836 continue;
3837 }
3838 }
3839
3840 changed_path_statuses.push(Edit::Insert(StatusEntry {
3841 repo_path: repo_path.clone(),
3842 status: *status,
3843 }));
3844 }
3845 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
3846 for path in changed_paths.iter() {
3847 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
3848 changed_path_statuses.push(Edit::Remove(PathKey(path.0.clone())));
3849 }
3850 }
3851 changed_path_statuses
3852 })
3853 .await;
3854
3855 this.update(&mut cx, |this, cx| {
3856 if !changed_path_statuses.is_empty() {
3857 this.snapshot
3858 .statuses_by_path
3859 .edit(changed_path_statuses, &());
3860 this.snapshot.scan_id += 1;
3861 if let Some(updates_tx) = updates_tx {
3862 updates_tx
3863 .unbounded_send(DownstreamUpdate::UpdateRepository(
3864 this.snapshot.clone(),
3865 ))
3866 .ok();
3867 }
3868 }
3869 cx.emit(RepositoryEvent::Updated);
3870 })
3871 },
3872 );
3873 }
3874}
3875
3876fn get_permalink_in_rust_registry_src(
3877 provider_registry: Arc<GitHostingProviderRegistry>,
3878 path: PathBuf,
3879 selection: Range<u32>,
3880) -> Result<url::Url> {
3881 #[derive(Deserialize)]
3882 struct CargoVcsGit {
3883 sha1: String,
3884 }
3885
3886 #[derive(Deserialize)]
3887 struct CargoVcsInfo {
3888 git: CargoVcsGit,
3889 path_in_vcs: String,
3890 }
3891
3892 #[derive(Deserialize)]
3893 struct CargoPackage {
3894 repository: String,
3895 }
3896
3897 #[derive(Deserialize)]
3898 struct CargoToml {
3899 package: CargoPackage,
3900 }
3901
3902 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
3903 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
3904 Some((dir, json))
3905 }) else {
3906 bail!("No .cargo_vcs_info.json found in parent directories")
3907 };
3908 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
3909 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
3910 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
3911 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
3912 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
3913 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
3914 let permalink = provider.build_permalink(
3915 remote,
3916 BuildPermalinkParams {
3917 sha: &cargo_vcs_info.git.sha1,
3918 path: &path.to_string_lossy(),
3919 selection: Some(selection),
3920 },
3921 );
3922 Ok(permalink)
3923}
3924
3925fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
3926 let Some(blame) = blame else {
3927 return proto::BlameBufferResponse {
3928 blame_response: None,
3929 };
3930 };
3931
3932 let entries = blame
3933 .entries
3934 .into_iter()
3935 .map(|entry| proto::BlameEntry {
3936 sha: entry.sha.as_bytes().into(),
3937 start_line: entry.range.start,
3938 end_line: entry.range.end,
3939 original_line_number: entry.original_line_number,
3940 author: entry.author.clone(),
3941 author_mail: entry.author_mail.clone(),
3942 author_time: entry.author_time,
3943 author_tz: entry.author_tz.clone(),
3944 committer: entry.committer_name.clone(),
3945 committer_mail: entry.committer_email.clone(),
3946 committer_time: entry.committer_time,
3947 committer_tz: entry.committer_tz.clone(),
3948 summary: entry.summary.clone(),
3949 previous: entry.previous.clone(),
3950 filename: entry.filename.clone(),
3951 })
3952 .collect::<Vec<_>>();
3953
3954 let messages = blame
3955 .messages
3956 .into_iter()
3957 .map(|(oid, message)| proto::CommitMessage {
3958 oid: oid.as_bytes().into(),
3959 message,
3960 })
3961 .collect::<Vec<_>>();
3962
3963 proto::BlameBufferResponse {
3964 blame_response: Some(proto::blame_buffer_response::BlameResponse {
3965 entries,
3966 messages,
3967 remote_url: blame.remote_url,
3968 }),
3969 }
3970}
3971
3972fn deserialize_blame_buffer_response(
3973 response: proto::BlameBufferResponse,
3974) -> Option<git::blame::Blame> {
3975 let response = response.blame_response?;
3976 let entries = response
3977 .entries
3978 .into_iter()
3979 .filter_map(|entry| {
3980 Some(git::blame::BlameEntry {
3981 sha: git::Oid::from_bytes(&entry.sha).ok()?,
3982 range: entry.start_line..entry.end_line,
3983 original_line_number: entry.original_line_number,
3984 committer_name: entry.committer,
3985 committer_time: entry.committer_time,
3986 committer_tz: entry.committer_tz,
3987 committer_email: entry.committer_mail,
3988 author: entry.author,
3989 author_mail: entry.author_mail,
3990 author_time: entry.author_time,
3991 author_tz: entry.author_tz,
3992 summary: entry.summary,
3993 previous: entry.previous,
3994 filename: entry.filename,
3995 })
3996 })
3997 .collect::<Vec<_>>();
3998
3999 let messages = response
4000 .messages
4001 .into_iter()
4002 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
4003 .collect::<HashMap<_, _>>();
4004
4005 Some(Blame {
4006 entries,
4007 messages,
4008 remote_url: response.remote_url,
4009 })
4010}
4011
4012fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4013 proto::Branch {
4014 is_head: branch.is_head,
4015 name: branch.name.to_string(),
4016 unix_timestamp: branch
4017 .most_recent_commit
4018 .as_ref()
4019 .map(|commit| commit.commit_timestamp as u64),
4020 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4021 ref_name: upstream.ref_name.to_string(),
4022 tracking: upstream
4023 .tracking
4024 .status()
4025 .map(|upstream| proto::UpstreamTracking {
4026 ahead: upstream.ahead as u64,
4027 behind: upstream.behind as u64,
4028 }),
4029 }),
4030 most_recent_commit: branch
4031 .most_recent_commit
4032 .as_ref()
4033 .map(|commit| proto::CommitSummary {
4034 sha: commit.sha.to_string(),
4035 subject: commit.subject.to_string(),
4036 commit_timestamp: commit.commit_timestamp,
4037 }),
4038 }
4039}
4040
4041fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4042 git::repository::Branch {
4043 is_head: proto.is_head,
4044 name: proto.name.clone().into(),
4045 upstream: proto
4046 .upstream
4047 .as_ref()
4048 .map(|upstream| git::repository::Upstream {
4049 ref_name: upstream.ref_name.to_string().into(),
4050 tracking: upstream
4051 .tracking
4052 .as_ref()
4053 .map(|tracking| {
4054 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4055 ahead: tracking.ahead as u32,
4056 behind: tracking.behind as u32,
4057 })
4058 })
4059 .unwrap_or(git::repository::UpstreamTracking::Gone),
4060 }),
4061 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4062 git::repository::CommitSummary {
4063 sha: commit.sha.to_string().into(),
4064 subject: commit.subject.to_string().into(),
4065 commit_timestamp: commit.commit_timestamp,
4066 has_parent: true,
4067 }
4068 }),
4069 }
4070}
4071
4072async fn compute_snapshot(
4073 id: RepositoryId,
4074 work_directory_abs_path: Arc<Path>,
4075 prev_snapshot: RepositorySnapshot,
4076 backend: Arc<dyn GitRepository>,
4077) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4078 let mut events = Vec::new();
4079 let branches = backend.branches().await?;
4080 let branch = branches.into_iter().find(|branch| branch.is_head);
4081 let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?;
4082 let merge_message = backend
4083 .merge_message()
4084 .await
4085 .and_then(|msg| Some(msg.lines().nth(0)?.to_owned().into()));
4086 let merge_head_shas = backend
4087 .merge_head_shas()
4088 .into_iter()
4089 .map(SharedString::from)
4090 .collect();
4091
4092 let statuses_by_path = SumTree::from_iter(
4093 statuses
4094 .entries
4095 .iter()
4096 .map(|(repo_path, status)| StatusEntry {
4097 repo_path: repo_path.clone(),
4098 status: *status,
4099 }),
4100 &(),
4101 );
4102
4103 let merge_head_shas_changed = merge_head_shas != prev_snapshot.merge_head_shas;
4104
4105 if merge_head_shas_changed
4106 || branch != prev_snapshot.branch
4107 || statuses_by_path != prev_snapshot.statuses_by_path
4108 {
4109 events.push(RepositoryEvent::Updated);
4110 }
4111
4112 let mut current_merge_conflicts = TreeSet::default();
4113 for (repo_path, status) in statuses.entries.iter() {
4114 if status.is_conflicted() {
4115 current_merge_conflicts.insert(repo_path.clone());
4116 }
4117 }
4118
4119 // Cache merge conflict paths so they don't change from staging/unstaging,
4120 // until the merge heads change (at commit time, etc.).
4121 let mut merge_conflicts = prev_snapshot.merge_conflicts.clone();
4122 if merge_head_shas_changed {
4123 merge_conflicts = current_merge_conflicts;
4124 events.push(RepositoryEvent::MergeHeadsChanged);
4125 }
4126
4127 let snapshot = RepositorySnapshot {
4128 id,
4129 merge_message,
4130 statuses_by_path,
4131 work_directory_abs_path,
4132 scan_id: prev_snapshot.scan_id + 1,
4133 branch,
4134 merge_conflicts,
4135 merge_head_shas,
4136 };
4137
4138 Ok((snapshot, events))
4139}
4140
4141fn status_from_proto(
4142 simple_status: i32,
4143 status: Option<proto::GitFileStatus>,
4144) -> anyhow::Result<FileStatus> {
4145 use proto::git_file_status::Variant;
4146
4147 let Some(variant) = status.and_then(|status| status.variant) else {
4148 let code = proto::GitStatus::from_i32(simple_status)
4149 .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
4150 let result = match code {
4151 proto::GitStatus::Added => TrackedStatus {
4152 worktree_status: StatusCode::Added,
4153 index_status: StatusCode::Unmodified,
4154 }
4155 .into(),
4156 proto::GitStatus::Modified => TrackedStatus {
4157 worktree_status: StatusCode::Modified,
4158 index_status: StatusCode::Unmodified,
4159 }
4160 .into(),
4161 proto::GitStatus::Conflict => UnmergedStatus {
4162 first_head: UnmergedStatusCode::Updated,
4163 second_head: UnmergedStatusCode::Updated,
4164 }
4165 .into(),
4166 proto::GitStatus::Deleted => TrackedStatus {
4167 worktree_status: StatusCode::Deleted,
4168 index_status: StatusCode::Unmodified,
4169 }
4170 .into(),
4171 _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
4172 };
4173 return Ok(result);
4174 };
4175
4176 let result = match variant {
4177 Variant::Untracked(_) => FileStatus::Untracked,
4178 Variant::Ignored(_) => FileStatus::Ignored,
4179 Variant::Unmerged(unmerged) => {
4180 let [first_head, second_head] =
4181 [unmerged.first_head, unmerged.second_head].map(|head| {
4182 let code = proto::GitStatus::from_i32(head)
4183 .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
4184 let result = match code {
4185 proto::GitStatus::Added => UnmergedStatusCode::Added,
4186 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4187 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4188 _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
4189 };
4190 Ok(result)
4191 });
4192 let [first_head, second_head] = [first_head?, second_head?];
4193 UnmergedStatus {
4194 first_head,
4195 second_head,
4196 }
4197 .into()
4198 }
4199 Variant::Tracked(tracked) => {
4200 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4201 .map(|status| {
4202 let code = proto::GitStatus::from_i32(status)
4203 .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
4204 let result = match code {
4205 proto::GitStatus::Modified => StatusCode::Modified,
4206 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4207 proto::GitStatus::Added => StatusCode::Added,
4208 proto::GitStatus::Deleted => StatusCode::Deleted,
4209 proto::GitStatus::Renamed => StatusCode::Renamed,
4210 proto::GitStatus::Copied => StatusCode::Copied,
4211 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4212 _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
4213 };
4214 Ok(result)
4215 });
4216 let [index_status, worktree_status] = [index_status?, worktree_status?];
4217 TrackedStatus {
4218 index_status,
4219 worktree_status,
4220 }
4221 .into()
4222 }
4223 };
4224 Ok(result)
4225}
4226
4227fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4228 use proto::git_file_status::{Tracked, Unmerged, Variant};
4229
4230 let variant = match status {
4231 FileStatus::Untracked => Variant::Untracked(Default::default()),
4232 FileStatus::Ignored => Variant::Ignored(Default::default()),
4233 FileStatus::Unmerged(UnmergedStatus {
4234 first_head,
4235 second_head,
4236 }) => Variant::Unmerged(Unmerged {
4237 first_head: unmerged_status_to_proto(first_head),
4238 second_head: unmerged_status_to_proto(second_head),
4239 }),
4240 FileStatus::Tracked(TrackedStatus {
4241 index_status,
4242 worktree_status,
4243 }) => Variant::Tracked(Tracked {
4244 index_status: tracked_status_to_proto(index_status),
4245 worktree_status: tracked_status_to_proto(worktree_status),
4246 }),
4247 };
4248 proto::GitFileStatus {
4249 variant: Some(variant),
4250 }
4251}
4252
4253fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4254 match code {
4255 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4256 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4257 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4258 }
4259}
4260
4261fn tracked_status_to_proto(code: StatusCode) -> i32 {
4262 match code {
4263 StatusCode::Added => proto::GitStatus::Added as _,
4264 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4265 StatusCode::Modified => proto::GitStatus::Modified as _,
4266 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4267 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4268 StatusCode::Copied => proto::GitStatus::Copied as _,
4269 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4270 }
4271}