1pub mod git_traversal;
2
3use crate::{
4 ProjectEnvironment, ProjectItem, ProjectPath,
5 buffer_store::{BufferStore, BufferStoreEvent},
6 worktree_store::{WorktreeStore, WorktreeStoreEvent},
7};
8use anyhow::{Context as _, Result, anyhow, bail};
9use askpass::AskPassDelegate;
10use buffer_diff::{BufferDiff, BufferDiffEvent};
11use client::ProjectId;
12use collections::HashMap;
13use fs::Fs;
14use futures::{
15 FutureExt as _, StreamExt as _,
16 channel::{mpsc, oneshot},
17 future::{self, Shared},
18};
19use git::{
20 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
21 blame::Blame,
22 parse_git_remote_url,
23 repository::{
24 Branch, CommitDetails, CommitDiff, CommitFile, DiffType, GitRepository,
25 GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
26 UpstreamTrackingStatus,
27 },
28 status::{
29 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
30 },
31};
32use gpui::{
33 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
34 WeakEntity,
35};
36use language::{
37 Buffer, BufferEvent, Language, LanguageRegistry,
38 proto::{deserialize_version, serialize_version},
39};
40use parking_lot::Mutex;
41use rpc::{
42 AnyProtoClient, TypedEnvelope,
43 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
44};
45use serde::Deserialize;
46use std::{
47 cmp::Ordering,
48 collections::{BTreeSet, VecDeque},
49 future::Future,
50 mem,
51 ops::Range,
52 path::{Path, PathBuf},
53 sync::{
54 Arc,
55 atomic::{self, AtomicU64},
56 },
57};
58use sum_tree::{Edit, SumTree, TreeSet};
59use text::{Bias, BufferId};
60use util::{ResultExt, debug_panic};
61use worktree::{
62 File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet, Worktree,
63};
64
65pub struct GitStore {
66 state: GitStoreState,
67 buffer_store: Entity<BufferStore>,
68 worktree_store: Entity<WorktreeStore>,
69 repositories: HashMap<RepositoryId, Entity<Repository>>,
70 active_repo_id: Option<RepositoryId>,
71 #[allow(clippy::type_complexity)]
72 loading_diffs:
73 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
74 diffs: HashMap<BufferId, Entity<BufferDiffState>>,
75 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
76 _subscriptions: Vec<Subscription>,
77}
78
79#[derive(Default)]
80struct SharedDiffs {
81 unstaged: Option<Entity<BufferDiff>>,
82 uncommitted: Option<Entity<BufferDiff>>,
83}
84
85#[derive(Default)]
86struct BufferDiffState {
87 unstaged_diff: Option<WeakEntity<BufferDiff>>,
88 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
89 recalculate_diff_task: Option<Task<Result<()>>>,
90 language: Option<Arc<Language>>,
91 language_registry: Option<Arc<LanguageRegistry>>,
92 diff_updated_futures: Vec<oneshot::Sender<()>>,
93 hunk_staging_operation_count: usize,
94
95 head_text: Option<Arc<String>>,
96 index_text: Option<Arc<String>>,
97 head_changed: bool,
98 index_changed: bool,
99 language_changed: bool,
100}
101
102#[derive(Clone, Debug)]
103enum DiffBasesChange {
104 SetIndex(Option<String>),
105 SetHead(Option<String>),
106 SetEach {
107 index: Option<String>,
108 head: Option<String>,
109 },
110 SetBoth(Option<String>),
111}
112
113#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
114enum DiffKind {
115 Unstaged,
116 Uncommitted,
117}
118
119enum GitStoreState {
120 Local {
121 next_repository_id: Arc<AtomicU64>,
122 downstream: Option<LocalDownstreamState>,
123 project_environment: Entity<ProjectEnvironment>,
124 fs: Arc<dyn Fs>,
125 },
126 Ssh {
127 upstream_client: AnyProtoClient,
128 upstream_project_id: ProjectId,
129 downstream: Option<(AnyProtoClient, ProjectId)>,
130 },
131 Remote {
132 upstream_client: AnyProtoClient,
133 upstream_project_id: ProjectId,
134 },
135}
136
137enum DownstreamUpdate {
138 UpdateRepository(RepositorySnapshot),
139 RemoveRepository(RepositoryId),
140}
141
142struct LocalDownstreamState {
143 client: AnyProtoClient,
144 project_id: ProjectId,
145 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
146 _task: Task<Result<()>>,
147}
148
149#[derive(Clone)]
150pub struct GitStoreCheckpoint {
151 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
152}
153
154#[derive(Clone, Debug, PartialEq, Eq)]
155pub struct StatusEntry {
156 pub repo_path: RepoPath,
157 pub status: FileStatus,
158}
159
160impl StatusEntry {
161 fn to_proto(&self) -> proto::StatusEntry {
162 let simple_status = match self.status {
163 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
164 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
165 FileStatus::Tracked(TrackedStatus {
166 index_status,
167 worktree_status,
168 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
169 worktree_status
170 } else {
171 index_status
172 }),
173 };
174
175 proto::StatusEntry {
176 repo_path: self.repo_path.as_ref().to_proto(),
177 simple_status,
178 status: Some(status_to_proto(self.status)),
179 }
180 }
181}
182
183impl TryFrom<proto::StatusEntry> for StatusEntry {
184 type Error = anyhow::Error;
185
186 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
187 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
188 let status = status_from_proto(value.simple_status, value.status)?;
189 Ok(Self { repo_path, status })
190 }
191}
192
193impl sum_tree::Item for StatusEntry {
194 type Summary = PathSummary<GitSummary>;
195
196 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
197 PathSummary {
198 max_path: self.repo_path.0.clone(),
199 item_summary: self.status.summary(),
200 }
201 }
202}
203
204impl sum_tree::KeyedItem for StatusEntry {
205 type Key = PathKey;
206
207 fn key(&self) -> Self::Key {
208 PathKey(self.repo_path.0.clone())
209 }
210}
211
212#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
213pub struct RepositoryId(pub u64);
214
215#[derive(Clone, Debug, PartialEq, Eq)]
216pub struct RepositorySnapshot {
217 pub id: RepositoryId,
218 pub merge_message: Option<SharedString>,
219 pub statuses_by_path: SumTree<StatusEntry>,
220 pub work_directory_abs_path: Arc<Path>,
221 pub branch: Option<Branch>,
222 pub merge_conflicts: TreeSet<RepoPath>,
223 pub merge_head_shas: Vec<SharedString>,
224 pub scan_id: u64,
225}
226
227pub struct Repository {
228 snapshot: RepositorySnapshot,
229 commit_message_buffer: Option<Entity<Buffer>>,
230 git_store: WeakEntity<GitStore>,
231 // For a local repository, holds paths that have had worktree events since the last status scan completed,
232 // and that should be examined during the next status scan.
233 paths_needing_status_update: BTreeSet<RepoPath>,
234 job_sender: mpsc::UnboundedSender<GitJob>,
235 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
236 latest_askpass_id: u64,
237}
238
239impl std::ops::Deref for Repository {
240 type Target = RepositorySnapshot;
241
242 fn deref(&self) -> &Self::Target {
243 &self.snapshot
244 }
245}
246
247#[derive(Clone)]
248pub enum RepositoryState {
249 Local {
250 backend: Arc<dyn GitRepository>,
251 environment: Arc<HashMap<String, String>>,
252 },
253 Remote {
254 project_id: ProjectId,
255 client: AnyProtoClient,
256 },
257}
258
259#[derive(Clone, Debug)]
260pub enum RepositoryEvent {
261 Updated,
262 MergeHeadsChanged,
263}
264
265#[derive(Debug)]
266pub enum GitStoreEvent {
267 ActiveRepositoryChanged(Option<RepositoryId>),
268 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
269 RepositoryAdded(RepositoryId),
270 RepositoryRemoved(RepositoryId),
271 IndexWriteError(anyhow::Error),
272}
273
274impl EventEmitter<RepositoryEvent> for Repository {}
275impl EventEmitter<GitStoreEvent> for GitStore {}
276
277struct GitJob {
278 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
279 key: Option<GitJobKey>,
280}
281
282#[derive(PartialEq, Eq)]
283enum GitJobKey {
284 WriteIndex(RepoPath),
285 BatchReadIndex,
286 RefreshStatuses,
287 ReloadGitState,
288}
289
290impl GitStore {
291 pub fn local(
292 worktree_store: &Entity<WorktreeStore>,
293 buffer_store: Entity<BufferStore>,
294 environment: Entity<ProjectEnvironment>,
295 fs: Arc<dyn Fs>,
296 cx: &mut Context<Self>,
297 ) -> Self {
298 Self::new(
299 worktree_store.clone(),
300 buffer_store,
301 GitStoreState::Local {
302 next_repository_id: Arc::new(AtomicU64::new(1)),
303 downstream: None,
304 project_environment: environment,
305 fs,
306 },
307 cx,
308 )
309 }
310
311 pub fn remote(
312 worktree_store: &Entity<WorktreeStore>,
313 buffer_store: Entity<BufferStore>,
314 upstream_client: AnyProtoClient,
315 project_id: ProjectId,
316 cx: &mut Context<Self>,
317 ) -> Self {
318 Self::new(
319 worktree_store.clone(),
320 buffer_store,
321 GitStoreState::Remote {
322 upstream_client,
323 upstream_project_id: project_id,
324 },
325 cx,
326 )
327 }
328
329 pub fn ssh(
330 worktree_store: &Entity<WorktreeStore>,
331 buffer_store: Entity<BufferStore>,
332 upstream_client: AnyProtoClient,
333 cx: &mut Context<Self>,
334 ) -> Self {
335 Self::new(
336 worktree_store.clone(),
337 buffer_store,
338 GitStoreState::Ssh {
339 upstream_client,
340 upstream_project_id: ProjectId(SSH_PROJECT_ID),
341 downstream: None,
342 },
343 cx,
344 )
345 }
346
347 fn new(
348 worktree_store: Entity<WorktreeStore>,
349 buffer_store: Entity<BufferStore>,
350 state: GitStoreState,
351 cx: &mut Context<Self>,
352 ) -> Self {
353 let _subscriptions = vec![
354 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
355 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
356 ];
357
358 GitStore {
359 state,
360 buffer_store,
361 worktree_store,
362 repositories: HashMap::default(),
363 active_repo_id: None,
364 _subscriptions,
365 loading_diffs: HashMap::default(),
366 shared_diffs: HashMap::default(),
367 diffs: HashMap::default(),
368 }
369 }
370
371 pub fn init(client: &AnyProtoClient) {
372 client.add_entity_request_handler(Self::handle_get_remotes);
373 client.add_entity_request_handler(Self::handle_get_branches);
374 client.add_entity_request_handler(Self::handle_change_branch);
375 client.add_entity_request_handler(Self::handle_create_branch);
376 client.add_entity_request_handler(Self::handle_git_init);
377 client.add_entity_request_handler(Self::handle_push);
378 client.add_entity_request_handler(Self::handle_pull);
379 client.add_entity_request_handler(Self::handle_fetch);
380 client.add_entity_request_handler(Self::handle_stage);
381 client.add_entity_request_handler(Self::handle_unstage);
382 client.add_entity_request_handler(Self::handle_commit);
383 client.add_entity_request_handler(Self::handle_reset);
384 client.add_entity_request_handler(Self::handle_show);
385 client.add_entity_request_handler(Self::handle_load_commit_diff);
386 client.add_entity_request_handler(Self::handle_checkout_files);
387 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
388 client.add_entity_request_handler(Self::handle_set_index_text);
389 client.add_entity_request_handler(Self::handle_askpass);
390 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
391 client.add_entity_request_handler(Self::handle_git_diff);
392 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
393 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
394 client.add_entity_message_handler(Self::handle_update_diff_bases);
395 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
396 client.add_entity_request_handler(Self::handle_blame_buffer);
397 client.add_entity_message_handler(Self::handle_update_repository);
398 client.add_entity_message_handler(Self::handle_remove_repository);
399 }
400
401 pub fn is_local(&self) -> bool {
402 matches!(self.state, GitStoreState::Local { .. })
403 }
404
405 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
406 match &mut self.state {
407 GitStoreState::Ssh {
408 downstream: downstream_client,
409 ..
410 } => {
411 for repo in self.repositories.values() {
412 let update = repo.read(cx).snapshot.initial_update(project_id);
413 for update in split_repository_update(update) {
414 client.send(update).log_err();
415 }
416 }
417 *downstream_client = Some((client, ProjectId(project_id)));
418 }
419 GitStoreState::Local {
420 downstream: downstream_client,
421 ..
422 } => {
423 let mut snapshots = HashMap::default();
424 let (updates_tx, mut updates_rx) = mpsc::unbounded();
425 for repo in self.repositories.values() {
426 updates_tx
427 .unbounded_send(DownstreamUpdate::UpdateRepository(
428 repo.read(cx).snapshot.clone(),
429 ))
430 .ok();
431 }
432 *downstream_client = Some(LocalDownstreamState {
433 client: client.clone(),
434 project_id: ProjectId(project_id),
435 updates_tx,
436 _task: cx.spawn(async move |this, cx| {
437 cx.background_spawn(async move {
438 while let Some(update) = updates_rx.next().await {
439 match update {
440 DownstreamUpdate::UpdateRepository(snapshot) => {
441 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
442 {
443 let update =
444 snapshot.build_update(old_snapshot, project_id);
445 *old_snapshot = snapshot;
446 for update in split_repository_update(update) {
447 client.send(update)?;
448 }
449 } else {
450 let update = snapshot.initial_update(project_id);
451 for update in split_repository_update(update) {
452 client.send(update)?;
453 }
454 snapshots.insert(snapshot.id, snapshot);
455 }
456 }
457 DownstreamUpdate::RemoveRepository(id) => {
458 client.send(proto::RemoveRepository {
459 project_id,
460 id: id.to_proto(),
461 })?;
462 }
463 }
464 }
465 anyhow::Ok(())
466 })
467 .await
468 .ok();
469 this.update(cx, |this, _| {
470 if let GitStoreState::Local {
471 downstream: downstream_client,
472 ..
473 } = &mut this.state
474 {
475 downstream_client.take();
476 } else {
477 unreachable!("unshared called on remote store");
478 }
479 })
480 }),
481 });
482 }
483 GitStoreState::Remote { .. } => {
484 debug_panic!("shared called on remote store");
485 }
486 }
487 }
488
489 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
490 match &mut self.state {
491 GitStoreState::Local {
492 downstream: downstream_client,
493 ..
494 } => {
495 downstream_client.take();
496 }
497 GitStoreState::Ssh {
498 downstream: downstream_client,
499 ..
500 } => {
501 downstream_client.take();
502 }
503 GitStoreState::Remote { .. } => {
504 debug_panic!("unshared called on remote store");
505 }
506 }
507 self.shared_diffs.clear();
508 }
509
510 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
511 self.shared_diffs.remove(peer_id);
512 }
513
514 pub fn active_repository(&self) -> Option<Entity<Repository>> {
515 self.active_repo_id
516 .as_ref()
517 .map(|id| self.repositories[&id].clone())
518 }
519
520 pub fn open_unstaged_diff(
521 &mut self,
522 buffer: Entity<Buffer>,
523 cx: &mut Context<Self>,
524 ) -> Task<Result<Entity<BufferDiff>>> {
525 let buffer_id = buffer.read(cx).remote_id();
526 if let Some(diff_state) = self.diffs.get(&buffer_id) {
527 if let Some(unstaged_diff) = diff_state
528 .read(cx)
529 .unstaged_diff
530 .as_ref()
531 .and_then(|weak| weak.upgrade())
532 {
533 if let Some(task) =
534 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
535 {
536 return cx.background_executor().spawn(async move {
537 task.await?;
538 Ok(unstaged_diff)
539 });
540 }
541 return Task::ready(Ok(unstaged_diff));
542 }
543 }
544
545 let Some((repo, repo_path)) =
546 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
547 else {
548 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
549 };
550
551 let task = self
552 .loading_diffs
553 .entry((buffer_id, DiffKind::Unstaged))
554 .or_insert_with(|| {
555 let staged_text = repo.read(cx).load_staged_text(buffer_id, repo_path, cx);
556 cx.spawn(async move |this, cx| {
557 Self::open_diff_internal(
558 this,
559 DiffKind::Unstaged,
560 staged_text.await.map(DiffBasesChange::SetIndex),
561 buffer,
562 cx,
563 )
564 .await
565 .map_err(Arc::new)
566 })
567 .shared()
568 })
569 .clone();
570
571 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
572 }
573
574 pub fn open_uncommitted_diff(
575 &mut self,
576 buffer: Entity<Buffer>,
577 cx: &mut Context<Self>,
578 ) -> Task<Result<Entity<BufferDiff>>> {
579 let buffer_id = buffer.read(cx).remote_id();
580
581 if let Some(diff_state) = self.diffs.get(&buffer_id) {
582 if let Some(uncommitted_diff) = diff_state
583 .read(cx)
584 .uncommitted_diff
585 .as_ref()
586 .and_then(|weak| weak.upgrade())
587 {
588 if let Some(task) =
589 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
590 {
591 return cx.background_executor().spawn(async move {
592 task.await?;
593 Ok(uncommitted_diff)
594 });
595 }
596 return Task::ready(Ok(uncommitted_diff));
597 }
598 }
599
600 let Some((repo, repo_path)) =
601 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
602 else {
603 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
604 };
605
606 let task = self
607 .loading_diffs
608 .entry((buffer_id, DiffKind::Uncommitted))
609 .or_insert_with(|| {
610 let changes = repo.read(cx).load_committed_text(buffer_id, repo_path, cx);
611 cx.spawn(async move |this, cx| {
612 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
613 .await
614 .map_err(Arc::new)
615 })
616 .shared()
617 })
618 .clone();
619
620 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
621 }
622
623 async fn open_diff_internal(
624 this: WeakEntity<Self>,
625 kind: DiffKind,
626 texts: Result<DiffBasesChange>,
627 buffer_entity: Entity<Buffer>,
628 cx: &mut AsyncApp,
629 ) -> Result<Entity<BufferDiff>> {
630 let diff_bases_change = match texts {
631 Err(e) => {
632 this.update(cx, |this, cx| {
633 let buffer = buffer_entity.read(cx);
634 let buffer_id = buffer.remote_id();
635 this.loading_diffs.remove(&(buffer_id, kind));
636 })?;
637 return Err(e);
638 }
639 Ok(change) => change,
640 };
641
642 this.update(cx, |this, cx| {
643 let buffer = buffer_entity.read(cx);
644 let buffer_id = buffer.remote_id();
645 let language = buffer.language().cloned();
646 let language_registry = buffer.language_registry();
647 let text_snapshot = buffer.text_snapshot();
648 this.loading_diffs.remove(&(buffer_id, kind));
649
650 let diff_state = this
651 .diffs
652 .entry(buffer_id)
653 .or_insert_with(|| cx.new(|_| BufferDiffState::default()));
654
655 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
656
657 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
658 diff_state.update(cx, |diff_state, cx| {
659 diff_state.language = language;
660 diff_state.language_registry = language_registry;
661
662 match kind {
663 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
664 DiffKind::Uncommitted => {
665 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
666 diff
667 } else {
668 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
669 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
670 unstaged_diff
671 };
672
673 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
674 diff_state.uncommitted_diff = Some(diff.downgrade())
675 }
676 }
677
678 let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, 0, cx);
679
680 anyhow::Ok(async move {
681 rx.await.ok();
682 Ok(diff)
683 })
684 })
685 })??
686 .await
687 }
688
689 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
690 let diff_state = self.diffs.get(&buffer_id)?;
691 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
692 }
693
694 pub fn get_uncommitted_diff(
695 &self,
696 buffer_id: BufferId,
697 cx: &App,
698 ) -> Option<Entity<BufferDiff>> {
699 let diff_state = self.diffs.get(&buffer_id)?;
700 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
701 }
702
703 pub fn project_path_git_status(
704 &self,
705 project_path: &ProjectPath,
706 cx: &App,
707 ) -> Option<FileStatus> {
708 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
709 Some(repo.read(cx).status_for_path(&repo_path)?.status)
710 }
711
712 pub fn checkpoint(&self, cx: &App) -> Task<Result<GitStoreCheckpoint>> {
713 let mut work_directory_abs_paths = Vec::new();
714 let mut checkpoints = Vec::new();
715 for repository in self.repositories.values() {
716 let repository = repository.read(cx);
717 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
718 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
719 }
720
721 cx.background_executor().spawn(async move {
722 let checkpoints = future::try_join_all(checkpoints).await?;
723 Ok(GitStoreCheckpoint {
724 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
725 .into_iter()
726 .zip(checkpoints)
727 .collect(),
728 })
729 })
730 }
731
732 pub fn restore_checkpoint(&self, checkpoint: GitStoreCheckpoint, cx: &App) -> Task<Result<()>> {
733 let repositories_by_work_dir_abs_path = self
734 .repositories
735 .values()
736 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
737 .collect::<HashMap<_, _>>();
738
739 let mut tasks = Vec::new();
740 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
741 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
742 let restore = repository.read(cx).restore_checkpoint(checkpoint);
743 tasks.push(async move { restore.await? });
744 }
745 }
746 cx.background_spawn(async move {
747 future::try_join_all(tasks).await?;
748 Ok(())
749 })
750 }
751
752 /// Compares two checkpoints, returning true if they are equal.
753 pub fn compare_checkpoints(
754 &self,
755 left: GitStoreCheckpoint,
756 mut right: GitStoreCheckpoint,
757 cx: &App,
758 ) -> Task<Result<bool>> {
759 let repositories_by_work_dir_abs_path = self
760 .repositories
761 .values()
762 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
763 .collect::<HashMap<_, _>>();
764
765 let mut tasks = Vec::new();
766 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
767 if let Some(right_checkpoint) = right
768 .checkpoints_by_work_dir_abs_path
769 .remove(&work_dir_abs_path)
770 {
771 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
772 {
773 let compare = repository
774 .read(cx)
775 .compare_checkpoints(left_checkpoint, right_checkpoint);
776 tasks.push(async move { compare.await? });
777 }
778 } else {
779 return Task::ready(Ok(false));
780 }
781 }
782 cx.background_spawn(async move {
783 Ok(future::try_join_all(tasks)
784 .await?
785 .into_iter()
786 .all(|result| result))
787 })
788 }
789
790 pub fn delete_checkpoint(&self, checkpoint: GitStoreCheckpoint, cx: &App) -> Task<Result<()>> {
791 let repositories_by_work_directory_abs_path = self
792 .repositories
793 .values()
794 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
795 .collect::<HashMap<_, _>>();
796
797 let mut tasks = Vec::new();
798 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
799 if let Some(repository) =
800 repositories_by_work_directory_abs_path.get(&work_dir_abs_path)
801 {
802 let delete = repository.read(cx).delete_checkpoint(checkpoint);
803 tasks.push(async move { delete.await? });
804 }
805 }
806 cx.background_spawn(async move {
807 future::try_join_all(tasks).await?;
808 Ok(())
809 })
810 }
811
812 /// Blames a buffer.
813 pub fn blame_buffer(
814 &self,
815 buffer: &Entity<Buffer>,
816 version: Option<clock::Global>,
817 cx: &App,
818 ) -> Task<Result<Option<Blame>>> {
819 let buffer = buffer.read(cx);
820 let Some((repo, repo_path)) =
821 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
822 else {
823 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
824 };
825 let content = match &version {
826 Some(version) => buffer.rope_for_version(version).clone(),
827 None => buffer.as_rope().clone(),
828 };
829 let version = version.unwrap_or(buffer.version());
830 let buffer_id = buffer.remote_id();
831
832 let rx = repo.read(cx).send_job(move |state, _| async move {
833 match state {
834 RepositoryState::Local { backend, .. } => backend
835 .blame(repo_path.clone(), content)
836 .await
837 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
838 .map(Some),
839 RepositoryState::Remote { project_id, client } => {
840 let response = client
841 .request(proto::BlameBuffer {
842 project_id: project_id.to_proto(),
843 buffer_id: buffer_id.into(),
844 version: serialize_version(&version),
845 })
846 .await?;
847 Ok(deserialize_blame_buffer_response(response))
848 }
849 }
850 });
851
852 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
853 }
854
855 pub fn get_permalink_to_line(
856 &self,
857 buffer: &Entity<Buffer>,
858 selection: Range<u32>,
859 cx: &App,
860 ) -> Task<Result<url::Url>> {
861 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
862 return Task::ready(Err(anyhow!("buffer has no file")));
863 };
864
865 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
866 &(file.worktree.read(cx).id(), file.path.clone()).into(),
867 cx,
868 ) else {
869 // If we're not in a Git repo, check whether this is a Rust source
870 // file in the Cargo registry (presumably opened with go-to-definition
871 // from a normal Rust file). If so, we can put together a permalink
872 // using crate metadata.
873 if buffer
874 .read(cx)
875 .language()
876 .is_none_or(|lang| lang.name() != "Rust".into())
877 {
878 return Task::ready(Err(anyhow!("no permalink available")));
879 }
880 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
881 return Task::ready(Err(anyhow!("no permalink available")));
882 };
883 return cx.spawn(async move |cx| {
884 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
885 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
886 .map_err(|_| anyhow!("no permalink available"))
887 });
888
889 // TODO remote case
890 };
891
892 let buffer_id = buffer.read(cx).remote_id();
893 let branch = repo.read(cx).branch.clone();
894 let remote = branch
895 .as_ref()
896 .and_then(|b| b.upstream.as_ref())
897 .and_then(|b| b.remote_name())
898 .unwrap_or("origin")
899 .to_string();
900 let rx = repo.read(cx).send_job(move |state, cx| async move {
901 match state {
902 RepositoryState::Local { backend, .. } => {
903 let origin_url = backend
904 .remote_url(&remote)
905 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
906
907 let sha = backend
908 .head_sha()
909 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
910
911 let provider_registry =
912 cx.update(GitHostingProviderRegistry::default_global)?;
913
914 let (provider, remote) =
915 parse_git_remote_url(provider_registry, &origin_url)
916 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
917
918 let path = repo_path
919 .to_str()
920 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
921
922 Ok(provider.build_permalink(
923 remote,
924 BuildPermalinkParams {
925 sha: &sha,
926 path,
927 selection: Some(selection),
928 },
929 ))
930 }
931 RepositoryState::Remote { project_id, client } => {
932 let response = client
933 .request(proto::GetPermalinkToLine {
934 project_id: project_id.to_proto(),
935 buffer_id: buffer_id.into(),
936 selection: Some(proto::Range {
937 start: selection.start as u64,
938 end: selection.end as u64,
939 }),
940 })
941 .await?;
942
943 url::Url::parse(&response.permalink).context("failed to parse permalink")
944 }
945 }
946 });
947 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
948 }
949
950 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
951 match &self.state {
952 GitStoreState::Local {
953 downstream: downstream_client,
954 ..
955 } => downstream_client
956 .as_ref()
957 .map(|state| (state.client.clone(), state.project_id)),
958 GitStoreState::Ssh {
959 downstream: downstream_client,
960 ..
961 } => downstream_client.clone(),
962 GitStoreState::Remote { .. } => None,
963 }
964 }
965
966 fn upstream_client(&self) -> Option<AnyProtoClient> {
967 match &self.state {
968 GitStoreState::Local { .. } => None,
969 GitStoreState::Ssh {
970 upstream_client, ..
971 }
972 | GitStoreState::Remote {
973 upstream_client, ..
974 } => Some(upstream_client.clone()),
975 }
976 }
977
978 fn on_worktree_store_event(
979 &mut self,
980 worktree_store: Entity<WorktreeStore>,
981 event: &WorktreeStoreEvent,
982 cx: &mut Context<Self>,
983 ) {
984 let GitStoreState::Local {
985 project_environment,
986 downstream,
987 next_repository_id,
988 fs,
989 } = &self.state
990 else {
991 return;
992 };
993
994 match event {
995 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
996 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
997 for (relative_path, _, _) in updated_entries.iter() {
998 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
999 &(*worktree_id, relative_path.clone()).into(),
1000 cx,
1001 ) else {
1002 continue;
1003 };
1004 paths_by_git_repo.entry(repo).or_default().push(repo_path)
1005 }
1006
1007 for (repo, paths) in paths_by_git_repo {
1008 repo.update(cx, |repo, cx| {
1009 repo.paths_changed(
1010 paths,
1011 downstream
1012 .as_ref()
1013 .map(|downstream| downstream.updates_tx.clone()),
1014 cx,
1015 );
1016 });
1017 }
1018 }
1019 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1020 self.update_repositories_from_worktrees(
1021 project_environment.clone(),
1022 next_repository_id.clone(),
1023 downstream
1024 .as_ref()
1025 .map(|downstream| downstream.updates_tx.clone()),
1026 changed_repos.clone(),
1027 fs.clone(),
1028 cx,
1029 );
1030 if let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx) {
1031 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1032 }
1033 }
1034 _ => {}
1035 }
1036 }
1037
1038 fn on_repository_event(
1039 &mut self,
1040 repo: Entity<Repository>,
1041 event: &RepositoryEvent,
1042 cx: &mut Context<Self>,
1043 ) {
1044 let id = repo.read(cx).id;
1045 cx.emit(GitStoreEvent::RepositoryUpdated(
1046 id,
1047 event.clone(),
1048 self.active_repo_id == Some(id),
1049 ))
1050 }
1051
1052 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1053 fn update_repositories_from_worktrees(
1054 &mut self,
1055 project_environment: Entity<ProjectEnvironment>,
1056 next_repository_id: Arc<AtomicU64>,
1057 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1058 updated_git_repositories: UpdatedGitRepositoriesSet,
1059 fs: Arc<dyn Fs>,
1060 cx: &mut Context<Self>,
1061 ) {
1062 let mut removed_ids = Vec::new();
1063 for update in updated_git_repositories.iter() {
1064 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1065 Some(&repo.read(cx).work_directory_abs_path)
1066 == update.old_work_directory_abs_path.as_ref()
1067 }) {
1068 if let Some(new_work_directory_abs_path) =
1069 update.new_work_directory_abs_path.clone()
1070 {
1071 existing.update(cx, |existing, cx| {
1072 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1073 existing.schedule_scan(updates_tx.clone(), cx);
1074 });
1075 } else {
1076 removed_ids.push(*id);
1077 }
1078 } else if let Some((work_directory_abs_path, dot_git_abs_path)) = update
1079 .new_work_directory_abs_path
1080 .clone()
1081 .zip(update.dot_git_abs_path.clone())
1082 {
1083 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1084 let git_store = cx.weak_entity();
1085 let repo = cx.new(|cx| {
1086 let mut repo = Repository::local(
1087 id,
1088 work_directory_abs_path,
1089 dot_git_abs_path,
1090 project_environment.downgrade(),
1091 fs.clone(),
1092 git_store,
1093 cx,
1094 );
1095 repo.schedule_scan(updates_tx.clone(), cx);
1096 repo
1097 });
1098 self._subscriptions
1099 .push(cx.subscribe(&repo, Self::on_repository_event));
1100 self.repositories.insert(id, repo);
1101 cx.emit(GitStoreEvent::RepositoryAdded(id));
1102 self.active_repo_id.get_or_insert_with(|| {
1103 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1104 id
1105 });
1106 }
1107 }
1108
1109 for id in removed_ids {
1110 if self.active_repo_id == Some(id) {
1111 self.active_repo_id = None;
1112 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1113 }
1114 self.repositories.remove(&id);
1115 if let Some(updates_tx) = updates_tx.as_ref() {
1116 updates_tx
1117 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1118 .ok();
1119 }
1120 }
1121 }
1122
1123 fn on_buffer_store_event(
1124 &mut self,
1125 _: Entity<BufferStore>,
1126 event: &BufferStoreEvent,
1127 cx: &mut Context<Self>,
1128 ) {
1129 match event {
1130 BufferStoreEvent::BufferAdded(buffer) => {
1131 cx.subscribe(&buffer, |this, buffer, event, cx| {
1132 if let BufferEvent::LanguageChanged = event {
1133 let buffer_id = buffer.read(cx).remote_id();
1134 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1135 diff_state.update(cx, |diff_state, cx| {
1136 diff_state.buffer_language_changed(buffer, cx);
1137 });
1138 }
1139 }
1140 })
1141 .detach();
1142 }
1143 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1144 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1145 diffs.remove(buffer_id);
1146 }
1147 }
1148 BufferStoreEvent::BufferDropped(buffer_id) => {
1149 self.diffs.remove(&buffer_id);
1150 for diffs in self.shared_diffs.values_mut() {
1151 diffs.remove(buffer_id);
1152 }
1153 }
1154
1155 _ => {}
1156 }
1157 }
1158
1159 pub fn recalculate_buffer_diffs(
1160 &mut self,
1161 buffers: Vec<Entity<Buffer>>,
1162 cx: &mut Context<Self>,
1163 ) -> impl Future<Output = ()> + use<> {
1164 let mut futures = Vec::new();
1165 for buffer in buffers {
1166 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1167 let buffer = buffer.read(cx).text_snapshot();
1168 futures.push(diff_state.update(cx, |diff_state, cx| {
1169 diff_state.recalculate_diffs(
1170 buffer,
1171 diff_state.hunk_staging_operation_count,
1172 cx,
1173 )
1174 }));
1175 }
1176 }
1177 async move {
1178 futures::future::join_all(futures).await;
1179 }
1180 }
1181
1182 fn on_buffer_diff_event(
1183 &mut self,
1184 diff: Entity<buffer_diff::BufferDiff>,
1185 event: &BufferDiffEvent,
1186 cx: &mut Context<Self>,
1187 ) {
1188 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1189 let buffer_id = diff.read(cx).buffer_id;
1190 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1191 diff_state.update(cx, |diff_state, _| {
1192 diff_state.hunk_staging_operation_count += 1;
1193 });
1194 }
1195 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1196 let recv = repo.update(cx, |repo, cx| {
1197 log::debug!("updating index text for buffer {}", path.display());
1198 repo.spawn_set_index_text_job(
1199 path,
1200 new_index_text.as_ref().map(|rope| rope.to_string()),
1201 cx,
1202 )
1203 });
1204 let diff = diff.downgrade();
1205 cx.spawn(async move |this, cx| {
1206 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1207 diff.update(cx, |diff, cx| {
1208 diff.clear_pending_hunks(cx);
1209 })
1210 .ok();
1211 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1212 .ok();
1213 }
1214 })
1215 .detach();
1216 }
1217 }
1218 }
1219
1220 fn local_worktree_git_repos_changed(
1221 &mut self,
1222 worktree: Entity<Worktree>,
1223 changed_repos: &UpdatedGitRepositoriesSet,
1224 cx: &mut Context<Self>,
1225 ) {
1226 log::debug!("local worktree repos changed");
1227 debug_assert!(worktree.read(cx).is_local());
1228
1229 let mut diff_state_updates = HashMap::<Entity<Repository>, Vec<_>>::default();
1230 for (buffer_id, diff_state) in &self.diffs {
1231 let Some(buffer) = self.buffer_store.read(cx).get(*buffer_id) else {
1232 continue;
1233 };
1234 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1235 continue;
1236 };
1237 if file.worktree != worktree {
1238 continue;
1239 }
1240 let Some((repo, repo_path)) =
1241 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
1242 else {
1243 continue;
1244 };
1245 if !changed_repos.iter().any(|update| {
1246 update.old_work_directory_abs_path.as_ref()
1247 == Some(&repo.read(cx).work_directory_abs_path)
1248 || update.new_work_directory_abs_path.as_ref()
1249 == Some(&repo.read(cx).work_directory_abs_path)
1250 }) {
1251 continue;
1252 }
1253
1254 let diff_state = diff_state.read(cx);
1255 let has_unstaged_diff = diff_state
1256 .unstaged_diff
1257 .as_ref()
1258 .is_some_and(|diff| diff.is_upgradable());
1259 let has_uncommitted_diff = diff_state
1260 .uncommitted_diff
1261 .as_ref()
1262 .is_some_and(|set| set.is_upgradable());
1263
1264 let update = (
1265 buffer,
1266 repo_path,
1267 has_unstaged_diff.then(|| diff_state.index_text.clone()),
1268 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
1269 diff_state.hunk_staging_operation_count,
1270 );
1271 diff_state_updates.entry(repo).or_default().push(update);
1272 }
1273
1274 if diff_state_updates.is_empty() {
1275 return;
1276 }
1277
1278 for (repo, repo_diff_state_updates) in diff_state_updates.into_iter() {
1279 let git_store = cx.weak_entity();
1280
1281 let _ = repo.read(cx).send_keyed_job(
1282 Some(GitJobKey::BatchReadIndex),
1283 |state, mut cx| async move {
1284 let RepositoryState::Local { backend, .. } = state else {
1285 log::error!("tried to recompute diffs for a non-local repository");
1286 return;
1287 };
1288 let mut diff_bases_changes_by_buffer = Vec::new();
1289 for (
1290 buffer,
1291 repo_path,
1292 current_index_text,
1293 current_head_text,
1294 hunk_staging_operation_count,
1295 ) in &repo_diff_state_updates
1296 {
1297 let index_text = if current_index_text.is_some() {
1298 backend.load_index_text(repo_path.clone()).await
1299 } else {
1300 None
1301 };
1302 let head_text = if current_head_text.is_some() {
1303 backend.load_committed_text(repo_path.clone()).await
1304 } else {
1305 None
1306 };
1307
1308 // Avoid triggering a diff update if the base text has not changed.
1309 if let Some((current_index, current_head)) =
1310 current_index_text.as_ref().zip(current_head_text.as_ref())
1311 {
1312 if current_index.as_deref() == index_text.as_ref()
1313 && current_head.as_deref() == head_text.as_ref()
1314 {
1315 continue;
1316 }
1317 }
1318
1319 let diff_bases_change =
1320 match (current_index_text.is_some(), current_head_text.is_some()) {
1321 (true, true) => Some(if index_text == head_text {
1322 DiffBasesChange::SetBoth(head_text)
1323 } else {
1324 DiffBasesChange::SetEach {
1325 index: index_text,
1326 head: head_text,
1327 }
1328 }),
1329 (true, false) => Some(DiffBasesChange::SetIndex(index_text)),
1330 (false, true) => Some(DiffBasesChange::SetHead(head_text)),
1331 (false, false) => None,
1332 };
1333
1334 diff_bases_changes_by_buffer.push((
1335 buffer,
1336 diff_bases_change,
1337 *hunk_staging_operation_count,
1338 ))
1339 }
1340
1341 git_store
1342 .update(&mut cx, |git_store, cx| {
1343 for (buffer, diff_bases_change, hunk_staging_operation_count) in
1344 diff_bases_changes_by_buffer
1345 {
1346 let Some(diff_state) =
1347 git_store.diffs.get(&buffer.read(cx).remote_id())
1348 else {
1349 continue;
1350 };
1351 let Some(diff_bases_change) = diff_bases_change else {
1352 continue;
1353 };
1354
1355 let downstream_client = git_store.downstream_client();
1356 diff_state.update(cx, |diff_state, cx| {
1357 use proto::update_diff_bases::Mode;
1358
1359 let buffer = buffer.read(cx);
1360 if let Some((client, project_id)) = downstream_client {
1361 let (staged_text, committed_text, mode) =
1362 match diff_bases_change.clone() {
1363 DiffBasesChange::SetIndex(index) => {
1364 (index, None, Mode::IndexOnly)
1365 }
1366 DiffBasesChange::SetHead(head) => {
1367 (None, head, Mode::HeadOnly)
1368 }
1369 DiffBasesChange::SetEach { index, head } => {
1370 (index, head, Mode::IndexAndHead)
1371 }
1372 DiffBasesChange::SetBoth(text) => {
1373 (None, text, Mode::IndexMatchesHead)
1374 }
1375 };
1376 let message = proto::UpdateDiffBases {
1377 project_id: project_id.to_proto(),
1378 buffer_id: buffer.remote_id().to_proto(),
1379 staged_text,
1380 committed_text,
1381 mode: mode as i32,
1382 };
1383
1384 client.send(message).log_err();
1385 }
1386
1387 let _ = diff_state.diff_bases_changed(
1388 buffer.text_snapshot(),
1389 diff_bases_change,
1390 hunk_staging_operation_count,
1391 cx,
1392 );
1393 });
1394 }
1395 })
1396 .ok();
1397 },
1398 );
1399 }
1400 }
1401
1402 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1403 &self.repositories
1404 }
1405
1406 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1407 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1408 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1409 Some(status.status)
1410 }
1411
1412 pub fn repository_and_path_for_buffer_id(
1413 &self,
1414 buffer_id: BufferId,
1415 cx: &App,
1416 ) -> Option<(Entity<Repository>, RepoPath)> {
1417 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1418 let project_path = buffer.read(cx).project_path(cx)?;
1419 self.repository_and_path_for_project_path(&project_path, cx)
1420 }
1421
1422 pub fn repository_and_path_for_project_path(
1423 &self,
1424 path: &ProjectPath,
1425 cx: &App,
1426 ) -> Option<(Entity<Repository>, RepoPath)> {
1427 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1428 self.repositories
1429 .values()
1430 .filter_map(|repo| {
1431 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1432 Some((repo.clone(), repo_path))
1433 })
1434 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1435 }
1436
1437 pub fn git_init(
1438 &self,
1439 path: Arc<Path>,
1440 fallback_branch_name: String,
1441 cx: &App,
1442 ) -> Task<Result<()>> {
1443 match &self.state {
1444 GitStoreState::Local { fs, .. } => {
1445 let fs = fs.clone();
1446 cx.background_executor()
1447 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1448 }
1449 GitStoreState::Ssh {
1450 upstream_client,
1451 upstream_project_id: project_id,
1452 ..
1453 }
1454 | GitStoreState::Remote {
1455 upstream_client,
1456 upstream_project_id: project_id,
1457 ..
1458 } => {
1459 let client = upstream_client.clone();
1460 let project_id = *project_id;
1461 cx.background_executor().spawn(async move {
1462 client
1463 .request(proto::GitInit {
1464 project_id: project_id.0,
1465 abs_path: path.to_string_lossy().to_string(),
1466 fallback_branch_name,
1467 })
1468 .await?;
1469 Ok(())
1470 })
1471 }
1472 }
1473 }
1474
1475 async fn handle_update_repository(
1476 this: Entity<Self>,
1477 envelope: TypedEnvelope<proto::UpdateRepository>,
1478 mut cx: AsyncApp,
1479 ) -> Result<()> {
1480 this.update(&mut cx, |this, cx| {
1481 let mut update = envelope.payload;
1482
1483 let id = RepositoryId::from_proto(update.id);
1484 let client = this
1485 .upstream_client()
1486 .context("no upstream client")?
1487 .clone();
1488
1489 let mut is_new = false;
1490 let repo = this.repositories.entry(id).or_insert_with(|| {
1491 is_new = true;
1492 let git_store = cx.weak_entity();
1493 cx.new(|cx| {
1494 Repository::remote(
1495 id,
1496 Path::new(&update.abs_path).into(),
1497 ProjectId(update.project_id),
1498 client,
1499 git_store,
1500 cx,
1501 )
1502 })
1503 });
1504 if is_new {
1505 this._subscriptions
1506 .push(cx.subscribe(&repo, Self::on_repository_event))
1507 }
1508
1509 repo.update(cx, {
1510 let update = update.clone();
1511 |repo, cx| repo.apply_remote_update(update, cx)
1512 })?;
1513
1514 this.active_repo_id.get_or_insert_with(|| {
1515 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1516 id
1517 });
1518
1519 if let Some((client, project_id)) = this.downstream_client() {
1520 update.project_id = project_id.to_proto();
1521 client.send(update).log_err();
1522 }
1523 Ok(())
1524 })?
1525 }
1526
1527 async fn handle_remove_repository(
1528 this: Entity<Self>,
1529 envelope: TypedEnvelope<proto::RemoveRepository>,
1530 mut cx: AsyncApp,
1531 ) -> Result<()> {
1532 this.update(&mut cx, |this, cx| {
1533 let mut update = envelope.payload;
1534 let id = RepositoryId::from_proto(update.id);
1535 this.repositories.remove(&id);
1536 if let Some((client, project_id)) = this.downstream_client() {
1537 update.project_id = project_id.to_proto();
1538 client.send(update).log_err();
1539 }
1540 if this.active_repo_id == Some(id) {
1541 this.active_repo_id = None;
1542 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1543 }
1544 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1545 })
1546 }
1547
1548 async fn handle_git_init(
1549 this: Entity<Self>,
1550 envelope: TypedEnvelope<proto::GitInit>,
1551 cx: AsyncApp,
1552 ) -> Result<proto::Ack> {
1553 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1554 let name = envelope.payload.fallback_branch_name;
1555 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1556 .await?;
1557
1558 Ok(proto::Ack {})
1559 }
1560
1561 async fn handle_fetch(
1562 this: Entity<Self>,
1563 envelope: TypedEnvelope<proto::Fetch>,
1564 mut cx: AsyncApp,
1565 ) -> Result<proto::RemoteMessageResponse> {
1566 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1567 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1568 let askpass_id = envelope.payload.askpass_id;
1569
1570 let askpass = make_remote_delegate(
1571 this,
1572 envelope.payload.project_id,
1573 repository_id,
1574 askpass_id,
1575 &mut cx,
1576 );
1577
1578 let remote_output = repository_handle
1579 .update(&mut cx, |repository_handle, cx| {
1580 repository_handle.fetch(askpass, cx)
1581 })?
1582 .await??;
1583
1584 Ok(proto::RemoteMessageResponse {
1585 stdout: remote_output.stdout,
1586 stderr: remote_output.stderr,
1587 })
1588 }
1589
1590 async fn handle_push(
1591 this: Entity<Self>,
1592 envelope: TypedEnvelope<proto::Push>,
1593 mut cx: AsyncApp,
1594 ) -> Result<proto::RemoteMessageResponse> {
1595 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1596 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1597
1598 let askpass_id = envelope.payload.askpass_id;
1599 let askpass = make_remote_delegate(
1600 this,
1601 envelope.payload.project_id,
1602 repository_id,
1603 askpass_id,
1604 &mut cx,
1605 );
1606
1607 let options = envelope
1608 .payload
1609 .options
1610 .as_ref()
1611 .map(|_| match envelope.payload.options() {
1612 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1613 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1614 });
1615
1616 let branch_name = envelope.payload.branch_name.into();
1617 let remote_name = envelope.payload.remote_name.into();
1618
1619 let remote_output = repository_handle
1620 .update(&mut cx, |repository_handle, cx| {
1621 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1622 })?
1623 .await??;
1624 Ok(proto::RemoteMessageResponse {
1625 stdout: remote_output.stdout,
1626 stderr: remote_output.stderr,
1627 })
1628 }
1629
1630 async fn handle_pull(
1631 this: Entity<Self>,
1632 envelope: TypedEnvelope<proto::Pull>,
1633 mut cx: AsyncApp,
1634 ) -> Result<proto::RemoteMessageResponse> {
1635 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1636 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1637 let askpass_id = envelope.payload.askpass_id;
1638 let askpass = make_remote_delegate(
1639 this,
1640 envelope.payload.project_id,
1641 repository_id,
1642 askpass_id,
1643 &mut cx,
1644 );
1645
1646 let branch_name = envelope.payload.branch_name.into();
1647 let remote_name = envelope.payload.remote_name.into();
1648
1649 let remote_message = repository_handle
1650 .update(&mut cx, |repository_handle, cx| {
1651 repository_handle.pull(branch_name, remote_name, askpass, cx)
1652 })?
1653 .await??;
1654
1655 Ok(proto::RemoteMessageResponse {
1656 stdout: remote_message.stdout,
1657 stderr: remote_message.stderr,
1658 })
1659 }
1660
1661 async fn handle_stage(
1662 this: Entity<Self>,
1663 envelope: TypedEnvelope<proto::Stage>,
1664 mut cx: AsyncApp,
1665 ) -> Result<proto::Ack> {
1666 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1667 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1668
1669 let entries = envelope
1670 .payload
1671 .paths
1672 .into_iter()
1673 .map(PathBuf::from)
1674 .map(RepoPath::new)
1675 .collect();
1676
1677 repository_handle
1678 .update(&mut cx, |repository_handle, cx| {
1679 repository_handle.stage_entries(entries, cx)
1680 })?
1681 .await?;
1682 Ok(proto::Ack {})
1683 }
1684
1685 async fn handle_unstage(
1686 this: Entity<Self>,
1687 envelope: TypedEnvelope<proto::Unstage>,
1688 mut cx: AsyncApp,
1689 ) -> Result<proto::Ack> {
1690 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1691 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1692
1693 let entries = envelope
1694 .payload
1695 .paths
1696 .into_iter()
1697 .map(PathBuf::from)
1698 .map(RepoPath::new)
1699 .collect();
1700
1701 repository_handle
1702 .update(&mut cx, |repository_handle, cx| {
1703 repository_handle.unstage_entries(entries, cx)
1704 })?
1705 .await?;
1706
1707 Ok(proto::Ack {})
1708 }
1709
1710 async fn handle_set_index_text(
1711 this: Entity<Self>,
1712 envelope: TypedEnvelope<proto::SetIndexText>,
1713 mut cx: AsyncApp,
1714 ) -> Result<proto::Ack> {
1715 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1716 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1717
1718 repository_handle
1719 .update(&mut cx, |repository_handle, cx| {
1720 repository_handle.spawn_set_index_text_job(
1721 RepoPath::from_str(&envelope.payload.path),
1722 envelope.payload.text,
1723 cx,
1724 )
1725 })?
1726 .await??;
1727 Ok(proto::Ack {})
1728 }
1729
1730 async fn handle_commit(
1731 this: Entity<Self>,
1732 envelope: TypedEnvelope<proto::Commit>,
1733 mut cx: AsyncApp,
1734 ) -> Result<proto::Ack> {
1735 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1736 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1737
1738 let message = SharedString::from(envelope.payload.message);
1739 let name = envelope.payload.name.map(SharedString::from);
1740 let email = envelope.payload.email.map(SharedString::from);
1741
1742 repository_handle
1743 .update(&mut cx, |repository_handle, cx| {
1744 repository_handle.commit(message, name.zip(email), cx)
1745 })?
1746 .await??;
1747 Ok(proto::Ack {})
1748 }
1749
1750 async fn handle_get_remotes(
1751 this: Entity<Self>,
1752 envelope: TypedEnvelope<proto::GetRemotes>,
1753 mut cx: AsyncApp,
1754 ) -> Result<proto::GetRemotesResponse> {
1755 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1756 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1757
1758 let branch_name = envelope.payload.branch_name;
1759
1760 let remotes = repository_handle
1761 .update(&mut cx, |repository_handle, _| {
1762 repository_handle.get_remotes(branch_name)
1763 })?
1764 .await??;
1765
1766 Ok(proto::GetRemotesResponse {
1767 remotes: remotes
1768 .into_iter()
1769 .map(|remotes| proto::get_remotes_response::Remote {
1770 name: remotes.name.to_string(),
1771 })
1772 .collect::<Vec<_>>(),
1773 })
1774 }
1775
1776 async fn handle_get_branches(
1777 this: Entity<Self>,
1778 envelope: TypedEnvelope<proto::GitGetBranches>,
1779 mut cx: AsyncApp,
1780 ) -> Result<proto::GitBranchesResponse> {
1781 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1782 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1783
1784 let branches = repository_handle
1785 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1786 .await??;
1787
1788 Ok(proto::GitBranchesResponse {
1789 branches: branches
1790 .into_iter()
1791 .map(|branch| branch_to_proto(&branch))
1792 .collect::<Vec<_>>(),
1793 })
1794 }
1795 async fn handle_create_branch(
1796 this: Entity<Self>,
1797 envelope: TypedEnvelope<proto::GitCreateBranch>,
1798 mut cx: AsyncApp,
1799 ) -> Result<proto::Ack> {
1800 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1801 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1802 let branch_name = envelope.payload.branch_name;
1803
1804 repository_handle
1805 .update(&mut cx, |repository_handle, _| {
1806 repository_handle.create_branch(branch_name)
1807 })?
1808 .await??;
1809
1810 Ok(proto::Ack {})
1811 }
1812
1813 async fn handle_change_branch(
1814 this: Entity<Self>,
1815 envelope: TypedEnvelope<proto::GitChangeBranch>,
1816 mut cx: AsyncApp,
1817 ) -> Result<proto::Ack> {
1818 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1819 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1820 let branch_name = envelope.payload.branch_name;
1821
1822 repository_handle
1823 .update(&mut cx, |repository_handle, _| {
1824 repository_handle.change_branch(branch_name)
1825 })?
1826 .await??;
1827
1828 Ok(proto::Ack {})
1829 }
1830
1831 async fn handle_show(
1832 this: Entity<Self>,
1833 envelope: TypedEnvelope<proto::GitShow>,
1834 mut cx: AsyncApp,
1835 ) -> Result<proto::GitCommitDetails> {
1836 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1837 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1838
1839 let commit = repository_handle
1840 .update(&mut cx, |repository_handle, _| {
1841 repository_handle.show(envelope.payload.commit)
1842 })?
1843 .await??;
1844 Ok(proto::GitCommitDetails {
1845 sha: commit.sha.into(),
1846 message: commit.message.into(),
1847 commit_timestamp: commit.commit_timestamp,
1848 author_email: commit.author_email.into(),
1849 author_name: commit.author_name.into(),
1850 })
1851 }
1852
1853 async fn handle_load_commit_diff(
1854 this: Entity<Self>,
1855 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1856 mut cx: AsyncApp,
1857 ) -> Result<proto::LoadCommitDiffResponse> {
1858 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1859 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1860
1861 let commit_diff = repository_handle
1862 .update(&mut cx, |repository_handle, _| {
1863 repository_handle.load_commit_diff(envelope.payload.commit)
1864 })?
1865 .await??;
1866 Ok(proto::LoadCommitDiffResponse {
1867 files: commit_diff
1868 .files
1869 .into_iter()
1870 .map(|file| proto::CommitFile {
1871 path: file.path.to_string(),
1872 old_text: file.old_text,
1873 new_text: file.new_text,
1874 })
1875 .collect(),
1876 })
1877 }
1878
1879 async fn handle_reset(
1880 this: Entity<Self>,
1881 envelope: TypedEnvelope<proto::GitReset>,
1882 mut cx: AsyncApp,
1883 ) -> Result<proto::Ack> {
1884 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1885 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1886
1887 let mode = match envelope.payload.mode() {
1888 git_reset::ResetMode::Soft => ResetMode::Soft,
1889 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1890 };
1891
1892 repository_handle
1893 .update(&mut cx, |repository_handle, cx| {
1894 repository_handle.reset(envelope.payload.commit, mode, cx)
1895 })?
1896 .await??;
1897 Ok(proto::Ack {})
1898 }
1899
1900 async fn handle_checkout_files(
1901 this: Entity<Self>,
1902 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1903 mut cx: AsyncApp,
1904 ) -> Result<proto::Ack> {
1905 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1906 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1907 let paths = envelope
1908 .payload
1909 .paths
1910 .iter()
1911 .map(|s| RepoPath::from_str(s))
1912 .collect();
1913
1914 repository_handle
1915 .update(&mut cx, |repository_handle, cx| {
1916 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1917 })?
1918 .await??;
1919 Ok(proto::Ack {})
1920 }
1921
1922 async fn handle_open_commit_message_buffer(
1923 this: Entity<Self>,
1924 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1925 mut cx: AsyncApp,
1926 ) -> Result<proto::OpenBufferResponse> {
1927 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1928 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1929 let buffer = repository
1930 .update(&mut cx, |repository, cx| {
1931 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1932 })?
1933 .await?;
1934
1935 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1936 this.update(&mut cx, |this, cx| {
1937 this.buffer_store.update(cx, |buffer_store, cx| {
1938 buffer_store
1939 .create_buffer_for_peer(
1940 &buffer,
1941 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1942 cx,
1943 )
1944 .detach_and_log_err(cx);
1945 })
1946 })?;
1947
1948 Ok(proto::OpenBufferResponse {
1949 buffer_id: buffer_id.to_proto(),
1950 })
1951 }
1952
1953 async fn handle_askpass(
1954 this: Entity<Self>,
1955 envelope: TypedEnvelope<proto::AskPassRequest>,
1956 mut cx: AsyncApp,
1957 ) -> Result<proto::AskPassResponse> {
1958 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1959 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1960
1961 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
1962 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
1963 debug_panic!("no askpass found");
1964 return Err(anyhow::anyhow!("no askpass found"));
1965 };
1966
1967 let response = askpass.ask_password(envelope.payload.prompt).await?;
1968
1969 delegates
1970 .lock()
1971 .insert(envelope.payload.askpass_id, askpass);
1972
1973 Ok(proto::AskPassResponse { response })
1974 }
1975
1976 async fn handle_check_for_pushed_commits(
1977 this: Entity<Self>,
1978 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
1979 mut cx: AsyncApp,
1980 ) -> Result<proto::CheckForPushedCommitsResponse> {
1981 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1982 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1983
1984 let branches = repository_handle
1985 .update(&mut cx, |repository_handle, _| {
1986 repository_handle.check_for_pushed_commits()
1987 })?
1988 .await??;
1989 Ok(proto::CheckForPushedCommitsResponse {
1990 pushed_to: branches
1991 .into_iter()
1992 .map(|commit| commit.to_string())
1993 .collect(),
1994 })
1995 }
1996
1997 async fn handle_git_diff(
1998 this: Entity<Self>,
1999 envelope: TypedEnvelope<proto::GitDiff>,
2000 mut cx: AsyncApp,
2001 ) -> Result<proto::GitDiffResponse> {
2002 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2003 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2004 let diff_type = match envelope.payload.diff_type() {
2005 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2006 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2007 };
2008
2009 let mut diff = repository_handle
2010 .update(&mut cx, |repository_handle, cx| {
2011 repository_handle.diff(diff_type, cx)
2012 })?
2013 .await??;
2014 const ONE_MB: usize = 1_000_000;
2015 if diff.len() > ONE_MB {
2016 diff = diff.chars().take(ONE_MB).collect()
2017 }
2018
2019 Ok(proto::GitDiffResponse { diff })
2020 }
2021
2022 async fn handle_open_unstaged_diff(
2023 this: Entity<Self>,
2024 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2025 mut cx: AsyncApp,
2026 ) -> Result<proto::OpenUnstagedDiffResponse> {
2027 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2028 let diff = this
2029 .update(&mut cx, |this, cx| {
2030 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2031 Some(this.open_unstaged_diff(buffer, cx))
2032 })?
2033 .ok_or_else(|| anyhow!("no such buffer"))?
2034 .await?;
2035 this.update(&mut cx, |this, _| {
2036 let shared_diffs = this
2037 .shared_diffs
2038 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2039 .or_default();
2040 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2041 })?;
2042 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2043 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2044 }
2045
2046 async fn handle_open_uncommitted_diff(
2047 this: Entity<Self>,
2048 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2049 mut cx: AsyncApp,
2050 ) -> Result<proto::OpenUncommittedDiffResponse> {
2051 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2052 let diff = this
2053 .update(&mut cx, |this, cx| {
2054 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2055 Some(this.open_uncommitted_diff(buffer, cx))
2056 })?
2057 .ok_or_else(|| anyhow!("no such buffer"))?
2058 .await?;
2059 this.update(&mut cx, |this, _| {
2060 let shared_diffs = this
2061 .shared_diffs
2062 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2063 .or_default();
2064 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2065 })?;
2066 diff.read_with(&cx, |diff, cx| {
2067 use proto::open_uncommitted_diff_response::Mode;
2068
2069 let unstaged_diff = diff.secondary_diff();
2070 let index_snapshot = unstaged_diff.and_then(|diff| {
2071 let diff = diff.read(cx);
2072 diff.base_text_exists().then(|| diff.base_text())
2073 });
2074
2075 let mode;
2076 let staged_text;
2077 let committed_text;
2078 if diff.base_text_exists() {
2079 let committed_snapshot = diff.base_text();
2080 committed_text = Some(committed_snapshot.text());
2081 if let Some(index_text) = index_snapshot {
2082 if index_text.remote_id() == committed_snapshot.remote_id() {
2083 mode = Mode::IndexMatchesHead;
2084 staged_text = None;
2085 } else {
2086 mode = Mode::IndexAndHead;
2087 staged_text = Some(index_text.text());
2088 }
2089 } else {
2090 mode = Mode::IndexAndHead;
2091 staged_text = None;
2092 }
2093 } else {
2094 mode = Mode::IndexAndHead;
2095 committed_text = None;
2096 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2097 }
2098
2099 proto::OpenUncommittedDiffResponse {
2100 committed_text,
2101 staged_text,
2102 mode: mode.into(),
2103 }
2104 })
2105 }
2106
2107 async fn handle_update_diff_bases(
2108 this: Entity<Self>,
2109 request: TypedEnvelope<proto::UpdateDiffBases>,
2110 mut cx: AsyncApp,
2111 ) -> Result<()> {
2112 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2113 this.update(&mut cx, |this, cx| {
2114 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2115 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2116 let buffer = buffer.read(cx).text_snapshot();
2117 diff_state.update(cx, |diff_state, cx| {
2118 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2119 })
2120 }
2121 }
2122 })
2123 }
2124
2125 async fn handle_blame_buffer(
2126 this: Entity<Self>,
2127 envelope: TypedEnvelope<proto::BlameBuffer>,
2128 mut cx: AsyncApp,
2129 ) -> Result<proto::BlameBufferResponse> {
2130 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2131 let version = deserialize_version(&envelope.payload.version);
2132 let buffer = this.read_with(&cx, |this, cx| {
2133 this.buffer_store.read(cx).get_existing(buffer_id)
2134 })??;
2135 buffer
2136 .update(&mut cx, |buffer, _| {
2137 buffer.wait_for_version(version.clone())
2138 })?
2139 .await?;
2140 let blame = this
2141 .update(&mut cx, |this, cx| {
2142 this.blame_buffer(&buffer, Some(version), cx)
2143 })?
2144 .await?;
2145 Ok(serialize_blame_buffer_response(blame))
2146 }
2147
2148 async fn handle_get_permalink_to_line(
2149 this: Entity<Self>,
2150 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2151 mut cx: AsyncApp,
2152 ) -> Result<proto::GetPermalinkToLineResponse> {
2153 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2154 // let version = deserialize_version(&envelope.payload.version);
2155 let selection = {
2156 let proto_selection = envelope
2157 .payload
2158 .selection
2159 .context("no selection to get permalink for defined")?;
2160 proto_selection.start as u32..proto_selection.end as u32
2161 };
2162 let buffer = this.read_with(&cx, |this, cx| {
2163 this.buffer_store.read(cx).get_existing(buffer_id)
2164 })??;
2165 let permalink = this
2166 .update(&mut cx, |this, cx| {
2167 this.get_permalink_to_line(&buffer, selection, cx)
2168 })?
2169 .await?;
2170 Ok(proto::GetPermalinkToLineResponse {
2171 permalink: permalink.to_string(),
2172 })
2173 }
2174
2175 fn repository_for_request(
2176 this: &Entity<Self>,
2177 id: RepositoryId,
2178 cx: &mut AsyncApp,
2179 ) -> Result<Entity<Repository>> {
2180 this.update(cx, |this, _| {
2181 this.repositories
2182 .get(&id)
2183 .context("missing repository handle")
2184 .cloned()
2185 })?
2186 }
2187
2188 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2189 self.repositories
2190 .iter()
2191 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2192 .collect()
2193 }
2194}
2195
2196impl BufferDiffState {
2197 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2198 self.language = buffer.read(cx).language().cloned();
2199 self.language_changed = true;
2200 let _ = self.recalculate_diffs(
2201 buffer.read(cx).text_snapshot(),
2202 self.hunk_staging_operation_count,
2203 cx,
2204 );
2205 }
2206
2207 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2208 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2209 }
2210
2211 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2212 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2213 }
2214
2215 fn handle_base_texts_updated(
2216 &mut self,
2217 buffer: text::BufferSnapshot,
2218 message: proto::UpdateDiffBases,
2219 cx: &mut Context<Self>,
2220 ) {
2221 use proto::update_diff_bases::Mode;
2222
2223 let Some(mode) = Mode::from_i32(message.mode) else {
2224 return;
2225 };
2226
2227 let diff_bases_change = match mode {
2228 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2229 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2230 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2231 Mode::IndexAndHead => DiffBasesChange::SetEach {
2232 index: message.staged_text,
2233 head: message.committed_text,
2234 },
2235 };
2236
2237 let _ = self.diff_bases_changed(
2238 buffer,
2239 diff_bases_change,
2240 self.hunk_staging_operation_count,
2241 cx,
2242 );
2243 }
2244
2245 pub fn wait_for_recalculation(&mut self) -> Option<oneshot::Receiver<()>> {
2246 if self.diff_updated_futures.is_empty() {
2247 return None;
2248 }
2249 let (tx, rx) = oneshot::channel();
2250 self.diff_updated_futures.push(tx);
2251 Some(rx)
2252 }
2253
2254 fn diff_bases_changed(
2255 &mut self,
2256 buffer: text::BufferSnapshot,
2257 diff_bases_change: DiffBasesChange,
2258 prev_hunk_staging_operation_count: usize,
2259 cx: &mut Context<Self>,
2260 ) -> oneshot::Receiver<()> {
2261 match diff_bases_change {
2262 DiffBasesChange::SetIndex(index) => {
2263 self.index_text = index.map(|mut index| {
2264 text::LineEnding::normalize(&mut index);
2265 Arc::new(index)
2266 });
2267 self.index_changed = true;
2268 }
2269 DiffBasesChange::SetHead(head) => {
2270 self.head_text = head.map(|mut head| {
2271 text::LineEnding::normalize(&mut head);
2272 Arc::new(head)
2273 });
2274 self.head_changed = true;
2275 }
2276 DiffBasesChange::SetBoth(text) => {
2277 let text = text.map(|mut text| {
2278 text::LineEnding::normalize(&mut text);
2279 Arc::new(text)
2280 });
2281 self.head_text = text.clone();
2282 self.index_text = text;
2283 self.head_changed = true;
2284 self.index_changed = true;
2285 }
2286 DiffBasesChange::SetEach { index, head } => {
2287 self.index_text = index.map(|mut index| {
2288 text::LineEnding::normalize(&mut index);
2289 Arc::new(index)
2290 });
2291 self.index_changed = true;
2292 self.head_text = head.map(|mut head| {
2293 text::LineEnding::normalize(&mut head);
2294 Arc::new(head)
2295 });
2296 self.head_changed = true;
2297 }
2298 }
2299
2300 self.recalculate_diffs(buffer, prev_hunk_staging_operation_count, cx)
2301 }
2302
2303 fn recalculate_diffs(
2304 &mut self,
2305 buffer: text::BufferSnapshot,
2306 prev_hunk_staging_operation_count: usize,
2307 cx: &mut Context<Self>,
2308 ) -> oneshot::Receiver<()> {
2309 log::debug!("recalculate diffs");
2310 let (tx, rx) = oneshot::channel();
2311 self.diff_updated_futures.push(tx);
2312
2313 let language = self.language.clone();
2314 let language_registry = self.language_registry.clone();
2315 let unstaged_diff = self.unstaged_diff();
2316 let uncommitted_diff = self.uncommitted_diff();
2317 let head = self.head_text.clone();
2318 let index = self.index_text.clone();
2319 let index_changed = self.index_changed;
2320 let head_changed = self.head_changed;
2321 let language_changed = self.language_changed;
2322 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2323 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2324 (None, None) => true,
2325 _ => false,
2326 };
2327 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2328 let mut new_unstaged_diff = None;
2329 if let Some(unstaged_diff) = &unstaged_diff {
2330 new_unstaged_diff = Some(
2331 BufferDiff::update_diff(
2332 unstaged_diff.clone(),
2333 buffer.clone(),
2334 index,
2335 index_changed,
2336 language_changed,
2337 language.clone(),
2338 language_registry.clone(),
2339 cx,
2340 )
2341 .await?,
2342 );
2343 }
2344
2345 let mut new_uncommitted_diff = None;
2346 if let Some(uncommitted_diff) = &uncommitted_diff {
2347 new_uncommitted_diff = if index_matches_head {
2348 new_unstaged_diff.clone()
2349 } else {
2350 Some(
2351 BufferDiff::update_diff(
2352 uncommitted_diff.clone(),
2353 buffer.clone(),
2354 head,
2355 head_changed,
2356 language_changed,
2357 language.clone(),
2358 language_registry.clone(),
2359 cx,
2360 )
2361 .await?,
2362 )
2363 }
2364 }
2365
2366 if this.update(cx, |this, _| {
2367 this.hunk_staging_operation_count > prev_hunk_staging_operation_count
2368 })? {
2369 eprintln!("early return");
2370 return Ok(());
2371 }
2372
2373 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2374 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2375 {
2376 unstaged_diff.update(cx, |diff, cx| {
2377 if language_changed {
2378 diff.language_changed(cx);
2379 }
2380 diff.set_snapshot(new_unstaged_diff, &buffer, None, cx)
2381 })?
2382 } else {
2383 None
2384 };
2385
2386 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2387 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2388 {
2389 uncommitted_diff.update(cx, |diff, cx| {
2390 if language_changed {
2391 diff.language_changed(cx);
2392 }
2393 diff.set_snapshot(new_uncommitted_diff, &buffer, unstaged_changed_range, cx);
2394 })?;
2395 }
2396
2397 if let Some(this) = this.upgrade() {
2398 this.update(cx, |this, _| {
2399 this.index_changed = false;
2400 this.head_changed = false;
2401 this.language_changed = false;
2402 for tx in this.diff_updated_futures.drain(..) {
2403 tx.send(()).ok();
2404 }
2405 })?;
2406 }
2407
2408 Ok(())
2409 }));
2410
2411 rx
2412 }
2413}
2414
2415fn make_remote_delegate(
2416 this: Entity<GitStore>,
2417 project_id: u64,
2418 repository_id: RepositoryId,
2419 askpass_id: u64,
2420 cx: &mut AsyncApp,
2421) -> AskPassDelegate {
2422 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2423 this.update(cx, |this, cx| {
2424 let Some((client, _)) = this.downstream_client() else {
2425 return;
2426 };
2427 let response = client.request(proto::AskPassRequest {
2428 project_id,
2429 repository_id: repository_id.to_proto(),
2430 askpass_id,
2431 prompt,
2432 });
2433 cx.spawn(async move |_, _| {
2434 tx.send(response.await?.response).ok();
2435 anyhow::Ok(())
2436 })
2437 .detach_and_log_err(cx);
2438 })
2439 .log_err();
2440 })
2441}
2442
2443impl RepositoryId {
2444 pub fn to_proto(self) -> u64 {
2445 self.0
2446 }
2447
2448 pub fn from_proto(id: u64) -> Self {
2449 RepositoryId(id)
2450 }
2451}
2452
2453impl RepositorySnapshot {
2454 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2455 Self {
2456 id,
2457 merge_message: None,
2458 statuses_by_path: Default::default(),
2459 work_directory_abs_path,
2460 branch: None,
2461 merge_conflicts: Default::default(),
2462 merge_head_shas: Default::default(),
2463 scan_id: 0,
2464 }
2465 }
2466
2467 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2468 proto::UpdateRepository {
2469 branch_summary: self.branch.as_ref().map(branch_to_proto),
2470 updated_statuses: self
2471 .statuses_by_path
2472 .iter()
2473 .map(|entry| entry.to_proto())
2474 .collect(),
2475 removed_statuses: Default::default(),
2476 current_merge_conflicts: self
2477 .merge_conflicts
2478 .iter()
2479 .map(|repo_path| repo_path.to_proto())
2480 .collect(),
2481 project_id,
2482 id: self.id.to_proto(),
2483 abs_path: self.work_directory_abs_path.to_proto(),
2484 entry_ids: vec![self.id.to_proto()],
2485 scan_id: self.scan_id,
2486 is_last_update: true,
2487 }
2488 }
2489
2490 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2491 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2492 let mut removed_statuses: Vec<String> = Vec::new();
2493
2494 let mut new_statuses = self.statuses_by_path.iter().peekable();
2495 let mut old_statuses = old.statuses_by_path.iter().peekable();
2496
2497 let mut current_new_entry = new_statuses.next();
2498 let mut current_old_entry = old_statuses.next();
2499 loop {
2500 match (current_new_entry, current_old_entry) {
2501 (Some(new_entry), Some(old_entry)) => {
2502 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2503 Ordering::Less => {
2504 updated_statuses.push(new_entry.to_proto());
2505 current_new_entry = new_statuses.next();
2506 }
2507 Ordering::Equal => {
2508 if new_entry.status != old_entry.status {
2509 updated_statuses.push(new_entry.to_proto());
2510 }
2511 current_old_entry = old_statuses.next();
2512 current_new_entry = new_statuses.next();
2513 }
2514 Ordering::Greater => {
2515 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2516 current_old_entry = old_statuses.next();
2517 }
2518 }
2519 }
2520 (None, Some(old_entry)) => {
2521 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2522 current_old_entry = old_statuses.next();
2523 }
2524 (Some(new_entry), None) => {
2525 updated_statuses.push(new_entry.to_proto());
2526 current_new_entry = new_statuses.next();
2527 }
2528 (None, None) => break,
2529 }
2530 }
2531
2532 proto::UpdateRepository {
2533 branch_summary: self.branch.as_ref().map(branch_to_proto),
2534 updated_statuses,
2535 removed_statuses,
2536 current_merge_conflicts: self
2537 .merge_conflicts
2538 .iter()
2539 .map(|path| path.as_ref().to_proto())
2540 .collect(),
2541 project_id,
2542 id: self.id.to_proto(),
2543 abs_path: self.work_directory_abs_path.to_proto(),
2544 entry_ids: vec![],
2545 scan_id: self.scan_id,
2546 is_last_update: true,
2547 }
2548 }
2549
2550 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2551 self.statuses_by_path.iter().cloned()
2552 }
2553
2554 pub fn status_summary(&self) -> GitSummary {
2555 self.statuses_by_path.summary().item_summary
2556 }
2557
2558 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2559 self.statuses_by_path
2560 .get(&PathKey(path.0.clone()), &())
2561 .cloned()
2562 }
2563
2564 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2565 abs_path
2566 .strip_prefix(&self.work_directory_abs_path)
2567 .map(RepoPath::from)
2568 .ok()
2569 }
2570
2571 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2572 self.statuses_by_path
2573 .get(&PathKey(repo_path.0.clone()), &())
2574 .map_or(false, |entry| entry.status.is_conflicted())
2575 }
2576
2577 /// This is the name that will be displayed in the repository selector for this repository.
2578 pub fn display_name(&self) -> SharedString {
2579 self.work_directory_abs_path
2580 .file_name()
2581 .unwrap_or_default()
2582 .to_string_lossy()
2583 .to_string()
2584 .into()
2585 }
2586}
2587
2588impl Repository {
2589 fn local(
2590 id: RepositoryId,
2591 work_directory_abs_path: Arc<Path>,
2592 dot_git_abs_path: Arc<Path>,
2593 project_environment: WeakEntity<ProjectEnvironment>,
2594 fs: Arc<dyn Fs>,
2595 git_store: WeakEntity<GitStore>,
2596 cx: &mut Context<Self>,
2597 ) -> Self {
2598 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2599 Repository {
2600 git_store,
2601 snapshot,
2602 commit_message_buffer: None,
2603 askpass_delegates: Default::default(),
2604 paths_needing_status_update: Default::default(),
2605 latest_askpass_id: 0,
2606 job_sender: Repository::spawn_local_git_worker(
2607 work_directory_abs_path,
2608 dot_git_abs_path,
2609 project_environment,
2610 fs,
2611 cx,
2612 ),
2613 }
2614 }
2615
2616 fn remote(
2617 id: RepositoryId,
2618 work_directory_abs_path: Arc<Path>,
2619 project_id: ProjectId,
2620 client: AnyProtoClient,
2621 git_store: WeakEntity<GitStore>,
2622 cx: &mut Context<Self>,
2623 ) -> Self {
2624 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2625 Self {
2626 snapshot,
2627 commit_message_buffer: None,
2628 git_store,
2629 paths_needing_status_update: Default::default(),
2630 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2631 askpass_delegates: Default::default(),
2632 latest_askpass_id: 0,
2633 }
2634 }
2635
2636 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2637 self.git_store.upgrade()
2638 }
2639
2640 pub fn send_job<F, Fut, R>(&self, job: F) -> oneshot::Receiver<R>
2641 where
2642 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2643 Fut: Future<Output = R> + 'static,
2644 R: Send + 'static,
2645 {
2646 self.send_keyed_job(None, job)
2647 }
2648
2649 fn send_keyed_job<F, Fut, R>(&self, key: Option<GitJobKey>, job: F) -> oneshot::Receiver<R>
2650 where
2651 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2652 Fut: Future<Output = R> + 'static,
2653 R: Send + 'static,
2654 {
2655 let (result_tx, result_rx) = futures::channel::oneshot::channel();
2656 self.job_sender
2657 .unbounded_send(GitJob {
2658 key,
2659 job: Box::new(|state, cx: &mut AsyncApp| {
2660 let job = job(state, cx.clone());
2661 cx.spawn(async move |_| {
2662 let result = job.await;
2663 result_tx.send(result).ok();
2664 })
2665 }),
2666 })
2667 .ok();
2668 result_rx
2669 }
2670
2671 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
2672 let Some(git_store) = self.git_store.upgrade() else {
2673 return;
2674 };
2675 let entity = cx.entity();
2676 git_store.update(cx, |git_store, cx| {
2677 let Some((&id, _)) = git_store
2678 .repositories
2679 .iter()
2680 .find(|(_, handle)| *handle == &entity)
2681 else {
2682 return;
2683 };
2684 git_store.active_repo_id = Some(id);
2685 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2686 });
2687 }
2688
2689 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
2690 self.snapshot.status()
2691 }
2692
2693 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
2694 let git_store = self.git_store.upgrade()?;
2695 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2696 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
2697 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
2698 Some(ProjectPath {
2699 worktree_id: worktree.read(cx).id(),
2700 path: relative_path.into(),
2701 })
2702 }
2703
2704 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
2705 let git_store = self.git_store.upgrade()?;
2706 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2707 let abs_path = worktree_store.absolutize(path, cx)?;
2708 self.snapshot.abs_path_to_repo_path(&abs_path)
2709 }
2710
2711 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
2712 other
2713 .read(cx)
2714 .snapshot
2715 .work_directory_abs_path
2716 .starts_with(&self.snapshot.work_directory_abs_path)
2717 }
2718
2719 pub fn open_commit_buffer(
2720 &mut self,
2721 languages: Option<Arc<LanguageRegistry>>,
2722 buffer_store: Entity<BufferStore>,
2723 cx: &mut Context<Self>,
2724 ) -> Task<Result<Entity<Buffer>>> {
2725 let id = self.id;
2726 if let Some(buffer) = self.commit_message_buffer.clone() {
2727 return Task::ready(Ok(buffer));
2728 }
2729 let this = cx.weak_entity();
2730
2731 let rx = self.send_job(move |state, mut cx| async move {
2732 let Some(this) = this.upgrade() else {
2733 bail!("git store was dropped");
2734 };
2735 match state {
2736 RepositoryState::Local { .. } => {
2737 this.update(&mut cx, |_, cx| {
2738 Self::open_local_commit_buffer(languages, buffer_store, cx)
2739 })?
2740 .await
2741 }
2742 RepositoryState::Remote { project_id, client } => {
2743 let request = client.request(proto::OpenCommitMessageBuffer {
2744 project_id: project_id.0,
2745 repository_id: id.to_proto(),
2746 });
2747 let response = request.await.context("requesting to open commit buffer")?;
2748 let buffer_id = BufferId::new(response.buffer_id)?;
2749 let buffer = buffer_store
2750 .update(&mut cx, |buffer_store, cx| {
2751 buffer_store.wait_for_remote_buffer(buffer_id, cx)
2752 })?
2753 .await?;
2754 if let Some(language_registry) = languages {
2755 let git_commit_language =
2756 language_registry.language_for_name("Git Commit").await?;
2757 buffer.update(&mut cx, |buffer, cx| {
2758 buffer.set_language(Some(git_commit_language), cx);
2759 })?;
2760 }
2761 this.update(&mut cx, |this, _| {
2762 this.commit_message_buffer = Some(buffer.clone());
2763 })?;
2764 Ok(buffer)
2765 }
2766 }
2767 });
2768
2769 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
2770 }
2771
2772 fn open_local_commit_buffer(
2773 language_registry: Option<Arc<LanguageRegistry>>,
2774 buffer_store: Entity<BufferStore>,
2775 cx: &mut Context<Self>,
2776 ) -> Task<Result<Entity<Buffer>>> {
2777 cx.spawn(async move |repository, cx| {
2778 let buffer = buffer_store
2779 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
2780 .await?;
2781
2782 if let Some(language_registry) = language_registry {
2783 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
2784 buffer.update(cx, |buffer, cx| {
2785 buffer.set_language(Some(git_commit_language), cx);
2786 })?;
2787 }
2788
2789 repository.update(cx, |repository, _| {
2790 repository.commit_message_buffer = Some(buffer.clone());
2791 })?;
2792 Ok(buffer)
2793 })
2794 }
2795
2796 pub fn checkout_files(
2797 &self,
2798 commit: &str,
2799 paths: Vec<RepoPath>,
2800 _cx: &mut App,
2801 ) -> oneshot::Receiver<Result<()>> {
2802 let commit = commit.to_string();
2803 let id = self.id;
2804
2805 self.send_job(move |git_repo, _| async move {
2806 match git_repo {
2807 RepositoryState::Local {
2808 backend,
2809 environment,
2810 ..
2811 } => {
2812 backend
2813 .checkout_files(commit, paths, environment.clone())
2814 .await
2815 }
2816 RepositoryState::Remote { project_id, client } => {
2817 client
2818 .request(proto::GitCheckoutFiles {
2819 project_id: project_id.0,
2820 repository_id: id.to_proto(),
2821 commit,
2822 paths: paths
2823 .into_iter()
2824 .map(|p| p.to_string_lossy().to_string())
2825 .collect(),
2826 })
2827 .await?;
2828
2829 Ok(())
2830 }
2831 }
2832 })
2833 }
2834
2835 pub fn reset(
2836 &self,
2837 commit: String,
2838 reset_mode: ResetMode,
2839 _cx: &mut App,
2840 ) -> oneshot::Receiver<Result<()>> {
2841 let commit = commit.to_string();
2842 let id = self.id;
2843
2844 self.send_job(move |git_repo, _| async move {
2845 match git_repo {
2846 RepositoryState::Local {
2847 backend,
2848 environment,
2849 ..
2850 } => backend.reset(commit, reset_mode, environment).await,
2851 RepositoryState::Remote { project_id, client } => {
2852 client
2853 .request(proto::GitReset {
2854 project_id: project_id.0,
2855 repository_id: id.to_proto(),
2856 commit,
2857 mode: match reset_mode {
2858 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
2859 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
2860 },
2861 })
2862 .await?;
2863
2864 Ok(())
2865 }
2866 }
2867 })
2868 }
2869
2870 pub fn show(&self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
2871 let id = self.id;
2872 self.send_job(move |git_repo, _cx| async move {
2873 match git_repo {
2874 RepositoryState::Local { backend, .. } => backend.show(commit).await,
2875 RepositoryState::Remote { project_id, client } => {
2876 let resp = client
2877 .request(proto::GitShow {
2878 project_id: project_id.0,
2879 repository_id: id.to_proto(),
2880 commit,
2881 })
2882 .await?;
2883
2884 Ok(CommitDetails {
2885 sha: resp.sha.into(),
2886 message: resp.message.into(),
2887 commit_timestamp: resp.commit_timestamp,
2888 author_email: resp.author_email.into(),
2889 author_name: resp.author_name.into(),
2890 })
2891 }
2892 }
2893 })
2894 }
2895
2896 pub fn load_commit_diff(&self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
2897 let id = self.id;
2898 self.send_job(move |git_repo, cx| async move {
2899 match git_repo {
2900 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
2901 RepositoryState::Remote {
2902 client, project_id, ..
2903 } => {
2904 let response = client
2905 .request(proto::LoadCommitDiff {
2906 project_id: project_id.0,
2907 repository_id: id.to_proto(),
2908 commit,
2909 })
2910 .await?;
2911 Ok(CommitDiff {
2912 files: response
2913 .files
2914 .into_iter()
2915 .map(|file| CommitFile {
2916 path: Path::new(&file.path).into(),
2917 old_text: file.old_text,
2918 new_text: file.new_text,
2919 })
2920 .collect(),
2921 })
2922 }
2923 }
2924 })
2925 }
2926
2927 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
2928 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
2929 }
2930
2931 pub fn stage_entries(
2932 &self,
2933 entries: Vec<RepoPath>,
2934 cx: &mut Context<Self>,
2935 ) -> Task<anyhow::Result<()>> {
2936 if entries.is_empty() {
2937 return Task::ready(Ok(()));
2938 }
2939 let id = self.id;
2940
2941 let mut save_futures = Vec::new();
2942 if let Some(buffer_store) = self.buffer_store(cx) {
2943 buffer_store.update(cx, |buffer_store, cx| {
2944 for path in &entries {
2945 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
2946 continue;
2947 };
2948 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
2949 if buffer
2950 .read(cx)
2951 .file()
2952 .map_or(false, |file| file.disk_state().exists())
2953 {
2954 save_futures.push(buffer_store.save_buffer(buffer, cx));
2955 }
2956 }
2957 }
2958 })
2959 }
2960
2961 cx.spawn(async move |this, cx| {
2962 for save_future in save_futures {
2963 save_future.await?;
2964 }
2965
2966 this.update(cx, |this, _| {
2967 this.send_job(move |git_repo, _cx| async move {
2968 match git_repo {
2969 RepositoryState::Local {
2970 backend,
2971 environment,
2972 ..
2973 } => backend.stage_paths(entries, environment.clone()).await,
2974 RepositoryState::Remote { project_id, client } => {
2975 client
2976 .request(proto::Stage {
2977 project_id: project_id.0,
2978 repository_id: id.to_proto(),
2979 paths: entries
2980 .into_iter()
2981 .map(|repo_path| repo_path.as_ref().to_proto())
2982 .collect(),
2983 })
2984 .await
2985 .context("sending stage request")?;
2986
2987 Ok(())
2988 }
2989 }
2990 })
2991 })?
2992 .await??;
2993
2994 Ok(())
2995 })
2996 }
2997
2998 pub fn unstage_entries(
2999 &self,
3000 entries: Vec<RepoPath>,
3001 cx: &mut Context<Self>,
3002 ) -> Task<anyhow::Result<()>> {
3003 if entries.is_empty() {
3004 return Task::ready(Ok(()));
3005 }
3006 let id = self.id;
3007
3008 let mut save_futures = Vec::new();
3009 if let Some(buffer_store) = self.buffer_store(cx) {
3010 buffer_store.update(cx, |buffer_store, cx| {
3011 for path in &entries {
3012 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3013 continue;
3014 };
3015 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3016 if buffer
3017 .read(cx)
3018 .file()
3019 .map_or(false, |file| file.disk_state().exists())
3020 {
3021 save_futures.push(buffer_store.save_buffer(buffer, cx));
3022 }
3023 }
3024 }
3025 })
3026 }
3027
3028 cx.spawn(async move |this, cx| {
3029 for save_future in save_futures {
3030 save_future.await?;
3031 }
3032
3033 this.update(cx, |this, _| {
3034 this.send_job(move |git_repo, _cx| async move {
3035 match git_repo {
3036 RepositoryState::Local {
3037 backend,
3038 environment,
3039 ..
3040 } => backend.unstage_paths(entries, environment).await,
3041 RepositoryState::Remote { project_id, client } => {
3042 client
3043 .request(proto::Unstage {
3044 project_id: project_id.0,
3045 repository_id: id.to_proto(),
3046 paths: entries
3047 .into_iter()
3048 .map(|repo_path| repo_path.as_ref().to_proto())
3049 .collect(),
3050 })
3051 .await
3052 .context("sending unstage request")?;
3053
3054 Ok(())
3055 }
3056 }
3057 })
3058 })?
3059 .await??;
3060
3061 Ok(())
3062 })
3063 }
3064
3065 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3066 let to_stage = self
3067 .cached_status()
3068 .filter(|entry| !entry.status.staging().is_fully_staged())
3069 .map(|entry| entry.repo_path.clone())
3070 .collect();
3071 self.stage_entries(to_stage, cx)
3072 }
3073
3074 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3075 let to_unstage = self
3076 .cached_status()
3077 .filter(|entry| entry.status.staging().has_staged())
3078 .map(|entry| entry.repo_path.clone())
3079 .collect();
3080 self.unstage_entries(to_unstage, cx)
3081 }
3082
3083 pub fn commit(
3084 &self,
3085 message: SharedString,
3086 name_and_email: Option<(SharedString, SharedString)>,
3087 _cx: &mut App,
3088 ) -> oneshot::Receiver<Result<()>> {
3089 let id = self.id;
3090
3091 self.send_job(move |git_repo, _cx| async move {
3092 match git_repo {
3093 RepositoryState::Local {
3094 backend,
3095 environment,
3096 ..
3097 } => backend.commit(message, name_and_email, environment).await,
3098 RepositoryState::Remote { project_id, client } => {
3099 let (name, email) = name_and_email.unzip();
3100 client
3101 .request(proto::Commit {
3102 project_id: project_id.0,
3103 repository_id: id.to_proto(),
3104 message: String::from(message),
3105 name: name.map(String::from),
3106 email: email.map(String::from),
3107 })
3108 .await
3109 .context("sending commit request")?;
3110
3111 Ok(())
3112 }
3113 }
3114 })
3115 }
3116
3117 pub fn fetch(
3118 &mut self,
3119 askpass: AskPassDelegate,
3120 _cx: &mut App,
3121 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3122 let askpass_delegates = self.askpass_delegates.clone();
3123 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3124 let id = self.id;
3125
3126 self.send_job(move |git_repo, cx| async move {
3127 match git_repo {
3128 RepositoryState::Local {
3129 backend,
3130 environment,
3131 ..
3132 } => backend.fetch(askpass, environment, cx).await,
3133 RepositoryState::Remote { project_id, client } => {
3134 askpass_delegates.lock().insert(askpass_id, askpass);
3135 let _defer = util::defer(|| {
3136 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3137 debug_assert!(askpass_delegate.is_some());
3138 });
3139
3140 let response = client
3141 .request(proto::Fetch {
3142 project_id: project_id.0,
3143 repository_id: id.to_proto(),
3144 askpass_id,
3145 })
3146 .await
3147 .context("sending fetch request")?;
3148
3149 Ok(RemoteCommandOutput {
3150 stdout: response.stdout,
3151 stderr: response.stderr,
3152 })
3153 }
3154 }
3155 })
3156 }
3157
3158 pub fn push(
3159 &mut self,
3160 branch: SharedString,
3161 remote: SharedString,
3162 options: Option<PushOptions>,
3163 askpass: AskPassDelegate,
3164 _cx: &mut App,
3165 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3166 let askpass_delegates = self.askpass_delegates.clone();
3167 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3168 let id = self.id;
3169
3170 self.send_job(move |git_repo, cx| async move {
3171 match git_repo {
3172 RepositoryState::Local {
3173 backend,
3174 environment,
3175 ..
3176 } => {
3177 backend
3178 .push(
3179 branch.to_string(),
3180 remote.to_string(),
3181 options,
3182 askpass,
3183 environment.clone(),
3184 cx,
3185 )
3186 .await
3187 }
3188 RepositoryState::Remote { project_id, client } => {
3189 askpass_delegates.lock().insert(askpass_id, askpass);
3190 let _defer = util::defer(|| {
3191 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3192 debug_assert!(askpass_delegate.is_some());
3193 });
3194 let response = client
3195 .request(proto::Push {
3196 project_id: project_id.0,
3197 repository_id: id.to_proto(),
3198 askpass_id,
3199 branch_name: branch.to_string(),
3200 remote_name: remote.to_string(),
3201 options: options.map(|options| match options {
3202 PushOptions::Force => proto::push::PushOptions::Force,
3203 PushOptions::SetUpstream => proto::push::PushOptions::SetUpstream,
3204 } as i32),
3205 })
3206 .await
3207 .context("sending push request")?;
3208
3209 Ok(RemoteCommandOutput {
3210 stdout: response.stdout,
3211 stderr: response.stderr,
3212 })
3213 }
3214 }
3215 })
3216 }
3217
3218 pub fn pull(
3219 &mut self,
3220 branch: SharedString,
3221 remote: SharedString,
3222 askpass: AskPassDelegate,
3223 _cx: &mut App,
3224 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3225 let askpass_delegates = self.askpass_delegates.clone();
3226 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3227 let id = self.id;
3228
3229 self.send_job(move |git_repo, cx| async move {
3230 match git_repo {
3231 RepositoryState::Local {
3232 backend,
3233 environment,
3234 ..
3235 } => {
3236 backend
3237 .pull(
3238 branch.to_string(),
3239 remote.to_string(),
3240 askpass,
3241 environment.clone(),
3242 cx,
3243 )
3244 .await
3245 }
3246 RepositoryState::Remote { project_id, client } => {
3247 askpass_delegates.lock().insert(askpass_id, askpass);
3248 let _defer = util::defer(|| {
3249 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3250 debug_assert!(askpass_delegate.is_some());
3251 });
3252 let response = client
3253 .request(proto::Pull {
3254 project_id: project_id.0,
3255 repository_id: id.to_proto(),
3256 askpass_id,
3257 branch_name: branch.to_string(),
3258 remote_name: remote.to_string(),
3259 })
3260 .await
3261 .context("sending pull request")?;
3262
3263 Ok(RemoteCommandOutput {
3264 stdout: response.stdout,
3265 stderr: response.stderr,
3266 })
3267 }
3268 }
3269 })
3270 }
3271
3272 fn spawn_set_index_text_job(
3273 &self,
3274 path: RepoPath,
3275 content: Option<String>,
3276 _cx: &mut App,
3277 ) -> oneshot::Receiver<anyhow::Result<()>> {
3278 let id = self.id;
3279
3280 self.send_keyed_job(
3281 Some(GitJobKey::WriteIndex(path.clone())),
3282 move |git_repo, _cx| async move {
3283 match git_repo {
3284 RepositoryState::Local {
3285 backend,
3286 environment,
3287 ..
3288 } => {
3289 backend
3290 .set_index_text(path, content, environment.clone())
3291 .await
3292 }
3293 RepositoryState::Remote { project_id, client } => {
3294 client
3295 .request(proto::SetIndexText {
3296 project_id: project_id.0,
3297 repository_id: id.to_proto(),
3298 path: path.as_ref().to_proto(),
3299 text: content,
3300 })
3301 .await?;
3302 Ok(())
3303 }
3304 }
3305 },
3306 )
3307 }
3308
3309 pub fn get_remotes(
3310 &self,
3311 branch_name: Option<String>,
3312 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3313 let id = self.id;
3314 self.send_job(move |repo, _cx| async move {
3315 match repo {
3316 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3317 RepositoryState::Remote { project_id, client } => {
3318 let response = client
3319 .request(proto::GetRemotes {
3320 project_id: project_id.0,
3321 repository_id: id.to_proto(),
3322 branch_name,
3323 })
3324 .await?;
3325
3326 let remotes = response
3327 .remotes
3328 .into_iter()
3329 .map(|remotes| git::repository::Remote {
3330 name: remotes.name.into(),
3331 })
3332 .collect();
3333
3334 Ok(remotes)
3335 }
3336 }
3337 })
3338 }
3339
3340 pub fn branches(&self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3341 let id = self.id;
3342 self.send_job(move |repo, cx| async move {
3343 match repo {
3344 RepositoryState::Local { backend, .. } => {
3345 let backend = backend.clone();
3346 cx.background_spawn(async move { backend.branches().await })
3347 .await
3348 }
3349 RepositoryState::Remote { project_id, client } => {
3350 let response = client
3351 .request(proto::GitGetBranches {
3352 project_id: project_id.0,
3353 repository_id: id.to_proto(),
3354 })
3355 .await?;
3356
3357 let branches = response
3358 .branches
3359 .into_iter()
3360 .map(|branch| proto_to_branch(&branch))
3361 .collect();
3362
3363 Ok(branches)
3364 }
3365 }
3366 })
3367 }
3368
3369 pub fn diff(&self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3370 let id = self.id;
3371 self.send_job(move |repo, _cx| async move {
3372 match repo {
3373 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3374 RepositoryState::Remote { project_id, client } => {
3375 let response = client
3376 .request(proto::GitDiff {
3377 project_id: project_id.0,
3378 repository_id: id.to_proto(),
3379 diff_type: match diff_type {
3380 DiffType::HeadToIndex => {
3381 proto::git_diff::DiffType::HeadToIndex.into()
3382 }
3383 DiffType::HeadToWorktree => {
3384 proto::git_diff::DiffType::HeadToWorktree.into()
3385 }
3386 },
3387 })
3388 .await?;
3389
3390 Ok(response.diff)
3391 }
3392 }
3393 })
3394 }
3395
3396 pub fn create_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3397 let id = self.id;
3398 self.send_job(move |repo, _cx| async move {
3399 match repo {
3400 RepositoryState::Local { backend, .. } => backend.create_branch(branch_name).await,
3401 RepositoryState::Remote { project_id, client } => {
3402 client
3403 .request(proto::GitCreateBranch {
3404 project_id: project_id.0,
3405 repository_id: id.to_proto(),
3406 branch_name,
3407 })
3408 .await?;
3409
3410 Ok(())
3411 }
3412 }
3413 })
3414 }
3415
3416 pub fn change_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3417 let id = self.id;
3418 self.send_job(move |repo, _cx| async move {
3419 match repo {
3420 RepositoryState::Local { backend, .. } => backend.change_branch(branch_name).await,
3421 RepositoryState::Remote { project_id, client } => {
3422 client
3423 .request(proto::GitChangeBranch {
3424 project_id: project_id.0,
3425 repository_id: id.to_proto(),
3426 branch_name,
3427 })
3428 .await?;
3429
3430 Ok(())
3431 }
3432 }
3433 })
3434 }
3435
3436 pub fn check_for_pushed_commits(&self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3437 let id = self.id;
3438 self.send_job(move |repo, _cx| async move {
3439 match repo {
3440 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3441 RepositoryState::Remote { project_id, client } => {
3442 let response = client
3443 .request(proto::CheckForPushedCommits {
3444 project_id: project_id.0,
3445 repository_id: id.to_proto(),
3446 })
3447 .await?;
3448
3449 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3450
3451 Ok(branches)
3452 }
3453 }
3454 })
3455 }
3456
3457 pub fn checkpoint(&self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3458 self.send_job(|repo, _cx| async move {
3459 match repo {
3460 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
3461 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3462 }
3463 })
3464 }
3465
3466 pub fn restore_checkpoint(
3467 &self,
3468 checkpoint: GitRepositoryCheckpoint,
3469 ) -> oneshot::Receiver<Result<()>> {
3470 self.send_job(move |repo, _cx| async move {
3471 match repo {
3472 RepositoryState::Local { backend, .. } => {
3473 backend.restore_checkpoint(checkpoint).await
3474 }
3475 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3476 }
3477 })
3478 }
3479
3480 pub(crate) fn apply_remote_update(
3481 &mut self,
3482 update: proto::UpdateRepository,
3483 cx: &mut Context<Self>,
3484 ) -> Result<()> {
3485 let conflicted_paths = TreeSet::from_ordered_entries(
3486 update
3487 .current_merge_conflicts
3488 .into_iter()
3489 .map(|path| RepoPath(Path::new(&path).into())),
3490 );
3491 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
3492 self.snapshot.merge_conflicts = conflicted_paths;
3493
3494 let edits = update
3495 .removed_statuses
3496 .into_iter()
3497 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3498 .chain(
3499 update
3500 .updated_statuses
3501 .into_iter()
3502 .filter_map(|updated_status| {
3503 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3504 }),
3505 )
3506 .collect::<Vec<_>>();
3507 self.snapshot.statuses_by_path.edit(edits, &());
3508 if update.is_last_update {
3509 self.snapshot.scan_id = update.scan_id;
3510 }
3511 cx.emit(RepositoryEvent::Updated);
3512 Ok(())
3513 }
3514
3515 pub fn compare_checkpoints(
3516 &self,
3517 left: GitRepositoryCheckpoint,
3518 right: GitRepositoryCheckpoint,
3519 ) -> oneshot::Receiver<Result<bool>> {
3520 self.send_job(move |repo, _cx| async move {
3521 match repo {
3522 RepositoryState::Local { backend, .. } => {
3523 backend.compare_checkpoints(left, right).await
3524 }
3525 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3526 }
3527 })
3528 }
3529
3530 pub fn delete_checkpoint(
3531 &self,
3532 checkpoint: GitRepositoryCheckpoint,
3533 ) -> oneshot::Receiver<Result<()>> {
3534 self.send_job(move |repo, _cx| async move {
3535 match repo {
3536 RepositoryState::Local { backend, .. } => {
3537 backend.delete_checkpoint(checkpoint).await
3538 }
3539 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3540 }
3541 })
3542 }
3543
3544 pub fn diff_checkpoints(
3545 &self,
3546 base_checkpoint: GitRepositoryCheckpoint,
3547 target_checkpoint: GitRepositoryCheckpoint,
3548 ) -> oneshot::Receiver<Result<String>> {
3549 self.send_job(move |repo, _cx| async move {
3550 match repo {
3551 RepositoryState::Local { backend, .. } => {
3552 backend
3553 .diff_checkpoints(base_checkpoint, target_checkpoint)
3554 .await
3555 }
3556 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3557 }
3558 })
3559 }
3560
3561 fn schedule_scan(
3562 &mut self,
3563 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3564 cx: &mut Context<Self>,
3565 ) {
3566 self.paths_changed(
3567 vec![git::repository::WORK_DIRECTORY_REPO_PATH.clone()],
3568 updates_tx.clone(),
3569 cx,
3570 );
3571
3572 let this = cx.weak_entity();
3573 let _ = self.send_keyed_job(
3574 Some(GitJobKey::ReloadGitState),
3575 |state, mut cx| async move {
3576 let Some(this) = this.upgrade() else {
3577 return Ok(());
3578 };
3579 let RepositoryState::Local { backend, .. } = state else {
3580 bail!("not a local repository")
3581 };
3582 let (snapshot, events) = this
3583 .update(&mut cx, |this, _| {
3584 compute_snapshot(
3585 this.id,
3586 this.work_directory_abs_path.clone(),
3587 this.snapshot.clone(),
3588 backend.clone(),
3589 )
3590 })?
3591 .await?;
3592 this.update(&mut cx, |this, cx| {
3593 this.snapshot = snapshot.clone();
3594 for event in events {
3595 cx.emit(event);
3596 }
3597 })?;
3598 if let Some(updates_tx) = updates_tx {
3599 updates_tx
3600 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3601 .ok();
3602 }
3603 Ok(())
3604 },
3605 );
3606 }
3607
3608 fn spawn_local_git_worker(
3609 work_directory_abs_path: Arc<Path>,
3610 dot_git_abs_path: Arc<Path>,
3611 project_environment: WeakEntity<ProjectEnvironment>,
3612 fs: Arc<dyn Fs>,
3613 cx: &mut Context<Self>,
3614 ) -> mpsc::UnboundedSender<GitJob> {
3615 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3616
3617 cx.spawn(async move |_, cx| {
3618 let environment = project_environment
3619 .upgrade()
3620 .ok_or_else(|| anyhow!("missing project environment"))?
3621 .update(cx, |project_environment, cx| {
3622 project_environment.get_environment(Some(work_directory_abs_path), cx)
3623 })?
3624 .await
3625 .ok_or_else(|| {
3626 anyhow!("failed to get environment for repository working directory")
3627 })?;
3628 let backend = cx
3629 .background_spawn(async move {
3630 fs.open_repo(&dot_git_abs_path)
3631 .ok_or_else(|| anyhow!("failed to build repository"))
3632 })
3633 .await?;
3634
3635 if let Some(git_hosting_provider_registry) =
3636 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
3637 {
3638 git_hosting_providers::register_additional_providers(
3639 git_hosting_provider_registry,
3640 backend.clone(),
3641 );
3642 }
3643
3644 let state = RepositoryState::Local {
3645 backend,
3646 environment: Arc::new(environment),
3647 };
3648 let mut jobs = VecDeque::new();
3649 loop {
3650 while let Ok(Some(next_job)) = job_rx.try_next() {
3651 jobs.push_back(next_job);
3652 }
3653
3654 if let Some(job) = jobs.pop_front() {
3655 if let Some(current_key) = &job.key {
3656 if jobs
3657 .iter()
3658 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3659 {
3660 continue;
3661 }
3662 }
3663 (job.job)(state.clone(), cx).await;
3664 } else if let Some(job) = job_rx.next().await {
3665 jobs.push_back(job);
3666 } else {
3667 break;
3668 }
3669 }
3670 anyhow::Ok(())
3671 })
3672 .detach_and_log_err(cx);
3673
3674 job_tx
3675 }
3676
3677 fn spawn_remote_git_worker(
3678 project_id: ProjectId,
3679 client: AnyProtoClient,
3680 cx: &mut Context<Self>,
3681 ) -> mpsc::UnboundedSender<GitJob> {
3682 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3683
3684 cx.spawn(async move |_, cx| {
3685 let state = RepositoryState::Remote { project_id, client };
3686 let mut jobs = VecDeque::new();
3687 loop {
3688 while let Ok(Some(next_job)) = job_rx.try_next() {
3689 jobs.push_back(next_job);
3690 }
3691
3692 if let Some(job) = jobs.pop_front() {
3693 if let Some(current_key) = &job.key {
3694 if jobs
3695 .iter()
3696 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3697 {
3698 continue;
3699 }
3700 }
3701 (job.job)(state.clone(), cx).await;
3702 } else if let Some(job) = job_rx.next().await {
3703 jobs.push_back(job);
3704 } else {
3705 break;
3706 }
3707 }
3708 anyhow::Ok(())
3709 })
3710 .detach_and_log_err(cx);
3711
3712 job_tx
3713 }
3714
3715 fn load_staged_text(
3716 &self,
3717 buffer_id: BufferId,
3718 repo_path: RepoPath,
3719 cx: &App,
3720 ) -> Task<Result<Option<String>>> {
3721 let rx = self.send_job(move |state, _| async move {
3722 match state {
3723 RepositoryState::Local { backend, .. } => {
3724 anyhow::Ok(backend.load_index_text(repo_path).await)
3725 }
3726 RepositoryState::Remote { project_id, client } => {
3727 let response = client
3728 .request(proto::OpenUnstagedDiff {
3729 project_id: project_id.to_proto(),
3730 buffer_id: buffer_id.to_proto(),
3731 })
3732 .await?;
3733 Ok(response.staged_text)
3734 }
3735 }
3736 });
3737 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3738 }
3739
3740 fn load_committed_text(
3741 &self,
3742 buffer_id: BufferId,
3743 repo_path: RepoPath,
3744 cx: &App,
3745 ) -> Task<Result<DiffBasesChange>> {
3746 let rx = self.send_job(move |state, _| async move {
3747 match state {
3748 RepositoryState::Local { backend, .. } => {
3749 let committed_text = backend.load_committed_text(repo_path.clone()).await;
3750 let staged_text = backend.load_index_text(repo_path).await;
3751 let diff_bases_change = if committed_text == staged_text {
3752 DiffBasesChange::SetBoth(committed_text)
3753 } else {
3754 DiffBasesChange::SetEach {
3755 index: staged_text,
3756 head: committed_text,
3757 }
3758 };
3759 anyhow::Ok(diff_bases_change)
3760 }
3761 RepositoryState::Remote { project_id, client } => {
3762 use proto::open_uncommitted_diff_response::Mode;
3763
3764 let response = client
3765 .request(proto::OpenUncommittedDiff {
3766 project_id: project_id.to_proto(),
3767 buffer_id: buffer_id.to_proto(),
3768 })
3769 .await?;
3770 let mode =
3771 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
3772 let bases = match mode {
3773 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
3774 Mode::IndexAndHead => DiffBasesChange::SetEach {
3775 head: response.committed_text,
3776 index: response.staged_text,
3777 },
3778 };
3779 Ok(bases)
3780 }
3781 }
3782 });
3783
3784 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3785 }
3786
3787 fn paths_changed(
3788 &mut self,
3789 paths: Vec<RepoPath>,
3790 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3791 cx: &mut Context<Self>,
3792 ) {
3793 self.paths_needing_status_update.extend(paths);
3794
3795 let this = cx.weak_entity();
3796 let _ = self.send_keyed_job(
3797 Some(GitJobKey::RefreshStatuses),
3798 |state, mut cx| async move {
3799 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
3800 (
3801 this.snapshot.clone(),
3802 mem::take(&mut this.paths_needing_status_update),
3803 )
3804 })?;
3805 let RepositoryState::Local { backend, .. } = state else {
3806 bail!("not a local repository")
3807 };
3808
3809 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
3810 let statuses = backend.status(&paths).await?;
3811
3812 let changed_path_statuses = cx
3813 .background_spawn(async move {
3814 let mut changed_path_statuses = Vec::new();
3815 let prev_statuses = prev_snapshot.statuses_by_path.clone();
3816 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
3817
3818 for (repo_path, status) in &*statuses.entries {
3819 changed_paths.remove(repo_path);
3820 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
3821 if &cursor.item().unwrap().status == status {
3822 continue;
3823 }
3824 }
3825
3826 changed_path_statuses.push(Edit::Insert(StatusEntry {
3827 repo_path: repo_path.clone(),
3828 status: *status,
3829 }));
3830 }
3831 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
3832 for path in changed_paths.iter() {
3833 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
3834 changed_path_statuses.push(Edit::Remove(PathKey(path.0.clone())));
3835 }
3836 }
3837 changed_path_statuses
3838 })
3839 .await;
3840
3841 this.update(&mut cx, |this, cx| {
3842 if !changed_path_statuses.is_empty() {
3843 this.snapshot
3844 .statuses_by_path
3845 .edit(changed_path_statuses, &());
3846 this.snapshot.scan_id += 1;
3847 if let Some(updates_tx) = updates_tx {
3848 updates_tx
3849 .unbounded_send(DownstreamUpdate::UpdateRepository(
3850 this.snapshot.clone(),
3851 ))
3852 .ok();
3853 }
3854 }
3855 cx.emit(RepositoryEvent::Updated);
3856 })
3857 },
3858 );
3859 }
3860}
3861
3862fn get_permalink_in_rust_registry_src(
3863 provider_registry: Arc<GitHostingProviderRegistry>,
3864 path: PathBuf,
3865 selection: Range<u32>,
3866) -> Result<url::Url> {
3867 #[derive(Deserialize)]
3868 struct CargoVcsGit {
3869 sha1: String,
3870 }
3871
3872 #[derive(Deserialize)]
3873 struct CargoVcsInfo {
3874 git: CargoVcsGit,
3875 path_in_vcs: String,
3876 }
3877
3878 #[derive(Deserialize)]
3879 struct CargoPackage {
3880 repository: String,
3881 }
3882
3883 #[derive(Deserialize)]
3884 struct CargoToml {
3885 package: CargoPackage,
3886 }
3887
3888 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
3889 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
3890 Some((dir, json))
3891 }) else {
3892 bail!("No .cargo_vcs_info.json found in parent directories")
3893 };
3894 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
3895 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
3896 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
3897 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
3898 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
3899 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
3900 let permalink = provider.build_permalink(
3901 remote,
3902 BuildPermalinkParams {
3903 sha: &cargo_vcs_info.git.sha1,
3904 path: &path.to_string_lossy(),
3905 selection: Some(selection),
3906 },
3907 );
3908 Ok(permalink)
3909}
3910
3911fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
3912 let Some(blame) = blame else {
3913 return proto::BlameBufferResponse {
3914 blame_response: None,
3915 };
3916 };
3917
3918 let entries = blame
3919 .entries
3920 .into_iter()
3921 .map(|entry| proto::BlameEntry {
3922 sha: entry.sha.as_bytes().into(),
3923 start_line: entry.range.start,
3924 end_line: entry.range.end,
3925 original_line_number: entry.original_line_number,
3926 author: entry.author.clone(),
3927 author_mail: entry.author_mail.clone(),
3928 author_time: entry.author_time,
3929 author_tz: entry.author_tz.clone(),
3930 committer: entry.committer_name.clone(),
3931 committer_mail: entry.committer_email.clone(),
3932 committer_time: entry.committer_time,
3933 committer_tz: entry.committer_tz.clone(),
3934 summary: entry.summary.clone(),
3935 previous: entry.previous.clone(),
3936 filename: entry.filename.clone(),
3937 })
3938 .collect::<Vec<_>>();
3939
3940 let messages = blame
3941 .messages
3942 .into_iter()
3943 .map(|(oid, message)| proto::CommitMessage {
3944 oid: oid.as_bytes().into(),
3945 message,
3946 })
3947 .collect::<Vec<_>>();
3948
3949 proto::BlameBufferResponse {
3950 blame_response: Some(proto::blame_buffer_response::BlameResponse {
3951 entries,
3952 messages,
3953 remote_url: blame.remote_url,
3954 }),
3955 }
3956}
3957
3958fn deserialize_blame_buffer_response(
3959 response: proto::BlameBufferResponse,
3960) -> Option<git::blame::Blame> {
3961 let response = response.blame_response?;
3962 let entries = response
3963 .entries
3964 .into_iter()
3965 .filter_map(|entry| {
3966 Some(git::blame::BlameEntry {
3967 sha: git::Oid::from_bytes(&entry.sha).ok()?,
3968 range: entry.start_line..entry.end_line,
3969 original_line_number: entry.original_line_number,
3970 committer_name: entry.committer,
3971 committer_time: entry.committer_time,
3972 committer_tz: entry.committer_tz,
3973 committer_email: entry.committer_mail,
3974 author: entry.author,
3975 author_mail: entry.author_mail,
3976 author_time: entry.author_time,
3977 author_tz: entry.author_tz,
3978 summary: entry.summary,
3979 previous: entry.previous,
3980 filename: entry.filename,
3981 })
3982 })
3983 .collect::<Vec<_>>();
3984
3985 let messages = response
3986 .messages
3987 .into_iter()
3988 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
3989 .collect::<HashMap<_, _>>();
3990
3991 Some(Blame {
3992 entries,
3993 messages,
3994 remote_url: response.remote_url,
3995 })
3996}
3997
3998fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
3999 proto::Branch {
4000 is_head: branch.is_head,
4001 name: branch.name.to_string(),
4002 unix_timestamp: branch
4003 .most_recent_commit
4004 .as_ref()
4005 .map(|commit| commit.commit_timestamp as u64),
4006 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4007 ref_name: upstream.ref_name.to_string(),
4008 tracking: upstream
4009 .tracking
4010 .status()
4011 .map(|upstream| proto::UpstreamTracking {
4012 ahead: upstream.ahead as u64,
4013 behind: upstream.behind as u64,
4014 }),
4015 }),
4016 most_recent_commit: branch
4017 .most_recent_commit
4018 .as_ref()
4019 .map(|commit| proto::CommitSummary {
4020 sha: commit.sha.to_string(),
4021 subject: commit.subject.to_string(),
4022 commit_timestamp: commit.commit_timestamp,
4023 }),
4024 }
4025}
4026
4027fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4028 git::repository::Branch {
4029 is_head: proto.is_head,
4030 name: proto.name.clone().into(),
4031 upstream: proto
4032 .upstream
4033 .as_ref()
4034 .map(|upstream| git::repository::Upstream {
4035 ref_name: upstream.ref_name.to_string().into(),
4036 tracking: upstream
4037 .tracking
4038 .as_ref()
4039 .map(|tracking| {
4040 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4041 ahead: tracking.ahead as u32,
4042 behind: tracking.behind as u32,
4043 })
4044 })
4045 .unwrap_or(git::repository::UpstreamTracking::Gone),
4046 }),
4047 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4048 git::repository::CommitSummary {
4049 sha: commit.sha.to_string().into(),
4050 subject: commit.subject.to_string().into(),
4051 commit_timestamp: commit.commit_timestamp,
4052 has_parent: true,
4053 }
4054 }),
4055 }
4056}
4057
4058async fn compute_snapshot(
4059 id: RepositoryId,
4060 work_directory_abs_path: Arc<Path>,
4061 prev_snapshot: RepositorySnapshot,
4062 backend: Arc<dyn GitRepository>,
4063) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4064 let mut events = Vec::new();
4065 let branches = backend.branches().await?;
4066 let branch = branches.into_iter().find(|branch| branch.is_head);
4067 let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?;
4068 let merge_message = backend
4069 .merge_message()
4070 .await
4071 .and_then(|msg| Some(msg.lines().nth(0)?.to_owned().into()));
4072 let merge_head_shas = backend
4073 .merge_head_shas()
4074 .into_iter()
4075 .map(SharedString::from)
4076 .collect();
4077
4078 let statuses_by_path = SumTree::from_iter(
4079 statuses
4080 .entries
4081 .iter()
4082 .map(|(repo_path, status)| StatusEntry {
4083 repo_path: repo_path.clone(),
4084 status: *status,
4085 }),
4086 &(),
4087 );
4088
4089 let merge_head_shas_changed = merge_head_shas != prev_snapshot.merge_head_shas;
4090
4091 if merge_head_shas_changed
4092 || branch != prev_snapshot.branch
4093 || statuses_by_path != prev_snapshot.statuses_by_path
4094 {
4095 events.push(RepositoryEvent::Updated);
4096 }
4097
4098 let mut current_merge_conflicts = TreeSet::default();
4099 for (repo_path, status) in statuses.entries.iter() {
4100 if status.is_conflicted() {
4101 current_merge_conflicts.insert(repo_path.clone());
4102 }
4103 }
4104
4105 // Cache merge conflict paths so they don't change from staging/unstaging,
4106 // until the merge heads change (at commit time, etc.).
4107 let mut merge_conflicts = prev_snapshot.merge_conflicts.clone();
4108 if merge_head_shas_changed {
4109 merge_conflicts = current_merge_conflicts;
4110 events.push(RepositoryEvent::MergeHeadsChanged);
4111 }
4112
4113 let snapshot = RepositorySnapshot {
4114 id,
4115 merge_message,
4116 statuses_by_path,
4117 work_directory_abs_path,
4118 scan_id: prev_snapshot.scan_id + 1,
4119 branch,
4120 merge_conflicts,
4121 merge_head_shas,
4122 };
4123
4124 Ok((snapshot, events))
4125}
4126
4127fn status_from_proto(
4128 simple_status: i32,
4129 status: Option<proto::GitFileStatus>,
4130) -> anyhow::Result<FileStatus> {
4131 use proto::git_file_status::Variant;
4132
4133 let Some(variant) = status.and_then(|status| status.variant) else {
4134 let code = proto::GitStatus::from_i32(simple_status)
4135 .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
4136 let result = match code {
4137 proto::GitStatus::Added => TrackedStatus {
4138 worktree_status: StatusCode::Added,
4139 index_status: StatusCode::Unmodified,
4140 }
4141 .into(),
4142 proto::GitStatus::Modified => TrackedStatus {
4143 worktree_status: StatusCode::Modified,
4144 index_status: StatusCode::Unmodified,
4145 }
4146 .into(),
4147 proto::GitStatus::Conflict => UnmergedStatus {
4148 first_head: UnmergedStatusCode::Updated,
4149 second_head: UnmergedStatusCode::Updated,
4150 }
4151 .into(),
4152 proto::GitStatus::Deleted => TrackedStatus {
4153 worktree_status: StatusCode::Deleted,
4154 index_status: StatusCode::Unmodified,
4155 }
4156 .into(),
4157 _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
4158 };
4159 return Ok(result);
4160 };
4161
4162 let result = match variant {
4163 Variant::Untracked(_) => FileStatus::Untracked,
4164 Variant::Ignored(_) => FileStatus::Ignored,
4165 Variant::Unmerged(unmerged) => {
4166 let [first_head, second_head] =
4167 [unmerged.first_head, unmerged.second_head].map(|head| {
4168 let code = proto::GitStatus::from_i32(head)
4169 .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
4170 let result = match code {
4171 proto::GitStatus::Added => UnmergedStatusCode::Added,
4172 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4173 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4174 _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
4175 };
4176 Ok(result)
4177 });
4178 let [first_head, second_head] = [first_head?, second_head?];
4179 UnmergedStatus {
4180 first_head,
4181 second_head,
4182 }
4183 .into()
4184 }
4185 Variant::Tracked(tracked) => {
4186 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4187 .map(|status| {
4188 let code = proto::GitStatus::from_i32(status)
4189 .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
4190 let result = match code {
4191 proto::GitStatus::Modified => StatusCode::Modified,
4192 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4193 proto::GitStatus::Added => StatusCode::Added,
4194 proto::GitStatus::Deleted => StatusCode::Deleted,
4195 proto::GitStatus::Renamed => StatusCode::Renamed,
4196 proto::GitStatus::Copied => StatusCode::Copied,
4197 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4198 _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
4199 };
4200 Ok(result)
4201 });
4202 let [index_status, worktree_status] = [index_status?, worktree_status?];
4203 TrackedStatus {
4204 index_status,
4205 worktree_status,
4206 }
4207 .into()
4208 }
4209 };
4210 Ok(result)
4211}
4212
4213fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4214 use proto::git_file_status::{Tracked, Unmerged, Variant};
4215
4216 let variant = match status {
4217 FileStatus::Untracked => Variant::Untracked(Default::default()),
4218 FileStatus::Ignored => Variant::Ignored(Default::default()),
4219 FileStatus::Unmerged(UnmergedStatus {
4220 first_head,
4221 second_head,
4222 }) => Variant::Unmerged(Unmerged {
4223 first_head: unmerged_status_to_proto(first_head),
4224 second_head: unmerged_status_to_proto(second_head),
4225 }),
4226 FileStatus::Tracked(TrackedStatus {
4227 index_status,
4228 worktree_status,
4229 }) => Variant::Tracked(Tracked {
4230 index_status: tracked_status_to_proto(index_status),
4231 worktree_status: tracked_status_to_proto(worktree_status),
4232 }),
4233 };
4234 proto::GitFileStatus {
4235 variant: Some(variant),
4236 }
4237}
4238
4239fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4240 match code {
4241 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4242 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4243 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4244 }
4245}
4246
4247fn tracked_status_to_proto(code: StatusCode) -> i32 {
4248 match code {
4249 StatusCode::Added => proto::GitStatus::Added as _,
4250 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4251 StatusCode::Modified => proto::GitStatus::Modified as _,
4252 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4253 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4254 StatusCode::Copied => proto::GitStatus::Copied as _,
4255 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4256 }
4257}