1pub mod git_traversal;
2
3use crate::{
4 ProjectEnvironment, ProjectItem, ProjectPath,
5 buffer_store::{BufferStore, BufferStoreEvent},
6 worktree_store::{WorktreeStore, WorktreeStoreEvent},
7};
8use anyhow::{Context as _, Result, anyhow, bail};
9use askpass::AskPassDelegate;
10use buffer_diff::{BufferDiff, BufferDiffEvent};
11use client::ProjectId;
12use collections::HashMap;
13use fs::Fs;
14use futures::{
15 FutureExt as _, StreamExt as _,
16 channel::{mpsc, oneshot},
17 future::{self, Shared},
18};
19use git::{
20 BuildPermalinkParams, GitHostingProviderRegistry, WORK_DIRECTORY_REPO_PATH,
21 blame::Blame,
22 parse_git_remote_url,
23 repository::{
24 Branch, CommitDetails, CommitDiff, CommitFile, DiffType, GitRepository,
25 GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
26 UpstreamTrackingStatus,
27 },
28 status::{
29 FileStatus, GitSummary, StatusCode, TrackedStatus, UnmergedStatus, UnmergedStatusCode,
30 },
31};
32use gpui::{
33 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
34 WeakEntity,
35};
36use language::{
37 Buffer, BufferEvent, Language, LanguageRegistry,
38 proto::{deserialize_version, serialize_version},
39};
40use parking_lot::Mutex;
41use rpc::{
42 AnyProtoClient, TypedEnvelope,
43 proto::{self, FromProto, SSH_PROJECT_ID, ToProto, git_reset, split_repository_update},
44};
45use serde::Deserialize;
46use std::{
47 cmp::Ordering,
48 collections::{BTreeSet, VecDeque},
49 future::Future,
50 mem,
51 ops::Range,
52 path::{Path, PathBuf},
53 sync::{
54 Arc,
55 atomic::{self, AtomicU64},
56 },
57};
58use sum_tree::{Edit, SumTree, TreeSet};
59use text::{Bias, BufferId};
60use util::{ResultExt, debug_panic};
61use worktree::{
62 File, PathKey, PathProgress, PathSummary, PathTarget, UpdatedGitRepositoriesSet, Worktree,
63};
64
65pub struct GitStore {
66 state: GitStoreState,
67 buffer_store: Entity<BufferStore>,
68 worktree_store: Entity<WorktreeStore>,
69 repositories: HashMap<RepositoryId, Entity<Repository>>,
70 active_repo_id: Option<RepositoryId>,
71 #[allow(clippy::type_complexity)]
72 loading_diffs:
73 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
74 diffs: HashMap<BufferId, Entity<BufferDiffState>>,
75 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
76 _subscriptions: Vec<Subscription>,
77}
78
79#[derive(Default)]
80struct SharedDiffs {
81 unstaged: Option<Entity<BufferDiff>>,
82 uncommitted: Option<Entity<BufferDiff>>,
83}
84
85#[derive(Default)]
86struct BufferDiffState {
87 unstaged_diff: Option<WeakEntity<BufferDiff>>,
88 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
89 recalculate_diff_task: Option<Task<Result<()>>>,
90 language: Option<Arc<Language>>,
91 language_registry: Option<Arc<LanguageRegistry>>,
92 diff_updated_futures: Vec<oneshot::Sender<()>>,
93 hunk_staging_operation_count: usize,
94
95 head_text: Option<Arc<String>>,
96 index_text: Option<Arc<String>>,
97 head_changed: bool,
98 index_changed: bool,
99 language_changed: bool,
100}
101
102#[derive(Clone, Debug)]
103enum DiffBasesChange {
104 SetIndex(Option<String>),
105 SetHead(Option<String>),
106 SetEach {
107 index: Option<String>,
108 head: Option<String>,
109 },
110 SetBoth(Option<String>),
111}
112
113#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
114enum DiffKind {
115 Unstaged,
116 Uncommitted,
117}
118
119enum GitStoreState {
120 Local {
121 next_repository_id: Arc<AtomicU64>,
122 downstream: Option<LocalDownstreamState>,
123 project_environment: Entity<ProjectEnvironment>,
124 fs: Arc<dyn Fs>,
125 },
126 Ssh {
127 upstream_client: AnyProtoClient,
128 upstream_project_id: ProjectId,
129 downstream: Option<(AnyProtoClient, ProjectId)>,
130 },
131 Remote {
132 upstream_client: AnyProtoClient,
133 upstream_project_id: ProjectId,
134 },
135}
136
137enum DownstreamUpdate {
138 UpdateRepository(RepositorySnapshot),
139 RemoveRepository(RepositoryId),
140}
141
142struct LocalDownstreamState {
143 client: AnyProtoClient,
144 project_id: ProjectId,
145 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
146 _task: Task<Result<()>>,
147}
148
149#[derive(Clone)]
150pub struct GitStoreCheckpoint {
151 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
152}
153
154#[derive(Clone, Debug, PartialEq, Eq)]
155pub struct StatusEntry {
156 pub repo_path: RepoPath,
157 pub status: FileStatus,
158}
159
160impl StatusEntry {
161 fn to_proto(&self) -> proto::StatusEntry {
162 let simple_status = match self.status {
163 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
164 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
165 FileStatus::Tracked(TrackedStatus {
166 index_status,
167 worktree_status,
168 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
169 worktree_status
170 } else {
171 index_status
172 }),
173 };
174
175 proto::StatusEntry {
176 repo_path: self.repo_path.as_ref().to_proto(),
177 simple_status,
178 status: Some(status_to_proto(self.status)),
179 }
180 }
181}
182
183impl TryFrom<proto::StatusEntry> for StatusEntry {
184 type Error = anyhow::Error;
185
186 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
187 let repo_path = RepoPath(Arc::<Path>::from_proto(value.repo_path));
188 let status = status_from_proto(value.simple_status, value.status)?;
189 Ok(Self { repo_path, status })
190 }
191}
192
193impl sum_tree::Item for StatusEntry {
194 type Summary = PathSummary<GitSummary>;
195
196 fn summary(&self, _: &<Self::Summary as sum_tree::Summary>::Context) -> Self::Summary {
197 PathSummary {
198 max_path: self.repo_path.0.clone(),
199 item_summary: self.status.summary(),
200 }
201 }
202}
203
204impl sum_tree::KeyedItem for StatusEntry {
205 type Key = PathKey;
206
207 fn key(&self) -> Self::Key {
208 PathKey(self.repo_path.0.clone())
209 }
210}
211
212#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
213pub struct RepositoryId(pub u64);
214
215#[derive(Clone, Debug, PartialEq, Eq)]
216pub struct RepositorySnapshot {
217 pub id: RepositoryId,
218 pub merge_message: Option<SharedString>,
219 pub statuses_by_path: SumTree<StatusEntry>,
220 pub work_directory_abs_path: Arc<Path>,
221 pub branch: Option<Branch>,
222 pub merge_conflicts: TreeSet<RepoPath>,
223 pub merge_head_shas: Vec<SharedString>,
224 pub scan_id: u64,
225}
226
227pub struct Repository {
228 snapshot: RepositorySnapshot,
229 commit_message_buffer: Option<Entity<Buffer>>,
230 git_store: WeakEntity<GitStore>,
231 // For a local repository, holds paths that have had worktree events since the last status scan completed,
232 // and that should be examined during the next status scan.
233 paths_needing_status_update: BTreeSet<RepoPath>,
234 job_sender: mpsc::UnboundedSender<GitJob>,
235 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
236 latest_askpass_id: u64,
237}
238
239impl std::ops::Deref for Repository {
240 type Target = RepositorySnapshot;
241
242 fn deref(&self) -> &Self::Target {
243 &self.snapshot
244 }
245}
246
247#[derive(Clone)]
248pub enum RepositoryState {
249 Local {
250 backend: Arc<dyn GitRepository>,
251 environment: Arc<HashMap<String, String>>,
252 },
253 Remote {
254 project_id: ProjectId,
255 client: AnyProtoClient,
256 },
257}
258
259#[derive(Clone, Debug)]
260pub enum RepositoryEvent {
261 Updated,
262 MergeHeadsChanged,
263}
264
265#[derive(Debug)]
266pub enum GitStoreEvent {
267 ActiveRepositoryChanged(Option<RepositoryId>),
268 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
269 RepositoryAdded(RepositoryId),
270 RepositoryRemoved(RepositoryId),
271 IndexWriteError(anyhow::Error),
272}
273
274impl EventEmitter<RepositoryEvent> for Repository {}
275impl EventEmitter<GitStoreEvent> for GitStore {}
276
277struct GitJob {
278 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
279 key: Option<GitJobKey>,
280}
281
282#[derive(PartialEq, Eq)]
283enum GitJobKey {
284 WriteIndex(RepoPath),
285 BatchReadIndex,
286 RefreshStatuses,
287 ReloadGitState,
288}
289
290impl GitStore {
291 pub fn local(
292 worktree_store: &Entity<WorktreeStore>,
293 buffer_store: Entity<BufferStore>,
294 environment: Entity<ProjectEnvironment>,
295 fs: Arc<dyn Fs>,
296 cx: &mut Context<Self>,
297 ) -> Self {
298 Self::new(
299 worktree_store.clone(),
300 buffer_store,
301 GitStoreState::Local {
302 next_repository_id: Arc::new(AtomicU64::new(1)),
303 downstream: None,
304 project_environment: environment,
305 fs,
306 },
307 cx,
308 )
309 }
310
311 pub fn remote(
312 worktree_store: &Entity<WorktreeStore>,
313 buffer_store: Entity<BufferStore>,
314 upstream_client: AnyProtoClient,
315 project_id: ProjectId,
316 cx: &mut Context<Self>,
317 ) -> Self {
318 Self::new(
319 worktree_store.clone(),
320 buffer_store,
321 GitStoreState::Remote {
322 upstream_client,
323 upstream_project_id: project_id,
324 },
325 cx,
326 )
327 }
328
329 pub fn ssh(
330 worktree_store: &Entity<WorktreeStore>,
331 buffer_store: Entity<BufferStore>,
332 upstream_client: AnyProtoClient,
333 cx: &mut Context<Self>,
334 ) -> Self {
335 Self::new(
336 worktree_store.clone(),
337 buffer_store,
338 GitStoreState::Ssh {
339 upstream_client,
340 upstream_project_id: ProjectId(SSH_PROJECT_ID),
341 downstream: None,
342 },
343 cx,
344 )
345 }
346
347 fn new(
348 worktree_store: Entity<WorktreeStore>,
349 buffer_store: Entity<BufferStore>,
350 state: GitStoreState,
351 cx: &mut Context<Self>,
352 ) -> Self {
353 let _subscriptions = vec![
354 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
355 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
356 ];
357
358 GitStore {
359 state,
360 buffer_store,
361 worktree_store,
362 repositories: HashMap::default(),
363 active_repo_id: None,
364 _subscriptions,
365 loading_diffs: HashMap::default(),
366 shared_diffs: HashMap::default(),
367 diffs: HashMap::default(),
368 }
369 }
370
371 pub fn init(client: &AnyProtoClient) {
372 client.add_entity_request_handler(Self::handle_get_remotes);
373 client.add_entity_request_handler(Self::handle_get_branches);
374 client.add_entity_request_handler(Self::handle_change_branch);
375 client.add_entity_request_handler(Self::handle_create_branch);
376 client.add_entity_request_handler(Self::handle_git_init);
377 client.add_entity_request_handler(Self::handle_push);
378 client.add_entity_request_handler(Self::handle_pull);
379 client.add_entity_request_handler(Self::handle_fetch);
380 client.add_entity_request_handler(Self::handle_stage);
381 client.add_entity_request_handler(Self::handle_unstage);
382 client.add_entity_request_handler(Self::handle_commit);
383 client.add_entity_request_handler(Self::handle_reset);
384 client.add_entity_request_handler(Self::handle_show);
385 client.add_entity_request_handler(Self::handle_load_commit_diff);
386 client.add_entity_request_handler(Self::handle_checkout_files);
387 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
388 client.add_entity_request_handler(Self::handle_set_index_text);
389 client.add_entity_request_handler(Self::handle_askpass);
390 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
391 client.add_entity_request_handler(Self::handle_git_diff);
392 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
393 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
394 client.add_entity_message_handler(Self::handle_update_diff_bases);
395 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
396 client.add_entity_request_handler(Self::handle_blame_buffer);
397 client.add_entity_message_handler(Self::handle_update_repository);
398 client.add_entity_message_handler(Self::handle_remove_repository);
399 }
400
401 pub fn is_local(&self) -> bool {
402 matches!(self.state, GitStoreState::Local { .. })
403 }
404
405 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
406 match &mut self.state {
407 GitStoreState::Ssh {
408 downstream: downstream_client,
409 ..
410 } => {
411 for repo in self.repositories.values() {
412 let update = repo.read(cx).snapshot.initial_update(project_id);
413 for update in split_repository_update(update) {
414 client.send(update).log_err();
415 }
416 }
417 *downstream_client = Some((client, ProjectId(project_id)));
418 }
419 GitStoreState::Local {
420 downstream: downstream_client,
421 ..
422 } => {
423 let mut snapshots = HashMap::default();
424 let (updates_tx, mut updates_rx) = mpsc::unbounded();
425 for repo in self.repositories.values() {
426 updates_tx
427 .unbounded_send(DownstreamUpdate::UpdateRepository(
428 repo.read(cx).snapshot.clone(),
429 ))
430 .ok();
431 }
432 *downstream_client = Some(LocalDownstreamState {
433 client: client.clone(),
434 project_id: ProjectId(project_id),
435 updates_tx,
436 _task: cx.spawn(async move |this, cx| {
437 cx.background_spawn(async move {
438 while let Some(update) = updates_rx.next().await {
439 match update {
440 DownstreamUpdate::UpdateRepository(snapshot) => {
441 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
442 {
443 let update =
444 snapshot.build_update(old_snapshot, project_id);
445 *old_snapshot = snapshot;
446 for update in split_repository_update(update) {
447 client.send(update)?;
448 }
449 } else {
450 let update = snapshot.initial_update(project_id);
451 for update in split_repository_update(update) {
452 client.send(update)?;
453 }
454 snapshots.insert(snapshot.id, snapshot);
455 }
456 }
457 DownstreamUpdate::RemoveRepository(id) => {
458 client.send(proto::RemoveRepository {
459 project_id,
460 id: id.to_proto(),
461 })?;
462 }
463 }
464 }
465 anyhow::Ok(())
466 })
467 .await
468 .ok();
469 this.update(cx, |this, _| {
470 if let GitStoreState::Local {
471 downstream: downstream_client,
472 ..
473 } = &mut this.state
474 {
475 downstream_client.take();
476 } else {
477 unreachable!("unshared called on remote store");
478 }
479 })
480 }),
481 });
482 }
483 GitStoreState::Remote { .. } => {
484 debug_panic!("shared called on remote store");
485 }
486 }
487 }
488
489 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
490 match &mut self.state {
491 GitStoreState::Local {
492 downstream: downstream_client,
493 ..
494 } => {
495 downstream_client.take();
496 }
497 GitStoreState::Ssh {
498 downstream: downstream_client,
499 ..
500 } => {
501 downstream_client.take();
502 }
503 GitStoreState::Remote { .. } => {
504 debug_panic!("unshared called on remote store");
505 }
506 }
507 self.shared_diffs.clear();
508 }
509
510 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
511 self.shared_diffs.remove(peer_id);
512 }
513
514 pub fn active_repository(&self) -> Option<Entity<Repository>> {
515 self.active_repo_id
516 .as_ref()
517 .map(|id| self.repositories[&id].clone())
518 }
519
520 pub fn open_unstaged_diff(
521 &mut self,
522 buffer: Entity<Buffer>,
523 cx: &mut Context<Self>,
524 ) -> Task<Result<Entity<BufferDiff>>> {
525 let buffer_id = buffer.read(cx).remote_id();
526 if let Some(diff_state) = self.diffs.get(&buffer_id) {
527 if let Some(unstaged_diff) = diff_state
528 .read(cx)
529 .unstaged_diff
530 .as_ref()
531 .and_then(|weak| weak.upgrade())
532 {
533 if let Some(task) =
534 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
535 {
536 return cx.background_executor().spawn(async move {
537 task.await?;
538 Ok(unstaged_diff)
539 });
540 }
541 return Task::ready(Ok(unstaged_diff));
542 }
543 }
544
545 let Some((repo, repo_path)) =
546 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
547 else {
548 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
549 };
550
551 let task = self
552 .loading_diffs
553 .entry((buffer_id, DiffKind::Unstaged))
554 .or_insert_with(|| {
555 let staged_text = repo.read(cx).load_staged_text(buffer_id, repo_path, cx);
556 cx.spawn(async move |this, cx| {
557 Self::open_diff_internal(
558 this,
559 DiffKind::Unstaged,
560 staged_text.await.map(DiffBasesChange::SetIndex),
561 buffer,
562 cx,
563 )
564 .await
565 .map_err(Arc::new)
566 })
567 .shared()
568 })
569 .clone();
570
571 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
572 }
573
574 pub fn open_uncommitted_diff(
575 &mut self,
576 buffer: Entity<Buffer>,
577 cx: &mut Context<Self>,
578 ) -> Task<Result<Entity<BufferDiff>>> {
579 let buffer_id = buffer.read(cx).remote_id();
580
581 if let Some(diff_state) = self.diffs.get(&buffer_id) {
582 if let Some(uncommitted_diff) = diff_state
583 .read(cx)
584 .uncommitted_diff
585 .as_ref()
586 .and_then(|weak| weak.upgrade())
587 {
588 if let Some(task) =
589 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
590 {
591 return cx.background_executor().spawn(async move {
592 task.await?;
593 Ok(uncommitted_diff)
594 });
595 }
596 return Task::ready(Ok(uncommitted_diff));
597 }
598 }
599
600 let Some((repo, repo_path)) =
601 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
602 else {
603 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
604 };
605
606 let task = self
607 .loading_diffs
608 .entry((buffer_id, DiffKind::Uncommitted))
609 .or_insert_with(|| {
610 let changes = repo.read(cx).load_committed_text(buffer_id, repo_path, cx);
611 cx.spawn(async move |this, cx| {
612 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
613 .await
614 .map_err(Arc::new)
615 })
616 .shared()
617 })
618 .clone();
619
620 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
621 }
622
623 async fn open_diff_internal(
624 this: WeakEntity<Self>,
625 kind: DiffKind,
626 texts: Result<DiffBasesChange>,
627 buffer_entity: Entity<Buffer>,
628 cx: &mut AsyncApp,
629 ) -> Result<Entity<BufferDiff>> {
630 let diff_bases_change = match texts {
631 Err(e) => {
632 this.update(cx, |this, cx| {
633 let buffer = buffer_entity.read(cx);
634 let buffer_id = buffer.remote_id();
635 this.loading_diffs.remove(&(buffer_id, kind));
636 })?;
637 return Err(e);
638 }
639 Ok(change) => change,
640 };
641
642 this.update(cx, |this, cx| {
643 let buffer = buffer_entity.read(cx);
644 let buffer_id = buffer.remote_id();
645 let language = buffer.language().cloned();
646 let language_registry = buffer.language_registry();
647 let text_snapshot = buffer.text_snapshot();
648 this.loading_diffs.remove(&(buffer_id, kind));
649
650 let diff_state = this
651 .diffs
652 .entry(buffer_id)
653 .or_insert_with(|| cx.new(|_| BufferDiffState::default()));
654
655 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
656
657 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
658 diff_state.update(cx, |diff_state, cx| {
659 diff_state.language = language;
660 diff_state.language_registry = language_registry;
661
662 match kind {
663 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
664 DiffKind::Uncommitted => {
665 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
666 diff
667 } else {
668 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
669 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
670 unstaged_diff
671 };
672
673 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
674 diff_state.uncommitted_diff = Some(diff.downgrade())
675 }
676 }
677
678 let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, 0, cx);
679
680 anyhow::Ok(async move {
681 rx.await.ok();
682 Ok(diff)
683 })
684 })
685 })??
686 .await
687 }
688
689 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
690 let diff_state = self.diffs.get(&buffer_id)?;
691 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
692 }
693
694 pub fn get_uncommitted_diff(
695 &self,
696 buffer_id: BufferId,
697 cx: &App,
698 ) -> Option<Entity<BufferDiff>> {
699 let diff_state = self.diffs.get(&buffer_id)?;
700 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
701 }
702
703 pub fn project_path_git_status(
704 &self,
705 project_path: &ProjectPath,
706 cx: &App,
707 ) -> Option<FileStatus> {
708 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
709 Some(repo.read(cx).status_for_path(&repo_path)?.status)
710 }
711
712 pub fn checkpoint(&self, cx: &App) -> Task<Result<GitStoreCheckpoint>> {
713 let mut work_directory_abs_paths = Vec::new();
714 let mut checkpoints = Vec::new();
715 for repository in self.repositories.values() {
716 let repository = repository.read(cx);
717 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
718 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
719 }
720
721 cx.background_executor().spawn(async move {
722 let checkpoints = future::try_join_all(checkpoints).await?;
723 Ok(GitStoreCheckpoint {
724 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
725 .into_iter()
726 .zip(checkpoints)
727 .collect(),
728 })
729 })
730 }
731
732 pub fn restore_checkpoint(&self, checkpoint: GitStoreCheckpoint, cx: &App) -> Task<Result<()>> {
733 let repositories_by_work_dir_abs_path = self
734 .repositories
735 .values()
736 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
737 .collect::<HashMap<_, _>>();
738
739 let mut tasks = Vec::new();
740 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
741 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
742 let restore = repository.read(cx).restore_checkpoint(checkpoint);
743 tasks.push(async move { restore.await? });
744 }
745 }
746 cx.background_spawn(async move {
747 future::try_join_all(tasks).await?;
748 Ok(())
749 })
750 }
751
752 /// Compares two checkpoints, returning true if they are equal.
753 pub fn compare_checkpoints(
754 &self,
755 left: GitStoreCheckpoint,
756 mut right: GitStoreCheckpoint,
757 cx: &App,
758 ) -> Task<Result<bool>> {
759 let repositories_by_work_dir_abs_path = self
760 .repositories
761 .values()
762 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
763 .collect::<HashMap<_, _>>();
764
765 let mut tasks = Vec::new();
766 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
767 if let Some(right_checkpoint) = right
768 .checkpoints_by_work_dir_abs_path
769 .remove(&work_dir_abs_path)
770 {
771 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
772 {
773 let compare = repository
774 .read(cx)
775 .compare_checkpoints(left_checkpoint, right_checkpoint);
776 tasks.push(async move { compare.await? });
777 }
778 } else {
779 return Task::ready(Ok(false));
780 }
781 }
782 cx.background_spawn(async move {
783 Ok(future::try_join_all(tasks)
784 .await?
785 .into_iter()
786 .all(|result| result))
787 })
788 }
789
790 pub fn delete_checkpoint(&self, checkpoint: GitStoreCheckpoint, cx: &App) -> Task<Result<()>> {
791 let repositories_by_work_directory_abs_path = self
792 .repositories
793 .values()
794 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
795 .collect::<HashMap<_, _>>();
796
797 let mut tasks = Vec::new();
798 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
799 if let Some(repository) =
800 repositories_by_work_directory_abs_path.get(&work_dir_abs_path)
801 {
802 let delete = repository.read(cx).delete_checkpoint(checkpoint);
803 tasks.push(async move { delete.await? });
804 }
805 }
806 cx.background_spawn(async move {
807 future::try_join_all(tasks).await?;
808 Ok(())
809 })
810 }
811
812 /// Blames a buffer.
813 pub fn blame_buffer(
814 &self,
815 buffer: &Entity<Buffer>,
816 version: Option<clock::Global>,
817 cx: &App,
818 ) -> Task<Result<Option<Blame>>> {
819 let buffer = buffer.read(cx);
820 let Some((repo, repo_path)) =
821 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
822 else {
823 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
824 };
825 let content = match &version {
826 Some(version) => buffer.rope_for_version(version).clone(),
827 None => buffer.as_rope().clone(),
828 };
829 let version = version.unwrap_or(buffer.version());
830 let buffer_id = buffer.remote_id();
831
832 let rx = repo.read(cx).send_job(move |state, _| async move {
833 match state {
834 RepositoryState::Local { backend, .. } => backend
835 .blame(repo_path.clone(), content)
836 .await
837 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
838 .map(Some),
839 RepositoryState::Remote { project_id, client } => {
840 let response = client
841 .request(proto::BlameBuffer {
842 project_id: project_id.to_proto(),
843 buffer_id: buffer_id.into(),
844 version: serialize_version(&version),
845 })
846 .await?;
847 Ok(deserialize_blame_buffer_response(response))
848 }
849 }
850 });
851
852 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
853 }
854
855 pub fn get_permalink_to_line(
856 &self,
857 buffer: &Entity<Buffer>,
858 selection: Range<u32>,
859 cx: &App,
860 ) -> Task<Result<url::Url>> {
861 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
862 return Task::ready(Err(anyhow!("buffer has no file")));
863 };
864
865 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
866 &(file.worktree.read(cx).id(), file.path.clone()).into(),
867 cx,
868 ) else {
869 // If we're not in a Git repo, check whether this is a Rust source
870 // file in the Cargo registry (presumably opened with go-to-definition
871 // from a normal Rust file). If so, we can put together a permalink
872 // using crate metadata.
873 if buffer
874 .read(cx)
875 .language()
876 .is_none_or(|lang| lang.name() != "Rust".into())
877 {
878 return Task::ready(Err(anyhow!("no permalink available")));
879 }
880 let Some(file_path) = file.worktree.read(cx).absolutize(&file.path).ok() else {
881 return Task::ready(Err(anyhow!("no permalink available")));
882 };
883 return cx.spawn(async move |cx| {
884 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
885 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
886 .map_err(|_| anyhow!("no permalink available"))
887 });
888
889 // TODO remote case
890 };
891
892 let buffer_id = buffer.read(cx).remote_id();
893 let branch = repo.read(cx).branch.clone();
894 let remote = branch
895 .as_ref()
896 .and_then(|b| b.upstream.as_ref())
897 .and_then(|b| b.remote_name())
898 .unwrap_or("origin")
899 .to_string();
900 let rx = repo.read(cx).send_job(move |state, cx| async move {
901 match state {
902 RepositoryState::Local { backend, .. } => {
903 let origin_url = backend
904 .remote_url(&remote)
905 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
906
907 let sha = backend
908 .head_sha()
909 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
910
911 let provider_registry =
912 cx.update(GitHostingProviderRegistry::default_global)?;
913
914 let (provider, remote) =
915 parse_git_remote_url(provider_registry, &origin_url)
916 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
917
918 let path = repo_path
919 .to_str()
920 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
921
922 Ok(provider.build_permalink(
923 remote,
924 BuildPermalinkParams {
925 sha: &sha,
926 path,
927 selection: Some(selection),
928 },
929 ))
930 }
931 RepositoryState::Remote { project_id, client } => {
932 let response = client
933 .request(proto::GetPermalinkToLine {
934 project_id: project_id.to_proto(),
935 buffer_id: buffer_id.into(),
936 selection: Some(proto::Range {
937 start: selection.start as u64,
938 end: selection.end as u64,
939 }),
940 })
941 .await?;
942
943 url::Url::parse(&response.permalink).context("failed to parse permalink")
944 }
945 }
946 });
947 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
948 }
949
950 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
951 match &self.state {
952 GitStoreState::Local {
953 downstream: downstream_client,
954 ..
955 } => downstream_client
956 .as_ref()
957 .map(|state| (state.client.clone(), state.project_id)),
958 GitStoreState::Ssh {
959 downstream: downstream_client,
960 ..
961 } => downstream_client.clone(),
962 GitStoreState::Remote { .. } => None,
963 }
964 }
965
966 fn upstream_client(&self) -> Option<AnyProtoClient> {
967 match &self.state {
968 GitStoreState::Local { .. } => None,
969 GitStoreState::Ssh {
970 upstream_client, ..
971 }
972 | GitStoreState::Remote {
973 upstream_client, ..
974 } => Some(upstream_client.clone()),
975 }
976 }
977
978 fn on_worktree_store_event(
979 &mut self,
980 worktree_store: Entity<WorktreeStore>,
981 event: &WorktreeStoreEvent,
982 cx: &mut Context<Self>,
983 ) {
984 let GitStoreState::Local {
985 project_environment,
986 downstream,
987 next_repository_id,
988 fs,
989 } = &self.state
990 else {
991 return;
992 };
993
994 match event {
995 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
996 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
997 for (relative_path, _, _) in updated_entries.iter() {
998 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
999 &(*worktree_id, relative_path.clone()).into(),
1000 cx,
1001 ) else {
1002 continue;
1003 };
1004 paths_by_git_repo.entry(repo).or_default().push(repo_path)
1005 }
1006
1007 for (repo, paths) in paths_by_git_repo {
1008 repo.update(cx, |repo, cx| {
1009 repo.paths_changed(
1010 paths,
1011 downstream
1012 .as_ref()
1013 .map(|downstream| downstream.updates_tx.clone()),
1014 cx,
1015 );
1016 });
1017 }
1018 }
1019 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1020 self.update_repositories_from_worktrees(
1021 project_environment.clone(),
1022 next_repository_id.clone(),
1023 downstream
1024 .as_ref()
1025 .map(|downstream| downstream.updates_tx.clone()),
1026 changed_repos.clone(),
1027 fs.clone(),
1028 cx,
1029 );
1030 if let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx) {
1031 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1032 }
1033 }
1034 _ => {}
1035 }
1036 }
1037
1038 fn on_repository_event(
1039 &mut self,
1040 repo: Entity<Repository>,
1041 event: &RepositoryEvent,
1042 cx: &mut Context<Self>,
1043 ) {
1044 let id = repo.read(cx).id;
1045 cx.emit(GitStoreEvent::RepositoryUpdated(
1046 id,
1047 event.clone(),
1048 self.active_repo_id == Some(id),
1049 ))
1050 }
1051
1052 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1053 fn update_repositories_from_worktrees(
1054 &mut self,
1055 project_environment: Entity<ProjectEnvironment>,
1056 next_repository_id: Arc<AtomicU64>,
1057 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1058 updated_git_repositories: UpdatedGitRepositoriesSet,
1059 fs: Arc<dyn Fs>,
1060 cx: &mut Context<Self>,
1061 ) {
1062 let mut removed_ids = Vec::new();
1063 for update in updated_git_repositories.iter() {
1064 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1065 let existing_work_directory_abs_path =
1066 repo.read(cx).work_directory_abs_path.clone();
1067 Some(&existing_work_directory_abs_path)
1068 == update.old_work_directory_abs_path.as_ref()
1069 || Some(&existing_work_directory_abs_path)
1070 == update.new_work_directory_abs_path.as_ref()
1071 }) {
1072 if let Some(new_work_directory_abs_path) =
1073 update.new_work_directory_abs_path.clone()
1074 {
1075 existing.update(cx, |existing, cx| {
1076 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1077 existing.schedule_scan(updates_tx.clone(), cx);
1078 });
1079 } else {
1080 removed_ids.push(*id);
1081 }
1082 } else if let Some((work_directory_abs_path, dot_git_abs_path)) = update
1083 .new_work_directory_abs_path
1084 .clone()
1085 .zip(update.dot_git_abs_path.clone())
1086 {
1087 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1088 let git_store = cx.weak_entity();
1089 let repo = cx.new(|cx| {
1090 let mut repo = Repository::local(
1091 id,
1092 work_directory_abs_path,
1093 dot_git_abs_path,
1094 project_environment.downgrade(),
1095 fs.clone(),
1096 git_store,
1097 cx,
1098 );
1099 repo.schedule_scan(updates_tx.clone(), cx);
1100 repo
1101 });
1102 self._subscriptions
1103 .push(cx.subscribe(&repo, Self::on_repository_event));
1104 self.repositories.insert(id, repo);
1105 cx.emit(GitStoreEvent::RepositoryAdded(id));
1106 self.active_repo_id.get_or_insert_with(|| {
1107 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1108 id
1109 });
1110 }
1111 }
1112
1113 for id in removed_ids {
1114 if self.active_repo_id == Some(id) {
1115 self.active_repo_id = None;
1116 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1117 }
1118 self.repositories.remove(&id);
1119 if let Some(updates_tx) = updates_tx.as_ref() {
1120 updates_tx
1121 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1122 .ok();
1123 }
1124 }
1125 }
1126
1127 fn on_buffer_store_event(
1128 &mut self,
1129 _: Entity<BufferStore>,
1130 event: &BufferStoreEvent,
1131 cx: &mut Context<Self>,
1132 ) {
1133 match event {
1134 BufferStoreEvent::BufferAdded(buffer) => {
1135 cx.subscribe(&buffer, |this, buffer, event, cx| {
1136 if let BufferEvent::LanguageChanged = event {
1137 let buffer_id = buffer.read(cx).remote_id();
1138 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1139 diff_state.update(cx, |diff_state, cx| {
1140 diff_state.buffer_language_changed(buffer, cx);
1141 });
1142 }
1143 }
1144 })
1145 .detach();
1146 }
1147 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1148 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1149 diffs.remove(buffer_id);
1150 }
1151 }
1152 BufferStoreEvent::BufferDropped(buffer_id) => {
1153 self.diffs.remove(&buffer_id);
1154 for diffs in self.shared_diffs.values_mut() {
1155 diffs.remove(buffer_id);
1156 }
1157 }
1158
1159 _ => {}
1160 }
1161 }
1162
1163 pub fn recalculate_buffer_diffs(
1164 &mut self,
1165 buffers: Vec<Entity<Buffer>>,
1166 cx: &mut Context<Self>,
1167 ) -> impl Future<Output = ()> + use<> {
1168 let mut futures = Vec::new();
1169 for buffer in buffers {
1170 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1171 let buffer = buffer.read(cx).text_snapshot();
1172 futures.push(diff_state.update(cx, |diff_state, cx| {
1173 diff_state.recalculate_diffs(
1174 buffer,
1175 diff_state.hunk_staging_operation_count,
1176 cx,
1177 )
1178 }));
1179 }
1180 }
1181 async move {
1182 futures::future::join_all(futures).await;
1183 }
1184 }
1185
1186 fn on_buffer_diff_event(
1187 &mut self,
1188 diff: Entity<buffer_diff::BufferDiff>,
1189 event: &BufferDiffEvent,
1190 cx: &mut Context<Self>,
1191 ) {
1192 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1193 let buffer_id = diff.read(cx).buffer_id;
1194 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1195 diff_state.update(cx, |diff_state, _| {
1196 diff_state.hunk_staging_operation_count += 1;
1197 });
1198 }
1199 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1200 let recv = repo.update(cx, |repo, cx| {
1201 log::debug!("updating index text for buffer {}", path.display());
1202 repo.spawn_set_index_text_job(
1203 path,
1204 new_index_text.as_ref().map(|rope| rope.to_string()),
1205 cx,
1206 )
1207 });
1208 let diff = diff.downgrade();
1209 cx.spawn(async move |this, cx| {
1210 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1211 diff.update(cx, |diff, cx| {
1212 diff.clear_pending_hunks(cx);
1213 })
1214 .ok();
1215 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1216 .ok();
1217 }
1218 })
1219 .detach();
1220 }
1221 }
1222 }
1223
1224 fn local_worktree_git_repos_changed(
1225 &mut self,
1226 worktree: Entity<Worktree>,
1227 changed_repos: &UpdatedGitRepositoriesSet,
1228 cx: &mut Context<Self>,
1229 ) {
1230 log::debug!("local worktree repos changed");
1231 debug_assert!(worktree.read(cx).is_local());
1232
1233 let mut diff_state_updates = HashMap::<Entity<Repository>, Vec<_>>::default();
1234 for (buffer_id, diff_state) in &self.diffs {
1235 let Some(buffer) = self.buffer_store.read(cx).get(*buffer_id) else {
1236 continue;
1237 };
1238 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1239 continue;
1240 };
1241 if file.worktree != worktree {
1242 continue;
1243 }
1244 let Some((repo, repo_path)) =
1245 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
1246 else {
1247 continue;
1248 };
1249 if !changed_repos.iter().any(|update| {
1250 update.old_work_directory_abs_path.as_ref()
1251 == Some(&repo.read(cx).work_directory_abs_path)
1252 || update.new_work_directory_abs_path.as_ref()
1253 == Some(&repo.read(cx).work_directory_abs_path)
1254 }) {
1255 continue;
1256 }
1257
1258 let diff_state = diff_state.read(cx);
1259 let has_unstaged_diff = diff_state
1260 .unstaged_diff
1261 .as_ref()
1262 .is_some_and(|diff| diff.is_upgradable());
1263 let has_uncommitted_diff = diff_state
1264 .uncommitted_diff
1265 .as_ref()
1266 .is_some_and(|set| set.is_upgradable());
1267
1268 let update = (
1269 buffer,
1270 repo_path,
1271 has_unstaged_diff.then(|| diff_state.index_text.clone()),
1272 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
1273 diff_state.hunk_staging_operation_count,
1274 );
1275 diff_state_updates.entry(repo).or_default().push(update);
1276 }
1277
1278 if diff_state_updates.is_empty() {
1279 return;
1280 }
1281
1282 for (repo, repo_diff_state_updates) in diff_state_updates.into_iter() {
1283 let git_store = cx.weak_entity();
1284
1285 let _ = repo.read(cx).send_keyed_job(
1286 Some(GitJobKey::BatchReadIndex),
1287 |state, mut cx| async move {
1288 let RepositoryState::Local { backend, .. } = state else {
1289 log::error!("tried to recompute diffs for a non-local repository");
1290 return;
1291 };
1292 let mut diff_bases_changes_by_buffer = Vec::new();
1293 for (
1294 buffer,
1295 repo_path,
1296 current_index_text,
1297 current_head_text,
1298 hunk_staging_operation_count,
1299 ) in &repo_diff_state_updates
1300 {
1301 let index_text = if current_index_text.is_some() {
1302 backend.load_index_text(repo_path.clone()).await
1303 } else {
1304 None
1305 };
1306 let head_text = if current_head_text.is_some() {
1307 backend.load_committed_text(repo_path.clone()).await
1308 } else {
1309 None
1310 };
1311
1312 // Avoid triggering a diff update if the base text has not changed.
1313 if let Some((current_index, current_head)) =
1314 current_index_text.as_ref().zip(current_head_text.as_ref())
1315 {
1316 if current_index.as_deref() == index_text.as_ref()
1317 && current_head.as_deref() == head_text.as_ref()
1318 {
1319 continue;
1320 }
1321 }
1322
1323 let diff_bases_change =
1324 match (current_index_text.is_some(), current_head_text.is_some()) {
1325 (true, true) => Some(if index_text == head_text {
1326 DiffBasesChange::SetBoth(head_text)
1327 } else {
1328 DiffBasesChange::SetEach {
1329 index: index_text,
1330 head: head_text,
1331 }
1332 }),
1333 (true, false) => Some(DiffBasesChange::SetIndex(index_text)),
1334 (false, true) => Some(DiffBasesChange::SetHead(head_text)),
1335 (false, false) => None,
1336 };
1337
1338 diff_bases_changes_by_buffer.push((
1339 buffer,
1340 diff_bases_change,
1341 *hunk_staging_operation_count,
1342 ))
1343 }
1344
1345 git_store
1346 .update(&mut cx, |git_store, cx| {
1347 for (buffer, diff_bases_change, hunk_staging_operation_count) in
1348 diff_bases_changes_by_buffer
1349 {
1350 let Some(diff_state) =
1351 git_store.diffs.get(&buffer.read(cx).remote_id())
1352 else {
1353 continue;
1354 };
1355 let Some(diff_bases_change) = diff_bases_change else {
1356 continue;
1357 };
1358
1359 let downstream_client = git_store.downstream_client();
1360 diff_state.update(cx, |diff_state, cx| {
1361 use proto::update_diff_bases::Mode;
1362
1363 let buffer = buffer.read(cx);
1364 if let Some((client, project_id)) = downstream_client {
1365 let (staged_text, committed_text, mode) =
1366 match diff_bases_change.clone() {
1367 DiffBasesChange::SetIndex(index) => {
1368 (index, None, Mode::IndexOnly)
1369 }
1370 DiffBasesChange::SetHead(head) => {
1371 (None, head, Mode::HeadOnly)
1372 }
1373 DiffBasesChange::SetEach { index, head } => {
1374 (index, head, Mode::IndexAndHead)
1375 }
1376 DiffBasesChange::SetBoth(text) => {
1377 (None, text, Mode::IndexMatchesHead)
1378 }
1379 };
1380 let message = proto::UpdateDiffBases {
1381 project_id: project_id.to_proto(),
1382 buffer_id: buffer.remote_id().to_proto(),
1383 staged_text,
1384 committed_text,
1385 mode: mode as i32,
1386 };
1387
1388 client.send(message).log_err();
1389 }
1390
1391 let _ = diff_state.diff_bases_changed(
1392 buffer.text_snapshot(),
1393 diff_bases_change,
1394 hunk_staging_operation_count,
1395 cx,
1396 );
1397 });
1398 }
1399 })
1400 .ok();
1401 },
1402 );
1403 }
1404 }
1405
1406 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1407 &self.repositories
1408 }
1409
1410 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1411 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1412 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1413 Some(status.status)
1414 }
1415
1416 pub fn repository_and_path_for_buffer_id(
1417 &self,
1418 buffer_id: BufferId,
1419 cx: &App,
1420 ) -> Option<(Entity<Repository>, RepoPath)> {
1421 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1422 let project_path = buffer.read(cx).project_path(cx)?;
1423 self.repository_and_path_for_project_path(&project_path, cx)
1424 }
1425
1426 pub fn repository_and_path_for_project_path(
1427 &self,
1428 path: &ProjectPath,
1429 cx: &App,
1430 ) -> Option<(Entity<Repository>, RepoPath)> {
1431 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1432 self.repositories
1433 .values()
1434 .filter_map(|repo| {
1435 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1436 Some((repo.clone(), repo_path))
1437 })
1438 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1439 }
1440
1441 pub fn git_init(
1442 &self,
1443 path: Arc<Path>,
1444 fallback_branch_name: String,
1445 cx: &App,
1446 ) -> Task<Result<()>> {
1447 match &self.state {
1448 GitStoreState::Local { fs, .. } => {
1449 let fs = fs.clone();
1450 cx.background_executor()
1451 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1452 }
1453 GitStoreState::Ssh {
1454 upstream_client,
1455 upstream_project_id: project_id,
1456 ..
1457 }
1458 | GitStoreState::Remote {
1459 upstream_client,
1460 upstream_project_id: project_id,
1461 ..
1462 } => {
1463 let client = upstream_client.clone();
1464 let project_id = *project_id;
1465 cx.background_executor().spawn(async move {
1466 client
1467 .request(proto::GitInit {
1468 project_id: project_id.0,
1469 abs_path: path.to_string_lossy().to_string(),
1470 fallback_branch_name,
1471 })
1472 .await?;
1473 Ok(())
1474 })
1475 }
1476 }
1477 }
1478
1479 async fn handle_update_repository(
1480 this: Entity<Self>,
1481 envelope: TypedEnvelope<proto::UpdateRepository>,
1482 mut cx: AsyncApp,
1483 ) -> Result<()> {
1484 this.update(&mut cx, |this, cx| {
1485 let mut update = envelope.payload;
1486
1487 let id = RepositoryId::from_proto(update.id);
1488 let client = this
1489 .upstream_client()
1490 .context("no upstream client")?
1491 .clone();
1492
1493 let mut is_new = false;
1494 let repo = this.repositories.entry(id).or_insert_with(|| {
1495 is_new = true;
1496 let git_store = cx.weak_entity();
1497 cx.new(|cx| {
1498 Repository::remote(
1499 id,
1500 Path::new(&update.abs_path).into(),
1501 ProjectId(update.project_id),
1502 client,
1503 git_store,
1504 cx,
1505 )
1506 })
1507 });
1508 if is_new {
1509 this._subscriptions
1510 .push(cx.subscribe(&repo, Self::on_repository_event))
1511 }
1512
1513 repo.update(cx, {
1514 let update = update.clone();
1515 |repo, cx| repo.apply_remote_update(update, cx)
1516 })?;
1517
1518 this.active_repo_id.get_or_insert_with(|| {
1519 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1520 id
1521 });
1522
1523 if let Some((client, project_id)) = this.downstream_client() {
1524 update.project_id = project_id.to_proto();
1525 client.send(update).log_err();
1526 }
1527 Ok(())
1528 })?
1529 }
1530
1531 async fn handle_remove_repository(
1532 this: Entity<Self>,
1533 envelope: TypedEnvelope<proto::RemoveRepository>,
1534 mut cx: AsyncApp,
1535 ) -> Result<()> {
1536 this.update(&mut cx, |this, cx| {
1537 let mut update = envelope.payload;
1538 let id = RepositoryId::from_proto(update.id);
1539 this.repositories.remove(&id);
1540 if let Some((client, project_id)) = this.downstream_client() {
1541 update.project_id = project_id.to_proto();
1542 client.send(update).log_err();
1543 }
1544 if this.active_repo_id == Some(id) {
1545 this.active_repo_id = None;
1546 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1547 }
1548 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1549 })
1550 }
1551
1552 async fn handle_git_init(
1553 this: Entity<Self>,
1554 envelope: TypedEnvelope<proto::GitInit>,
1555 cx: AsyncApp,
1556 ) -> Result<proto::Ack> {
1557 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1558 let name = envelope.payload.fallback_branch_name;
1559 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1560 .await?;
1561
1562 Ok(proto::Ack {})
1563 }
1564
1565 async fn handle_fetch(
1566 this: Entity<Self>,
1567 envelope: TypedEnvelope<proto::Fetch>,
1568 mut cx: AsyncApp,
1569 ) -> Result<proto::RemoteMessageResponse> {
1570 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1571 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1572 let askpass_id = envelope.payload.askpass_id;
1573
1574 let askpass = make_remote_delegate(
1575 this,
1576 envelope.payload.project_id,
1577 repository_id,
1578 askpass_id,
1579 &mut cx,
1580 );
1581
1582 let remote_output = repository_handle
1583 .update(&mut cx, |repository_handle, cx| {
1584 repository_handle.fetch(askpass, cx)
1585 })?
1586 .await??;
1587
1588 Ok(proto::RemoteMessageResponse {
1589 stdout: remote_output.stdout,
1590 stderr: remote_output.stderr,
1591 })
1592 }
1593
1594 async fn handle_push(
1595 this: Entity<Self>,
1596 envelope: TypedEnvelope<proto::Push>,
1597 mut cx: AsyncApp,
1598 ) -> Result<proto::RemoteMessageResponse> {
1599 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1600 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1601
1602 let askpass_id = envelope.payload.askpass_id;
1603 let askpass = make_remote_delegate(
1604 this,
1605 envelope.payload.project_id,
1606 repository_id,
1607 askpass_id,
1608 &mut cx,
1609 );
1610
1611 let options = envelope
1612 .payload
1613 .options
1614 .as_ref()
1615 .map(|_| match envelope.payload.options() {
1616 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1617 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1618 });
1619
1620 let branch_name = envelope.payload.branch_name.into();
1621 let remote_name = envelope.payload.remote_name.into();
1622
1623 let remote_output = repository_handle
1624 .update(&mut cx, |repository_handle, cx| {
1625 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1626 })?
1627 .await??;
1628 Ok(proto::RemoteMessageResponse {
1629 stdout: remote_output.stdout,
1630 stderr: remote_output.stderr,
1631 })
1632 }
1633
1634 async fn handle_pull(
1635 this: Entity<Self>,
1636 envelope: TypedEnvelope<proto::Pull>,
1637 mut cx: AsyncApp,
1638 ) -> Result<proto::RemoteMessageResponse> {
1639 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1640 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1641 let askpass_id = envelope.payload.askpass_id;
1642 let askpass = make_remote_delegate(
1643 this,
1644 envelope.payload.project_id,
1645 repository_id,
1646 askpass_id,
1647 &mut cx,
1648 );
1649
1650 let branch_name = envelope.payload.branch_name.into();
1651 let remote_name = envelope.payload.remote_name.into();
1652
1653 let remote_message = repository_handle
1654 .update(&mut cx, |repository_handle, cx| {
1655 repository_handle.pull(branch_name, remote_name, askpass, cx)
1656 })?
1657 .await??;
1658
1659 Ok(proto::RemoteMessageResponse {
1660 stdout: remote_message.stdout,
1661 stderr: remote_message.stderr,
1662 })
1663 }
1664
1665 async fn handle_stage(
1666 this: Entity<Self>,
1667 envelope: TypedEnvelope<proto::Stage>,
1668 mut cx: AsyncApp,
1669 ) -> Result<proto::Ack> {
1670 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1671 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1672
1673 let entries = envelope
1674 .payload
1675 .paths
1676 .into_iter()
1677 .map(PathBuf::from)
1678 .map(RepoPath::new)
1679 .collect();
1680
1681 repository_handle
1682 .update(&mut cx, |repository_handle, cx| {
1683 repository_handle.stage_entries(entries, cx)
1684 })?
1685 .await?;
1686 Ok(proto::Ack {})
1687 }
1688
1689 async fn handle_unstage(
1690 this: Entity<Self>,
1691 envelope: TypedEnvelope<proto::Unstage>,
1692 mut cx: AsyncApp,
1693 ) -> Result<proto::Ack> {
1694 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1695 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1696
1697 let entries = envelope
1698 .payload
1699 .paths
1700 .into_iter()
1701 .map(PathBuf::from)
1702 .map(RepoPath::new)
1703 .collect();
1704
1705 repository_handle
1706 .update(&mut cx, |repository_handle, cx| {
1707 repository_handle.unstage_entries(entries, cx)
1708 })?
1709 .await?;
1710
1711 Ok(proto::Ack {})
1712 }
1713
1714 async fn handle_set_index_text(
1715 this: Entity<Self>,
1716 envelope: TypedEnvelope<proto::SetIndexText>,
1717 mut cx: AsyncApp,
1718 ) -> Result<proto::Ack> {
1719 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1720 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1721
1722 repository_handle
1723 .update(&mut cx, |repository_handle, cx| {
1724 repository_handle.spawn_set_index_text_job(
1725 RepoPath::from_str(&envelope.payload.path),
1726 envelope.payload.text,
1727 cx,
1728 )
1729 })?
1730 .await??;
1731 Ok(proto::Ack {})
1732 }
1733
1734 async fn handle_commit(
1735 this: Entity<Self>,
1736 envelope: TypedEnvelope<proto::Commit>,
1737 mut cx: AsyncApp,
1738 ) -> Result<proto::Ack> {
1739 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1740 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1741
1742 let message = SharedString::from(envelope.payload.message);
1743 let name = envelope.payload.name.map(SharedString::from);
1744 let email = envelope.payload.email.map(SharedString::from);
1745
1746 repository_handle
1747 .update(&mut cx, |repository_handle, cx| {
1748 repository_handle.commit(message, name.zip(email), cx)
1749 })?
1750 .await??;
1751 Ok(proto::Ack {})
1752 }
1753
1754 async fn handle_get_remotes(
1755 this: Entity<Self>,
1756 envelope: TypedEnvelope<proto::GetRemotes>,
1757 mut cx: AsyncApp,
1758 ) -> Result<proto::GetRemotesResponse> {
1759 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1760 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1761
1762 let branch_name = envelope.payload.branch_name;
1763
1764 let remotes = repository_handle
1765 .update(&mut cx, |repository_handle, _| {
1766 repository_handle.get_remotes(branch_name)
1767 })?
1768 .await??;
1769
1770 Ok(proto::GetRemotesResponse {
1771 remotes: remotes
1772 .into_iter()
1773 .map(|remotes| proto::get_remotes_response::Remote {
1774 name: remotes.name.to_string(),
1775 })
1776 .collect::<Vec<_>>(),
1777 })
1778 }
1779
1780 async fn handle_get_branches(
1781 this: Entity<Self>,
1782 envelope: TypedEnvelope<proto::GitGetBranches>,
1783 mut cx: AsyncApp,
1784 ) -> Result<proto::GitBranchesResponse> {
1785 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1786 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1787
1788 let branches = repository_handle
1789 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1790 .await??;
1791
1792 Ok(proto::GitBranchesResponse {
1793 branches: branches
1794 .into_iter()
1795 .map(|branch| branch_to_proto(&branch))
1796 .collect::<Vec<_>>(),
1797 })
1798 }
1799 async fn handle_create_branch(
1800 this: Entity<Self>,
1801 envelope: TypedEnvelope<proto::GitCreateBranch>,
1802 mut cx: AsyncApp,
1803 ) -> Result<proto::Ack> {
1804 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1805 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1806 let branch_name = envelope.payload.branch_name;
1807
1808 repository_handle
1809 .update(&mut cx, |repository_handle, _| {
1810 repository_handle.create_branch(branch_name)
1811 })?
1812 .await??;
1813
1814 Ok(proto::Ack {})
1815 }
1816
1817 async fn handle_change_branch(
1818 this: Entity<Self>,
1819 envelope: TypedEnvelope<proto::GitChangeBranch>,
1820 mut cx: AsyncApp,
1821 ) -> Result<proto::Ack> {
1822 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1823 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1824 let branch_name = envelope.payload.branch_name;
1825
1826 repository_handle
1827 .update(&mut cx, |repository_handle, _| {
1828 repository_handle.change_branch(branch_name)
1829 })?
1830 .await??;
1831
1832 Ok(proto::Ack {})
1833 }
1834
1835 async fn handle_show(
1836 this: Entity<Self>,
1837 envelope: TypedEnvelope<proto::GitShow>,
1838 mut cx: AsyncApp,
1839 ) -> Result<proto::GitCommitDetails> {
1840 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1841 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1842
1843 let commit = repository_handle
1844 .update(&mut cx, |repository_handle, _| {
1845 repository_handle.show(envelope.payload.commit)
1846 })?
1847 .await??;
1848 Ok(proto::GitCommitDetails {
1849 sha: commit.sha.into(),
1850 message: commit.message.into(),
1851 commit_timestamp: commit.commit_timestamp,
1852 author_email: commit.author_email.into(),
1853 author_name: commit.author_name.into(),
1854 })
1855 }
1856
1857 async fn handle_load_commit_diff(
1858 this: Entity<Self>,
1859 envelope: TypedEnvelope<proto::LoadCommitDiff>,
1860 mut cx: AsyncApp,
1861 ) -> Result<proto::LoadCommitDiffResponse> {
1862 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1863 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1864
1865 let commit_diff = repository_handle
1866 .update(&mut cx, |repository_handle, _| {
1867 repository_handle.load_commit_diff(envelope.payload.commit)
1868 })?
1869 .await??;
1870 Ok(proto::LoadCommitDiffResponse {
1871 files: commit_diff
1872 .files
1873 .into_iter()
1874 .map(|file| proto::CommitFile {
1875 path: file.path.to_string(),
1876 old_text: file.old_text,
1877 new_text: file.new_text,
1878 })
1879 .collect(),
1880 })
1881 }
1882
1883 async fn handle_reset(
1884 this: Entity<Self>,
1885 envelope: TypedEnvelope<proto::GitReset>,
1886 mut cx: AsyncApp,
1887 ) -> Result<proto::Ack> {
1888 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1889 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1890
1891 let mode = match envelope.payload.mode() {
1892 git_reset::ResetMode::Soft => ResetMode::Soft,
1893 git_reset::ResetMode::Mixed => ResetMode::Mixed,
1894 };
1895
1896 repository_handle
1897 .update(&mut cx, |repository_handle, cx| {
1898 repository_handle.reset(envelope.payload.commit, mode, cx)
1899 })?
1900 .await??;
1901 Ok(proto::Ack {})
1902 }
1903
1904 async fn handle_checkout_files(
1905 this: Entity<Self>,
1906 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
1907 mut cx: AsyncApp,
1908 ) -> Result<proto::Ack> {
1909 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1910 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1911 let paths = envelope
1912 .payload
1913 .paths
1914 .iter()
1915 .map(|s| RepoPath::from_str(s))
1916 .collect();
1917
1918 repository_handle
1919 .update(&mut cx, |repository_handle, cx| {
1920 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
1921 })?
1922 .await??;
1923 Ok(proto::Ack {})
1924 }
1925
1926 async fn handle_open_commit_message_buffer(
1927 this: Entity<Self>,
1928 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
1929 mut cx: AsyncApp,
1930 ) -> Result<proto::OpenBufferResponse> {
1931 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1932 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1933 let buffer = repository
1934 .update(&mut cx, |repository, cx| {
1935 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
1936 })?
1937 .await?;
1938
1939 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
1940 this.update(&mut cx, |this, cx| {
1941 this.buffer_store.update(cx, |buffer_store, cx| {
1942 buffer_store
1943 .create_buffer_for_peer(
1944 &buffer,
1945 envelope.original_sender_id.unwrap_or(envelope.sender_id),
1946 cx,
1947 )
1948 .detach_and_log_err(cx);
1949 })
1950 })?;
1951
1952 Ok(proto::OpenBufferResponse {
1953 buffer_id: buffer_id.to_proto(),
1954 })
1955 }
1956
1957 async fn handle_askpass(
1958 this: Entity<Self>,
1959 envelope: TypedEnvelope<proto::AskPassRequest>,
1960 mut cx: AsyncApp,
1961 ) -> Result<proto::AskPassResponse> {
1962 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1963 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
1964
1965 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
1966 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
1967 debug_panic!("no askpass found");
1968 return Err(anyhow::anyhow!("no askpass found"));
1969 };
1970
1971 let response = askpass.ask_password(envelope.payload.prompt).await?;
1972
1973 delegates
1974 .lock()
1975 .insert(envelope.payload.askpass_id, askpass);
1976
1977 Ok(proto::AskPassResponse { response })
1978 }
1979
1980 async fn handle_check_for_pushed_commits(
1981 this: Entity<Self>,
1982 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
1983 mut cx: AsyncApp,
1984 ) -> Result<proto::CheckForPushedCommitsResponse> {
1985 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1986 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1987
1988 let branches = repository_handle
1989 .update(&mut cx, |repository_handle, _| {
1990 repository_handle.check_for_pushed_commits()
1991 })?
1992 .await??;
1993 Ok(proto::CheckForPushedCommitsResponse {
1994 pushed_to: branches
1995 .into_iter()
1996 .map(|commit| commit.to_string())
1997 .collect(),
1998 })
1999 }
2000
2001 async fn handle_git_diff(
2002 this: Entity<Self>,
2003 envelope: TypedEnvelope<proto::GitDiff>,
2004 mut cx: AsyncApp,
2005 ) -> Result<proto::GitDiffResponse> {
2006 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2007 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2008 let diff_type = match envelope.payload.diff_type() {
2009 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2010 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2011 };
2012
2013 let mut diff = repository_handle
2014 .update(&mut cx, |repository_handle, cx| {
2015 repository_handle.diff(diff_type, cx)
2016 })?
2017 .await??;
2018 const ONE_MB: usize = 1_000_000;
2019 if diff.len() > ONE_MB {
2020 diff = diff.chars().take(ONE_MB).collect()
2021 }
2022
2023 Ok(proto::GitDiffResponse { diff })
2024 }
2025
2026 async fn handle_open_unstaged_diff(
2027 this: Entity<Self>,
2028 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2029 mut cx: AsyncApp,
2030 ) -> Result<proto::OpenUnstagedDiffResponse> {
2031 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2032 let diff = this
2033 .update(&mut cx, |this, cx| {
2034 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2035 Some(this.open_unstaged_diff(buffer, cx))
2036 })?
2037 .ok_or_else(|| anyhow!("no such buffer"))?
2038 .await?;
2039 this.update(&mut cx, |this, _| {
2040 let shared_diffs = this
2041 .shared_diffs
2042 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2043 .or_default();
2044 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2045 })?;
2046 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2047 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2048 }
2049
2050 async fn handle_open_uncommitted_diff(
2051 this: Entity<Self>,
2052 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2053 mut cx: AsyncApp,
2054 ) -> Result<proto::OpenUncommittedDiffResponse> {
2055 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2056 let diff = this
2057 .update(&mut cx, |this, cx| {
2058 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2059 Some(this.open_uncommitted_diff(buffer, cx))
2060 })?
2061 .ok_or_else(|| anyhow!("no such buffer"))?
2062 .await?;
2063 this.update(&mut cx, |this, _| {
2064 let shared_diffs = this
2065 .shared_diffs
2066 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2067 .or_default();
2068 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2069 })?;
2070 diff.read_with(&cx, |diff, cx| {
2071 use proto::open_uncommitted_diff_response::Mode;
2072
2073 let unstaged_diff = diff.secondary_diff();
2074 let index_snapshot = unstaged_diff.and_then(|diff| {
2075 let diff = diff.read(cx);
2076 diff.base_text_exists().then(|| diff.base_text())
2077 });
2078
2079 let mode;
2080 let staged_text;
2081 let committed_text;
2082 if diff.base_text_exists() {
2083 let committed_snapshot = diff.base_text();
2084 committed_text = Some(committed_snapshot.text());
2085 if let Some(index_text) = index_snapshot {
2086 if index_text.remote_id() == committed_snapshot.remote_id() {
2087 mode = Mode::IndexMatchesHead;
2088 staged_text = None;
2089 } else {
2090 mode = Mode::IndexAndHead;
2091 staged_text = Some(index_text.text());
2092 }
2093 } else {
2094 mode = Mode::IndexAndHead;
2095 staged_text = None;
2096 }
2097 } else {
2098 mode = Mode::IndexAndHead;
2099 committed_text = None;
2100 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2101 }
2102
2103 proto::OpenUncommittedDiffResponse {
2104 committed_text,
2105 staged_text,
2106 mode: mode.into(),
2107 }
2108 })
2109 }
2110
2111 async fn handle_update_diff_bases(
2112 this: Entity<Self>,
2113 request: TypedEnvelope<proto::UpdateDiffBases>,
2114 mut cx: AsyncApp,
2115 ) -> Result<()> {
2116 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2117 this.update(&mut cx, |this, cx| {
2118 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2119 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2120 let buffer = buffer.read(cx).text_snapshot();
2121 diff_state.update(cx, |diff_state, cx| {
2122 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2123 })
2124 }
2125 }
2126 })
2127 }
2128
2129 async fn handle_blame_buffer(
2130 this: Entity<Self>,
2131 envelope: TypedEnvelope<proto::BlameBuffer>,
2132 mut cx: AsyncApp,
2133 ) -> Result<proto::BlameBufferResponse> {
2134 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2135 let version = deserialize_version(&envelope.payload.version);
2136 let buffer = this.read_with(&cx, |this, cx| {
2137 this.buffer_store.read(cx).get_existing(buffer_id)
2138 })??;
2139 buffer
2140 .update(&mut cx, |buffer, _| {
2141 buffer.wait_for_version(version.clone())
2142 })?
2143 .await?;
2144 let blame = this
2145 .update(&mut cx, |this, cx| {
2146 this.blame_buffer(&buffer, Some(version), cx)
2147 })?
2148 .await?;
2149 Ok(serialize_blame_buffer_response(blame))
2150 }
2151
2152 async fn handle_get_permalink_to_line(
2153 this: Entity<Self>,
2154 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2155 mut cx: AsyncApp,
2156 ) -> Result<proto::GetPermalinkToLineResponse> {
2157 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2158 // let version = deserialize_version(&envelope.payload.version);
2159 let selection = {
2160 let proto_selection = envelope
2161 .payload
2162 .selection
2163 .context("no selection to get permalink for defined")?;
2164 proto_selection.start as u32..proto_selection.end as u32
2165 };
2166 let buffer = this.read_with(&cx, |this, cx| {
2167 this.buffer_store.read(cx).get_existing(buffer_id)
2168 })??;
2169 let permalink = this
2170 .update(&mut cx, |this, cx| {
2171 this.get_permalink_to_line(&buffer, selection, cx)
2172 })?
2173 .await?;
2174 Ok(proto::GetPermalinkToLineResponse {
2175 permalink: permalink.to_string(),
2176 })
2177 }
2178
2179 fn repository_for_request(
2180 this: &Entity<Self>,
2181 id: RepositoryId,
2182 cx: &mut AsyncApp,
2183 ) -> Result<Entity<Repository>> {
2184 this.update(cx, |this, _| {
2185 this.repositories
2186 .get(&id)
2187 .context("missing repository handle")
2188 .cloned()
2189 })?
2190 }
2191
2192 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2193 self.repositories
2194 .iter()
2195 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2196 .collect()
2197 }
2198}
2199
2200impl BufferDiffState {
2201 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2202 self.language = buffer.read(cx).language().cloned();
2203 self.language_changed = true;
2204 let _ = self.recalculate_diffs(
2205 buffer.read(cx).text_snapshot(),
2206 self.hunk_staging_operation_count,
2207 cx,
2208 );
2209 }
2210
2211 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2212 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2213 }
2214
2215 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2216 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2217 }
2218
2219 fn handle_base_texts_updated(
2220 &mut self,
2221 buffer: text::BufferSnapshot,
2222 message: proto::UpdateDiffBases,
2223 cx: &mut Context<Self>,
2224 ) {
2225 use proto::update_diff_bases::Mode;
2226
2227 let Some(mode) = Mode::from_i32(message.mode) else {
2228 return;
2229 };
2230
2231 let diff_bases_change = match mode {
2232 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2233 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2234 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2235 Mode::IndexAndHead => DiffBasesChange::SetEach {
2236 index: message.staged_text,
2237 head: message.committed_text,
2238 },
2239 };
2240
2241 let _ = self.diff_bases_changed(
2242 buffer,
2243 diff_bases_change,
2244 self.hunk_staging_operation_count,
2245 cx,
2246 );
2247 }
2248
2249 pub fn wait_for_recalculation(&mut self) -> Option<oneshot::Receiver<()>> {
2250 if self.diff_updated_futures.is_empty() {
2251 return None;
2252 }
2253 let (tx, rx) = oneshot::channel();
2254 self.diff_updated_futures.push(tx);
2255 Some(rx)
2256 }
2257
2258 fn diff_bases_changed(
2259 &mut self,
2260 buffer: text::BufferSnapshot,
2261 diff_bases_change: DiffBasesChange,
2262 prev_hunk_staging_operation_count: usize,
2263 cx: &mut Context<Self>,
2264 ) -> oneshot::Receiver<()> {
2265 match diff_bases_change {
2266 DiffBasesChange::SetIndex(index) => {
2267 self.index_text = index.map(|mut index| {
2268 text::LineEnding::normalize(&mut index);
2269 Arc::new(index)
2270 });
2271 self.index_changed = true;
2272 }
2273 DiffBasesChange::SetHead(head) => {
2274 self.head_text = head.map(|mut head| {
2275 text::LineEnding::normalize(&mut head);
2276 Arc::new(head)
2277 });
2278 self.head_changed = true;
2279 }
2280 DiffBasesChange::SetBoth(text) => {
2281 let text = text.map(|mut text| {
2282 text::LineEnding::normalize(&mut text);
2283 Arc::new(text)
2284 });
2285 self.head_text = text.clone();
2286 self.index_text = text;
2287 self.head_changed = true;
2288 self.index_changed = true;
2289 }
2290 DiffBasesChange::SetEach { index, head } => {
2291 self.index_text = index.map(|mut index| {
2292 text::LineEnding::normalize(&mut index);
2293 Arc::new(index)
2294 });
2295 self.index_changed = true;
2296 self.head_text = head.map(|mut head| {
2297 text::LineEnding::normalize(&mut head);
2298 Arc::new(head)
2299 });
2300 self.head_changed = true;
2301 }
2302 }
2303
2304 self.recalculate_diffs(buffer, prev_hunk_staging_operation_count, cx)
2305 }
2306
2307 fn recalculate_diffs(
2308 &mut self,
2309 buffer: text::BufferSnapshot,
2310 prev_hunk_staging_operation_count: usize,
2311 cx: &mut Context<Self>,
2312 ) -> oneshot::Receiver<()> {
2313 log::debug!("recalculate diffs");
2314 let (tx, rx) = oneshot::channel();
2315 self.diff_updated_futures.push(tx);
2316
2317 let language = self.language.clone();
2318 let language_registry = self.language_registry.clone();
2319 let unstaged_diff = self.unstaged_diff();
2320 let uncommitted_diff = self.uncommitted_diff();
2321 let head = self.head_text.clone();
2322 let index = self.index_text.clone();
2323 let index_changed = self.index_changed;
2324 let head_changed = self.head_changed;
2325 let language_changed = self.language_changed;
2326 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2327 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2328 (None, None) => true,
2329 _ => false,
2330 };
2331 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2332 let mut new_unstaged_diff = None;
2333 if let Some(unstaged_diff) = &unstaged_diff {
2334 new_unstaged_diff = Some(
2335 BufferDiff::update_diff(
2336 unstaged_diff.clone(),
2337 buffer.clone(),
2338 index,
2339 index_changed,
2340 language_changed,
2341 language.clone(),
2342 language_registry.clone(),
2343 cx,
2344 )
2345 .await?,
2346 );
2347 }
2348
2349 let mut new_uncommitted_diff = None;
2350 if let Some(uncommitted_diff) = &uncommitted_diff {
2351 new_uncommitted_diff = if index_matches_head {
2352 new_unstaged_diff.clone()
2353 } else {
2354 Some(
2355 BufferDiff::update_diff(
2356 uncommitted_diff.clone(),
2357 buffer.clone(),
2358 head,
2359 head_changed,
2360 language_changed,
2361 language.clone(),
2362 language_registry.clone(),
2363 cx,
2364 )
2365 .await?,
2366 )
2367 }
2368 }
2369
2370 if this.update(cx, |this, _| {
2371 this.hunk_staging_operation_count > prev_hunk_staging_operation_count
2372 })? {
2373 eprintln!("early return");
2374 return Ok(());
2375 }
2376
2377 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2378 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2379 {
2380 unstaged_diff.update(cx, |diff, cx| {
2381 if language_changed {
2382 diff.language_changed(cx);
2383 }
2384 diff.set_snapshot(new_unstaged_diff, &buffer, None, cx)
2385 })?
2386 } else {
2387 None
2388 };
2389
2390 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2391 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2392 {
2393 uncommitted_diff.update(cx, |diff, cx| {
2394 if language_changed {
2395 diff.language_changed(cx);
2396 }
2397 diff.set_snapshot(new_uncommitted_diff, &buffer, unstaged_changed_range, cx);
2398 })?;
2399 }
2400
2401 if let Some(this) = this.upgrade() {
2402 this.update(cx, |this, _| {
2403 this.index_changed = false;
2404 this.head_changed = false;
2405 this.language_changed = false;
2406 for tx in this.diff_updated_futures.drain(..) {
2407 tx.send(()).ok();
2408 }
2409 })?;
2410 }
2411
2412 Ok(())
2413 }));
2414
2415 rx
2416 }
2417}
2418
2419fn make_remote_delegate(
2420 this: Entity<GitStore>,
2421 project_id: u64,
2422 repository_id: RepositoryId,
2423 askpass_id: u64,
2424 cx: &mut AsyncApp,
2425) -> AskPassDelegate {
2426 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2427 this.update(cx, |this, cx| {
2428 let Some((client, _)) = this.downstream_client() else {
2429 return;
2430 };
2431 let response = client.request(proto::AskPassRequest {
2432 project_id,
2433 repository_id: repository_id.to_proto(),
2434 askpass_id,
2435 prompt,
2436 });
2437 cx.spawn(async move |_, _| {
2438 tx.send(response.await?.response).ok();
2439 anyhow::Ok(())
2440 })
2441 .detach_and_log_err(cx);
2442 })
2443 .log_err();
2444 })
2445}
2446
2447impl RepositoryId {
2448 pub fn to_proto(self) -> u64 {
2449 self.0
2450 }
2451
2452 pub fn from_proto(id: u64) -> Self {
2453 RepositoryId(id)
2454 }
2455}
2456
2457impl RepositorySnapshot {
2458 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>) -> Self {
2459 Self {
2460 id,
2461 merge_message: None,
2462 statuses_by_path: Default::default(),
2463 work_directory_abs_path,
2464 branch: None,
2465 merge_conflicts: Default::default(),
2466 merge_head_shas: Default::default(),
2467 scan_id: 0,
2468 }
2469 }
2470
2471 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2472 proto::UpdateRepository {
2473 branch_summary: self.branch.as_ref().map(branch_to_proto),
2474 updated_statuses: self
2475 .statuses_by_path
2476 .iter()
2477 .map(|entry| entry.to_proto())
2478 .collect(),
2479 removed_statuses: Default::default(),
2480 current_merge_conflicts: self
2481 .merge_conflicts
2482 .iter()
2483 .map(|repo_path| repo_path.to_proto())
2484 .collect(),
2485 project_id,
2486 id: self.id.to_proto(),
2487 abs_path: self.work_directory_abs_path.to_proto(),
2488 entry_ids: vec![self.id.to_proto()],
2489 scan_id: self.scan_id,
2490 is_last_update: true,
2491 }
2492 }
2493
2494 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2495 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2496 let mut removed_statuses: Vec<String> = Vec::new();
2497
2498 let mut new_statuses = self.statuses_by_path.iter().peekable();
2499 let mut old_statuses = old.statuses_by_path.iter().peekable();
2500
2501 let mut current_new_entry = new_statuses.next();
2502 let mut current_old_entry = old_statuses.next();
2503 loop {
2504 match (current_new_entry, current_old_entry) {
2505 (Some(new_entry), Some(old_entry)) => {
2506 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2507 Ordering::Less => {
2508 updated_statuses.push(new_entry.to_proto());
2509 current_new_entry = new_statuses.next();
2510 }
2511 Ordering::Equal => {
2512 if new_entry.status != old_entry.status {
2513 updated_statuses.push(new_entry.to_proto());
2514 }
2515 current_old_entry = old_statuses.next();
2516 current_new_entry = new_statuses.next();
2517 }
2518 Ordering::Greater => {
2519 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2520 current_old_entry = old_statuses.next();
2521 }
2522 }
2523 }
2524 (None, Some(old_entry)) => {
2525 removed_statuses.push(old_entry.repo_path.as_ref().to_proto());
2526 current_old_entry = old_statuses.next();
2527 }
2528 (Some(new_entry), None) => {
2529 updated_statuses.push(new_entry.to_proto());
2530 current_new_entry = new_statuses.next();
2531 }
2532 (None, None) => break,
2533 }
2534 }
2535
2536 proto::UpdateRepository {
2537 branch_summary: self.branch.as_ref().map(branch_to_proto),
2538 updated_statuses,
2539 removed_statuses,
2540 current_merge_conflicts: self
2541 .merge_conflicts
2542 .iter()
2543 .map(|path| path.as_ref().to_proto())
2544 .collect(),
2545 project_id,
2546 id: self.id.to_proto(),
2547 abs_path: self.work_directory_abs_path.to_proto(),
2548 entry_ids: vec![],
2549 scan_id: self.scan_id,
2550 is_last_update: true,
2551 }
2552 }
2553
2554 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
2555 self.statuses_by_path.iter().cloned()
2556 }
2557
2558 pub fn status_summary(&self) -> GitSummary {
2559 self.statuses_by_path.summary().item_summary
2560 }
2561
2562 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2563 self.statuses_by_path
2564 .get(&PathKey(path.0.clone()), &())
2565 .cloned()
2566 }
2567
2568 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
2569 abs_path
2570 .strip_prefix(&self.work_directory_abs_path)
2571 .map(RepoPath::from)
2572 .ok()
2573 }
2574
2575 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
2576 self.statuses_by_path
2577 .get(&PathKey(repo_path.0.clone()), &())
2578 .map_or(false, |entry| entry.status.is_conflicted())
2579 }
2580
2581 /// This is the name that will be displayed in the repository selector for this repository.
2582 pub fn display_name(&self) -> SharedString {
2583 self.work_directory_abs_path
2584 .file_name()
2585 .unwrap_or_default()
2586 .to_string_lossy()
2587 .to_string()
2588 .into()
2589 }
2590}
2591
2592impl Repository {
2593 fn local(
2594 id: RepositoryId,
2595 work_directory_abs_path: Arc<Path>,
2596 dot_git_abs_path: Arc<Path>,
2597 project_environment: WeakEntity<ProjectEnvironment>,
2598 fs: Arc<dyn Fs>,
2599 git_store: WeakEntity<GitStore>,
2600 cx: &mut Context<Self>,
2601 ) -> Self {
2602 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path.clone());
2603 Repository {
2604 git_store,
2605 snapshot,
2606 commit_message_buffer: None,
2607 askpass_delegates: Default::default(),
2608 paths_needing_status_update: Default::default(),
2609 latest_askpass_id: 0,
2610 job_sender: Repository::spawn_local_git_worker(
2611 work_directory_abs_path,
2612 dot_git_abs_path,
2613 project_environment,
2614 fs,
2615 cx,
2616 ),
2617 }
2618 }
2619
2620 fn remote(
2621 id: RepositoryId,
2622 work_directory_abs_path: Arc<Path>,
2623 project_id: ProjectId,
2624 client: AnyProtoClient,
2625 git_store: WeakEntity<GitStore>,
2626 cx: &mut Context<Self>,
2627 ) -> Self {
2628 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path);
2629 Self {
2630 snapshot,
2631 commit_message_buffer: None,
2632 git_store,
2633 paths_needing_status_update: Default::default(),
2634 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
2635 askpass_delegates: Default::default(),
2636 latest_askpass_id: 0,
2637 }
2638 }
2639
2640 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2641 self.git_store.upgrade()
2642 }
2643
2644 pub fn send_job<F, Fut, R>(&self, job: F) -> oneshot::Receiver<R>
2645 where
2646 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2647 Fut: Future<Output = R> + 'static,
2648 R: Send + 'static,
2649 {
2650 self.send_keyed_job(None, job)
2651 }
2652
2653 fn send_keyed_job<F, Fut, R>(&self, key: Option<GitJobKey>, job: F) -> oneshot::Receiver<R>
2654 where
2655 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2656 Fut: Future<Output = R> + 'static,
2657 R: Send + 'static,
2658 {
2659 let (result_tx, result_rx) = futures::channel::oneshot::channel();
2660 self.job_sender
2661 .unbounded_send(GitJob {
2662 key,
2663 job: Box::new(|state, cx: &mut AsyncApp| {
2664 let job = job(state, cx.clone());
2665 cx.spawn(async move |_| {
2666 let result = job.await;
2667 result_tx.send(result).ok();
2668 })
2669 }),
2670 })
2671 .ok();
2672 result_rx
2673 }
2674
2675 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
2676 let Some(git_store) = self.git_store.upgrade() else {
2677 return;
2678 };
2679 let entity = cx.entity();
2680 git_store.update(cx, |git_store, cx| {
2681 let Some((&id, _)) = git_store
2682 .repositories
2683 .iter()
2684 .find(|(_, handle)| *handle == &entity)
2685 else {
2686 return;
2687 };
2688 git_store.active_repo_id = Some(id);
2689 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
2690 });
2691 }
2692
2693 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
2694 self.snapshot.status()
2695 }
2696
2697 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
2698 let git_store = self.git_store.upgrade()?;
2699 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2700 let abs_path = self.snapshot.work_directory_abs_path.join(&path.0);
2701 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
2702 Some(ProjectPath {
2703 worktree_id: worktree.read(cx).id(),
2704 path: relative_path.into(),
2705 })
2706 }
2707
2708 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
2709 let git_store = self.git_store.upgrade()?;
2710 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2711 let abs_path = worktree_store.absolutize(path, cx)?;
2712 self.snapshot.abs_path_to_repo_path(&abs_path)
2713 }
2714
2715 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
2716 other
2717 .read(cx)
2718 .snapshot
2719 .work_directory_abs_path
2720 .starts_with(&self.snapshot.work_directory_abs_path)
2721 }
2722
2723 pub fn open_commit_buffer(
2724 &mut self,
2725 languages: Option<Arc<LanguageRegistry>>,
2726 buffer_store: Entity<BufferStore>,
2727 cx: &mut Context<Self>,
2728 ) -> Task<Result<Entity<Buffer>>> {
2729 let id = self.id;
2730 if let Some(buffer) = self.commit_message_buffer.clone() {
2731 return Task::ready(Ok(buffer));
2732 }
2733 let this = cx.weak_entity();
2734
2735 let rx = self.send_job(move |state, mut cx| async move {
2736 let Some(this) = this.upgrade() else {
2737 bail!("git store was dropped");
2738 };
2739 match state {
2740 RepositoryState::Local { .. } => {
2741 this.update(&mut cx, |_, cx| {
2742 Self::open_local_commit_buffer(languages, buffer_store, cx)
2743 })?
2744 .await
2745 }
2746 RepositoryState::Remote { project_id, client } => {
2747 let request = client.request(proto::OpenCommitMessageBuffer {
2748 project_id: project_id.0,
2749 repository_id: id.to_proto(),
2750 });
2751 let response = request.await.context("requesting to open commit buffer")?;
2752 let buffer_id = BufferId::new(response.buffer_id)?;
2753 let buffer = buffer_store
2754 .update(&mut cx, |buffer_store, cx| {
2755 buffer_store.wait_for_remote_buffer(buffer_id, cx)
2756 })?
2757 .await?;
2758 if let Some(language_registry) = languages {
2759 let git_commit_language =
2760 language_registry.language_for_name("Git Commit").await?;
2761 buffer.update(&mut cx, |buffer, cx| {
2762 buffer.set_language(Some(git_commit_language), cx);
2763 })?;
2764 }
2765 this.update(&mut cx, |this, _| {
2766 this.commit_message_buffer = Some(buffer.clone());
2767 })?;
2768 Ok(buffer)
2769 }
2770 }
2771 });
2772
2773 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
2774 }
2775
2776 fn open_local_commit_buffer(
2777 language_registry: Option<Arc<LanguageRegistry>>,
2778 buffer_store: Entity<BufferStore>,
2779 cx: &mut Context<Self>,
2780 ) -> Task<Result<Entity<Buffer>>> {
2781 cx.spawn(async move |repository, cx| {
2782 let buffer = buffer_store
2783 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
2784 .await?;
2785
2786 if let Some(language_registry) = language_registry {
2787 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
2788 buffer.update(cx, |buffer, cx| {
2789 buffer.set_language(Some(git_commit_language), cx);
2790 })?;
2791 }
2792
2793 repository.update(cx, |repository, _| {
2794 repository.commit_message_buffer = Some(buffer.clone());
2795 })?;
2796 Ok(buffer)
2797 })
2798 }
2799
2800 pub fn checkout_files(
2801 &self,
2802 commit: &str,
2803 paths: Vec<RepoPath>,
2804 _cx: &mut App,
2805 ) -> oneshot::Receiver<Result<()>> {
2806 let commit = commit.to_string();
2807 let id = self.id;
2808
2809 self.send_job(move |git_repo, _| async move {
2810 match git_repo {
2811 RepositoryState::Local {
2812 backend,
2813 environment,
2814 ..
2815 } => {
2816 backend
2817 .checkout_files(commit, paths, environment.clone())
2818 .await
2819 }
2820 RepositoryState::Remote { project_id, client } => {
2821 client
2822 .request(proto::GitCheckoutFiles {
2823 project_id: project_id.0,
2824 repository_id: id.to_proto(),
2825 commit,
2826 paths: paths
2827 .into_iter()
2828 .map(|p| p.to_string_lossy().to_string())
2829 .collect(),
2830 })
2831 .await?;
2832
2833 Ok(())
2834 }
2835 }
2836 })
2837 }
2838
2839 pub fn reset(
2840 &self,
2841 commit: String,
2842 reset_mode: ResetMode,
2843 _cx: &mut App,
2844 ) -> oneshot::Receiver<Result<()>> {
2845 let commit = commit.to_string();
2846 let id = self.id;
2847
2848 self.send_job(move |git_repo, _| async move {
2849 match git_repo {
2850 RepositoryState::Local {
2851 backend,
2852 environment,
2853 ..
2854 } => backend.reset(commit, reset_mode, environment).await,
2855 RepositoryState::Remote { project_id, client } => {
2856 client
2857 .request(proto::GitReset {
2858 project_id: project_id.0,
2859 repository_id: id.to_proto(),
2860 commit,
2861 mode: match reset_mode {
2862 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
2863 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
2864 },
2865 })
2866 .await?;
2867
2868 Ok(())
2869 }
2870 }
2871 })
2872 }
2873
2874 pub fn show(&self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
2875 let id = self.id;
2876 self.send_job(move |git_repo, _cx| async move {
2877 match git_repo {
2878 RepositoryState::Local { backend, .. } => backend.show(commit).await,
2879 RepositoryState::Remote { project_id, client } => {
2880 let resp = client
2881 .request(proto::GitShow {
2882 project_id: project_id.0,
2883 repository_id: id.to_proto(),
2884 commit,
2885 })
2886 .await?;
2887
2888 Ok(CommitDetails {
2889 sha: resp.sha.into(),
2890 message: resp.message.into(),
2891 commit_timestamp: resp.commit_timestamp,
2892 author_email: resp.author_email.into(),
2893 author_name: resp.author_name.into(),
2894 })
2895 }
2896 }
2897 })
2898 }
2899
2900 pub fn load_commit_diff(&self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
2901 let id = self.id;
2902 self.send_job(move |git_repo, cx| async move {
2903 match git_repo {
2904 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
2905 RepositoryState::Remote {
2906 client, project_id, ..
2907 } => {
2908 let response = client
2909 .request(proto::LoadCommitDiff {
2910 project_id: project_id.0,
2911 repository_id: id.to_proto(),
2912 commit,
2913 })
2914 .await?;
2915 Ok(CommitDiff {
2916 files: response
2917 .files
2918 .into_iter()
2919 .map(|file| CommitFile {
2920 path: Path::new(&file.path).into(),
2921 old_text: file.old_text,
2922 new_text: file.new_text,
2923 })
2924 .collect(),
2925 })
2926 }
2927 }
2928 })
2929 }
2930
2931 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
2932 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
2933 }
2934
2935 pub fn stage_entries(
2936 &self,
2937 entries: Vec<RepoPath>,
2938 cx: &mut Context<Self>,
2939 ) -> Task<anyhow::Result<()>> {
2940 if entries.is_empty() {
2941 return Task::ready(Ok(()));
2942 }
2943 let id = self.id;
2944
2945 let mut save_futures = Vec::new();
2946 if let Some(buffer_store) = self.buffer_store(cx) {
2947 buffer_store.update(cx, |buffer_store, cx| {
2948 for path in &entries {
2949 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
2950 continue;
2951 };
2952 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
2953 if buffer
2954 .read(cx)
2955 .file()
2956 .map_or(false, |file| file.disk_state().exists())
2957 {
2958 save_futures.push(buffer_store.save_buffer(buffer, cx));
2959 }
2960 }
2961 }
2962 })
2963 }
2964
2965 cx.spawn(async move |this, cx| {
2966 for save_future in save_futures {
2967 save_future.await?;
2968 }
2969
2970 this.update(cx, |this, _| {
2971 this.send_job(move |git_repo, _cx| async move {
2972 match git_repo {
2973 RepositoryState::Local {
2974 backend,
2975 environment,
2976 ..
2977 } => backend.stage_paths(entries, environment.clone()).await,
2978 RepositoryState::Remote { project_id, client } => {
2979 client
2980 .request(proto::Stage {
2981 project_id: project_id.0,
2982 repository_id: id.to_proto(),
2983 paths: entries
2984 .into_iter()
2985 .map(|repo_path| repo_path.as_ref().to_proto())
2986 .collect(),
2987 })
2988 .await
2989 .context("sending stage request")?;
2990
2991 Ok(())
2992 }
2993 }
2994 })
2995 })?
2996 .await??;
2997
2998 Ok(())
2999 })
3000 }
3001
3002 pub fn unstage_entries(
3003 &self,
3004 entries: Vec<RepoPath>,
3005 cx: &mut Context<Self>,
3006 ) -> Task<anyhow::Result<()>> {
3007 if entries.is_empty() {
3008 return Task::ready(Ok(()));
3009 }
3010 let id = self.id;
3011
3012 let mut save_futures = Vec::new();
3013 if let Some(buffer_store) = self.buffer_store(cx) {
3014 buffer_store.update(cx, |buffer_store, cx| {
3015 for path in &entries {
3016 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3017 continue;
3018 };
3019 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3020 if buffer
3021 .read(cx)
3022 .file()
3023 .map_or(false, |file| file.disk_state().exists())
3024 {
3025 save_futures.push(buffer_store.save_buffer(buffer, cx));
3026 }
3027 }
3028 }
3029 })
3030 }
3031
3032 cx.spawn(async move |this, cx| {
3033 for save_future in save_futures {
3034 save_future.await?;
3035 }
3036
3037 this.update(cx, |this, _| {
3038 this.send_job(move |git_repo, _cx| async move {
3039 match git_repo {
3040 RepositoryState::Local {
3041 backend,
3042 environment,
3043 ..
3044 } => backend.unstage_paths(entries, environment).await,
3045 RepositoryState::Remote { project_id, client } => {
3046 client
3047 .request(proto::Unstage {
3048 project_id: project_id.0,
3049 repository_id: id.to_proto(),
3050 paths: entries
3051 .into_iter()
3052 .map(|repo_path| repo_path.as_ref().to_proto())
3053 .collect(),
3054 })
3055 .await
3056 .context("sending unstage request")?;
3057
3058 Ok(())
3059 }
3060 }
3061 })
3062 })?
3063 .await??;
3064
3065 Ok(())
3066 })
3067 }
3068
3069 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3070 let to_stage = self
3071 .cached_status()
3072 .filter(|entry| !entry.status.staging().is_fully_staged())
3073 .map(|entry| entry.repo_path.clone())
3074 .collect();
3075 self.stage_entries(to_stage, cx)
3076 }
3077
3078 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3079 let to_unstage = self
3080 .cached_status()
3081 .filter(|entry| entry.status.staging().has_staged())
3082 .map(|entry| entry.repo_path.clone())
3083 .collect();
3084 self.unstage_entries(to_unstage, cx)
3085 }
3086
3087 pub fn commit(
3088 &self,
3089 message: SharedString,
3090 name_and_email: Option<(SharedString, SharedString)>,
3091 _cx: &mut App,
3092 ) -> oneshot::Receiver<Result<()>> {
3093 let id = self.id;
3094
3095 self.send_job(move |git_repo, _cx| async move {
3096 match git_repo {
3097 RepositoryState::Local {
3098 backend,
3099 environment,
3100 ..
3101 } => backend.commit(message, name_and_email, environment).await,
3102 RepositoryState::Remote { project_id, client } => {
3103 let (name, email) = name_and_email.unzip();
3104 client
3105 .request(proto::Commit {
3106 project_id: project_id.0,
3107 repository_id: id.to_proto(),
3108 message: String::from(message),
3109 name: name.map(String::from),
3110 email: email.map(String::from),
3111 })
3112 .await
3113 .context("sending commit request")?;
3114
3115 Ok(())
3116 }
3117 }
3118 })
3119 }
3120
3121 pub fn fetch(
3122 &mut self,
3123 askpass: AskPassDelegate,
3124 _cx: &mut App,
3125 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3126 let askpass_delegates = self.askpass_delegates.clone();
3127 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3128 let id = self.id;
3129
3130 self.send_job(move |git_repo, cx| async move {
3131 match git_repo {
3132 RepositoryState::Local {
3133 backend,
3134 environment,
3135 ..
3136 } => backend.fetch(askpass, environment, cx).await,
3137 RepositoryState::Remote { project_id, client } => {
3138 askpass_delegates.lock().insert(askpass_id, askpass);
3139 let _defer = util::defer(|| {
3140 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3141 debug_assert!(askpass_delegate.is_some());
3142 });
3143
3144 let response = client
3145 .request(proto::Fetch {
3146 project_id: project_id.0,
3147 repository_id: id.to_proto(),
3148 askpass_id,
3149 })
3150 .await
3151 .context("sending fetch request")?;
3152
3153 Ok(RemoteCommandOutput {
3154 stdout: response.stdout,
3155 stderr: response.stderr,
3156 })
3157 }
3158 }
3159 })
3160 }
3161
3162 pub fn push(
3163 &mut self,
3164 branch: SharedString,
3165 remote: SharedString,
3166 options: Option<PushOptions>,
3167 askpass: AskPassDelegate,
3168 _cx: &mut App,
3169 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3170 let askpass_delegates = self.askpass_delegates.clone();
3171 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3172 let id = self.id;
3173
3174 self.send_job(move |git_repo, cx| async move {
3175 match git_repo {
3176 RepositoryState::Local {
3177 backend,
3178 environment,
3179 ..
3180 } => {
3181 backend
3182 .push(
3183 branch.to_string(),
3184 remote.to_string(),
3185 options,
3186 askpass,
3187 environment.clone(),
3188 cx,
3189 )
3190 .await
3191 }
3192 RepositoryState::Remote { project_id, client } => {
3193 askpass_delegates.lock().insert(askpass_id, askpass);
3194 let _defer = util::defer(|| {
3195 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3196 debug_assert!(askpass_delegate.is_some());
3197 });
3198 let response = client
3199 .request(proto::Push {
3200 project_id: project_id.0,
3201 repository_id: id.to_proto(),
3202 askpass_id,
3203 branch_name: branch.to_string(),
3204 remote_name: remote.to_string(),
3205 options: options.map(|options| match options {
3206 PushOptions::Force => proto::push::PushOptions::Force,
3207 PushOptions::SetUpstream => proto::push::PushOptions::SetUpstream,
3208 } as i32),
3209 })
3210 .await
3211 .context("sending push request")?;
3212
3213 Ok(RemoteCommandOutput {
3214 stdout: response.stdout,
3215 stderr: response.stderr,
3216 })
3217 }
3218 }
3219 })
3220 }
3221
3222 pub fn pull(
3223 &mut self,
3224 branch: SharedString,
3225 remote: SharedString,
3226 askpass: AskPassDelegate,
3227 _cx: &mut App,
3228 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3229 let askpass_delegates = self.askpass_delegates.clone();
3230 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3231 let id = self.id;
3232
3233 self.send_job(move |git_repo, cx| async move {
3234 match git_repo {
3235 RepositoryState::Local {
3236 backend,
3237 environment,
3238 ..
3239 } => {
3240 backend
3241 .pull(
3242 branch.to_string(),
3243 remote.to_string(),
3244 askpass,
3245 environment.clone(),
3246 cx,
3247 )
3248 .await
3249 }
3250 RepositoryState::Remote { project_id, client } => {
3251 askpass_delegates.lock().insert(askpass_id, askpass);
3252 let _defer = util::defer(|| {
3253 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3254 debug_assert!(askpass_delegate.is_some());
3255 });
3256 let response = client
3257 .request(proto::Pull {
3258 project_id: project_id.0,
3259 repository_id: id.to_proto(),
3260 askpass_id,
3261 branch_name: branch.to_string(),
3262 remote_name: remote.to_string(),
3263 })
3264 .await
3265 .context("sending pull request")?;
3266
3267 Ok(RemoteCommandOutput {
3268 stdout: response.stdout,
3269 stderr: response.stderr,
3270 })
3271 }
3272 }
3273 })
3274 }
3275
3276 fn spawn_set_index_text_job(
3277 &self,
3278 path: RepoPath,
3279 content: Option<String>,
3280 _cx: &mut App,
3281 ) -> oneshot::Receiver<anyhow::Result<()>> {
3282 let id = self.id;
3283
3284 self.send_keyed_job(
3285 Some(GitJobKey::WriteIndex(path.clone())),
3286 move |git_repo, _cx| async move {
3287 match git_repo {
3288 RepositoryState::Local {
3289 backend,
3290 environment,
3291 ..
3292 } => {
3293 backend
3294 .set_index_text(path, content, environment.clone())
3295 .await
3296 }
3297 RepositoryState::Remote { project_id, client } => {
3298 client
3299 .request(proto::SetIndexText {
3300 project_id: project_id.0,
3301 repository_id: id.to_proto(),
3302 path: path.as_ref().to_proto(),
3303 text: content,
3304 })
3305 .await?;
3306 Ok(())
3307 }
3308 }
3309 },
3310 )
3311 }
3312
3313 pub fn get_remotes(
3314 &self,
3315 branch_name: Option<String>,
3316 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3317 let id = self.id;
3318 self.send_job(move |repo, _cx| async move {
3319 match repo {
3320 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
3321 RepositoryState::Remote { project_id, client } => {
3322 let response = client
3323 .request(proto::GetRemotes {
3324 project_id: project_id.0,
3325 repository_id: id.to_proto(),
3326 branch_name,
3327 })
3328 .await?;
3329
3330 let remotes = response
3331 .remotes
3332 .into_iter()
3333 .map(|remotes| git::repository::Remote {
3334 name: remotes.name.into(),
3335 })
3336 .collect();
3337
3338 Ok(remotes)
3339 }
3340 }
3341 })
3342 }
3343
3344 pub fn branches(&self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3345 let id = self.id;
3346 self.send_job(move |repo, cx| async move {
3347 match repo {
3348 RepositoryState::Local { backend, .. } => {
3349 let backend = backend.clone();
3350 cx.background_spawn(async move { backend.branches().await })
3351 .await
3352 }
3353 RepositoryState::Remote { project_id, client } => {
3354 let response = client
3355 .request(proto::GitGetBranches {
3356 project_id: project_id.0,
3357 repository_id: id.to_proto(),
3358 })
3359 .await?;
3360
3361 let branches = response
3362 .branches
3363 .into_iter()
3364 .map(|branch| proto_to_branch(&branch))
3365 .collect();
3366
3367 Ok(branches)
3368 }
3369 }
3370 })
3371 }
3372
3373 pub fn diff(&self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3374 let id = self.id;
3375 self.send_job(move |repo, _cx| async move {
3376 match repo {
3377 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
3378 RepositoryState::Remote { project_id, client } => {
3379 let response = client
3380 .request(proto::GitDiff {
3381 project_id: project_id.0,
3382 repository_id: id.to_proto(),
3383 diff_type: match diff_type {
3384 DiffType::HeadToIndex => {
3385 proto::git_diff::DiffType::HeadToIndex.into()
3386 }
3387 DiffType::HeadToWorktree => {
3388 proto::git_diff::DiffType::HeadToWorktree.into()
3389 }
3390 },
3391 })
3392 .await?;
3393
3394 Ok(response.diff)
3395 }
3396 }
3397 })
3398 }
3399
3400 pub fn create_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3401 let id = self.id;
3402 self.send_job(move |repo, _cx| async move {
3403 match repo {
3404 RepositoryState::Local { backend, .. } => backend.create_branch(branch_name).await,
3405 RepositoryState::Remote { project_id, client } => {
3406 client
3407 .request(proto::GitCreateBranch {
3408 project_id: project_id.0,
3409 repository_id: id.to_proto(),
3410 branch_name,
3411 })
3412 .await?;
3413
3414 Ok(())
3415 }
3416 }
3417 })
3418 }
3419
3420 pub fn change_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3421 let id = self.id;
3422 self.send_job(move |repo, _cx| async move {
3423 match repo {
3424 RepositoryState::Local { backend, .. } => backend.change_branch(branch_name).await,
3425 RepositoryState::Remote { project_id, client } => {
3426 client
3427 .request(proto::GitChangeBranch {
3428 project_id: project_id.0,
3429 repository_id: id.to_proto(),
3430 branch_name,
3431 })
3432 .await?;
3433
3434 Ok(())
3435 }
3436 }
3437 })
3438 }
3439
3440 pub fn check_for_pushed_commits(&self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3441 let id = self.id;
3442 self.send_job(move |repo, _cx| async move {
3443 match repo {
3444 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
3445 RepositoryState::Remote { project_id, client } => {
3446 let response = client
3447 .request(proto::CheckForPushedCommits {
3448 project_id: project_id.0,
3449 repository_id: id.to_proto(),
3450 })
3451 .await?;
3452
3453 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3454
3455 Ok(branches)
3456 }
3457 }
3458 })
3459 }
3460
3461 pub fn checkpoint(&self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3462 self.send_job(|repo, _cx| async move {
3463 match repo {
3464 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
3465 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3466 }
3467 })
3468 }
3469
3470 pub fn restore_checkpoint(
3471 &self,
3472 checkpoint: GitRepositoryCheckpoint,
3473 ) -> oneshot::Receiver<Result<()>> {
3474 self.send_job(move |repo, _cx| async move {
3475 match repo {
3476 RepositoryState::Local { backend, .. } => {
3477 backend.restore_checkpoint(checkpoint).await
3478 }
3479 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3480 }
3481 })
3482 }
3483
3484 pub(crate) fn apply_remote_update(
3485 &mut self,
3486 update: proto::UpdateRepository,
3487 cx: &mut Context<Self>,
3488 ) -> Result<()> {
3489 let conflicted_paths = TreeSet::from_ordered_entries(
3490 update
3491 .current_merge_conflicts
3492 .into_iter()
3493 .map(|path| RepoPath(Path::new(&path).into())),
3494 );
3495 self.snapshot.branch = update.branch_summary.as_ref().map(proto_to_branch);
3496 self.snapshot.merge_conflicts = conflicted_paths;
3497
3498 let edits = update
3499 .removed_statuses
3500 .into_iter()
3501 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3502 .chain(
3503 update
3504 .updated_statuses
3505 .into_iter()
3506 .filter_map(|updated_status| {
3507 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3508 }),
3509 )
3510 .collect::<Vec<_>>();
3511 self.snapshot.statuses_by_path.edit(edits, &());
3512 if update.is_last_update {
3513 self.snapshot.scan_id = update.scan_id;
3514 }
3515 cx.emit(RepositoryEvent::Updated);
3516 Ok(())
3517 }
3518
3519 pub fn compare_checkpoints(
3520 &self,
3521 left: GitRepositoryCheckpoint,
3522 right: GitRepositoryCheckpoint,
3523 ) -> oneshot::Receiver<Result<bool>> {
3524 self.send_job(move |repo, _cx| async move {
3525 match repo {
3526 RepositoryState::Local { backend, .. } => {
3527 backend.compare_checkpoints(left, right).await
3528 }
3529 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3530 }
3531 })
3532 }
3533
3534 pub fn delete_checkpoint(
3535 &self,
3536 checkpoint: GitRepositoryCheckpoint,
3537 ) -> oneshot::Receiver<Result<()>> {
3538 self.send_job(move |repo, _cx| async move {
3539 match repo {
3540 RepositoryState::Local { backend, .. } => {
3541 backend.delete_checkpoint(checkpoint).await
3542 }
3543 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3544 }
3545 })
3546 }
3547
3548 pub fn diff_checkpoints(
3549 &self,
3550 base_checkpoint: GitRepositoryCheckpoint,
3551 target_checkpoint: GitRepositoryCheckpoint,
3552 ) -> oneshot::Receiver<Result<String>> {
3553 self.send_job(move |repo, _cx| async move {
3554 match repo {
3555 RepositoryState::Local { backend, .. } => {
3556 backend
3557 .diff_checkpoints(base_checkpoint, target_checkpoint)
3558 .await
3559 }
3560 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3561 }
3562 })
3563 }
3564
3565 fn schedule_scan(
3566 &mut self,
3567 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3568 cx: &mut Context<Self>,
3569 ) {
3570 self.paths_changed(
3571 vec![git::repository::WORK_DIRECTORY_REPO_PATH.clone()],
3572 updates_tx.clone(),
3573 cx,
3574 );
3575
3576 let this = cx.weak_entity();
3577 let _ = self.send_keyed_job(
3578 Some(GitJobKey::ReloadGitState),
3579 |state, mut cx| async move {
3580 let Some(this) = this.upgrade() else {
3581 return Ok(());
3582 };
3583 let RepositoryState::Local { backend, .. } = state else {
3584 bail!("not a local repository")
3585 };
3586 let (snapshot, events) = this
3587 .update(&mut cx, |this, _| {
3588 compute_snapshot(
3589 this.id,
3590 this.work_directory_abs_path.clone(),
3591 this.snapshot.clone(),
3592 backend.clone(),
3593 )
3594 })?
3595 .await?;
3596 this.update(&mut cx, |this, cx| {
3597 this.snapshot = snapshot.clone();
3598 for event in events {
3599 cx.emit(event);
3600 }
3601 })?;
3602 if let Some(updates_tx) = updates_tx {
3603 updates_tx
3604 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
3605 .ok();
3606 }
3607 Ok(())
3608 },
3609 );
3610 }
3611
3612 fn spawn_local_git_worker(
3613 work_directory_abs_path: Arc<Path>,
3614 dot_git_abs_path: Arc<Path>,
3615 project_environment: WeakEntity<ProjectEnvironment>,
3616 fs: Arc<dyn Fs>,
3617 cx: &mut Context<Self>,
3618 ) -> mpsc::UnboundedSender<GitJob> {
3619 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3620
3621 cx.spawn(async move |_, cx| {
3622 let environment = project_environment
3623 .upgrade()
3624 .ok_or_else(|| anyhow!("missing project environment"))?
3625 .update(cx, |project_environment, cx| {
3626 project_environment.get_environment(Some(work_directory_abs_path), cx)
3627 })?
3628 .await
3629 .ok_or_else(|| {
3630 anyhow!("failed to get environment for repository working directory")
3631 })?;
3632 let backend = cx
3633 .background_spawn(async move {
3634 fs.open_repo(&dot_git_abs_path)
3635 .ok_or_else(|| anyhow!("failed to build repository"))
3636 })
3637 .await?;
3638
3639 if let Some(git_hosting_provider_registry) =
3640 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
3641 {
3642 git_hosting_providers::register_additional_providers(
3643 git_hosting_provider_registry,
3644 backend.clone(),
3645 );
3646 }
3647
3648 let state = RepositoryState::Local {
3649 backend,
3650 environment: Arc::new(environment),
3651 };
3652 let mut jobs = VecDeque::new();
3653 loop {
3654 while let Ok(Some(next_job)) = job_rx.try_next() {
3655 jobs.push_back(next_job);
3656 }
3657
3658 if let Some(job) = jobs.pop_front() {
3659 if let Some(current_key) = &job.key {
3660 if jobs
3661 .iter()
3662 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3663 {
3664 continue;
3665 }
3666 }
3667 (job.job)(state.clone(), cx).await;
3668 } else if let Some(job) = job_rx.next().await {
3669 jobs.push_back(job);
3670 } else {
3671 break;
3672 }
3673 }
3674 anyhow::Ok(())
3675 })
3676 .detach_and_log_err(cx);
3677
3678 job_tx
3679 }
3680
3681 fn spawn_remote_git_worker(
3682 project_id: ProjectId,
3683 client: AnyProtoClient,
3684 cx: &mut Context<Self>,
3685 ) -> mpsc::UnboundedSender<GitJob> {
3686 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
3687
3688 cx.spawn(async move |_, cx| {
3689 let state = RepositoryState::Remote { project_id, client };
3690 let mut jobs = VecDeque::new();
3691 loop {
3692 while let Ok(Some(next_job)) = job_rx.try_next() {
3693 jobs.push_back(next_job);
3694 }
3695
3696 if let Some(job) = jobs.pop_front() {
3697 if let Some(current_key) = &job.key {
3698 if jobs
3699 .iter()
3700 .any(|other_job| other_job.key.as_ref() == Some(current_key))
3701 {
3702 continue;
3703 }
3704 }
3705 (job.job)(state.clone(), cx).await;
3706 } else if let Some(job) = job_rx.next().await {
3707 jobs.push_back(job);
3708 } else {
3709 break;
3710 }
3711 }
3712 anyhow::Ok(())
3713 })
3714 .detach_and_log_err(cx);
3715
3716 job_tx
3717 }
3718
3719 fn load_staged_text(
3720 &self,
3721 buffer_id: BufferId,
3722 repo_path: RepoPath,
3723 cx: &App,
3724 ) -> Task<Result<Option<String>>> {
3725 let rx = self.send_job(move |state, _| async move {
3726 match state {
3727 RepositoryState::Local { backend, .. } => {
3728 anyhow::Ok(backend.load_index_text(repo_path).await)
3729 }
3730 RepositoryState::Remote { project_id, client } => {
3731 let response = client
3732 .request(proto::OpenUnstagedDiff {
3733 project_id: project_id.to_proto(),
3734 buffer_id: buffer_id.to_proto(),
3735 })
3736 .await?;
3737 Ok(response.staged_text)
3738 }
3739 }
3740 });
3741 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3742 }
3743
3744 fn load_committed_text(
3745 &self,
3746 buffer_id: BufferId,
3747 repo_path: RepoPath,
3748 cx: &App,
3749 ) -> Task<Result<DiffBasesChange>> {
3750 let rx = self.send_job(move |state, _| async move {
3751 match state {
3752 RepositoryState::Local { backend, .. } => {
3753 let committed_text = backend.load_committed_text(repo_path.clone()).await;
3754 let staged_text = backend.load_index_text(repo_path).await;
3755 let diff_bases_change = if committed_text == staged_text {
3756 DiffBasesChange::SetBoth(committed_text)
3757 } else {
3758 DiffBasesChange::SetEach {
3759 index: staged_text,
3760 head: committed_text,
3761 }
3762 };
3763 anyhow::Ok(diff_bases_change)
3764 }
3765 RepositoryState::Remote { project_id, client } => {
3766 use proto::open_uncommitted_diff_response::Mode;
3767
3768 let response = client
3769 .request(proto::OpenUncommittedDiff {
3770 project_id: project_id.to_proto(),
3771 buffer_id: buffer_id.to_proto(),
3772 })
3773 .await?;
3774 let mode =
3775 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
3776 let bases = match mode {
3777 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
3778 Mode::IndexAndHead => DiffBasesChange::SetEach {
3779 head: response.committed_text,
3780 index: response.staged_text,
3781 },
3782 };
3783 Ok(bases)
3784 }
3785 }
3786 });
3787
3788 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
3789 }
3790
3791 fn paths_changed(
3792 &mut self,
3793 paths: Vec<RepoPath>,
3794 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
3795 cx: &mut Context<Self>,
3796 ) {
3797 self.paths_needing_status_update.extend(paths);
3798
3799 let this = cx.weak_entity();
3800 let _ = self.send_keyed_job(
3801 Some(GitJobKey::RefreshStatuses),
3802 |state, mut cx| async move {
3803 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
3804 (
3805 this.snapshot.clone(),
3806 mem::take(&mut this.paths_needing_status_update),
3807 )
3808 })?;
3809 let RepositoryState::Local { backend, .. } = state else {
3810 bail!("not a local repository")
3811 };
3812
3813 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
3814 let statuses = backend.status(&paths).await?;
3815
3816 let changed_path_statuses = cx
3817 .background_spawn(async move {
3818 let mut changed_path_statuses = Vec::new();
3819 let prev_statuses = prev_snapshot.statuses_by_path.clone();
3820 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
3821
3822 for (repo_path, status) in &*statuses.entries {
3823 changed_paths.remove(repo_path);
3824 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left, &()) {
3825 if &cursor.item().unwrap().status == status {
3826 continue;
3827 }
3828 }
3829
3830 changed_path_statuses.push(Edit::Insert(StatusEntry {
3831 repo_path: repo_path.clone(),
3832 status: *status,
3833 }));
3834 }
3835 let mut cursor = prev_statuses.cursor::<PathProgress>(&());
3836 for path in changed_paths.iter() {
3837 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left, &()) {
3838 changed_path_statuses.push(Edit::Remove(PathKey(path.0.clone())));
3839 }
3840 }
3841 changed_path_statuses
3842 })
3843 .await;
3844
3845 this.update(&mut cx, |this, cx| {
3846 if !changed_path_statuses.is_empty() {
3847 this.snapshot
3848 .statuses_by_path
3849 .edit(changed_path_statuses, &());
3850 this.snapshot.scan_id += 1;
3851 if let Some(updates_tx) = updates_tx {
3852 updates_tx
3853 .unbounded_send(DownstreamUpdate::UpdateRepository(
3854 this.snapshot.clone(),
3855 ))
3856 .ok();
3857 }
3858 }
3859 cx.emit(RepositoryEvent::Updated);
3860 })
3861 },
3862 );
3863 }
3864}
3865
3866fn get_permalink_in_rust_registry_src(
3867 provider_registry: Arc<GitHostingProviderRegistry>,
3868 path: PathBuf,
3869 selection: Range<u32>,
3870) -> Result<url::Url> {
3871 #[derive(Deserialize)]
3872 struct CargoVcsGit {
3873 sha1: String,
3874 }
3875
3876 #[derive(Deserialize)]
3877 struct CargoVcsInfo {
3878 git: CargoVcsGit,
3879 path_in_vcs: String,
3880 }
3881
3882 #[derive(Deserialize)]
3883 struct CargoPackage {
3884 repository: String,
3885 }
3886
3887 #[derive(Deserialize)]
3888 struct CargoToml {
3889 package: CargoPackage,
3890 }
3891
3892 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
3893 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
3894 Some((dir, json))
3895 }) else {
3896 bail!("No .cargo_vcs_info.json found in parent directories")
3897 };
3898 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
3899 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
3900 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
3901 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
3902 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
3903 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
3904 let permalink = provider.build_permalink(
3905 remote,
3906 BuildPermalinkParams {
3907 sha: &cargo_vcs_info.git.sha1,
3908 path: &path.to_string_lossy(),
3909 selection: Some(selection),
3910 },
3911 );
3912 Ok(permalink)
3913}
3914
3915fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
3916 let Some(blame) = blame else {
3917 return proto::BlameBufferResponse {
3918 blame_response: None,
3919 };
3920 };
3921
3922 let entries = blame
3923 .entries
3924 .into_iter()
3925 .map(|entry| proto::BlameEntry {
3926 sha: entry.sha.as_bytes().into(),
3927 start_line: entry.range.start,
3928 end_line: entry.range.end,
3929 original_line_number: entry.original_line_number,
3930 author: entry.author.clone(),
3931 author_mail: entry.author_mail.clone(),
3932 author_time: entry.author_time,
3933 author_tz: entry.author_tz.clone(),
3934 committer: entry.committer_name.clone(),
3935 committer_mail: entry.committer_email.clone(),
3936 committer_time: entry.committer_time,
3937 committer_tz: entry.committer_tz.clone(),
3938 summary: entry.summary.clone(),
3939 previous: entry.previous.clone(),
3940 filename: entry.filename.clone(),
3941 })
3942 .collect::<Vec<_>>();
3943
3944 let messages = blame
3945 .messages
3946 .into_iter()
3947 .map(|(oid, message)| proto::CommitMessage {
3948 oid: oid.as_bytes().into(),
3949 message,
3950 })
3951 .collect::<Vec<_>>();
3952
3953 proto::BlameBufferResponse {
3954 blame_response: Some(proto::blame_buffer_response::BlameResponse {
3955 entries,
3956 messages,
3957 remote_url: blame.remote_url,
3958 }),
3959 }
3960}
3961
3962fn deserialize_blame_buffer_response(
3963 response: proto::BlameBufferResponse,
3964) -> Option<git::blame::Blame> {
3965 let response = response.blame_response?;
3966 let entries = response
3967 .entries
3968 .into_iter()
3969 .filter_map(|entry| {
3970 Some(git::blame::BlameEntry {
3971 sha: git::Oid::from_bytes(&entry.sha).ok()?,
3972 range: entry.start_line..entry.end_line,
3973 original_line_number: entry.original_line_number,
3974 committer_name: entry.committer,
3975 committer_time: entry.committer_time,
3976 committer_tz: entry.committer_tz,
3977 committer_email: entry.committer_mail,
3978 author: entry.author,
3979 author_mail: entry.author_mail,
3980 author_time: entry.author_time,
3981 author_tz: entry.author_tz,
3982 summary: entry.summary,
3983 previous: entry.previous,
3984 filename: entry.filename,
3985 })
3986 })
3987 .collect::<Vec<_>>();
3988
3989 let messages = response
3990 .messages
3991 .into_iter()
3992 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
3993 .collect::<HashMap<_, _>>();
3994
3995 Some(Blame {
3996 entries,
3997 messages,
3998 remote_url: response.remote_url,
3999 })
4000}
4001
4002fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
4003 proto::Branch {
4004 is_head: branch.is_head,
4005 name: branch.name.to_string(),
4006 unix_timestamp: branch
4007 .most_recent_commit
4008 .as_ref()
4009 .map(|commit| commit.commit_timestamp as u64),
4010 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
4011 ref_name: upstream.ref_name.to_string(),
4012 tracking: upstream
4013 .tracking
4014 .status()
4015 .map(|upstream| proto::UpstreamTracking {
4016 ahead: upstream.ahead as u64,
4017 behind: upstream.behind as u64,
4018 }),
4019 }),
4020 most_recent_commit: branch
4021 .most_recent_commit
4022 .as_ref()
4023 .map(|commit| proto::CommitSummary {
4024 sha: commit.sha.to_string(),
4025 subject: commit.subject.to_string(),
4026 commit_timestamp: commit.commit_timestamp,
4027 }),
4028 }
4029}
4030
4031fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
4032 git::repository::Branch {
4033 is_head: proto.is_head,
4034 name: proto.name.clone().into(),
4035 upstream: proto
4036 .upstream
4037 .as_ref()
4038 .map(|upstream| git::repository::Upstream {
4039 ref_name: upstream.ref_name.to_string().into(),
4040 tracking: upstream
4041 .tracking
4042 .as_ref()
4043 .map(|tracking| {
4044 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
4045 ahead: tracking.ahead as u32,
4046 behind: tracking.behind as u32,
4047 })
4048 })
4049 .unwrap_or(git::repository::UpstreamTracking::Gone),
4050 }),
4051 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
4052 git::repository::CommitSummary {
4053 sha: commit.sha.to_string().into(),
4054 subject: commit.subject.to_string().into(),
4055 commit_timestamp: commit.commit_timestamp,
4056 has_parent: true,
4057 }
4058 }),
4059 }
4060}
4061
4062async fn compute_snapshot(
4063 id: RepositoryId,
4064 work_directory_abs_path: Arc<Path>,
4065 prev_snapshot: RepositorySnapshot,
4066 backend: Arc<dyn GitRepository>,
4067) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
4068 let mut events = Vec::new();
4069 let branches = backend.branches().await?;
4070 let branch = branches.into_iter().find(|branch| branch.is_head);
4071 let statuses = backend.status(&[WORK_DIRECTORY_REPO_PATH.clone()]).await?;
4072 let merge_message = backend
4073 .merge_message()
4074 .await
4075 .and_then(|msg| Some(msg.lines().nth(0)?.to_owned().into()));
4076 let merge_head_shas = backend
4077 .merge_head_shas()
4078 .into_iter()
4079 .map(SharedString::from)
4080 .collect();
4081
4082 let statuses_by_path = SumTree::from_iter(
4083 statuses
4084 .entries
4085 .iter()
4086 .map(|(repo_path, status)| StatusEntry {
4087 repo_path: repo_path.clone(),
4088 status: *status,
4089 }),
4090 &(),
4091 );
4092
4093 let merge_head_shas_changed = merge_head_shas != prev_snapshot.merge_head_shas;
4094
4095 if merge_head_shas_changed
4096 || branch != prev_snapshot.branch
4097 || statuses_by_path != prev_snapshot.statuses_by_path
4098 {
4099 events.push(RepositoryEvent::Updated);
4100 }
4101
4102 let mut current_merge_conflicts = TreeSet::default();
4103 for (repo_path, status) in statuses.entries.iter() {
4104 if status.is_conflicted() {
4105 current_merge_conflicts.insert(repo_path.clone());
4106 }
4107 }
4108
4109 // Cache merge conflict paths so they don't change from staging/unstaging,
4110 // until the merge heads change (at commit time, etc.).
4111 let mut merge_conflicts = prev_snapshot.merge_conflicts.clone();
4112 if merge_head_shas_changed {
4113 merge_conflicts = current_merge_conflicts;
4114 events.push(RepositoryEvent::MergeHeadsChanged);
4115 }
4116
4117 let snapshot = RepositorySnapshot {
4118 id,
4119 merge_message,
4120 statuses_by_path,
4121 work_directory_abs_path,
4122 scan_id: prev_snapshot.scan_id + 1,
4123 branch,
4124 merge_conflicts,
4125 merge_head_shas,
4126 };
4127
4128 Ok((snapshot, events))
4129}
4130
4131fn status_from_proto(
4132 simple_status: i32,
4133 status: Option<proto::GitFileStatus>,
4134) -> anyhow::Result<FileStatus> {
4135 use proto::git_file_status::Variant;
4136
4137 let Some(variant) = status.and_then(|status| status.variant) else {
4138 let code = proto::GitStatus::from_i32(simple_status)
4139 .ok_or_else(|| anyhow!("Invalid git status code: {simple_status}"))?;
4140 let result = match code {
4141 proto::GitStatus::Added => TrackedStatus {
4142 worktree_status: StatusCode::Added,
4143 index_status: StatusCode::Unmodified,
4144 }
4145 .into(),
4146 proto::GitStatus::Modified => TrackedStatus {
4147 worktree_status: StatusCode::Modified,
4148 index_status: StatusCode::Unmodified,
4149 }
4150 .into(),
4151 proto::GitStatus::Conflict => UnmergedStatus {
4152 first_head: UnmergedStatusCode::Updated,
4153 second_head: UnmergedStatusCode::Updated,
4154 }
4155 .into(),
4156 proto::GitStatus::Deleted => TrackedStatus {
4157 worktree_status: StatusCode::Deleted,
4158 index_status: StatusCode::Unmodified,
4159 }
4160 .into(),
4161 _ => return Err(anyhow!("Invalid code for simple status: {simple_status}")),
4162 };
4163 return Ok(result);
4164 };
4165
4166 let result = match variant {
4167 Variant::Untracked(_) => FileStatus::Untracked,
4168 Variant::Ignored(_) => FileStatus::Ignored,
4169 Variant::Unmerged(unmerged) => {
4170 let [first_head, second_head] =
4171 [unmerged.first_head, unmerged.second_head].map(|head| {
4172 let code = proto::GitStatus::from_i32(head)
4173 .ok_or_else(|| anyhow!("Invalid git status code: {head}"))?;
4174 let result = match code {
4175 proto::GitStatus::Added => UnmergedStatusCode::Added,
4176 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
4177 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
4178 _ => return Err(anyhow!("Invalid code for unmerged status: {code:?}")),
4179 };
4180 Ok(result)
4181 });
4182 let [first_head, second_head] = [first_head?, second_head?];
4183 UnmergedStatus {
4184 first_head,
4185 second_head,
4186 }
4187 .into()
4188 }
4189 Variant::Tracked(tracked) => {
4190 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
4191 .map(|status| {
4192 let code = proto::GitStatus::from_i32(status)
4193 .ok_or_else(|| anyhow!("Invalid git status code: {status}"))?;
4194 let result = match code {
4195 proto::GitStatus::Modified => StatusCode::Modified,
4196 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
4197 proto::GitStatus::Added => StatusCode::Added,
4198 proto::GitStatus::Deleted => StatusCode::Deleted,
4199 proto::GitStatus::Renamed => StatusCode::Renamed,
4200 proto::GitStatus::Copied => StatusCode::Copied,
4201 proto::GitStatus::Unmodified => StatusCode::Unmodified,
4202 _ => return Err(anyhow!("Invalid code for tracked status: {code:?}")),
4203 };
4204 Ok(result)
4205 });
4206 let [index_status, worktree_status] = [index_status?, worktree_status?];
4207 TrackedStatus {
4208 index_status,
4209 worktree_status,
4210 }
4211 .into()
4212 }
4213 };
4214 Ok(result)
4215}
4216
4217fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
4218 use proto::git_file_status::{Tracked, Unmerged, Variant};
4219
4220 let variant = match status {
4221 FileStatus::Untracked => Variant::Untracked(Default::default()),
4222 FileStatus::Ignored => Variant::Ignored(Default::default()),
4223 FileStatus::Unmerged(UnmergedStatus {
4224 first_head,
4225 second_head,
4226 }) => Variant::Unmerged(Unmerged {
4227 first_head: unmerged_status_to_proto(first_head),
4228 second_head: unmerged_status_to_proto(second_head),
4229 }),
4230 FileStatus::Tracked(TrackedStatus {
4231 index_status,
4232 worktree_status,
4233 }) => Variant::Tracked(Tracked {
4234 index_status: tracked_status_to_proto(index_status),
4235 worktree_status: tracked_status_to_proto(worktree_status),
4236 }),
4237 };
4238 proto::GitFileStatus {
4239 variant: Some(variant),
4240 }
4241}
4242
4243fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
4244 match code {
4245 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
4246 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
4247 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
4248 }
4249}
4250
4251fn tracked_status_to_proto(code: StatusCode) -> i32 {
4252 match code {
4253 StatusCode::Added => proto::GitStatus::Added as _,
4254 StatusCode::Deleted => proto::GitStatus::Deleted as _,
4255 StatusCode::Modified => proto::GitStatus::Modified as _,
4256 StatusCode::Renamed => proto::GitStatus::Renamed as _,
4257 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
4258 StatusCode::Copied => proto::GitStatus::Copied as _,
4259 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
4260 }
4261}