1pub mod git_traversal;
2
3use crate::{
4 buffer_store::{BufferStore, BufferStoreEvent},
5 worktree_store::{WorktreeStore, WorktreeStoreEvent},
6 ProjectEnvironment, ProjectItem, ProjectPath,
7};
8use anyhow::{anyhow, bail, Context as _, Result};
9use askpass::{AskPassDelegate, AskPassSession};
10use buffer_diff::{BufferDiff, BufferDiffEvent};
11use client::ProjectId;
12use collections::HashMap;
13use fs::Fs;
14use futures::{
15 channel::{mpsc, oneshot},
16 future::{self, OptionFuture, Shared},
17 FutureExt as _, StreamExt as _,
18};
19use git::{
20 blame::Blame,
21 parse_git_remote_url,
22 repository::{
23 Branch, CommitDetails, DiffType, GitIndex, GitRepository, GitRepositoryCheckpoint,
24 PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
25 },
26 status::{FileStatus, GitStatus},
27 BuildPermalinkParams, GitHostingProviderRegistry,
28};
29use gpui::{
30 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
31 WeakEntity,
32};
33use language::{
34 proto::{deserialize_version, serialize_version},
35 Buffer, BufferEvent, Language, LanguageRegistry,
36};
37use parking_lot::Mutex;
38use rpc::{
39 proto::{self, git_reset, FromProto, ToProto, SSH_PROJECT_ID},
40 AnyProtoClient, TypedEnvelope,
41};
42use serde::Deserialize;
43use settings::WorktreeId;
44use std::{
45 collections::{hash_map, VecDeque},
46 future::Future,
47 ops::Range,
48 path::{Path, PathBuf},
49 sync::Arc,
50};
51use sum_tree::TreeSet;
52use text::BufferId;
53use util::{debug_panic, maybe, ResultExt};
54use worktree::{
55 proto_to_branch, File, PathKey, ProjectEntryId, RepositoryEntry, StatusEntry,
56 UpdatedGitRepositoriesSet, Worktree,
57};
58
59pub struct GitStore {
60 state: GitStoreState,
61 buffer_store: Entity<BufferStore>,
62 worktree_store: Entity<WorktreeStore>,
63 repositories: HashMap<ProjectEntryId, Entity<Repository>>,
64 active_repo_id: Option<ProjectEntryId>,
65 #[allow(clippy::type_complexity)]
66 loading_diffs:
67 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
68 diffs: HashMap<BufferId, Entity<BufferDiffState>>,
69 update_sender: mpsc::UnboundedSender<GitJob>,
70 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
71 _subscriptions: [Subscription; 2],
72}
73
74#[derive(Default)]
75struct SharedDiffs {
76 unstaged: Option<Entity<BufferDiff>>,
77 uncommitted: Option<Entity<BufferDiff>>,
78}
79
80#[derive(Default)]
81struct BufferDiffState {
82 unstaged_diff: Option<WeakEntity<BufferDiff>>,
83 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
84 recalculate_diff_task: Option<Task<Result<()>>>,
85 language: Option<Arc<Language>>,
86 language_registry: Option<Arc<LanguageRegistry>>,
87 diff_updated_futures: Vec<oneshot::Sender<()>>,
88 hunk_staging_operation_count: usize,
89
90 head_text: Option<Arc<String>>,
91 index_text: Option<Arc<String>>,
92 head_changed: bool,
93 index_changed: bool,
94 language_changed: bool,
95}
96
97#[derive(Clone, Debug)]
98enum DiffBasesChange {
99 SetIndex(Option<String>),
100 SetHead(Option<String>),
101 SetEach {
102 index: Option<String>,
103 head: Option<String>,
104 },
105 SetBoth(Option<String>),
106}
107
108#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
109enum DiffKind {
110 Unstaged,
111 Uncommitted,
112}
113
114enum GitStoreState {
115 Local {
116 downstream_client: Option<LocalDownstreamState>,
117 environment: Entity<ProjectEnvironment>,
118 fs: Arc<dyn Fs>,
119 },
120 Ssh {
121 upstream_client: AnyProtoClient,
122 upstream_project_id: ProjectId,
123 downstream_client: Option<(AnyProtoClient, ProjectId)>,
124 environment: Entity<ProjectEnvironment>,
125 },
126 Remote {
127 upstream_client: AnyProtoClient,
128 project_id: ProjectId,
129 },
130}
131
132enum DownstreamUpdate {
133 UpdateRepository(RepositoryEntry),
134 RemoveRepository(ProjectEntryId),
135}
136
137struct LocalDownstreamState {
138 client: AnyProtoClient,
139 project_id: ProjectId,
140 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
141 _task: Task<Result<()>>,
142}
143
144#[derive(Clone)]
145pub struct GitStoreCheckpoint {
146 checkpoints_by_work_dir_abs_path: HashMap<PathBuf, GitRepositoryCheckpoint>,
147}
148
149#[derive(Clone, Debug)]
150pub struct GitStoreDiff {
151 diffs_by_work_dir_abs_path: HashMap<PathBuf, String>,
152}
153
154#[derive(Clone, Debug)]
155pub struct GitStoreIndex {
156 indices_by_work_dir_abs_path: HashMap<PathBuf, GitIndex>,
157}
158
159#[derive(Default)]
160pub struct GitStoreStatus {
161 #[allow(dead_code)]
162 statuses_by_work_dir_abs_path: HashMap<PathBuf, GitStatus>,
163}
164
165pub struct Repository {
166 pub repository_entry: RepositoryEntry,
167 pub merge_message: Option<String>,
168 pub completed_scan_id: usize,
169 commit_message_buffer: Option<Entity<Buffer>>,
170 git_store: WeakEntity<GitStore>,
171 project_environment: Option<WeakEntity<ProjectEnvironment>>,
172 pub worktree_id: Option<WorktreeId>,
173 state: RepositoryState,
174 job_sender: mpsc::UnboundedSender<GitJob>,
175 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
176 latest_askpass_id: u64,
177}
178
179#[derive(Clone)]
180enum RepositoryState {
181 Local(Arc<dyn GitRepository>),
182 Remote {
183 project_id: ProjectId,
184 client: AnyProtoClient,
185 work_directory_id: ProjectEntryId,
186 },
187}
188
189#[derive(Debug)]
190pub enum GitEvent {
191 ActiveRepositoryChanged,
192 FileSystemUpdated,
193 GitStateUpdated,
194 IndexWriteError(anyhow::Error),
195}
196
197struct GitJob {
198 job: Box<dyn FnOnce(&mut AsyncApp) -> Task<()>>,
199 key: Option<GitJobKey>,
200}
201
202#[derive(PartialEq, Eq)]
203enum GitJobKey {
204 WriteIndex(RepoPath),
205 BatchReadIndex(ProjectEntryId),
206}
207
208impl EventEmitter<GitEvent> for GitStore {}
209
210impl GitStore {
211 pub fn local(
212 worktree_store: &Entity<WorktreeStore>,
213 buffer_store: Entity<BufferStore>,
214 environment: Entity<ProjectEnvironment>,
215 fs: Arc<dyn Fs>,
216 cx: &mut Context<Self>,
217 ) -> Self {
218 Self::new(
219 worktree_store.clone(),
220 buffer_store,
221 GitStoreState::Local {
222 downstream_client: None,
223 environment,
224 fs,
225 },
226 cx,
227 )
228 }
229
230 pub fn remote(
231 worktree_store: &Entity<WorktreeStore>,
232 buffer_store: Entity<BufferStore>,
233 upstream_client: AnyProtoClient,
234 project_id: ProjectId,
235 cx: &mut Context<Self>,
236 ) -> Self {
237 Self::new(
238 worktree_store.clone(),
239 buffer_store,
240 GitStoreState::Remote {
241 upstream_client,
242 project_id,
243 },
244 cx,
245 )
246 }
247
248 pub fn ssh(
249 worktree_store: &Entity<WorktreeStore>,
250 buffer_store: Entity<BufferStore>,
251 environment: Entity<ProjectEnvironment>,
252 upstream_client: AnyProtoClient,
253 cx: &mut Context<Self>,
254 ) -> Self {
255 Self::new(
256 worktree_store.clone(),
257 buffer_store,
258 GitStoreState::Ssh {
259 upstream_client,
260 upstream_project_id: ProjectId(SSH_PROJECT_ID),
261 downstream_client: None,
262 environment,
263 },
264 cx,
265 )
266 }
267
268 fn new(
269 worktree_store: Entity<WorktreeStore>,
270 buffer_store: Entity<BufferStore>,
271 state: GitStoreState,
272 cx: &mut Context<Self>,
273 ) -> Self {
274 let update_sender = Self::spawn_git_worker(cx);
275 let _subscriptions = [
276 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
277 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
278 ];
279
280 GitStore {
281 state,
282 buffer_store,
283 worktree_store,
284 repositories: HashMap::default(),
285 active_repo_id: None,
286 update_sender,
287 _subscriptions,
288 loading_diffs: HashMap::default(),
289 shared_diffs: HashMap::default(),
290 diffs: HashMap::default(),
291 }
292 }
293
294 pub fn init(client: &AnyProtoClient) {
295 client.add_entity_request_handler(Self::handle_get_remotes);
296 client.add_entity_request_handler(Self::handle_get_branches);
297 client.add_entity_request_handler(Self::handle_change_branch);
298 client.add_entity_request_handler(Self::handle_create_branch);
299 client.add_entity_request_handler(Self::handle_git_init);
300 client.add_entity_request_handler(Self::handle_push);
301 client.add_entity_request_handler(Self::handle_pull);
302 client.add_entity_request_handler(Self::handle_fetch);
303 client.add_entity_request_handler(Self::handle_stage);
304 client.add_entity_request_handler(Self::handle_unstage);
305 client.add_entity_request_handler(Self::handle_commit);
306 client.add_entity_request_handler(Self::handle_reset);
307 client.add_entity_request_handler(Self::handle_show);
308 client.add_entity_request_handler(Self::handle_checkout_files);
309 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
310 client.add_entity_request_handler(Self::handle_set_index_text);
311 client.add_entity_request_handler(Self::handle_askpass);
312 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
313 client.add_entity_request_handler(Self::handle_git_diff);
314 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
315 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
316 client.add_entity_message_handler(Self::handle_update_diff_bases);
317 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
318 client.add_entity_request_handler(Self::handle_blame_buffer);
319 client.add_entity_message_handler(Self::handle_update_repository);
320 client.add_entity_message_handler(Self::handle_remove_repository);
321 }
322
323 pub fn is_local(&self) -> bool {
324 matches!(self.state, GitStoreState::Local { .. })
325 }
326
327 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
328 match &mut self.state {
329 GitStoreState::Ssh {
330 downstream_client, ..
331 } => {
332 for repo in self.repositories.values() {
333 client
334 .send(repo.read(cx).repository_entry.initial_update(project_id))
335 .log_err();
336 }
337 *downstream_client = Some((client, ProjectId(project_id)));
338 }
339 GitStoreState::Local {
340 downstream_client, ..
341 } => {
342 let mut snapshots = HashMap::default();
343 let (updates_tx, mut updates_rx) = mpsc::unbounded();
344 for repo in self.repositories.values() {
345 updates_tx
346 .unbounded_send(DownstreamUpdate::UpdateRepository(
347 repo.read(cx).repository_entry.clone(),
348 ))
349 .ok();
350 }
351 *downstream_client = Some(LocalDownstreamState {
352 client: client.clone(),
353 project_id: ProjectId(project_id),
354 updates_tx,
355 _task: cx.spawn(async move |this, cx| {
356 cx.background_spawn(async move {
357 while let Some(update) = updates_rx.next().await {
358 match update {
359 DownstreamUpdate::UpdateRepository(snapshot) => {
360 if let Some(old_snapshot) =
361 snapshots.get_mut(&snapshot.work_directory_id)
362 {
363 let update =
364 snapshot.build_update(old_snapshot, project_id);
365 *old_snapshot = snapshot;
366 client.send(update)?;
367 } else {
368 let update = snapshot.initial_update(project_id);
369 client.send(update)?;
370 snapshots.insert(snapshot.work_directory_id, snapshot);
371 }
372 }
373 DownstreamUpdate::RemoveRepository(id) => {
374 client.send(proto::RemoveRepository {
375 project_id,
376 id: id.to_proto(),
377 })?;
378 }
379 }
380 }
381 anyhow::Ok(())
382 })
383 .await
384 .ok();
385 this.update(cx, |this, _| {
386 if let GitStoreState::Local {
387 downstream_client, ..
388 } = &mut this.state
389 {
390 downstream_client.take();
391 } else {
392 unreachable!("unshared called on remote store");
393 }
394 })
395 }),
396 });
397 }
398 GitStoreState::Remote { .. } => {
399 debug_panic!("shared called on remote store");
400 }
401 }
402 }
403
404 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
405 match &mut self.state {
406 GitStoreState::Local {
407 downstream_client, ..
408 } => {
409 downstream_client.take();
410 }
411 GitStoreState::Ssh {
412 downstream_client, ..
413 } => {
414 downstream_client.take();
415 }
416 GitStoreState::Remote { .. } => {
417 debug_panic!("unshared called on remote store");
418 }
419 }
420 self.shared_diffs.clear();
421 }
422
423 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
424 self.shared_diffs.remove(peer_id);
425 }
426
427 pub fn active_repository(&self) -> Option<Entity<Repository>> {
428 self.active_repo_id
429 .as_ref()
430 .map(|id| self.repositories[&id].clone())
431 }
432
433 pub fn open_unstaged_diff(
434 &mut self,
435 buffer: Entity<Buffer>,
436 cx: &mut Context<Self>,
437 ) -> Task<Result<Entity<BufferDiff>>> {
438 let buffer_id = buffer.read(cx).remote_id();
439 if let Some(diff_state) = self.diffs.get(&buffer_id) {
440 if let Some(unstaged_diff) = diff_state
441 .read(cx)
442 .unstaged_diff
443 .as_ref()
444 .and_then(|weak| weak.upgrade())
445 {
446 if let Some(task) =
447 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
448 {
449 return cx.background_executor().spawn(async move {
450 task.await?;
451 Ok(unstaged_diff)
452 });
453 }
454 return Task::ready(Ok(unstaged_diff));
455 }
456 }
457
458 let task = match self.loading_diffs.entry((buffer_id, DiffKind::Unstaged)) {
459 hash_map::Entry::Occupied(e) => e.get().clone(),
460 hash_map::Entry::Vacant(entry) => {
461 let staged_text = self.state.load_staged_text(&buffer, &self.buffer_store, cx);
462 entry
463 .insert(
464 cx.spawn(async move |this, cx| {
465 Self::open_diff_internal(
466 this,
467 DiffKind::Unstaged,
468 staged_text.await.map(DiffBasesChange::SetIndex),
469 buffer,
470 cx,
471 )
472 .await
473 .map_err(Arc::new)
474 })
475 .shared(),
476 )
477 .clone()
478 }
479 };
480
481 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
482 }
483
484 pub fn open_uncommitted_diff(
485 &mut self,
486 buffer: Entity<Buffer>,
487 cx: &mut Context<Self>,
488 ) -> Task<Result<Entity<BufferDiff>>> {
489 let buffer_id = buffer.read(cx).remote_id();
490
491 if let Some(diff_state) = self.diffs.get(&buffer_id) {
492 if let Some(uncommitted_diff) = diff_state
493 .read(cx)
494 .uncommitted_diff
495 .as_ref()
496 .and_then(|weak| weak.upgrade())
497 {
498 if let Some(task) =
499 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
500 {
501 return cx.background_executor().spawn(async move {
502 task.await?;
503 Ok(uncommitted_diff)
504 });
505 }
506 return Task::ready(Ok(uncommitted_diff));
507 }
508 }
509
510 let task = match self.loading_diffs.entry((buffer_id, DiffKind::Uncommitted)) {
511 hash_map::Entry::Occupied(e) => e.get().clone(),
512 hash_map::Entry::Vacant(entry) => {
513 let changes = self
514 .state
515 .load_committed_text(&buffer, &self.buffer_store, cx);
516
517 entry
518 .insert(
519 cx.spawn(async move |this, cx| {
520 Self::open_diff_internal(
521 this,
522 DiffKind::Uncommitted,
523 changes.await,
524 buffer,
525 cx,
526 )
527 .await
528 .map_err(Arc::new)
529 })
530 .shared(),
531 )
532 .clone()
533 }
534 };
535
536 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
537 }
538
539 async fn open_diff_internal(
540 this: WeakEntity<Self>,
541 kind: DiffKind,
542 texts: Result<DiffBasesChange>,
543 buffer_entity: Entity<Buffer>,
544 cx: &mut AsyncApp,
545 ) -> Result<Entity<BufferDiff>> {
546 let diff_bases_change = match texts {
547 Err(e) => {
548 this.update(cx, |this, cx| {
549 let buffer = buffer_entity.read(cx);
550 let buffer_id = buffer.remote_id();
551 this.loading_diffs.remove(&(buffer_id, kind));
552 })?;
553 return Err(e);
554 }
555 Ok(change) => change,
556 };
557
558 this.update(cx, |this, cx| {
559 let buffer = buffer_entity.read(cx);
560 let buffer_id = buffer.remote_id();
561 let language = buffer.language().cloned();
562 let language_registry = buffer.language_registry();
563 let text_snapshot = buffer.text_snapshot();
564 this.loading_diffs.remove(&(buffer_id, kind));
565
566 let diff_state = this
567 .diffs
568 .entry(buffer_id)
569 .or_insert_with(|| cx.new(|_| BufferDiffState::default()));
570
571 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
572
573 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
574 diff_state.update(cx, |diff_state, cx| {
575 diff_state.language = language;
576 diff_state.language_registry = language_registry;
577
578 match kind {
579 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
580 DiffKind::Uncommitted => {
581 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
582 diff
583 } else {
584 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
585 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
586 unstaged_diff
587 };
588
589 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
590 diff_state.uncommitted_diff = Some(diff.downgrade())
591 }
592 }
593
594 let rx = diff_state.diff_bases_changed(text_snapshot, diff_bases_change, 0, cx);
595
596 anyhow::Ok(async move {
597 rx.await.ok();
598 Ok(diff)
599 })
600 })
601 })??
602 .await
603 }
604
605 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
606 let diff_state = self.diffs.get(&buffer_id)?;
607 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
608 }
609
610 pub fn get_uncommitted_diff(
611 &self,
612 buffer_id: BufferId,
613 cx: &App,
614 ) -> Option<Entity<BufferDiff>> {
615 let diff_state = self.diffs.get(&buffer_id)?;
616 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
617 }
618
619 pub fn project_path_git_status(
620 &self,
621 project_path: &ProjectPath,
622 cx: &App,
623 ) -> Option<FileStatus> {
624 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
625 Some(
626 repo.read(cx)
627 .repository_entry
628 .status_for_path(&repo_path)?
629 .status,
630 )
631 }
632
633 pub fn checkpoint(&self, cx: &App) -> Task<Result<GitStoreCheckpoint>> {
634 let mut work_directory_abs_paths = Vec::new();
635 let mut checkpoints = Vec::new();
636 for repository in self.repositories.values() {
637 let repository = repository.read(cx);
638 work_directory_abs_paths
639 .push(repository.repository_entry.work_directory_abs_path.clone());
640 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
641 }
642
643 cx.background_executor().spawn(async move {
644 let checkpoints = future::try_join_all(checkpoints).await?;
645 Ok(GitStoreCheckpoint {
646 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
647 .into_iter()
648 .zip(checkpoints)
649 .collect(),
650 })
651 })
652 }
653
654 pub fn restore_checkpoint(&self, checkpoint: GitStoreCheckpoint, cx: &App) -> Task<Result<()>> {
655 let repositories_by_work_dir_abs_path = self
656 .repositories
657 .values()
658 .map(|repo| {
659 (
660 repo.read(cx)
661 .repository_entry
662 .work_directory_abs_path
663 .clone(),
664 repo,
665 )
666 })
667 .collect::<HashMap<_, _>>();
668
669 let mut tasks = Vec::new();
670 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
671 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
672 let restore = repository.read(cx).restore_checkpoint(checkpoint);
673 tasks.push(async move { restore.await? });
674 }
675 }
676 cx.background_spawn(async move {
677 future::try_join_all(tasks).await?;
678 Ok(())
679 })
680 }
681
682 /// Compares two checkpoints, returning true if they are equal.
683 pub fn compare_checkpoints(
684 &self,
685 left: GitStoreCheckpoint,
686 mut right: GitStoreCheckpoint,
687 cx: &App,
688 ) -> Task<Result<bool>> {
689 let repositories_by_work_dir_abs_path = self
690 .repositories
691 .values()
692 .map(|repo| {
693 (
694 repo.read(cx)
695 .repository_entry
696 .work_directory_abs_path
697 .clone(),
698 repo,
699 )
700 })
701 .collect::<HashMap<_, _>>();
702
703 let mut tasks = Vec::new();
704 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
705 if let Some(right_checkpoint) = right
706 .checkpoints_by_work_dir_abs_path
707 .remove(&work_dir_abs_path)
708 {
709 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
710 {
711 let compare = repository
712 .read(cx)
713 .compare_checkpoints(left_checkpoint, right_checkpoint);
714 tasks.push(async move { compare.await? });
715 }
716 } else {
717 return Task::ready(Ok(false));
718 }
719 }
720 cx.background_spawn(async move {
721 Ok(future::try_join_all(tasks)
722 .await?
723 .into_iter()
724 .all(|result| result))
725 })
726 }
727
728 pub fn delete_checkpoint(&self, checkpoint: GitStoreCheckpoint, cx: &App) -> Task<Result<()>> {
729 let repositories_by_work_directory_abs_path = self
730 .repositories
731 .values()
732 .map(|repo| {
733 (
734 repo.read(cx)
735 .repository_entry
736 .work_directory_abs_path
737 .clone(),
738 repo,
739 )
740 })
741 .collect::<HashMap<_, _>>();
742
743 let mut tasks = Vec::new();
744 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
745 if let Some(repository) =
746 repositories_by_work_directory_abs_path.get(&work_dir_abs_path)
747 {
748 let delete = repository.read(cx).delete_checkpoint(checkpoint);
749 tasks.push(async move { delete.await? });
750 }
751 }
752 cx.background_spawn(async move {
753 future::try_join_all(tasks).await?;
754 Ok(())
755 })
756 }
757
758 pub fn diff_checkpoints(
759 &self,
760 base_checkpoint: GitStoreCheckpoint,
761 target_checkpoint: GitStoreCheckpoint,
762 cx: &App,
763 ) -> Task<Result<GitStoreDiff>> {
764 let repositories_by_work_dir_abs_path = self
765 .repositories
766 .values()
767 .map(|repo| {
768 (
769 repo.read(cx)
770 .repository_entry
771 .work_directory_abs_path
772 .clone(),
773 repo,
774 )
775 })
776 .collect::<HashMap<_, _>>();
777
778 let mut tasks = Vec::new();
779 for (work_dir_abs_path, base_checkpoint) in base_checkpoint.checkpoints_by_work_dir_abs_path
780 {
781 if let Some(target_checkpoint) = target_checkpoint
782 .checkpoints_by_work_dir_abs_path
783 .get(&work_dir_abs_path)
784 .cloned()
785 {
786 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
787 {
788 let diff = repository
789 .read(cx)
790 .diff_checkpoints(base_checkpoint, target_checkpoint);
791 tasks.push(async move {
792 let diff = diff.await??;
793 anyhow::Ok((work_dir_abs_path, diff))
794 });
795 }
796 }
797 }
798
799 cx.background_spawn(async move {
800 let diffs_by_path = future::try_join_all(tasks).await?;
801 Ok(GitStoreDiff {
802 diffs_by_work_dir_abs_path: diffs_by_path.into_iter().collect(),
803 })
804 })
805 }
806
807 pub fn create_index(&self, cx: &App) -> Task<Result<GitStoreIndex>> {
808 let mut indices = Vec::new();
809 for repository in self.repositories.values() {
810 let repository = repository.read(cx);
811 let work_dir_abs_path = repository.repository_entry.work_directory_abs_path.clone();
812 let index = repository.create_index().map(|index| index?);
813 indices.push(async move {
814 let index = index.await?;
815 anyhow::Ok((work_dir_abs_path, index))
816 });
817 }
818
819 cx.background_executor().spawn(async move {
820 let indices = future::try_join_all(indices).await?;
821 Ok(GitStoreIndex {
822 indices_by_work_dir_abs_path: indices.into_iter().collect(),
823 })
824 })
825 }
826
827 pub fn apply_diff(
828 &self,
829 mut index: GitStoreIndex,
830 diff: GitStoreDiff,
831 cx: &App,
832 ) -> Task<Result<()>> {
833 let repositories_by_work_dir_abs_path = self
834 .repositories
835 .values()
836 .map(|repo| {
837 (
838 repo.read(cx)
839 .repository_entry
840 .work_directory_abs_path
841 .clone(),
842 repo,
843 )
844 })
845 .collect::<HashMap<_, _>>();
846
847 let mut tasks = Vec::new();
848 for (work_dir_abs_path, diff) in diff.diffs_by_work_dir_abs_path {
849 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
850 if let Some(branch) = index
851 .indices_by_work_dir_abs_path
852 .remove(&work_dir_abs_path)
853 {
854 let apply = repository.read(cx).apply_diff(branch, diff);
855 tasks.push(async move { apply.await? });
856 }
857 }
858 }
859 cx.background_spawn(async move {
860 future::try_join_all(tasks).await?;
861 Ok(())
862 })
863 }
864
865 /// Blames a buffer.
866 pub fn blame_buffer(
867 &self,
868 buffer: &Entity<Buffer>,
869 version: Option<clock::Global>,
870 cx: &App,
871 ) -> Task<Result<Option<Blame>>> {
872 let buffer = buffer.read(cx);
873 let Some(file) = File::from_dyn(buffer.file()) else {
874 return Task::ready(Err(anyhow!("buffer has no file")));
875 };
876
877 match file.worktree.clone().read(cx) {
878 Worktree::Local(worktree) => {
879 let worktree = worktree.snapshot();
880 let blame_params = maybe!({
881 let local_repo = match worktree.local_repo_containing_path(&file.path) {
882 Some(repo_for_path) => repo_for_path,
883 None => return Ok(None),
884 };
885
886 let relative_path = local_repo
887 .relativize(&file.path)
888 .context("failed to relativize buffer path")?;
889
890 let repo = local_repo.repo().clone();
891
892 let content = match version {
893 Some(version) => buffer.rope_for_version(&version).clone(),
894 None => buffer.as_rope().clone(),
895 };
896
897 anyhow::Ok(Some((repo, relative_path, content)))
898 });
899
900 cx.spawn(async move |_cx| {
901 let Some((repo, relative_path, content)) = blame_params? else {
902 return Ok(None);
903 };
904 repo.blame(relative_path.clone(), content)
905 .await
906 .with_context(|| format!("Failed to blame {:?}", relative_path.0))
907 .map(Some)
908 })
909 }
910 Worktree::Remote(worktree) => {
911 let buffer_id = buffer.remote_id();
912 let version = buffer.version();
913 let project_id = worktree.project_id();
914 let client = worktree.client();
915 cx.spawn(async move |_| {
916 let response = client
917 .request(proto::BlameBuffer {
918 project_id,
919 buffer_id: buffer_id.into(),
920 version: serialize_version(&version),
921 })
922 .await?;
923 Ok(deserialize_blame_buffer_response(response))
924 })
925 }
926 }
927 }
928
929 pub fn get_permalink_to_line(
930 &self,
931 buffer: &Entity<Buffer>,
932 selection: Range<u32>,
933 cx: &App,
934 ) -> Task<Result<url::Url>> {
935 let buffer = buffer.read(cx);
936 let Some(file) = File::from_dyn(buffer.file()) else {
937 return Task::ready(Err(anyhow!("buffer has no file")));
938 };
939
940 match file.worktree.read(cx) {
941 Worktree::Local(worktree) => {
942 let repository = self
943 .repository_and_path_for_project_path(
944 &(worktree.id(), file.path.clone()).into(),
945 cx,
946 )
947 .map(|(repository, _)| repository);
948 let Some((local_repo_entry, repo_entry)) = repository.and_then(|repository| {
949 let repository = repository.read(cx);
950 let repo_entry = repository.repository_entry.clone();
951 Some((worktree.get_local_repo(&repo_entry)?, repo_entry))
952 }) else {
953 // If we're not in a Git repo, check whether this is a Rust source
954 // file in the Cargo registry (presumably opened with go-to-definition
955 // from a normal Rust file). If so, we can put together a permalink
956 // using crate metadata.
957 if buffer
958 .language()
959 .is_none_or(|lang| lang.name() != "Rust".into())
960 {
961 return Task::ready(Err(anyhow!("no permalink available")));
962 }
963 let Some(file_path) = worktree.absolutize(&file.path).ok() else {
964 return Task::ready(Err(anyhow!("no permalink available")));
965 };
966 return cx.spawn(async move |cx| {
967 let provider_registry =
968 cx.update(GitHostingProviderRegistry::default_global)?;
969 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
970 .map_err(|_| anyhow!("no permalink available"))
971 });
972 };
973
974 let path = match local_repo_entry.relativize(&file.path) {
975 Ok(RepoPath(path)) => path,
976 Err(e) => return Task::ready(Err(e)),
977 };
978
979 let remote = repo_entry
980 .branch()
981 .and_then(|b| b.upstream.as_ref())
982 .and_then(|b| b.remote_name())
983 .unwrap_or("origin")
984 .to_string();
985
986 let repo = local_repo_entry.repo().clone();
987 cx.spawn(async move |cx| {
988 let origin_url = repo
989 .remote_url(&remote)
990 .ok_or_else(|| anyhow!("remote \"{remote}\" not found"))?;
991
992 let sha = repo
993 .head_sha()
994 .ok_or_else(|| anyhow!("failed to read HEAD SHA"))?;
995
996 let provider_registry =
997 cx.update(GitHostingProviderRegistry::default_global)?;
998
999 let (provider, remote) =
1000 parse_git_remote_url(provider_registry, &origin_url)
1001 .ok_or_else(|| anyhow!("failed to parse Git remote URL"))?;
1002
1003 let path = path
1004 .to_str()
1005 .ok_or_else(|| anyhow!("failed to convert path to string"))?;
1006
1007 Ok(provider.build_permalink(
1008 remote,
1009 BuildPermalinkParams {
1010 sha: &sha,
1011 path,
1012 selection: Some(selection),
1013 },
1014 ))
1015 })
1016 }
1017 Worktree::Remote(worktree) => {
1018 let buffer_id = buffer.remote_id();
1019 let project_id = worktree.project_id();
1020 let client = worktree.client();
1021 cx.spawn(async move |_| {
1022 let response = client
1023 .request(proto::GetPermalinkToLine {
1024 project_id,
1025 buffer_id: buffer_id.into(),
1026 selection: Some(proto::Range {
1027 start: selection.start as u64,
1028 end: selection.end as u64,
1029 }),
1030 })
1031 .await?;
1032
1033 url::Url::parse(&response.permalink).context("failed to parse permalink")
1034 })
1035 }
1036 }
1037 }
1038
1039 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1040 match &self.state {
1041 GitStoreState::Local {
1042 downstream_client, ..
1043 } => downstream_client
1044 .as_ref()
1045 .map(|state| (state.client.clone(), state.project_id)),
1046 GitStoreState::Ssh {
1047 downstream_client, ..
1048 } => downstream_client.clone(),
1049 GitStoreState::Remote { .. } => None,
1050 }
1051 }
1052
1053 fn upstream_client(&self) -> Option<AnyProtoClient> {
1054 match &self.state {
1055 GitStoreState::Local { .. } => None,
1056 GitStoreState::Ssh {
1057 upstream_client, ..
1058 }
1059 | GitStoreState::Remote {
1060 upstream_client, ..
1061 } => Some(upstream_client.clone()),
1062 }
1063 }
1064
1065 fn project_environment(&self) -> Option<Entity<ProjectEnvironment>> {
1066 match &self.state {
1067 GitStoreState::Local { environment, .. } => Some(environment.clone()),
1068 GitStoreState::Ssh { environment, .. } => Some(environment.clone()),
1069 GitStoreState::Remote { .. } => None,
1070 }
1071 }
1072
1073 fn project_id(&self) -> Option<ProjectId> {
1074 match &self.state {
1075 GitStoreState::Local { .. } => None,
1076 GitStoreState::Ssh { .. } => Some(ProjectId(proto::SSH_PROJECT_ID)),
1077 GitStoreState::Remote { project_id, .. } => Some(*project_id),
1078 }
1079 }
1080
1081 fn on_worktree_store_event(
1082 &mut self,
1083 worktree_store: Entity<WorktreeStore>,
1084 event: &WorktreeStoreEvent,
1085 cx: &mut Context<Self>,
1086 ) {
1087 match event {
1088 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1089 // We should only get this event for a local project.
1090 self.update_repositories(&worktree_store, cx);
1091 if self.is_local() {
1092 if let Some(worktree) =
1093 worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1094 {
1095 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1096 }
1097 }
1098 cx.emit(GitEvent::GitStateUpdated);
1099 }
1100 WorktreeStoreEvent::WorktreeAdded(_) => {}
1101 _ => {
1102 cx.emit(GitEvent::FileSystemUpdated);
1103 }
1104 }
1105 }
1106
1107 fn update_repositories(
1108 &mut self,
1109 worktree_store: &Entity<WorktreeStore>,
1110 cx: &mut Context<'_, GitStore>,
1111 ) {
1112 let mut new_repositories = HashMap::default();
1113 let git_store = cx.weak_entity();
1114 worktree_store.update(cx, |worktree_store, cx| {
1115 for worktree in worktree_store.worktrees() {
1116 worktree.update(cx, |worktree, cx| {
1117 let snapshot = worktree.snapshot();
1118 for repo_entry in snapshot.repositories().iter() {
1119 let git_repo_and_merge_message = worktree
1120 .as_local()
1121 .and_then(|local_worktree| local_worktree.get_local_repo(repo_entry))
1122 .map(|local_repo| {
1123 (
1124 RepositoryState::Local(local_repo.repo().clone()),
1125 local_repo.merge_message.clone(),
1126 )
1127 })
1128 .or_else(|| {
1129 let git_repo = RepositoryState::Remote {
1130 project_id: self.project_id()?,
1131 client: self
1132 .upstream_client()
1133 .context("no upstream client")
1134 .log_err()?
1135 .clone(),
1136 work_directory_id: repo_entry.work_directory_id(),
1137 };
1138 Some((git_repo, None))
1139 });
1140
1141 let Some((git_repo, merge_message)) = git_repo_and_merge_message else {
1142 continue;
1143 };
1144
1145 let existing_repo = self
1146 .repositories
1147 .values()
1148 .find(|repo| repo.read(cx).id() == repo_entry.work_directory_id());
1149
1150 let repo = if let Some(existing_repo) = existing_repo {
1151 // Update the statuses and merge message but keep everything else.
1152 let existing_repo = existing_repo.clone();
1153 existing_repo.update(cx, |existing_repo, _| {
1154 existing_repo.repository_entry = repo_entry.clone();
1155 if matches!(git_repo, RepositoryState::Local { .. }) {
1156 existing_repo.merge_message = merge_message;
1157 existing_repo.completed_scan_id = worktree.completed_scan_id();
1158 }
1159 });
1160 existing_repo
1161 } else {
1162 cx.new(|_| Repository {
1163 worktree_id: Some(worktree.id()),
1164 project_environment: self
1165 .project_environment()
1166 .as_ref()
1167 .map(|env| env.downgrade()),
1168 git_store: git_store.clone(),
1169 askpass_delegates: Default::default(),
1170 latest_askpass_id: 0,
1171 repository_entry: repo_entry.clone(),
1172 job_sender: self.update_sender.clone(),
1173 merge_message,
1174 commit_message_buffer: None,
1175 completed_scan_id: worktree.completed_scan_id(),
1176 state: git_repo,
1177 })
1178 };
1179
1180 // TODO only send out messages for repository snapshots that have changed
1181 let snapshot = repo.read(cx).repository_entry.clone();
1182 if let GitStoreState::Local {
1183 downstream_client: Some(state),
1184 ..
1185 } = &self.state
1186 {
1187 state
1188 .updates_tx
1189 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
1190 .ok();
1191 }
1192 new_repositories.insert(repo_entry.work_directory_id(), repo);
1193 self.repositories.remove(&repo_entry.work_directory_id());
1194 }
1195 })
1196 }
1197 });
1198
1199 if let GitStoreState::Local {
1200 downstream_client: Some(state),
1201 ..
1202 } = &self.state
1203 {
1204 for id in self.repositories.keys().cloned() {
1205 state
1206 .updates_tx
1207 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1208 .ok();
1209 }
1210 }
1211
1212 self.repositories = new_repositories;
1213 if let Some(id) = self.active_repo_id.as_ref() {
1214 if !self.repositories.contains_key(id) {
1215 self.active_repo_id = None;
1216 }
1217 } else if let Some(&first_id) = self.repositories.keys().next() {
1218 self.active_repo_id = Some(first_id);
1219 }
1220 }
1221
1222 fn on_buffer_store_event(
1223 &mut self,
1224 _: Entity<BufferStore>,
1225 event: &BufferStoreEvent,
1226 cx: &mut Context<Self>,
1227 ) {
1228 match event {
1229 BufferStoreEvent::BufferAdded(buffer) => {
1230 cx.subscribe(&buffer, |this, buffer, event, cx| {
1231 if let BufferEvent::LanguageChanged = event {
1232 let buffer_id = buffer.read(cx).remote_id();
1233 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1234 diff_state.update(cx, |diff_state, cx| {
1235 diff_state.buffer_language_changed(buffer, cx);
1236 });
1237 }
1238 }
1239 })
1240 .detach();
1241 }
1242 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1243 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1244 diffs.remove(buffer_id);
1245 }
1246 }
1247 BufferStoreEvent::BufferDropped(buffer_id) => {
1248 self.diffs.remove(&buffer_id);
1249 for diffs in self.shared_diffs.values_mut() {
1250 diffs.remove(buffer_id);
1251 }
1252 }
1253
1254 _ => {}
1255 }
1256 }
1257
1258 pub fn recalculate_buffer_diffs(
1259 &mut self,
1260 buffers: Vec<Entity<Buffer>>,
1261 cx: &mut Context<Self>,
1262 ) -> impl Future<Output = ()> {
1263 let mut futures = Vec::new();
1264 for buffer in buffers {
1265 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1266 let buffer = buffer.read(cx).text_snapshot();
1267 futures.push(diff_state.update(cx, |diff_state, cx| {
1268 diff_state.recalculate_diffs(
1269 buffer,
1270 diff_state.hunk_staging_operation_count,
1271 cx,
1272 )
1273 }));
1274 }
1275 }
1276 async move {
1277 futures::future::join_all(futures).await;
1278 }
1279 }
1280
1281 fn on_buffer_diff_event(
1282 &mut self,
1283 diff: Entity<buffer_diff::BufferDiff>,
1284 event: &BufferDiffEvent,
1285 cx: &mut Context<Self>,
1286 ) {
1287 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1288 let buffer_id = diff.read(cx).buffer_id;
1289 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1290 diff_state.update(cx, |diff_state, _| {
1291 diff_state.hunk_staging_operation_count += 1;
1292 });
1293 }
1294 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1295 let recv = repo.update(cx, |repo, cx| {
1296 log::debug!("updating index text for buffer {}", path.display());
1297 repo.spawn_set_index_text_job(
1298 path,
1299 new_index_text.as_ref().map(|rope| rope.to_string()),
1300 cx,
1301 )
1302 });
1303 let diff = diff.downgrade();
1304 cx.spawn(async move |this, cx| {
1305 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1306 diff.update(cx, |diff, cx| {
1307 diff.clear_pending_hunks(cx);
1308 })
1309 .ok();
1310 this.update(cx, |_, cx| cx.emit(GitEvent::IndexWriteError(error)))
1311 .ok();
1312 }
1313 })
1314 .detach();
1315 }
1316 }
1317 }
1318
1319 fn local_worktree_git_repos_changed(
1320 &mut self,
1321 worktree: Entity<Worktree>,
1322 changed_repos: &UpdatedGitRepositoriesSet,
1323 cx: &mut Context<Self>,
1324 ) {
1325 debug_assert!(worktree.read(cx).is_local());
1326
1327 let Some(active_repo) = self.active_repository() else {
1328 log::error!("local worktree changed but we have no active repository");
1329 return;
1330 };
1331
1332 let mut diff_state_updates = HashMap::<ProjectEntryId, Vec<_>>::default();
1333 for (buffer_id, diff_state) in &self.diffs {
1334 let Some(buffer) = self.buffer_store.read(cx).get(*buffer_id) else {
1335 continue;
1336 };
1337 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1338 continue;
1339 };
1340 if file.worktree != worktree {
1341 continue;
1342 }
1343 let Some(repo_id) = changed_repos
1344 .iter()
1345 .map(|(entry, _)| entry.id)
1346 .find(|repo_id| self.repositories().contains_key(&repo_id))
1347 else {
1348 continue;
1349 };
1350
1351 let diff_state = diff_state.read(cx);
1352 let has_unstaged_diff = diff_state
1353 .unstaged_diff
1354 .as_ref()
1355 .is_some_and(|diff| diff.is_upgradable());
1356 let has_uncommitted_diff = diff_state
1357 .uncommitted_diff
1358 .as_ref()
1359 .is_some_and(|set| set.is_upgradable());
1360
1361 let update = (
1362 buffer,
1363 file.path.clone(),
1364 has_unstaged_diff.then(|| diff_state.index_text.clone()),
1365 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
1366 diff_state.hunk_staging_operation_count,
1367 );
1368 diff_state_updates.entry(repo_id).or_default().push(update);
1369 }
1370
1371 if diff_state_updates.is_empty() {
1372 return;
1373 }
1374
1375 for (repo_id, repo_diff_state_updates) in diff_state_updates.into_iter() {
1376 let worktree = worktree.downgrade();
1377 let git_store = cx.weak_entity();
1378
1379 let _ = active_repo.read(cx).send_keyed_job(
1380 Some(GitJobKey::BatchReadIndex(repo_id)),
1381 |_, mut cx| async move {
1382 let snapshot = worktree.update(&mut cx, |tree, _| {
1383 tree.as_local().map(|local_tree| local_tree.snapshot())
1384 });
1385 let Ok(Some(snapshot)) = snapshot else {
1386 return;
1387 };
1388
1389 let mut diff_bases_changes_by_buffer = Vec::new();
1390 for (
1391 buffer,
1392 path,
1393 current_index_text,
1394 current_head_text,
1395 hunk_staging_operation_count,
1396 ) in &repo_diff_state_updates
1397 {
1398 let Some(local_repo) = snapshot.local_repo_containing_path(&path) else {
1399 continue;
1400 };
1401 let Some(relative_path) = local_repo.relativize(&path).ok() else {
1402 continue;
1403 };
1404
1405 log::debug!("reloading git state for buffer {}", path.display());
1406 let index_text = if current_index_text.is_some() {
1407 local_repo
1408 .repo()
1409 .load_index_text(None, relative_path.clone())
1410 .await
1411 } else {
1412 None
1413 };
1414 let head_text = if current_head_text.is_some() {
1415 local_repo.repo().load_committed_text(relative_path).await
1416 } else {
1417 None
1418 };
1419
1420 // Avoid triggering a diff update if the base text has not changed.
1421 if let Some((current_index, current_head)) =
1422 current_index_text.as_ref().zip(current_head_text.as_ref())
1423 {
1424 if current_index.as_deref() == index_text.as_ref()
1425 && current_head.as_deref() == head_text.as_ref()
1426 {
1427 continue;
1428 }
1429 }
1430
1431 let diff_bases_change =
1432 match (current_index_text.is_some(), current_head_text.is_some()) {
1433 (true, true) => Some(if index_text == head_text {
1434 DiffBasesChange::SetBoth(head_text)
1435 } else {
1436 DiffBasesChange::SetEach {
1437 index: index_text,
1438 head: head_text,
1439 }
1440 }),
1441 (true, false) => Some(DiffBasesChange::SetIndex(index_text)),
1442 (false, true) => Some(DiffBasesChange::SetHead(head_text)),
1443 (false, false) => None,
1444 };
1445
1446 diff_bases_changes_by_buffer.push((
1447 buffer,
1448 diff_bases_change,
1449 *hunk_staging_operation_count,
1450 ))
1451 }
1452
1453 git_store
1454 .update(&mut cx, |git_store, cx| {
1455 for (buffer, diff_bases_change, hunk_staging_operation_count) in
1456 diff_bases_changes_by_buffer
1457 {
1458 let Some(diff_state) =
1459 git_store.diffs.get(&buffer.read(cx).remote_id())
1460 else {
1461 continue;
1462 };
1463 let Some(diff_bases_change) = diff_bases_change else {
1464 continue;
1465 };
1466
1467 let downstream_client = git_store.downstream_client();
1468 diff_state.update(cx, |diff_state, cx| {
1469 use proto::update_diff_bases::Mode;
1470
1471 let buffer = buffer.read(cx);
1472 if let Some((client, project_id)) = downstream_client {
1473 let (staged_text, committed_text, mode) =
1474 match diff_bases_change.clone() {
1475 DiffBasesChange::SetIndex(index) => {
1476 (index, None, Mode::IndexOnly)
1477 }
1478 DiffBasesChange::SetHead(head) => {
1479 (None, head, Mode::HeadOnly)
1480 }
1481 DiffBasesChange::SetEach { index, head } => {
1482 (index, head, Mode::IndexAndHead)
1483 }
1484 DiffBasesChange::SetBoth(text) => {
1485 (None, text, Mode::IndexMatchesHead)
1486 }
1487 };
1488 let message = proto::UpdateDiffBases {
1489 project_id: project_id.to_proto(),
1490 buffer_id: buffer.remote_id().to_proto(),
1491 staged_text,
1492 committed_text,
1493 mode: mode as i32,
1494 };
1495
1496 client.send(message).log_err();
1497 }
1498
1499 let _ = diff_state.diff_bases_changed(
1500 buffer.text_snapshot(),
1501 diff_bases_change,
1502 hunk_staging_operation_count,
1503 cx,
1504 );
1505 });
1506 }
1507 })
1508 .ok();
1509 },
1510 );
1511 }
1512 }
1513
1514 pub fn repositories(&self) -> &HashMap<ProjectEntryId, Entity<Repository>> {
1515 &self.repositories
1516 }
1517
1518 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1519 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1520 let status = repo.read(cx).repository_entry.status_for_path(&path)?;
1521 Some(status.status)
1522 }
1523
1524 pub fn status(&self, index: Option<GitStoreIndex>, cx: &App) -> Task<Result<GitStoreStatus>> {
1525 let repositories_by_work_dir_abs_path = self
1526 .repositories
1527 .values()
1528 .map(|repo| {
1529 (
1530 repo.read(cx)
1531 .repository_entry
1532 .work_directory_abs_path
1533 .clone(),
1534 repo,
1535 )
1536 })
1537 .collect::<HashMap<_, _>>();
1538
1539 let mut tasks = Vec::new();
1540
1541 if let Some(index) = index {
1542 // When we have an index, just check the repositories that are part of it
1543 for (work_dir_abs_path, git_index) in index.indices_by_work_dir_abs_path {
1544 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1545 {
1546 let status = repository.read(cx).status(Some(git_index));
1547 tasks.push(
1548 async move {
1549 let status = status.await??;
1550 anyhow::Ok((work_dir_abs_path, status))
1551 }
1552 .boxed(),
1553 );
1554 }
1555 }
1556 } else {
1557 // Otherwise, check all repositories
1558 for repository in self.repositories.values() {
1559 let repository = repository.read(cx);
1560 let work_dir_abs_path = repository.repository_entry.work_directory_abs_path.clone();
1561 let status = repository.status(None);
1562 tasks.push(
1563 async move {
1564 let status = status.await??;
1565 anyhow::Ok((work_dir_abs_path, status))
1566 }
1567 .boxed(),
1568 );
1569 }
1570 }
1571
1572 cx.background_executor().spawn(async move {
1573 let statuses = future::try_join_all(tasks).await?;
1574 Ok(GitStoreStatus {
1575 statuses_by_work_dir_abs_path: statuses.into_iter().collect(),
1576 })
1577 })
1578 }
1579
1580 pub fn load_index_text(
1581 &self,
1582 index: Option<GitStoreIndex>,
1583 buffer: &Entity<Buffer>,
1584 cx: &App,
1585 ) -> Task<Option<String>> {
1586 let Some((repository, path)) =
1587 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
1588 else {
1589 return Task::ready(None);
1590 };
1591
1592 let git_index = index.and_then(|index| {
1593 index
1594 .indices_by_work_dir_abs_path
1595 .get(&repository.read(cx).repository_entry.work_directory_abs_path)
1596 .copied()
1597 });
1598 let text = repository.read(cx).load_index_text(git_index, path);
1599 cx.background_spawn(async move {
1600 let text = text.await;
1601 text.ok().flatten()
1602 })
1603 }
1604
1605 pub fn repository_and_path_for_buffer_id(
1606 &self,
1607 buffer_id: BufferId,
1608 cx: &App,
1609 ) -> Option<(Entity<Repository>, RepoPath)> {
1610 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1611 let project_path = buffer.read(cx).project_path(cx)?;
1612 self.repository_and_path_for_project_path(&project_path, cx)
1613 }
1614
1615 pub fn repository_and_path_for_project_path(
1616 &self,
1617 path: &ProjectPath,
1618 cx: &App,
1619 ) -> Option<(Entity<Repository>, RepoPath)> {
1620 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1621 self.repositories
1622 .values()
1623 .filter_map(|repo_handle| {
1624 let repo = repo_handle.read(cx);
1625 let relative_path = repo.repository_entry.relativize_abs_path(&abs_path)?;
1626 Some((repo_handle.clone(), relative_path))
1627 })
1628 .max_by_key(|(repo, _)| {
1629 repo.read(cx)
1630 .repository_entry
1631 .work_directory_abs_path
1632 .clone()
1633 })
1634 }
1635
1636 fn spawn_git_worker(cx: &mut Context<GitStore>) -> mpsc::UnboundedSender<GitJob> {
1637 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
1638
1639 cx.spawn(async move |_, cx| {
1640 let mut jobs = VecDeque::new();
1641 loop {
1642 while let Ok(Some(next_job)) = job_rx.try_next() {
1643 jobs.push_back(next_job);
1644 }
1645
1646 if let Some(job) = jobs.pop_front() {
1647 if let Some(current_key) = &job.key {
1648 if jobs
1649 .iter()
1650 .any(|other_job| other_job.key.as_ref() == Some(current_key))
1651 {
1652 continue;
1653 }
1654 }
1655 (job.job)(cx).await;
1656 } else if let Some(job) = job_rx.next().await {
1657 jobs.push_back(job);
1658 } else {
1659 break;
1660 }
1661 }
1662 })
1663 .detach();
1664 job_tx
1665 }
1666
1667 pub fn git_init(
1668 &self,
1669 path: Arc<Path>,
1670 fallback_branch_name: String,
1671 cx: &App,
1672 ) -> Task<Result<()>> {
1673 match &self.state {
1674 GitStoreState::Local { fs, .. } => {
1675 let fs = fs.clone();
1676 cx.background_executor()
1677 .spawn(async move { fs.git_init(&path, fallback_branch_name) })
1678 }
1679 GitStoreState::Ssh {
1680 upstream_client,
1681 upstream_project_id: project_id,
1682 ..
1683 }
1684 | GitStoreState::Remote {
1685 upstream_client,
1686 project_id,
1687 ..
1688 } => {
1689 let client = upstream_client.clone();
1690 let project_id = *project_id;
1691 cx.background_executor().spawn(async move {
1692 client
1693 .request(proto::GitInit {
1694 project_id: project_id.0,
1695 abs_path: path.to_string_lossy().to_string(),
1696 fallback_branch_name,
1697 })
1698 .await?;
1699 Ok(())
1700 })
1701 }
1702 }
1703 }
1704
1705 async fn handle_update_repository(
1706 this: Entity<Self>,
1707 envelope: TypedEnvelope<proto::UpdateRepository>,
1708 mut cx: AsyncApp,
1709 ) -> Result<()> {
1710 this.update(&mut cx, |this, cx| {
1711 let mut update = envelope.payload;
1712
1713 let work_directory_id = ProjectEntryId::from_proto(update.id);
1714 let client = this
1715 .upstream_client()
1716 .context("no upstream client")?
1717 .clone();
1718
1719 let repo = this
1720 .repositories
1721 .entry(work_directory_id)
1722 .or_insert_with(|| {
1723 let git_store = cx.weak_entity();
1724
1725 cx.new(|_| Repository {
1726 commit_message_buffer: None,
1727 git_store,
1728 project_environment: None,
1729 worktree_id: None,
1730 repository_entry: RepositoryEntry {
1731 work_directory_id,
1732 current_branch: None,
1733 statuses_by_path: Default::default(),
1734 current_merge_conflicts: Default::default(),
1735 work_directory_abs_path: update.abs_path.clone().into(),
1736 worktree_scan_id: update.scan_id as usize,
1737 },
1738 merge_message: None,
1739 completed_scan_id: update.scan_id as usize,
1740 state: RepositoryState::Remote {
1741 project_id: ProjectId(update.project_id),
1742 client,
1743 work_directory_id,
1744 },
1745 job_sender: this.update_sender.clone(),
1746 askpass_delegates: Default::default(),
1747 latest_askpass_id: 0,
1748 })
1749 });
1750
1751 repo.update(cx, |repo, _cx| repo.apply_remote_update(update.clone()))?;
1752 cx.emit(GitEvent::GitStateUpdated);
1753 this.active_repo_id.get_or_insert_with(|| {
1754 cx.emit(GitEvent::ActiveRepositoryChanged);
1755 work_directory_id
1756 });
1757
1758 if let Some((client, project_id)) = this.downstream_client() {
1759 update.project_id = project_id.to_proto();
1760 client.send(update).log_err();
1761 }
1762 Ok(())
1763 })?
1764 }
1765
1766 async fn handle_remove_repository(
1767 this: Entity<Self>,
1768 envelope: TypedEnvelope<proto::RemoveRepository>,
1769 mut cx: AsyncApp,
1770 ) -> Result<()> {
1771 this.update(&mut cx, |this, cx| {
1772 let mut update = envelope.payload;
1773 let id = ProjectEntryId::from_proto(update.id);
1774 this.repositories.remove(&id);
1775 if let Some((client, project_id)) = this.downstream_client() {
1776 update.project_id = project_id.to_proto();
1777 client.send(update).log_err();
1778 }
1779 if this.active_repo_id == Some(id) {
1780 this.active_repo_id = None;
1781 cx.emit(GitEvent::ActiveRepositoryChanged);
1782 }
1783 cx.emit(GitEvent::GitStateUpdated);
1784 })
1785 }
1786
1787 async fn handle_git_init(
1788 this: Entity<Self>,
1789 envelope: TypedEnvelope<proto::GitInit>,
1790 cx: AsyncApp,
1791 ) -> Result<proto::Ack> {
1792 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1793 let name = envelope.payload.fallback_branch_name;
1794 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1795 .await?;
1796
1797 Ok(proto::Ack {})
1798 }
1799
1800 async fn handle_fetch(
1801 this: Entity<Self>,
1802 envelope: TypedEnvelope<proto::Fetch>,
1803 mut cx: AsyncApp,
1804 ) -> Result<proto::RemoteMessageResponse> {
1805 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
1806 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
1807 let askpass_id = envelope.payload.askpass_id;
1808
1809 let askpass = make_remote_delegate(
1810 this,
1811 envelope.payload.project_id,
1812 work_directory_id,
1813 askpass_id,
1814 &mut cx,
1815 );
1816
1817 let remote_output = repository_handle
1818 .update(&mut cx, |repository_handle, cx| {
1819 repository_handle.fetch(askpass, cx)
1820 })?
1821 .await??;
1822
1823 Ok(proto::RemoteMessageResponse {
1824 stdout: remote_output.stdout,
1825 stderr: remote_output.stderr,
1826 })
1827 }
1828
1829 async fn handle_push(
1830 this: Entity<Self>,
1831 envelope: TypedEnvelope<proto::Push>,
1832 mut cx: AsyncApp,
1833 ) -> Result<proto::RemoteMessageResponse> {
1834 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
1835 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
1836
1837 let askpass_id = envelope.payload.askpass_id;
1838 let askpass = make_remote_delegate(
1839 this,
1840 envelope.payload.project_id,
1841 work_directory_id,
1842 askpass_id,
1843 &mut cx,
1844 );
1845
1846 let options = envelope
1847 .payload
1848 .options
1849 .as_ref()
1850 .map(|_| match envelope.payload.options() {
1851 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1852 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1853 });
1854
1855 let branch_name = envelope.payload.branch_name.into();
1856 let remote_name = envelope.payload.remote_name.into();
1857
1858 let remote_output = repository_handle
1859 .update(&mut cx, |repository_handle, cx| {
1860 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1861 })?
1862 .await??;
1863 Ok(proto::RemoteMessageResponse {
1864 stdout: remote_output.stdout,
1865 stderr: remote_output.stderr,
1866 })
1867 }
1868
1869 async fn handle_pull(
1870 this: Entity<Self>,
1871 envelope: TypedEnvelope<proto::Pull>,
1872 mut cx: AsyncApp,
1873 ) -> Result<proto::RemoteMessageResponse> {
1874 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
1875 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
1876 let askpass_id = envelope.payload.askpass_id;
1877 let askpass = make_remote_delegate(
1878 this,
1879 envelope.payload.project_id,
1880 work_directory_id,
1881 askpass_id,
1882 &mut cx,
1883 );
1884
1885 let branch_name = envelope.payload.branch_name.into();
1886 let remote_name = envelope.payload.remote_name.into();
1887
1888 let remote_message = repository_handle
1889 .update(&mut cx, |repository_handle, cx| {
1890 repository_handle.pull(branch_name, remote_name, askpass, cx)
1891 })?
1892 .await??;
1893
1894 Ok(proto::RemoteMessageResponse {
1895 stdout: remote_message.stdout,
1896 stderr: remote_message.stderr,
1897 })
1898 }
1899
1900 async fn handle_stage(
1901 this: Entity<Self>,
1902 envelope: TypedEnvelope<proto::Stage>,
1903 mut cx: AsyncApp,
1904 ) -> Result<proto::Ack> {
1905 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
1906 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
1907
1908 let entries = envelope
1909 .payload
1910 .paths
1911 .into_iter()
1912 .map(PathBuf::from)
1913 .map(RepoPath::new)
1914 .collect();
1915
1916 repository_handle
1917 .update(&mut cx, |repository_handle, cx| {
1918 repository_handle.stage_entries(entries, cx)
1919 })?
1920 .await?;
1921 Ok(proto::Ack {})
1922 }
1923
1924 async fn handle_unstage(
1925 this: Entity<Self>,
1926 envelope: TypedEnvelope<proto::Unstage>,
1927 mut cx: AsyncApp,
1928 ) -> Result<proto::Ack> {
1929 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
1930 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
1931
1932 let entries = envelope
1933 .payload
1934 .paths
1935 .into_iter()
1936 .map(PathBuf::from)
1937 .map(RepoPath::new)
1938 .collect();
1939
1940 repository_handle
1941 .update(&mut cx, |repository_handle, cx| {
1942 repository_handle.unstage_entries(entries, cx)
1943 })?
1944 .await?;
1945
1946 Ok(proto::Ack {})
1947 }
1948
1949 async fn handle_set_index_text(
1950 this: Entity<Self>,
1951 envelope: TypedEnvelope<proto::SetIndexText>,
1952 mut cx: AsyncApp,
1953 ) -> Result<proto::Ack> {
1954 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
1955 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
1956
1957 repository_handle
1958 .update(&mut cx, |repository_handle, cx| {
1959 repository_handle.spawn_set_index_text_job(
1960 RepoPath::from_str(&envelope.payload.path),
1961 envelope.payload.text,
1962 cx,
1963 )
1964 })?
1965 .await??;
1966 Ok(proto::Ack {})
1967 }
1968
1969 async fn handle_commit(
1970 this: Entity<Self>,
1971 envelope: TypedEnvelope<proto::Commit>,
1972 mut cx: AsyncApp,
1973 ) -> Result<proto::Ack> {
1974 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
1975 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
1976
1977 let message = SharedString::from(envelope.payload.message);
1978 let name = envelope.payload.name.map(SharedString::from);
1979 let email = envelope.payload.email.map(SharedString::from);
1980
1981 repository_handle
1982 .update(&mut cx, |repository_handle, cx| {
1983 repository_handle.commit(message, name.zip(email), cx)
1984 })?
1985 .await??;
1986 Ok(proto::Ack {})
1987 }
1988
1989 async fn handle_get_remotes(
1990 this: Entity<Self>,
1991 envelope: TypedEnvelope<proto::GetRemotes>,
1992 mut cx: AsyncApp,
1993 ) -> Result<proto::GetRemotesResponse> {
1994 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
1995 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
1996
1997 let branch_name = envelope.payload.branch_name;
1998
1999 let remotes = repository_handle
2000 .update(&mut cx, |repository_handle, _| {
2001 repository_handle.get_remotes(branch_name)
2002 })?
2003 .await??;
2004
2005 Ok(proto::GetRemotesResponse {
2006 remotes: remotes
2007 .into_iter()
2008 .map(|remotes| proto::get_remotes_response::Remote {
2009 name: remotes.name.to_string(),
2010 })
2011 .collect::<Vec<_>>(),
2012 })
2013 }
2014
2015 async fn handle_get_branches(
2016 this: Entity<Self>,
2017 envelope: TypedEnvelope<proto::GitGetBranches>,
2018 mut cx: AsyncApp,
2019 ) -> Result<proto::GitBranchesResponse> {
2020 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
2021 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
2022
2023 let branches = repository_handle
2024 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2025 .await??;
2026
2027 Ok(proto::GitBranchesResponse {
2028 branches: branches
2029 .into_iter()
2030 .map(|branch| worktree::branch_to_proto(&branch))
2031 .collect::<Vec<_>>(),
2032 })
2033 }
2034 async fn handle_create_branch(
2035 this: Entity<Self>,
2036 envelope: TypedEnvelope<proto::GitCreateBranch>,
2037 mut cx: AsyncApp,
2038 ) -> Result<proto::Ack> {
2039 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
2040 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
2041 let branch_name = envelope.payload.branch_name;
2042
2043 repository_handle
2044 .update(&mut cx, |repository_handle, _| {
2045 repository_handle.create_branch(branch_name)
2046 })?
2047 .await??;
2048
2049 Ok(proto::Ack {})
2050 }
2051
2052 async fn handle_change_branch(
2053 this: Entity<Self>,
2054 envelope: TypedEnvelope<proto::GitChangeBranch>,
2055 mut cx: AsyncApp,
2056 ) -> Result<proto::Ack> {
2057 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
2058 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
2059 let branch_name = envelope.payload.branch_name;
2060
2061 repository_handle
2062 .update(&mut cx, |repository_handle, _| {
2063 repository_handle.change_branch(branch_name)
2064 })?
2065 .await??;
2066
2067 Ok(proto::Ack {})
2068 }
2069
2070 async fn handle_show(
2071 this: Entity<Self>,
2072 envelope: TypedEnvelope<proto::GitShow>,
2073 mut cx: AsyncApp,
2074 ) -> Result<proto::GitCommitDetails> {
2075 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
2076 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
2077
2078 let commit = repository_handle
2079 .update(&mut cx, |repository_handle, _| {
2080 repository_handle.show(envelope.payload.commit)
2081 })?
2082 .await??;
2083 Ok(proto::GitCommitDetails {
2084 sha: commit.sha.into(),
2085 message: commit.message.into(),
2086 commit_timestamp: commit.commit_timestamp,
2087 committer_email: commit.committer_email.into(),
2088 committer_name: commit.committer_name.into(),
2089 })
2090 }
2091
2092 async fn handle_reset(
2093 this: Entity<Self>,
2094 envelope: TypedEnvelope<proto::GitReset>,
2095 mut cx: AsyncApp,
2096 ) -> Result<proto::Ack> {
2097 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
2098 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
2099
2100 let mode = match envelope.payload.mode() {
2101 git_reset::ResetMode::Soft => ResetMode::Soft,
2102 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2103 };
2104
2105 repository_handle
2106 .update(&mut cx, |repository_handle, cx| {
2107 repository_handle.reset(envelope.payload.commit, mode, cx)
2108 })?
2109 .await??;
2110 Ok(proto::Ack {})
2111 }
2112
2113 async fn handle_checkout_files(
2114 this: Entity<Self>,
2115 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2116 mut cx: AsyncApp,
2117 ) -> Result<proto::Ack> {
2118 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
2119 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
2120 let paths = envelope
2121 .payload
2122 .paths
2123 .iter()
2124 .map(|s| RepoPath::from_str(s))
2125 .collect();
2126
2127 repository_handle
2128 .update(&mut cx, |repository_handle, cx| {
2129 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2130 })?
2131 .await??;
2132 Ok(proto::Ack {})
2133 }
2134
2135 async fn handle_open_commit_message_buffer(
2136 this: Entity<Self>,
2137 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2138 mut cx: AsyncApp,
2139 ) -> Result<proto::OpenBufferResponse> {
2140 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
2141 let repository = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
2142 let buffer = repository
2143 .update(&mut cx, |repository, cx| {
2144 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2145 })?
2146 .await?;
2147
2148 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2149 this.update(&mut cx, |this, cx| {
2150 this.buffer_store.update(cx, |buffer_store, cx| {
2151 buffer_store
2152 .create_buffer_for_peer(
2153 &buffer,
2154 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2155 cx,
2156 )
2157 .detach_and_log_err(cx);
2158 })
2159 })?;
2160
2161 Ok(proto::OpenBufferResponse {
2162 buffer_id: buffer_id.to_proto(),
2163 })
2164 }
2165
2166 async fn handle_askpass(
2167 this: Entity<Self>,
2168 envelope: TypedEnvelope<proto::AskPassRequest>,
2169 mut cx: AsyncApp,
2170 ) -> Result<proto::AskPassResponse> {
2171 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
2172 let repository = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
2173
2174 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2175 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2176 debug_panic!("no askpass found");
2177 return Err(anyhow::anyhow!("no askpass found"));
2178 };
2179
2180 let response = askpass.ask_password(envelope.payload.prompt).await?;
2181
2182 delegates
2183 .lock()
2184 .insert(envelope.payload.askpass_id, askpass);
2185
2186 Ok(proto::AskPassResponse { response })
2187 }
2188
2189 async fn handle_check_for_pushed_commits(
2190 this: Entity<Self>,
2191 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2192 mut cx: AsyncApp,
2193 ) -> Result<proto::CheckForPushedCommitsResponse> {
2194 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
2195 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
2196
2197 let branches = repository_handle
2198 .update(&mut cx, |repository_handle, _| {
2199 repository_handle.check_for_pushed_commits()
2200 })?
2201 .await??;
2202 Ok(proto::CheckForPushedCommitsResponse {
2203 pushed_to: branches
2204 .into_iter()
2205 .map(|commit| commit.to_string())
2206 .collect(),
2207 })
2208 }
2209
2210 async fn handle_git_diff(
2211 this: Entity<Self>,
2212 envelope: TypedEnvelope<proto::GitDiff>,
2213 mut cx: AsyncApp,
2214 ) -> Result<proto::GitDiffResponse> {
2215 let work_directory_id = ProjectEntryId::from_proto(envelope.payload.work_directory_id);
2216 let repository_handle = Self::repository_for_request(&this, work_directory_id, &mut cx)?;
2217 let diff_type = match envelope.payload.diff_type() {
2218 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2219 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2220 };
2221
2222 let mut diff = repository_handle
2223 .update(&mut cx, |repository_handle, cx| {
2224 repository_handle.diff(diff_type, cx)
2225 })?
2226 .await??;
2227 const ONE_MB: usize = 1_000_000;
2228 if diff.len() > ONE_MB {
2229 diff = diff.chars().take(ONE_MB).collect()
2230 }
2231
2232 Ok(proto::GitDiffResponse { diff })
2233 }
2234
2235 async fn handle_open_unstaged_diff(
2236 this: Entity<Self>,
2237 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2238 mut cx: AsyncApp,
2239 ) -> Result<proto::OpenUnstagedDiffResponse> {
2240 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2241 let diff = this
2242 .update(&mut cx, |this, cx| {
2243 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2244 Some(this.open_unstaged_diff(buffer, cx))
2245 })?
2246 .ok_or_else(|| anyhow!("no such buffer"))?
2247 .await?;
2248 this.update(&mut cx, |this, _| {
2249 let shared_diffs = this
2250 .shared_diffs
2251 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2252 .or_default();
2253 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2254 })?;
2255 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2256 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2257 }
2258
2259 async fn handle_open_uncommitted_diff(
2260 this: Entity<Self>,
2261 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2262 mut cx: AsyncApp,
2263 ) -> Result<proto::OpenUncommittedDiffResponse> {
2264 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2265 let diff = this
2266 .update(&mut cx, |this, cx| {
2267 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2268 Some(this.open_uncommitted_diff(buffer, cx))
2269 })?
2270 .ok_or_else(|| anyhow!("no such buffer"))?
2271 .await?;
2272 this.update(&mut cx, |this, _| {
2273 let shared_diffs = this
2274 .shared_diffs
2275 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2276 .or_default();
2277 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2278 })?;
2279 diff.read_with(&cx, |diff, cx| {
2280 use proto::open_uncommitted_diff_response::Mode;
2281
2282 let unstaged_diff = diff.secondary_diff();
2283 let index_snapshot = unstaged_diff.and_then(|diff| {
2284 let diff = diff.read(cx);
2285 diff.base_text_exists().then(|| diff.base_text())
2286 });
2287
2288 let mode;
2289 let staged_text;
2290 let committed_text;
2291 if diff.base_text_exists() {
2292 let committed_snapshot = diff.base_text();
2293 committed_text = Some(committed_snapshot.text());
2294 if let Some(index_text) = index_snapshot {
2295 if index_text.remote_id() == committed_snapshot.remote_id() {
2296 mode = Mode::IndexMatchesHead;
2297 staged_text = None;
2298 } else {
2299 mode = Mode::IndexAndHead;
2300 staged_text = Some(index_text.text());
2301 }
2302 } else {
2303 mode = Mode::IndexAndHead;
2304 staged_text = None;
2305 }
2306 } else {
2307 mode = Mode::IndexAndHead;
2308 committed_text = None;
2309 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2310 }
2311
2312 proto::OpenUncommittedDiffResponse {
2313 committed_text,
2314 staged_text,
2315 mode: mode.into(),
2316 }
2317 })
2318 }
2319
2320 async fn handle_update_diff_bases(
2321 this: Entity<Self>,
2322 request: TypedEnvelope<proto::UpdateDiffBases>,
2323 mut cx: AsyncApp,
2324 ) -> Result<()> {
2325 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2326 this.update(&mut cx, |this, cx| {
2327 if let Some(diff_state) = this.diffs.get_mut(&buffer_id) {
2328 if let Some(buffer) = this.buffer_store.read(cx).get(buffer_id) {
2329 let buffer = buffer.read(cx).text_snapshot();
2330 diff_state.update(cx, |diff_state, cx| {
2331 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2332 })
2333 }
2334 }
2335 })
2336 }
2337
2338 async fn handle_blame_buffer(
2339 this: Entity<Self>,
2340 envelope: TypedEnvelope<proto::BlameBuffer>,
2341 mut cx: AsyncApp,
2342 ) -> Result<proto::BlameBufferResponse> {
2343 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2344 let version = deserialize_version(&envelope.payload.version);
2345 let buffer = this.read_with(&cx, |this, cx| {
2346 this.buffer_store.read(cx).get_existing(buffer_id)
2347 })??;
2348 buffer
2349 .update(&mut cx, |buffer, _| {
2350 buffer.wait_for_version(version.clone())
2351 })?
2352 .await?;
2353 let blame = this
2354 .update(&mut cx, |this, cx| {
2355 this.blame_buffer(&buffer, Some(version), cx)
2356 })?
2357 .await?;
2358 Ok(serialize_blame_buffer_response(blame))
2359 }
2360
2361 async fn handle_get_permalink_to_line(
2362 this: Entity<Self>,
2363 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2364 mut cx: AsyncApp,
2365 ) -> Result<proto::GetPermalinkToLineResponse> {
2366 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2367 // let version = deserialize_version(&envelope.payload.version);
2368 let selection = {
2369 let proto_selection = envelope
2370 .payload
2371 .selection
2372 .context("no selection to get permalink for defined")?;
2373 proto_selection.start as u32..proto_selection.end as u32
2374 };
2375 let buffer = this.read_with(&cx, |this, cx| {
2376 this.buffer_store.read(cx).get_existing(buffer_id)
2377 })??;
2378 let permalink = this
2379 .update(&mut cx, |this, cx| {
2380 this.get_permalink_to_line(&buffer, selection, cx)
2381 })?
2382 .await?;
2383 Ok(proto::GetPermalinkToLineResponse {
2384 permalink: permalink.to_string(),
2385 })
2386 }
2387
2388 fn repository_for_request(
2389 this: &Entity<Self>,
2390 work_directory_id: ProjectEntryId,
2391 cx: &mut AsyncApp,
2392 ) -> Result<Entity<Repository>> {
2393 this.update(cx, |this, cx| {
2394 this.repositories
2395 .values()
2396 .find(|repository_handle| {
2397 repository_handle
2398 .read(cx)
2399 .repository_entry
2400 .work_directory_id()
2401 == work_directory_id
2402 })
2403 .context("missing repository handle")
2404 .cloned()
2405 })?
2406 }
2407
2408 pub fn repo_snapshots(&self, cx: &App) -> HashMap<ProjectEntryId, RepositoryEntry> {
2409 self.repositories
2410 .iter()
2411 .map(|(id, repo)| (*id, repo.read(cx).repository_entry.clone()))
2412 .collect()
2413 }
2414}
2415
2416impl BufferDiffState {
2417 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2418 self.language = buffer.read(cx).language().cloned();
2419 self.language_changed = true;
2420 let _ = self.recalculate_diffs(
2421 buffer.read(cx).text_snapshot(),
2422 self.hunk_staging_operation_count,
2423 cx,
2424 );
2425 }
2426
2427 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2428 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2429 }
2430
2431 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2432 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2433 }
2434
2435 fn handle_base_texts_updated(
2436 &mut self,
2437 buffer: text::BufferSnapshot,
2438 message: proto::UpdateDiffBases,
2439 cx: &mut Context<Self>,
2440 ) {
2441 use proto::update_diff_bases::Mode;
2442
2443 let Some(mode) = Mode::from_i32(message.mode) else {
2444 return;
2445 };
2446
2447 let diff_bases_change = match mode {
2448 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2449 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2450 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2451 Mode::IndexAndHead => DiffBasesChange::SetEach {
2452 index: message.staged_text,
2453 head: message.committed_text,
2454 },
2455 };
2456
2457 let _ = self.diff_bases_changed(
2458 buffer,
2459 diff_bases_change,
2460 self.hunk_staging_operation_count,
2461 cx,
2462 );
2463 }
2464
2465 pub fn wait_for_recalculation(&mut self) -> Option<oneshot::Receiver<()>> {
2466 if self.diff_updated_futures.is_empty() {
2467 return None;
2468 }
2469 let (tx, rx) = oneshot::channel();
2470 self.diff_updated_futures.push(tx);
2471 Some(rx)
2472 }
2473
2474 fn diff_bases_changed(
2475 &mut self,
2476 buffer: text::BufferSnapshot,
2477 diff_bases_change: DiffBasesChange,
2478 prev_hunk_staging_operation_count: usize,
2479 cx: &mut Context<Self>,
2480 ) -> oneshot::Receiver<()> {
2481 match diff_bases_change {
2482 DiffBasesChange::SetIndex(index) => {
2483 self.index_text = index.map(|mut index| {
2484 text::LineEnding::normalize(&mut index);
2485 Arc::new(index)
2486 });
2487 self.index_changed = true;
2488 }
2489 DiffBasesChange::SetHead(head) => {
2490 self.head_text = head.map(|mut head| {
2491 text::LineEnding::normalize(&mut head);
2492 Arc::new(head)
2493 });
2494 self.head_changed = true;
2495 }
2496 DiffBasesChange::SetBoth(text) => {
2497 let text = text.map(|mut text| {
2498 text::LineEnding::normalize(&mut text);
2499 Arc::new(text)
2500 });
2501 self.head_text = text.clone();
2502 self.index_text = text;
2503 self.head_changed = true;
2504 self.index_changed = true;
2505 }
2506 DiffBasesChange::SetEach { index, head } => {
2507 self.index_text = index.map(|mut index| {
2508 text::LineEnding::normalize(&mut index);
2509 Arc::new(index)
2510 });
2511 self.index_changed = true;
2512 self.head_text = head.map(|mut head| {
2513 text::LineEnding::normalize(&mut head);
2514 Arc::new(head)
2515 });
2516 self.head_changed = true;
2517 }
2518 }
2519
2520 self.recalculate_diffs(buffer, prev_hunk_staging_operation_count, cx)
2521 }
2522
2523 fn recalculate_diffs(
2524 &mut self,
2525 buffer: text::BufferSnapshot,
2526 prev_hunk_staging_operation_count: usize,
2527 cx: &mut Context<Self>,
2528 ) -> oneshot::Receiver<()> {
2529 log::debug!("recalculate diffs");
2530 let (tx, rx) = oneshot::channel();
2531 self.diff_updated_futures.push(tx);
2532
2533 let language = self.language.clone();
2534 let language_registry = self.language_registry.clone();
2535 let unstaged_diff = self.unstaged_diff();
2536 let uncommitted_diff = self.uncommitted_diff();
2537 let head = self.head_text.clone();
2538 let index = self.index_text.clone();
2539 let index_changed = self.index_changed;
2540 let head_changed = self.head_changed;
2541 let language_changed = self.language_changed;
2542 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2543 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2544 (None, None) => true,
2545 _ => false,
2546 };
2547 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2548 let mut new_unstaged_diff = None;
2549 if let Some(unstaged_diff) = &unstaged_diff {
2550 new_unstaged_diff = Some(
2551 BufferDiff::update_diff(
2552 unstaged_diff.clone(),
2553 buffer.clone(),
2554 index,
2555 index_changed,
2556 language_changed,
2557 language.clone(),
2558 language_registry.clone(),
2559 cx,
2560 )
2561 .await?,
2562 );
2563 }
2564
2565 let mut new_uncommitted_diff = None;
2566 if let Some(uncommitted_diff) = &uncommitted_diff {
2567 new_uncommitted_diff = if index_matches_head {
2568 new_unstaged_diff.clone()
2569 } else {
2570 Some(
2571 BufferDiff::update_diff(
2572 uncommitted_diff.clone(),
2573 buffer.clone(),
2574 head,
2575 head_changed,
2576 language_changed,
2577 language.clone(),
2578 language_registry.clone(),
2579 cx,
2580 )
2581 .await?,
2582 )
2583 }
2584 }
2585
2586 if this.update(cx, |this, _| {
2587 this.hunk_staging_operation_count > prev_hunk_staging_operation_count
2588 })? {
2589 return Ok(());
2590 }
2591
2592 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2593 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2594 {
2595 unstaged_diff.update(cx, |diff, cx| {
2596 diff.set_snapshot(&buffer, new_unstaged_diff, language_changed, None, cx)
2597 })?
2598 } else {
2599 None
2600 };
2601
2602 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2603 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2604 {
2605 uncommitted_diff.update(cx, |uncommitted_diff, cx| {
2606 uncommitted_diff.set_snapshot(
2607 &buffer,
2608 new_uncommitted_diff,
2609 language_changed,
2610 unstaged_changed_range,
2611 cx,
2612 );
2613 })?;
2614 }
2615
2616 if let Some(this) = this.upgrade() {
2617 this.update(cx, |this, _| {
2618 this.index_changed = false;
2619 this.head_changed = false;
2620 this.language_changed = false;
2621 for tx in this.diff_updated_futures.drain(..) {
2622 tx.send(()).ok();
2623 }
2624 })?;
2625 }
2626
2627 Ok(())
2628 }));
2629
2630 rx
2631 }
2632}
2633
2634fn make_remote_delegate(
2635 this: Entity<GitStore>,
2636 project_id: u64,
2637 work_directory_id: ProjectEntryId,
2638 askpass_id: u64,
2639 cx: &mut AsyncApp,
2640) -> AskPassDelegate {
2641 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2642 this.update(cx, |this, cx| {
2643 let Some((client, _)) = this.downstream_client() else {
2644 return;
2645 };
2646 let response = client.request(proto::AskPassRequest {
2647 project_id,
2648 work_directory_id: work_directory_id.to_proto(),
2649 askpass_id,
2650 prompt,
2651 });
2652 cx.spawn(async move |_, _| {
2653 tx.send(response.await?.response).ok();
2654 anyhow::Ok(())
2655 })
2656 .detach_and_log_err(cx);
2657 })
2658 .log_err();
2659 })
2660}
2661
2662impl GitStoreState {
2663 fn load_staged_text(
2664 &self,
2665 buffer: &Entity<Buffer>,
2666 buffer_store: &Entity<BufferStore>,
2667 cx: &App,
2668 ) -> Task<Result<Option<String>>> {
2669 match self {
2670 GitStoreState::Local { .. } => {
2671 if let Some((worktree, path)) =
2672 buffer_store.read(cx).worktree_for_buffer(buffer, cx)
2673 {
2674 worktree.read(cx).load_staged_file(path.as_ref(), cx)
2675 } else {
2676 return Task::ready(Err(anyhow!("no such worktree")));
2677 }
2678 }
2679 GitStoreState::Ssh {
2680 upstream_client,
2681 upstream_project_id: project_id,
2682 ..
2683 }
2684 | GitStoreState::Remote {
2685 upstream_client,
2686 project_id,
2687 } => {
2688 let buffer_id = buffer.read(cx).remote_id();
2689 let project_id = *project_id;
2690 let client = upstream_client.clone();
2691 cx.background_spawn(async move {
2692 let response = client
2693 .request(proto::OpenUnstagedDiff {
2694 project_id: project_id.to_proto(),
2695 buffer_id: buffer_id.to_proto(),
2696 })
2697 .await?;
2698 Ok(response.staged_text)
2699 })
2700 }
2701 }
2702 }
2703
2704 fn load_committed_text(
2705 &self,
2706 buffer: &Entity<Buffer>,
2707 buffer_store: &Entity<BufferStore>,
2708 cx: &App,
2709 ) -> Task<Result<DiffBasesChange>> {
2710 match self {
2711 GitStoreState::Local { .. } => {
2712 if let Some((worktree, path)) =
2713 buffer_store.read(cx).worktree_for_buffer(buffer, cx)
2714 {
2715 let worktree = worktree.read(cx);
2716 let committed_text = worktree.load_committed_file(&path, cx);
2717 let staged_text = worktree.load_staged_file(&path, cx);
2718 cx.background_spawn(async move {
2719 let committed_text = committed_text.await?;
2720 let staged_text = staged_text.await?;
2721 let diff_bases_change = if committed_text == staged_text {
2722 DiffBasesChange::SetBoth(committed_text)
2723 } else {
2724 DiffBasesChange::SetEach {
2725 index: staged_text,
2726 head: committed_text,
2727 }
2728 };
2729 Ok(diff_bases_change)
2730 })
2731 } else {
2732 Task::ready(Err(anyhow!("no such worktree")))
2733 }
2734 }
2735 GitStoreState::Ssh {
2736 upstream_client,
2737 upstream_project_id: project_id,
2738 ..
2739 }
2740 | GitStoreState::Remote {
2741 upstream_client,
2742 project_id,
2743 } => {
2744 use proto::open_uncommitted_diff_response::Mode;
2745
2746 let buffer_id = buffer.read(cx).remote_id();
2747 let project_id = *project_id;
2748 let client = upstream_client.clone();
2749 cx.background_spawn(async move {
2750 let response = client
2751 .request(proto::OpenUncommittedDiff {
2752 project_id: project_id.to_proto(),
2753 buffer_id: buffer_id.to_proto(),
2754 })
2755 .await?;
2756 let mode =
2757 Mode::from_i32(response.mode).ok_or_else(|| anyhow!("Invalid mode"))?;
2758 let bases = match mode {
2759 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
2760 Mode::IndexAndHead => DiffBasesChange::SetEach {
2761 head: response.committed_text,
2762 index: response.staged_text,
2763 },
2764 };
2765 Ok(bases)
2766 })
2767 }
2768 }
2769 }
2770}
2771
2772impl Repository {
2773 pub fn git_store(&self) -> Option<Entity<GitStore>> {
2774 self.git_store.upgrade()
2775 }
2776
2777 fn id(&self) -> ProjectEntryId {
2778 self.repository_entry.work_directory_id()
2779 }
2780
2781 pub fn current_branch(&self) -> Option<&Branch> {
2782 self.repository_entry.branch()
2783 }
2784
2785 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
2786 self.repository_entry.status_for_path(path)
2787 }
2788
2789 fn send_job<F, Fut, R>(&self, job: F) -> oneshot::Receiver<R>
2790 where
2791 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2792 Fut: Future<Output = R> + 'static,
2793 R: Send + 'static,
2794 {
2795 self.send_keyed_job(None, job)
2796 }
2797
2798 fn send_keyed_job<F, Fut, R>(&self, key: Option<GitJobKey>, job: F) -> oneshot::Receiver<R>
2799 where
2800 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
2801 Fut: Future<Output = R> + 'static,
2802 R: Send + 'static,
2803 {
2804 let (result_tx, result_rx) = futures::channel::oneshot::channel();
2805 let git_repo = self.state.clone();
2806 self.job_sender
2807 .unbounded_send(GitJob {
2808 key,
2809 job: Box::new(|cx: &mut AsyncApp| {
2810 let job = job(git_repo, cx.clone());
2811 cx.spawn(async move |_| {
2812 let result = job.await;
2813 result_tx.send(result).ok();
2814 })
2815 }),
2816 })
2817 .ok();
2818 result_rx
2819 }
2820
2821 /// This is the name that will be displayed in the repository selector for this repository.
2822 pub fn display_name(&self) -> SharedString {
2823 self.repository_entry
2824 .work_directory_abs_path
2825 .file_name()
2826 .unwrap_or_default()
2827 .to_string_lossy()
2828 .to_string()
2829 .into()
2830 }
2831
2832 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
2833 let Some(git_store) = self.git_store.upgrade() else {
2834 return;
2835 };
2836 let entity = cx.entity();
2837 git_store.update(cx, |git_store, cx| {
2838 let Some((&id, _)) = git_store
2839 .repositories
2840 .iter()
2841 .find(|(_, handle)| *handle == &entity)
2842 else {
2843 return;
2844 };
2845 git_store.active_repo_id = Some(id);
2846 cx.emit(GitEvent::ActiveRepositoryChanged);
2847 });
2848 }
2849
2850 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
2851 self.repository_entry.status()
2852 }
2853
2854 pub fn status(&self, index: Option<GitIndex>) -> oneshot::Receiver<Result<GitStatus>> {
2855 self.send_job(move |repo, _cx| async move {
2856 match repo {
2857 RepositoryState::Local(git_repository) => git_repository.status(index, &[]).await,
2858 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
2859 }
2860 })
2861 }
2862
2863 pub fn load_index_text(
2864 &self,
2865 index: Option<GitIndex>,
2866 path: RepoPath,
2867 ) -> oneshot::Receiver<Option<String>> {
2868 self.send_job(move |repo, _cx| async move {
2869 match repo {
2870 RepositoryState::Local(git_repository) => {
2871 git_repository.load_index_text(index, path).await
2872 }
2873 RepositoryState::Remote { .. } => None,
2874 }
2875 })
2876 }
2877
2878 pub fn has_conflict(&self, path: &RepoPath) -> bool {
2879 self.repository_entry
2880 .current_merge_conflicts
2881 .contains(&path)
2882 }
2883
2884 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
2885 let git_store = self.git_store.upgrade()?;
2886 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2887 let abs_path = self.repository_entry.work_directory_abs_path.join(&path.0);
2888 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
2889 Some(ProjectPath {
2890 worktree_id: worktree.read(cx).id(),
2891 path: relative_path.into(),
2892 })
2893 }
2894
2895 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
2896 let git_store = self.git_store.upgrade()?;
2897 let worktree_store = git_store.read(cx).worktree_store.read(cx);
2898 let abs_path = worktree_store.absolutize(path, cx)?;
2899 self.repository_entry.relativize_abs_path(&abs_path)
2900 }
2901
2902 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
2903 other
2904 .read(cx)
2905 .repository_entry
2906 .work_directory_abs_path
2907 .starts_with(&self.repository_entry.work_directory_abs_path)
2908 }
2909
2910 pub fn local_repository(&self) -> Option<Arc<dyn GitRepository>> {
2911 match &self.state {
2912 RepositoryState::Local(git_repository) => Some(git_repository.clone()),
2913 RepositoryState::Remote { .. } => None,
2914 }
2915 }
2916
2917 pub fn open_commit_buffer(
2918 &mut self,
2919 languages: Option<Arc<LanguageRegistry>>,
2920 buffer_store: Entity<BufferStore>,
2921 cx: &mut Context<Self>,
2922 ) -> Task<Result<Entity<Buffer>>> {
2923 if let Some(buffer) = self.commit_message_buffer.clone() {
2924 return Task::ready(Ok(buffer));
2925 }
2926
2927 if let RepositoryState::Remote {
2928 project_id,
2929 client,
2930 work_directory_id,
2931 } = self.state.clone()
2932 {
2933 let client = client.clone();
2934 cx.spawn(async move |repository, cx| {
2935 let request = client.request(proto::OpenCommitMessageBuffer {
2936 project_id: project_id.0,
2937 work_directory_id: work_directory_id.to_proto(),
2938 });
2939 let response = request.await.context("requesting to open commit buffer")?;
2940 let buffer_id = BufferId::new(response.buffer_id)?;
2941 let buffer = buffer_store
2942 .update(cx, |buffer_store, cx| {
2943 buffer_store.wait_for_remote_buffer(buffer_id, cx)
2944 })?
2945 .await?;
2946 if let Some(language_registry) = languages {
2947 let git_commit_language =
2948 language_registry.language_for_name("Git Commit").await?;
2949 buffer.update(cx, |buffer, cx| {
2950 buffer.set_language(Some(git_commit_language), cx);
2951 })?;
2952 }
2953 repository.update(cx, |repository, _| {
2954 repository.commit_message_buffer = Some(buffer.clone());
2955 })?;
2956 Ok(buffer)
2957 })
2958 } else {
2959 self.open_local_commit_buffer(languages, buffer_store, cx)
2960 }
2961 }
2962
2963 fn open_local_commit_buffer(
2964 &mut self,
2965 language_registry: Option<Arc<LanguageRegistry>>,
2966 buffer_store: Entity<BufferStore>,
2967 cx: &mut Context<Self>,
2968 ) -> Task<Result<Entity<Buffer>>> {
2969 cx.spawn(async move |repository, cx| {
2970 let buffer = buffer_store
2971 .update(cx, |buffer_store, cx| buffer_store.create_buffer(cx))?
2972 .await?;
2973
2974 if let Some(language_registry) = language_registry {
2975 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
2976 buffer.update(cx, |buffer, cx| {
2977 buffer.set_language(Some(git_commit_language), cx);
2978 })?;
2979 }
2980
2981 repository.update(cx, |repository, _| {
2982 repository.commit_message_buffer = Some(buffer.clone());
2983 })?;
2984 Ok(buffer)
2985 })
2986 }
2987
2988 pub fn checkout_files(
2989 &self,
2990 commit: &str,
2991 paths: Vec<RepoPath>,
2992 cx: &mut App,
2993 ) -> oneshot::Receiver<Result<()>> {
2994 let commit = commit.to_string();
2995 let env = self.worktree_environment(cx);
2996
2997 self.send_job(|git_repo, _| async move {
2998 match git_repo {
2999 RepositoryState::Local(repo) => repo.checkout_files(commit, paths, env.await).await,
3000 RepositoryState::Remote {
3001 project_id,
3002 client,
3003 work_directory_id,
3004 } => {
3005 client
3006 .request(proto::GitCheckoutFiles {
3007 project_id: project_id.0,
3008 work_directory_id: work_directory_id.to_proto(),
3009 commit,
3010 paths: paths
3011 .into_iter()
3012 .map(|p| p.to_string_lossy().to_string())
3013 .collect(),
3014 })
3015 .await?;
3016
3017 Ok(())
3018 }
3019 }
3020 })
3021 }
3022
3023 pub fn reset(
3024 &self,
3025 commit: String,
3026 reset_mode: ResetMode,
3027 cx: &mut App,
3028 ) -> oneshot::Receiver<Result<()>> {
3029 let commit = commit.to_string();
3030 let env = self.worktree_environment(cx);
3031 self.send_job(|git_repo, _| async move {
3032 match git_repo {
3033 RepositoryState::Local(git_repo) => {
3034 let env = env.await;
3035 git_repo.reset(commit, reset_mode, env).await
3036 }
3037 RepositoryState::Remote {
3038 project_id,
3039 client,
3040 work_directory_id,
3041 } => {
3042 client
3043 .request(proto::GitReset {
3044 project_id: project_id.0,
3045 work_directory_id: work_directory_id.to_proto(),
3046 commit,
3047 mode: match reset_mode {
3048 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3049 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3050 },
3051 })
3052 .await?;
3053
3054 Ok(())
3055 }
3056 }
3057 })
3058 }
3059
3060 pub fn show(&self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3061 self.send_job(|git_repo, _cx| async move {
3062 match git_repo {
3063 RepositoryState::Local(git_repository) => git_repository.show(commit).await,
3064 RepositoryState::Remote {
3065 project_id,
3066 client,
3067 work_directory_id,
3068 } => {
3069 let resp = client
3070 .request(proto::GitShow {
3071 project_id: project_id.0,
3072 work_directory_id: work_directory_id.to_proto(),
3073 commit,
3074 })
3075 .await?;
3076
3077 Ok(CommitDetails {
3078 sha: resp.sha.into(),
3079 message: resp.message.into(),
3080 commit_timestamp: resp.commit_timestamp,
3081 committer_email: resp.committer_email.into(),
3082 committer_name: resp.committer_name.into(),
3083 })
3084 }
3085 }
3086 })
3087 }
3088
3089 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3090 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3091 }
3092
3093 pub fn stage_entries(
3094 &self,
3095 entries: Vec<RepoPath>,
3096 cx: &mut Context<Self>,
3097 ) -> Task<anyhow::Result<()>> {
3098 if entries.is_empty() {
3099 return Task::ready(Ok(()));
3100 }
3101 let env = self.worktree_environment(cx);
3102
3103 let mut save_futures = Vec::new();
3104 if let Some(buffer_store) = self.buffer_store(cx) {
3105 buffer_store.update(cx, |buffer_store, cx| {
3106 for path in &entries {
3107 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3108 continue;
3109 };
3110 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3111 if buffer
3112 .read(cx)
3113 .file()
3114 .map_or(false, |file| file.disk_state().exists())
3115 {
3116 save_futures.push(buffer_store.save_buffer(buffer, cx));
3117 }
3118 }
3119 }
3120 })
3121 }
3122
3123 cx.spawn(async move |this, cx| {
3124 for save_future in save_futures {
3125 save_future.await?;
3126 }
3127 let env = env.await;
3128
3129 this.update(cx, |this, _| {
3130 this.send_job(|git_repo, _cx| async move {
3131 match git_repo {
3132 RepositoryState::Local(repo) => repo.stage_paths(entries, env).await,
3133 RepositoryState::Remote {
3134 project_id,
3135 client,
3136 work_directory_id,
3137 } => {
3138 client
3139 .request(proto::Stage {
3140 project_id: project_id.0,
3141 work_directory_id: work_directory_id.to_proto(),
3142 paths: entries
3143 .into_iter()
3144 .map(|repo_path| repo_path.as_ref().to_proto())
3145 .collect(),
3146 })
3147 .await
3148 .context("sending stage request")?;
3149
3150 Ok(())
3151 }
3152 }
3153 })
3154 })?
3155 .await??;
3156
3157 Ok(())
3158 })
3159 }
3160
3161 pub fn unstage_entries(
3162 &self,
3163 entries: Vec<RepoPath>,
3164 cx: &mut Context<Self>,
3165 ) -> Task<anyhow::Result<()>> {
3166 if entries.is_empty() {
3167 return Task::ready(Ok(()));
3168 }
3169 let env = self.worktree_environment(cx);
3170
3171 let mut save_futures = Vec::new();
3172 if let Some(buffer_store) = self.buffer_store(cx) {
3173 buffer_store.update(cx, |buffer_store, cx| {
3174 for path in &entries {
3175 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3176 continue;
3177 };
3178 if let Some(buffer) = buffer_store.get_by_path(&project_path, cx) {
3179 if buffer
3180 .read(cx)
3181 .file()
3182 .map_or(false, |file| file.disk_state().exists())
3183 {
3184 save_futures.push(buffer_store.save_buffer(buffer, cx));
3185 }
3186 }
3187 }
3188 })
3189 }
3190
3191 cx.spawn(async move |this, cx| {
3192 for save_future in save_futures {
3193 save_future.await?;
3194 }
3195 let env = env.await;
3196
3197 this.update(cx, |this, _| {
3198 this.send_job(|git_repo, _cx| async move {
3199 match git_repo {
3200 RepositoryState::Local(repo) => repo.unstage_paths(entries, env).await,
3201 RepositoryState::Remote {
3202 project_id,
3203 client,
3204 work_directory_id,
3205 } => {
3206 client
3207 .request(proto::Unstage {
3208 project_id: project_id.0,
3209 work_directory_id: work_directory_id.to_proto(),
3210 paths: entries
3211 .into_iter()
3212 .map(|repo_path| repo_path.as_ref().to_proto())
3213 .collect(),
3214 })
3215 .await
3216 .context("sending unstage request")?;
3217
3218 Ok(())
3219 }
3220 }
3221 })
3222 })?
3223 .await??;
3224
3225 Ok(())
3226 })
3227 }
3228
3229 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3230 let to_stage = self
3231 .repository_entry
3232 .status()
3233 .filter(|entry| !entry.status.staging().is_fully_staged())
3234 .map(|entry| entry.repo_path.clone())
3235 .collect();
3236 self.stage_entries(to_stage, cx)
3237 }
3238
3239 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3240 let to_unstage = self
3241 .repository_entry
3242 .status()
3243 .filter(|entry| entry.status.staging().has_staged())
3244 .map(|entry| entry.repo_path.clone())
3245 .collect();
3246 self.unstage_entries(to_unstage, cx)
3247 }
3248
3249 /// Get a count of all entries in the active repository, including
3250 /// untracked files.
3251 pub fn entry_count(&self) -> usize {
3252 self.repository_entry.status_len()
3253 }
3254
3255 fn worktree_environment(
3256 &self,
3257 cx: &mut App,
3258 ) -> impl Future<Output = HashMap<String, String>> + 'static {
3259 let task = self.project_environment.as_ref().and_then(|env| {
3260 env.update(cx, |env, cx| {
3261 env.get_environment(
3262 self.worktree_id,
3263 Some(
3264 self.repository_entry
3265 .work_directory_abs_path
3266 .as_path()
3267 .into(),
3268 ),
3269 cx,
3270 )
3271 })
3272 .ok()
3273 });
3274 async move { OptionFuture::from(task).await.flatten().unwrap_or_default() }
3275 }
3276
3277 pub fn commit(
3278 &self,
3279 message: SharedString,
3280 name_and_email: Option<(SharedString, SharedString)>,
3281 cx: &mut App,
3282 ) -> oneshot::Receiver<Result<()>> {
3283 let env = self.worktree_environment(cx);
3284 self.send_job(|git_repo, _cx| async move {
3285 match git_repo {
3286 RepositoryState::Local(repo) => {
3287 let env = env.await;
3288 repo.commit(message, name_and_email, env).await
3289 }
3290 RepositoryState::Remote {
3291 project_id,
3292 client,
3293 work_directory_id,
3294 } => {
3295 let (name, email) = name_and_email.unzip();
3296 client
3297 .request(proto::Commit {
3298 project_id: project_id.0,
3299 work_directory_id: work_directory_id.to_proto(),
3300 message: String::from(message),
3301 name: name.map(String::from),
3302 email: email.map(String::from),
3303 })
3304 .await
3305 .context("sending commit request")?;
3306
3307 Ok(())
3308 }
3309 }
3310 })
3311 }
3312
3313 pub fn fetch(
3314 &mut self,
3315 askpass: AskPassDelegate,
3316 cx: &mut App,
3317 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3318 let executor = cx.background_executor().clone();
3319 let askpass_delegates = self.askpass_delegates.clone();
3320 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3321 let env = self.worktree_environment(cx);
3322
3323 self.send_job(move |git_repo, cx| async move {
3324 match git_repo {
3325 RepositoryState::Local(git_repository) => {
3326 let askpass = AskPassSession::new(&executor, askpass).await?;
3327 let env = env.await;
3328 git_repository.fetch(askpass, env, cx).await
3329 }
3330 RepositoryState::Remote {
3331 project_id,
3332 client,
3333 work_directory_id,
3334 } => {
3335 askpass_delegates.lock().insert(askpass_id, askpass);
3336 let _defer = util::defer(|| {
3337 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3338 debug_assert!(askpass_delegate.is_some());
3339 });
3340
3341 let response = client
3342 .request(proto::Fetch {
3343 project_id: project_id.0,
3344 work_directory_id: work_directory_id.to_proto(),
3345 askpass_id,
3346 })
3347 .await
3348 .context("sending fetch request")?;
3349
3350 Ok(RemoteCommandOutput {
3351 stdout: response.stdout,
3352 stderr: response.stderr,
3353 })
3354 }
3355 }
3356 })
3357 }
3358
3359 pub fn push(
3360 &mut self,
3361 branch: SharedString,
3362 remote: SharedString,
3363 options: Option<PushOptions>,
3364 askpass: AskPassDelegate,
3365 cx: &mut App,
3366 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3367 let executor = cx.background_executor().clone();
3368 let askpass_delegates = self.askpass_delegates.clone();
3369 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3370 let env = self.worktree_environment(cx);
3371
3372 self.send_job(move |git_repo, cx| async move {
3373 match git_repo {
3374 RepositoryState::Local(git_repository) => {
3375 let env = env.await;
3376 let askpass = AskPassSession::new(&executor, askpass).await?;
3377 git_repository
3378 .push(
3379 branch.to_string(),
3380 remote.to_string(),
3381 options,
3382 askpass,
3383 env,
3384 cx,
3385 )
3386 .await
3387 }
3388 RepositoryState::Remote {
3389 project_id,
3390 client,
3391 work_directory_id,
3392 } => {
3393 askpass_delegates.lock().insert(askpass_id, askpass);
3394 let _defer = util::defer(|| {
3395 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3396 debug_assert!(askpass_delegate.is_some());
3397 });
3398 let response = client
3399 .request(proto::Push {
3400 project_id: project_id.0,
3401 work_directory_id: work_directory_id.to_proto(),
3402 askpass_id,
3403 branch_name: branch.to_string(),
3404 remote_name: remote.to_string(),
3405 options: options.map(|options| match options {
3406 PushOptions::Force => proto::push::PushOptions::Force,
3407 PushOptions::SetUpstream => proto::push::PushOptions::SetUpstream,
3408 } as i32),
3409 })
3410 .await
3411 .context("sending push request")?;
3412
3413 Ok(RemoteCommandOutput {
3414 stdout: response.stdout,
3415 stderr: response.stderr,
3416 })
3417 }
3418 }
3419 })
3420 }
3421
3422 pub fn pull(
3423 &mut self,
3424 branch: SharedString,
3425 remote: SharedString,
3426 askpass: AskPassDelegate,
3427 cx: &mut App,
3428 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
3429 let executor = cx.background_executor().clone();
3430 let askpass_delegates = self.askpass_delegates.clone();
3431 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
3432 let env = self.worktree_environment(cx);
3433
3434 self.send_job(move |git_repo, cx| async move {
3435 match git_repo {
3436 RepositoryState::Local(git_repository) => {
3437 let askpass = AskPassSession::new(&executor, askpass).await?;
3438 let env = env.await;
3439 git_repository
3440 .pull(branch.to_string(), remote.to_string(), askpass, env, cx)
3441 .await
3442 }
3443 RepositoryState::Remote {
3444 project_id,
3445 client,
3446 work_directory_id,
3447 } => {
3448 askpass_delegates.lock().insert(askpass_id, askpass);
3449 let _defer = util::defer(|| {
3450 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
3451 debug_assert!(askpass_delegate.is_some());
3452 });
3453 let response = client
3454 .request(proto::Pull {
3455 project_id: project_id.0,
3456 work_directory_id: work_directory_id.to_proto(),
3457 askpass_id,
3458 branch_name: branch.to_string(),
3459 remote_name: remote.to_string(),
3460 })
3461 .await
3462 .context("sending pull request")?;
3463
3464 Ok(RemoteCommandOutput {
3465 stdout: response.stdout,
3466 stderr: response.stderr,
3467 })
3468 }
3469 }
3470 })
3471 }
3472
3473 fn spawn_set_index_text_job(
3474 &self,
3475 path: RepoPath,
3476 content: Option<String>,
3477 cx: &mut App,
3478 ) -> oneshot::Receiver<anyhow::Result<()>> {
3479 let env = self.worktree_environment(cx);
3480
3481 self.send_keyed_job(
3482 Some(GitJobKey::WriteIndex(path.clone())),
3483 |git_repo, _cx| async {
3484 match git_repo {
3485 RepositoryState::Local(repo) => {
3486 repo.set_index_text(path, content, env.await).await
3487 }
3488 RepositoryState::Remote {
3489 project_id,
3490 client,
3491 work_directory_id,
3492 } => {
3493 client
3494 .request(proto::SetIndexText {
3495 project_id: project_id.0,
3496 work_directory_id: work_directory_id.to_proto(),
3497 path: path.as_ref().to_proto(),
3498 text: content,
3499 })
3500 .await?;
3501 Ok(())
3502 }
3503 }
3504 },
3505 )
3506 }
3507
3508 pub fn get_remotes(
3509 &self,
3510 branch_name: Option<String>,
3511 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
3512 self.send_job(|repo, _cx| async move {
3513 match repo {
3514 RepositoryState::Local(git_repository) => {
3515 git_repository.get_remotes(branch_name).await
3516 }
3517 RepositoryState::Remote {
3518 project_id,
3519 client,
3520 work_directory_id,
3521 } => {
3522 let response = client
3523 .request(proto::GetRemotes {
3524 project_id: project_id.0,
3525 work_directory_id: work_directory_id.to_proto(),
3526 branch_name,
3527 })
3528 .await?;
3529
3530 let remotes = response
3531 .remotes
3532 .into_iter()
3533 .map(|remotes| git::repository::Remote {
3534 name: remotes.name.into(),
3535 })
3536 .collect();
3537
3538 Ok(remotes)
3539 }
3540 }
3541 })
3542 }
3543
3544 pub fn branch(&self) -> Option<&Branch> {
3545 self.repository_entry.branch()
3546 }
3547
3548 pub fn branches(&self) -> oneshot::Receiver<Result<Vec<Branch>>> {
3549 self.send_job(|repo, cx| async move {
3550 match repo {
3551 RepositoryState::Local(git_repository) => {
3552 let git_repository = git_repository.clone();
3553 cx.background_spawn(async move { git_repository.branches().await })
3554 .await
3555 }
3556 RepositoryState::Remote {
3557 project_id,
3558 client,
3559 work_directory_id,
3560 } => {
3561 let response = client
3562 .request(proto::GitGetBranches {
3563 project_id: project_id.0,
3564 work_directory_id: work_directory_id.to_proto(),
3565 })
3566 .await?;
3567
3568 let branches = response
3569 .branches
3570 .into_iter()
3571 .map(|branch| worktree::proto_to_branch(&branch))
3572 .collect();
3573
3574 Ok(branches)
3575 }
3576 }
3577 })
3578 }
3579
3580 pub fn diff(&self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
3581 self.send_job(|repo, _cx| async move {
3582 match repo {
3583 RepositoryState::Local(git_repository) => git_repository.diff(diff_type).await,
3584 RepositoryState::Remote {
3585 project_id,
3586 client,
3587 work_directory_id,
3588 ..
3589 } => {
3590 let response = client
3591 .request(proto::GitDiff {
3592 project_id: project_id.0,
3593 work_directory_id: work_directory_id.to_proto(),
3594 diff_type: match diff_type {
3595 DiffType::HeadToIndex => {
3596 proto::git_diff::DiffType::HeadToIndex.into()
3597 }
3598 DiffType::HeadToWorktree => {
3599 proto::git_diff::DiffType::HeadToWorktree.into()
3600 }
3601 },
3602 })
3603 .await?;
3604
3605 Ok(response.diff)
3606 }
3607 }
3608 })
3609 }
3610
3611 pub fn create_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3612 self.send_job(|repo, _cx| async move {
3613 match repo {
3614 RepositoryState::Local(git_repository) => {
3615 git_repository.create_branch(branch_name).await
3616 }
3617 RepositoryState::Remote {
3618 project_id,
3619 client,
3620 work_directory_id,
3621 } => {
3622 client
3623 .request(proto::GitCreateBranch {
3624 project_id: project_id.0,
3625 work_directory_id: work_directory_id.to_proto(),
3626 branch_name,
3627 })
3628 .await?;
3629
3630 Ok(())
3631 }
3632 }
3633 })
3634 }
3635
3636 pub fn change_branch(&self, branch_name: String) -> oneshot::Receiver<Result<()>> {
3637 self.send_job(|repo, _cx| async move {
3638 match repo {
3639 RepositoryState::Local(git_repository) => {
3640 git_repository.change_branch(branch_name).await
3641 }
3642 RepositoryState::Remote {
3643 project_id,
3644 client,
3645 work_directory_id,
3646 } => {
3647 client
3648 .request(proto::GitChangeBranch {
3649 project_id: project_id.0,
3650 work_directory_id: work_directory_id.to_proto(),
3651 branch_name,
3652 })
3653 .await?;
3654
3655 Ok(())
3656 }
3657 }
3658 })
3659 }
3660
3661 pub fn check_for_pushed_commits(&self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
3662 self.send_job(|repo, _cx| async move {
3663 match repo {
3664 RepositoryState::Local(git_repository) => {
3665 git_repository.check_for_pushed_commit().await
3666 }
3667 RepositoryState::Remote {
3668 project_id,
3669 client,
3670 work_directory_id,
3671 } => {
3672 let response = client
3673 .request(proto::CheckForPushedCommits {
3674 project_id: project_id.0,
3675 work_directory_id: work_directory_id.to_proto(),
3676 })
3677 .await?;
3678
3679 let branches = response.pushed_to.into_iter().map(Into::into).collect();
3680
3681 Ok(branches)
3682 }
3683 }
3684 })
3685 }
3686
3687 pub fn checkpoint(&self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
3688 self.send_job(|repo, _cx| async move {
3689 match repo {
3690 RepositoryState::Local(git_repository) => git_repository.checkpoint().await,
3691 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3692 }
3693 })
3694 }
3695
3696 pub fn restore_checkpoint(
3697 &self,
3698 checkpoint: GitRepositoryCheckpoint,
3699 ) -> oneshot::Receiver<Result<()>> {
3700 self.send_job(move |repo, _cx| async move {
3701 match repo {
3702 RepositoryState::Local(git_repository) => {
3703 git_repository.restore_checkpoint(checkpoint).await
3704 }
3705 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3706 }
3707 })
3708 }
3709
3710 pub(crate) fn apply_remote_update(&mut self, update: proto::UpdateRepository) -> Result<()> {
3711 let conflicted_paths = TreeSet::from_ordered_entries(
3712 update
3713 .current_merge_conflicts
3714 .into_iter()
3715 .map(|path| RepoPath(Path::new(&path).into())),
3716 );
3717 self.repository_entry.current_branch = update.branch_summary.as_ref().map(proto_to_branch);
3718 self.repository_entry.current_merge_conflicts = conflicted_paths;
3719
3720 let edits = update
3721 .removed_statuses
3722 .into_iter()
3723 .map(|path| sum_tree::Edit::Remove(PathKey(FromProto::from_proto(path))))
3724 .chain(
3725 update
3726 .updated_statuses
3727 .into_iter()
3728 .filter_map(|updated_status| {
3729 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
3730 }),
3731 )
3732 .collect::<Vec<_>>();
3733 self.repository_entry.statuses_by_path.edit(edits, &());
3734 Ok(())
3735 }
3736
3737 pub fn compare_checkpoints(
3738 &self,
3739 left: GitRepositoryCheckpoint,
3740 right: GitRepositoryCheckpoint,
3741 ) -> oneshot::Receiver<Result<bool>> {
3742 self.send_job(move |repo, _cx| async move {
3743 match repo {
3744 RepositoryState::Local(git_repository) => {
3745 git_repository.compare_checkpoints(left, right).await
3746 }
3747 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3748 }
3749 })
3750 }
3751
3752 pub fn delete_checkpoint(
3753 &self,
3754 checkpoint: GitRepositoryCheckpoint,
3755 ) -> oneshot::Receiver<Result<()>> {
3756 self.send_job(move |repo, _cx| async move {
3757 match repo {
3758 RepositoryState::Local(git_repository) => {
3759 git_repository.delete_checkpoint(checkpoint).await
3760 }
3761 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3762 }
3763 })
3764 }
3765
3766 pub fn diff_checkpoints(
3767 &self,
3768 base_checkpoint: GitRepositoryCheckpoint,
3769 target_checkpoint: GitRepositoryCheckpoint,
3770 ) -> oneshot::Receiver<Result<String>> {
3771 self.send_job(move |repo, _cx| async move {
3772 match repo {
3773 RepositoryState::Local(git_repository) => {
3774 git_repository
3775 .diff_checkpoints(base_checkpoint, target_checkpoint)
3776 .await
3777 }
3778 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3779 }
3780 })
3781 }
3782
3783 pub fn create_index(&self) -> oneshot::Receiver<Result<GitIndex>> {
3784 self.send_job(move |repo, _cx| async move {
3785 match repo {
3786 RepositoryState::Local(git_repository) => git_repository.create_index().await,
3787 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3788 }
3789 })
3790 }
3791
3792 pub fn apply_diff(&self, index: GitIndex, diff: String) -> oneshot::Receiver<Result<()>> {
3793 self.send_job(move |repo, _cx| async move {
3794 match repo {
3795 RepositoryState::Local(git_repository) => {
3796 git_repository.apply_diff(index, diff).await
3797 }
3798 RepositoryState::Remote { .. } => Err(anyhow!("not implemented yet")),
3799 }
3800 })
3801 }
3802}
3803
3804fn get_permalink_in_rust_registry_src(
3805 provider_registry: Arc<GitHostingProviderRegistry>,
3806 path: PathBuf,
3807 selection: Range<u32>,
3808) -> Result<url::Url> {
3809 #[derive(Deserialize)]
3810 struct CargoVcsGit {
3811 sha1: String,
3812 }
3813
3814 #[derive(Deserialize)]
3815 struct CargoVcsInfo {
3816 git: CargoVcsGit,
3817 path_in_vcs: String,
3818 }
3819
3820 #[derive(Deserialize)]
3821 struct CargoPackage {
3822 repository: String,
3823 }
3824
3825 #[derive(Deserialize)]
3826 struct CargoToml {
3827 package: CargoPackage,
3828 }
3829
3830 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
3831 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
3832 Some((dir, json))
3833 }) else {
3834 bail!("No .cargo_vcs_info.json found in parent directories")
3835 };
3836 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
3837 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
3838 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
3839 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
3840 .ok_or_else(|| anyhow!("Failed to parse package.repository field of manifest"))?;
3841 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
3842 let permalink = provider.build_permalink(
3843 remote,
3844 BuildPermalinkParams {
3845 sha: &cargo_vcs_info.git.sha1,
3846 path: &path.to_string_lossy(),
3847 selection: Some(selection),
3848 },
3849 );
3850 Ok(permalink)
3851}
3852
3853fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
3854 let Some(blame) = blame else {
3855 return proto::BlameBufferResponse {
3856 blame_response: None,
3857 };
3858 };
3859
3860 let entries = blame
3861 .entries
3862 .into_iter()
3863 .map(|entry| proto::BlameEntry {
3864 sha: entry.sha.as_bytes().into(),
3865 start_line: entry.range.start,
3866 end_line: entry.range.end,
3867 original_line_number: entry.original_line_number,
3868 author: entry.author.clone(),
3869 author_mail: entry.author_mail.clone(),
3870 author_time: entry.author_time,
3871 author_tz: entry.author_tz.clone(),
3872 committer: entry.committer_name.clone(),
3873 committer_mail: entry.committer_email.clone(),
3874 committer_time: entry.committer_time,
3875 committer_tz: entry.committer_tz.clone(),
3876 summary: entry.summary.clone(),
3877 previous: entry.previous.clone(),
3878 filename: entry.filename.clone(),
3879 })
3880 .collect::<Vec<_>>();
3881
3882 let messages = blame
3883 .messages
3884 .into_iter()
3885 .map(|(oid, message)| proto::CommitMessage {
3886 oid: oid.as_bytes().into(),
3887 message,
3888 })
3889 .collect::<Vec<_>>();
3890
3891 proto::BlameBufferResponse {
3892 blame_response: Some(proto::blame_buffer_response::BlameResponse {
3893 entries,
3894 messages,
3895 remote_url: blame.remote_url,
3896 }),
3897 }
3898}
3899
3900fn deserialize_blame_buffer_response(
3901 response: proto::BlameBufferResponse,
3902) -> Option<git::blame::Blame> {
3903 let response = response.blame_response?;
3904 let entries = response
3905 .entries
3906 .into_iter()
3907 .filter_map(|entry| {
3908 Some(git::blame::BlameEntry {
3909 sha: git::Oid::from_bytes(&entry.sha).ok()?,
3910 range: entry.start_line..entry.end_line,
3911 original_line_number: entry.original_line_number,
3912 committer_name: entry.committer,
3913 committer_time: entry.committer_time,
3914 committer_tz: entry.committer_tz,
3915 committer_email: entry.committer_mail,
3916 author: entry.author,
3917 author_mail: entry.author_mail,
3918 author_time: entry.author_time,
3919 author_tz: entry.author_tz,
3920 summary: entry.summary,
3921 previous: entry.previous,
3922 filename: entry.filename,
3923 })
3924 })
3925 .collect::<Vec<_>>();
3926
3927 let messages = response
3928 .messages
3929 .into_iter()
3930 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
3931 .collect::<HashMap<_, _>>();
3932
3933 Some(Blame {
3934 entries,
3935 messages,
3936 remote_url: response.remote_url,
3937 })
3938}