1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4
5use crate::{
6 ProjectEnvironment, ProjectItem, ProjectPath,
7 buffer_store::{BufferStore, BufferStoreEvent},
8 worktree_store::{WorktreeStore, WorktreeStoreEvent},
9};
10use anyhow::{Context as _, Result, anyhow, bail};
11use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
12use buffer_diff::{BufferDiff, BufferDiffEvent};
13use client::ProjectId;
14use collections::HashMap;
15pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
16use fs::Fs;
17use futures::{
18 FutureExt, StreamExt,
19 channel::{mpsc, oneshot},
20 future::{self, Shared},
21 stream::FuturesOrdered,
22};
23use git::{
24 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
25 blame::Blame,
26 parse_git_remote_url,
27 repository::{
28 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
29 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
30 ResetMode, UpstreamTrackingStatus,
31 },
32 stash::{GitStash, StashEntry},
33 status::{
34 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
35 UnmergedStatus, UnmergedStatusCode,
36 },
37};
38use gpui::{
39 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
40 WeakEntity,
41};
42use language::{
43 Buffer, BufferEvent, Language, LanguageRegistry,
44 proto::{deserialize_version, serialize_version},
45};
46use parking_lot::Mutex;
47use postage::stream::Stream as _;
48use rpc::{
49 AnyProtoClient, TypedEnvelope,
50 proto::{self, git_reset, split_repository_update},
51};
52use serde::Deserialize;
53use std::{
54 cmp::Ordering,
55 collections::{BTreeSet, VecDeque},
56 future::Future,
57 mem,
58 ops::Range,
59 path::{Path, PathBuf},
60 str::FromStr,
61 sync::{
62 Arc,
63 atomic::{self, AtomicU64},
64 },
65 time::Instant,
66};
67use sum_tree::{Edit, SumTree, TreeSet};
68use task::Shell;
69use text::{Bias, BufferId};
70use util::{
71 ResultExt, debug_panic,
72 paths::{PathStyle, SanitizedPath},
73 post_inc,
74 rel_path::RelPath,
75};
76use worktree::{
77 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
78 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
79};
80use zeroize::Zeroize;
81
82pub struct GitStore {
83 state: GitStoreState,
84 buffer_store: Entity<BufferStore>,
85 worktree_store: Entity<WorktreeStore>,
86 repositories: HashMap<RepositoryId, Entity<Repository>>,
87 active_repo_id: Option<RepositoryId>,
88 #[allow(clippy::type_complexity)]
89 loading_diffs:
90 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
91 diffs: HashMap<BufferId, Entity<BufferGitState>>,
92 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
93 _subscriptions: Vec<Subscription>,
94}
95
96#[derive(Default)]
97struct SharedDiffs {
98 unstaged: Option<Entity<BufferDiff>>,
99 uncommitted: Option<Entity<BufferDiff>>,
100}
101
102struct BufferGitState {
103 unstaged_diff: Option<WeakEntity<BufferDiff>>,
104 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
105 conflict_set: Option<WeakEntity<ConflictSet>>,
106 recalculate_diff_task: Option<Task<Result<()>>>,
107 reparse_conflict_markers_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 language_registry: Option<Arc<LanguageRegistry>>,
110 conflict_updated_futures: Vec<oneshot::Sender<()>>,
111 recalculating_tx: postage::watch::Sender<bool>,
112
113 /// These operation counts are used to ensure that head and index text
114 /// values read from the git repository are up-to-date with any hunk staging
115 /// operations that have been performed on the BufferDiff.
116 ///
117 /// The operation count is incremented immediately when the user initiates a
118 /// hunk stage/unstage operation. Then, upon finishing writing the new index
119 /// text do disk, the `operation count as of write` is updated to reflect
120 /// the operation count that prompted the write.
121 hunk_staging_operation_count: usize,
122 hunk_staging_operation_count_as_of_write: usize,
123
124 head_text: Option<Arc<String>>,
125 index_text: Option<Arc<String>>,
126 head_changed: bool,
127 index_changed: bool,
128 language_changed: bool,
129}
130
131#[derive(Clone, Debug)]
132enum DiffBasesChange {
133 SetIndex(Option<String>),
134 SetHead(Option<String>),
135 SetEach {
136 index: Option<String>,
137 head: Option<String>,
138 },
139 SetBoth(Option<String>),
140}
141
142#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
143enum DiffKind {
144 Unstaged,
145 Uncommitted,
146}
147
148enum GitStoreState {
149 Local {
150 next_repository_id: Arc<AtomicU64>,
151 downstream: Option<LocalDownstreamState>,
152 project_environment: Entity<ProjectEnvironment>,
153 fs: Arc<dyn Fs>,
154 },
155 Remote {
156 upstream_client: AnyProtoClient,
157 upstream_project_id: u64,
158 downstream: Option<(AnyProtoClient, ProjectId)>,
159 },
160}
161
162enum DownstreamUpdate {
163 UpdateRepository(RepositorySnapshot),
164 RemoveRepository(RepositoryId),
165}
166
167struct LocalDownstreamState {
168 client: AnyProtoClient,
169 project_id: ProjectId,
170 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
171 _task: Task<Result<()>>,
172}
173
174#[derive(Clone, Debug)]
175pub struct GitStoreCheckpoint {
176 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
177}
178
179#[derive(Clone, Debug, PartialEq, Eq)]
180pub struct StatusEntry {
181 pub repo_path: RepoPath,
182 pub status: FileStatus,
183}
184
185impl StatusEntry {
186 fn to_proto(&self) -> proto::StatusEntry {
187 let simple_status = match self.status {
188 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
189 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
190 FileStatus::Tracked(TrackedStatus {
191 index_status,
192 worktree_status,
193 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
194 worktree_status
195 } else {
196 index_status
197 }),
198 };
199
200 proto::StatusEntry {
201 repo_path: self.repo_path.to_proto(),
202 simple_status,
203 status: Some(status_to_proto(self.status)),
204 }
205 }
206}
207
208impl TryFrom<proto::StatusEntry> for StatusEntry {
209 type Error = anyhow::Error;
210
211 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
212 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
213 let status = status_from_proto(value.simple_status, value.status)?;
214 Ok(Self { repo_path, status })
215 }
216}
217
218impl sum_tree::Item for StatusEntry {
219 type Summary = PathSummary<GitSummary>;
220
221 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
222 PathSummary {
223 max_path: self.repo_path.0.clone(),
224 item_summary: self.status.summary(),
225 }
226 }
227}
228
229impl sum_tree::KeyedItem for StatusEntry {
230 type Key = PathKey;
231
232 fn key(&self) -> Self::Key {
233 PathKey(self.repo_path.0.clone())
234 }
235}
236
237#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
238pub struct RepositoryId(pub u64);
239
240#[derive(Clone, Debug, Default, PartialEq, Eq)]
241pub struct MergeDetails {
242 pub conflicted_paths: TreeSet<RepoPath>,
243 pub message: Option<SharedString>,
244 pub heads: Vec<Option<SharedString>>,
245}
246
247#[derive(Clone, Debug, PartialEq, Eq)]
248pub struct RepositorySnapshot {
249 pub id: RepositoryId,
250 pub statuses_by_path: SumTree<StatusEntry>,
251 pub work_directory_abs_path: Arc<Path>,
252 pub path_style: PathStyle,
253 pub branch: Option<Branch>,
254 pub head_commit: Option<CommitDetails>,
255 pub scan_id: u64,
256 pub merge: MergeDetails,
257 pub remote_origin_url: Option<String>,
258 pub remote_upstream_url: Option<String>,
259 pub stash_entries: GitStash,
260}
261
262type JobId = u64;
263
264#[derive(Clone, Debug, PartialEq, Eq)]
265pub struct JobInfo {
266 pub start: Instant,
267 pub message: SharedString,
268}
269
270pub struct Repository {
271 this: WeakEntity<Self>,
272 snapshot: RepositorySnapshot,
273 commit_message_buffer: Option<Entity<Buffer>>,
274 git_store: WeakEntity<GitStore>,
275 // For a local repository, holds paths that have had worktree events since the last status scan completed,
276 // and that should be examined during the next status scan.
277 paths_needing_status_update: BTreeSet<RepoPath>,
278 job_sender: mpsc::UnboundedSender<GitJob>,
279 active_jobs: HashMap<JobId, JobInfo>,
280 job_id: JobId,
281 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
282 latest_askpass_id: u64,
283}
284
285impl std::ops::Deref for Repository {
286 type Target = RepositorySnapshot;
287
288 fn deref(&self) -> &Self::Target {
289 &self.snapshot
290 }
291}
292
293#[derive(Clone)]
294pub enum RepositoryState {
295 Local {
296 backend: Arc<dyn GitRepository>,
297 environment: Arc<HashMap<String, String>>,
298 },
299 Remote {
300 project_id: ProjectId,
301 client: AnyProtoClient,
302 },
303}
304
305#[derive(Clone, Debug, PartialEq, Eq)]
306pub enum RepositoryEvent {
307 StatusesChanged {
308 // TODO could report which statuses changed here
309 full_scan: bool,
310 },
311 MergeHeadsChanged,
312 BranchChanged,
313 StashEntriesChanged,
314}
315
316#[derive(Clone, Debug)]
317pub struct JobsUpdated;
318
319#[derive(Debug)]
320pub enum GitStoreEvent {
321 ActiveRepositoryChanged(Option<RepositoryId>),
322 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
323 RepositoryAdded,
324 RepositoryRemoved(RepositoryId),
325 IndexWriteError(anyhow::Error),
326 JobsUpdated,
327 ConflictsUpdated,
328}
329
330impl EventEmitter<RepositoryEvent> for Repository {}
331impl EventEmitter<JobsUpdated> for Repository {}
332impl EventEmitter<GitStoreEvent> for GitStore {}
333
334pub struct GitJob {
335 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
336 key: Option<GitJobKey>,
337}
338
339#[derive(PartialEq, Eq)]
340enum GitJobKey {
341 WriteIndex(RepoPath),
342 ReloadBufferDiffBases,
343 RefreshStatuses,
344 ReloadGitState,
345}
346
347impl GitStore {
348 pub fn local(
349 worktree_store: &Entity<WorktreeStore>,
350 buffer_store: Entity<BufferStore>,
351 environment: Entity<ProjectEnvironment>,
352 fs: Arc<dyn Fs>,
353 cx: &mut Context<Self>,
354 ) -> Self {
355 Self::new(
356 worktree_store.clone(),
357 buffer_store,
358 GitStoreState::Local {
359 next_repository_id: Arc::new(AtomicU64::new(1)),
360 downstream: None,
361 project_environment: environment,
362 fs,
363 },
364 cx,
365 )
366 }
367
368 pub fn remote(
369 worktree_store: &Entity<WorktreeStore>,
370 buffer_store: Entity<BufferStore>,
371 upstream_client: AnyProtoClient,
372 project_id: u64,
373 cx: &mut Context<Self>,
374 ) -> Self {
375 Self::new(
376 worktree_store.clone(),
377 buffer_store,
378 GitStoreState::Remote {
379 upstream_client,
380 upstream_project_id: project_id,
381 downstream: None,
382 },
383 cx,
384 )
385 }
386
387 fn new(
388 worktree_store: Entity<WorktreeStore>,
389 buffer_store: Entity<BufferStore>,
390 state: GitStoreState,
391 cx: &mut Context<Self>,
392 ) -> Self {
393 let _subscriptions = vec![
394 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
395 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
396 ];
397
398 GitStore {
399 state,
400 buffer_store,
401 worktree_store,
402 repositories: HashMap::default(),
403 active_repo_id: None,
404 _subscriptions,
405 loading_diffs: HashMap::default(),
406 shared_diffs: HashMap::default(),
407 diffs: HashMap::default(),
408 }
409 }
410
411 pub fn init(client: &AnyProtoClient) {
412 client.add_entity_request_handler(Self::handle_get_remotes);
413 client.add_entity_request_handler(Self::handle_get_branches);
414 client.add_entity_request_handler(Self::handle_get_default_branch);
415 client.add_entity_request_handler(Self::handle_change_branch);
416 client.add_entity_request_handler(Self::handle_create_branch);
417 client.add_entity_request_handler(Self::handle_rename_branch);
418 client.add_entity_request_handler(Self::handle_git_init);
419 client.add_entity_request_handler(Self::handle_push);
420 client.add_entity_request_handler(Self::handle_pull);
421 client.add_entity_request_handler(Self::handle_fetch);
422 client.add_entity_request_handler(Self::handle_stage);
423 client.add_entity_request_handler(Self::handle_unstage);
424 client.add_entity_request_handler(Self::handle_stash);
425 client.add_entity_request_handler(Self::handle_stash_pop);
426 client.add_entity_request_handler(Self::handle_stash_apply);
427 client.add_entity_request_handler(Self::handle_stash_drop);
428 client.add_entity_request_handler(Self::handle_commit);
429 client.add_entity_request_handler(Self::handle_reset);
430 client.add_entity_request_handler(Self::handle_show);
431 client.add_entity_request_handler(Self::handle_load_commit_diff);
432 client.add_entity_request_handler(Self::handle_checkout_files);
433 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
434 client.add_entity_request_handler(Self::handle_set_index_text);
435 client.add_entity_request_handler(Self::handle_askpass);
436 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
437 client.add_entity_request_handler(Self::handle_git_diff);
438 client.add_entity_request_handler(Self::handle_tree_diff);
439 client.add_entity_request_handler(Self::handle_get_blob_content);
440 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
441 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
442 client.add_entity_message_handler(Self::handle_update_diff_bases);
443 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
444 client.add_entity_request_handler(Self::handle_blame_buffer);
445 client.add_entity_message_handler(Self::handle_update_repository);
446 client.add_entity_message_handler(Self::handle_remove_repository);
447 client.add_entity_request_handler(Self::handle_git_clone);
448 }
449
450 pub fn is_local(&self) -> bool {
451 matches!(self.state, GitStoreState::Local { .. })
452 }
453 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
454 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
455 let id = repo.read(cx).id;
456 if self.active_repo_id != Some(id) {
457 self.active_repo_id = Some(id);
458 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
459 }
460 }
461 }
462
463 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
464 match &mut self.state {
465 GitStoreState::Remote {
466 downstream: downstream_client,
467 ..
468 } => {
469 for repo in self.repositories.values() {
470 let update = repo.read(cx).snapshot.initial_update(project_id);
471 for update in split_repository_update(update) {
472 client.send(update).log_err();
473 }
474 }
475 *downstream_client = Some((client, ProjectId(project_id)));
476 }
477 GitStoreState::Local {
478 downstream: downstream_client,
479 ..
480 } => {
481 let mut snapshots = HashMap::default();
482 let (updates_tx, mut updates_rx) = mpsc::unbounded();
483 for repo in self.repositories.values() {
484 updates_tx
485 .unbounded_send(DownstreamUpdate::UpdateRepository(
486 repo.read(cx).snapshot.clone(),
487 ))
488 .ok();
489 }
490 *downstream_client = Some(LocalDownstreamState {
491 client: client.clone(),
492 project_id: ProjectId(project_id),
493 updates_tx,
494 _task: cx.spawn(async move |this, cx| {
495 cx.background_spawn(async move {
496 while let Some(update) = updates_rx.next().await {
497 match update {
498 DownstreamUpdate::UpdateRepository(snapshot) => {
499 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
500 {
501 let update =
502 snapshot.build_update(old_snapshot, project_id);
503 *old_snapshot = snapshot;
504 for update in split_repository_update(update) {
505 client.send(update)?;
506 }
507 } else {
508 let update = snapshot.initial_update(project_id);
509 for update in split_repository_update(update) {
510 client.send(update)?;
511 }
512 snapshots.insert(snapshot.id, snapshot);
513 }
514 }
515 DownstreamUpdate::RemoveRepository(id) => {
516 client.send(proto::RemoveRepository {
517 project_id,
518 id: id.to_proto(),
519 })?;
520 }
521 }
522 }
523 anyhow::Ok(())
524 })
525 .await
526 .ok();
527 this.update(cx, |this, _| {
528 if let GitStoreState::Local {
529 downstream: downstream_client,
530 ..
531 } = &mut this.state
532 {
533 downstream_client.take();
534 } else {
535 unreachable!("unshared called on remote store");
536 }
537 })
538 }),
539 });
540 }
541 }
542 }
543
544 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
545 match &mut self.state {
546 GitStoreState::Local {
547 downstream: downstream_client,
548 ..
549 } => {
550 downstream_client.take();
551 }
552 GitStoreState::Remote {
553 downstream: downstream_client,
554 ..
555 } => {
556 downstream_client.take();
557 }
558 }
559 self.shared_diffs.clear();
560 }
561
562 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
563 self.shared_diffs.remove(peer_id);
564 }
565
566 pub fn active_repository(&self) -> Option<Entity<Repository>> {
567 self.active_repo_id
568 .as_ref()
569 .map(|id| self.repositories[id].clone())
570 }
571
572 pub fn open_unstaged_diff(
573 &mut self,
574 buffer: Entity<Buffer>,
575 cx: &mut Context<Self>,
576 ) -> Task<Result<Entity<BufferDiff>>> {
577 let buffer_id = buffer.read(cx).remote_id();
578 if let Some(diff_state) = self.diffs.get(&buffer_id)
579 && let Some(unstaged_diff) = diff_state
580 .read(cx)
581 .unstaged_diff
582 .as_ref()
583 .and_then(|weak| weak.upgrade())
584 {
585 if let Some(task) =
586 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
587 {
588 return cx.background_executor().spawn(async move {
589 task.await;
590 Ok(unstaged_diff)
591 });
592 }
593 return Task::ready(Ok(unstaged_diff));
594 }
595
596 let Some((repo, repo_path)) =
597 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
598 else {
599 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
600 };
601
602 let task = self
603 .loading_diffs
604 .entry((buffer_id, DiffKind::Unstaged))
605 .or_insert_with(|| {
606 let staged_text = repo.update(cx, |repo, cx| {
607 repo.load_staged_text(buffer_id, repo_path, cx)
608 });
609 cx.spawn(async move |this, cx| {
610 Self::open_diff_internal(
611 this,
612 DiffKind::Unstaged,
613 staged_text.await.map(DiffBasesChange::SetIndex),
614 buffer,
615 cx,
616 )
617 .await
618 .map_err(Arc::new)
619 })
620 .shared()
621 })
622 .clone();
623
624 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
625 }
626
627 pub fn open_diff_since(
628 &mut self,
629 oid: Option<git::Oid>,
630 buffer: Entity<Buffer>,
631 repo: Entity<Repository>,
632 languages: Arc<LanguageRegistry>,
633 cx: &mut Context<Self>,
634 ) -> Task<Result<Entity<BufferDiff>>> {
635 cx.spawn(async move |this, cx| {
636 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
637 let content = match oid {
638 None => None,
639 Some(oid) => Some(
640 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
641 .await?,
642 ),
643 };
644 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
645
646 buffer_diff
647 .update(cx, |buffer_diff, cx| {
648 buffer_diff.set_base_text(
649 content.map(Arc::new),
650 buffer_snapshot.language().cloned(),
651 Some(languages.clone()),
652 buffer_snapshot.text,
653 cx,
654 )
655 })?
656 .await?;
657 let unstaged_diff = this
658 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
659 .await?;
660 buffer_diff.update(cx, |buffer_diff, _| {
661 buffer_diff.set_secondary_diff(unstaged_diff);
662 })?;
663
664 this.update(cx, |_, cx| {
665 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
666 .detach();
667 })?;
668
669 Ok(buffer_diff)
670 })
671 }
672
673 pub fn open_uncommitted_diff(
674 &mut self,
675 buffer: Entity<Buffer>,
676 cx: &mut Context<Self>,
677 ) -> Task<Result<Entity<BufferDiff>>> {
678 let buffer_id = buffer.read(cx).remote_id();
679
680 if let Some(diff_state) = self.diffs.get(&buffer_id)
681 && let Some(uncommitted_diff) = diff_state
682 .read(cx)
683 .uncommitted_diff
684 .as_ref()
685 .and_then(|weak| weak.upgrade())
686 {
687 if let Some(task) =
688 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
689 {
690 return cx.background_executor().spawn(async move {
691 task.await;
692 Ok(uncommitted_diff)
693 });
694 }
695 return Task::ready(Ok(uncommitted_diff));
696 }
697
698 let Some((repo, repo_path)) =
699 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
700 else {
701 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
702 };
703
704 let task = self
705 .loading_diffs
706 .entry((buffer_id, DiffKind::Uncommitted))
707 .or_insert_with(|| {
708 let changes = repo.update(cx, |repo, cx| {
709 repo.load_committed_text(buffer_id, repo_path, cx)
710 });
711
712 // todo(lw): hot foreground spawn
713 cx.spawn(async move |this, cx| {
714 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
715 .await
716 .map_err(Arc::new)
717 })
718 .shared()
719 })
720 .clone();
721
722 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
723 }
724
725 async fn open_diff_internal(
726 this: WeakEntity<Self>,
727 kind: DiffKind,
728 texts: Result<DiffBasesChange>,
729 buffer_entity: Entity<Buffer>,
730 cx: &mut AsyncApp,
731 ) -> Result<Entity<BufferDiff>> {
732 let diff_bases_change = match texts {
733 Err(e) => {
734 this.update(cx, |this, cx| {
735 let buffer = buffer_entity.read(cx);
736 let buffer_id = buffer.remote_id();
737 this.loading_diffs.remove(&(buffer_id, kind));
738 })?;
739 return Err(e);
740 }
741 Ok(change) => change,
742 };
743
744 this.update(cx, |this, cx| {
745 let buffer = buffer_entity.read(cx);
746 let buffer_id = buffer.remote_id();
747 let language = buffer.language().cloned();
748 let language_registry = buffer.language_registry();
749 let text_snapshot = buffer.text_snapshot();
750 this.loading_diffs.remove(&(buffer_id, kind));
751
752 let git_store = cx.weak_entity();
753 let diff_state = this
754 .diffs
755 .entry(buffer_id)
756 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
757
758 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
759
760 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
761 diff_state.update(cx, |diff_state, cx| {
762 diff_state.language = language;
763 diff_state.language_registry = language_registry;
764
765 match kind {
766 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
767 DiffKind::Uncommitted => {
768 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
769 diff
770 } else {
771 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
772 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
773 unstaged_diff
774 };
775
776 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
777 diff_state.uncommitted_diff = Some(diff.downgrade())
778 }
779 }
780
781 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
782 let rx = diff_state.wait_for_recalculation();
783
784 anyhow::Ok(async move {
785 if let Some(rx) = rx {
786 rx.await;
787 }
788 Ok(diff)
789 })
790 })
791 })??
792 .await
793 }
794
795 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
796 let diff_state = self.diffs.get(&buffer_id)?;
797 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
798 }
799
800 pub fn get_uncommitted_diff(
801 &self,
802 buffer_id: BufferId,
803 cx: &App,
804 ) -> Option<Entity<BufferDiff>> {
805 let diff_state = self.diffs.get(&buffer_id)?;
806 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
807 }
808
809 pub fn open_conflict_set(
810 &mut self,
811 buffer: Entity<Buffer>,
812 cx: &mut Context<Self>,
813 ) -> Entity<ConflictSet> {
814 log::debug!("open conflict set");
815 let buffer_id = buffer.read(cx).remote_id();
816
817 if let Some(git_state) = self.diffs.get(&buffer_id)
818 && let Some(conflict_set) = git_state
819 .read(cx)
820 .conflict_set
821 .as_ref()
822 .and_then(|weak| weak.upgrade())
823 {
824 let conflict_set = conflict_set;
825 let buffer_snapshot = buffer.read(cx).text_snapshot();
826
827 git_state.update(cx, |state, cx| {
828 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
829 });
830
831 return conflict_set;
832 }
833
834 let is_unmerged = self
835 .repository_and_path_for_buffer_id(buffer_id, cx)
836 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
837 let git_store = cx.weak_entity();
838 let buffer_git_state = self
839 .diffs
840 .entry(buffer_id)
841 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
842 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
843
844 self._subscriptions
845 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
846 cx.emit(GitStoreEvent::ConflictsUpdated);
847 }));
848
849 buffer_git_state.update(cx, |state, cx| {
850 state.conflict_set = Some(conflict_set.downgrade());
851 let buffer_snapshot = buffer.read(cx).text_snapshot();
852 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
853 });
854
855 conflict_set
856 }
857
858 pub fn project_path_git_status(
859 &self,
860 project_path: &ProjectPath,
861 cx: &App,
862 ) -> Option<FileStatus> {
863 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
864 Some(repo.read(cx).status_for_path(&repo_path)?.status)
865 }
866
867 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
868 let mut work_directory_abs_paths = Vec::new();
869 let mut checkpoints = Vec::new();
870 for repository in self.repositories.values() {
871 repository.update(cx, |repository, _| {
872 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
873 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
874 });
875 }
876
877 cx.background_executor().spawn(async move {
878 let checkpoints = future::try_join_all(checkpoints).await?;
879 Ok(GitStoreCheckpoint {
880 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
881 .into_iter()
882 .zip(checkpoints)
883 .collect(),
884 })
885 })
886 }
887
888 pub fn restore_checkpoint(
889 &self,
890 checkpoint: GitStoreCheckpoint,
891 cx: &mut App,
892 ) -> Task<Result<()>> {
893 let repositories_by_work_dir_abs_path = self
894 .repositories
895 .values()
896 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
897 .collect::<HashMap<_, _>>();
898
899 let mut tasks = Vec::new();
900 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
901 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
902 let restore = repository.update(cx, |repository, _| {
903 repository.restore_checkpoint(checkpoint)
904 });
905 tasks.push(async move { restore.await? });
906 }
907 }
908 cx.background_spawn(async move {
909 future::try_join_all(tasks).await?;
910 Ok(())
911 })
912 }
913
914 /// Compares two checkpoints, returning true if they are equal.
915 pub fn compare_checkpoints(
916 &self,
917 left: GitStoreCheckpoint,
918 mut right: GitStoreCheckpoint,
919 cx: &mut App,
920 ) -> Task<Result<bool>> {
921 let repositories_by_work_dir_abs_path = self
922 .repositories
923 .values()
924 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
925 .collect::<HashMap<_, _>>();
926
927 let mut tasks = Vec::new();
928 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
929 if let Some(right_checkpoint) = right
930 .checkpoints_by_work_dir_abs_path
931 .remove(&work_dir_abs_path)
932 {
933 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
934 {
935 let compare = repository.update(cx, |repository, _| {
936 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
937 });
938
939 tasks.push(async move { compare.await? });
940 }
941 } else {
942 return Task::ready(Ok(false));
943 }
944 }
945 cx.background_spawn(async move {
946 Ok(future::try_join_all(tasks)
947 .await?
948 .into_iter()
949 .all(|result| result))
950 })
951 }
952
953 /// Blames a buffer.
954 pub fn blame_buffer(
955 &self,
956 buffer: &Entity<Buffer>,
957 version: Option<clock::Global>,
958 cx: &mut App,
959 ) -> Task<Result<Option<Blame>>> {
960 let buffer = buffer.read(cx);
961 let Some((repo, repo_path)) =
962 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
963 else {
964 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
965 };
966 let content = match &version {
967 Some(version) => buffer.rope_for_version(version),
968 None => buffer.as_rope().clone(),
969 };
970 let version = version.unwrap_or(buffer.version());
971 let buffer_id = buffer.remote_id();
972
973 let rx = repo.update(cx, |repo, _| {
974 repo.send_job(None, move |state, _| async move {
975 match state {
976 RepositoryState::Local { backend, .. } => backend
977 .blame(repo_path.clone(), content)
978 .await
979 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
980 .map(Some),
981 RepositoryState::Remote { project_id, client } => {
982 let response = client
983 .request(proto::BlameBuffer {
984 project_id: project_id.to_proto(),
985 buffer_id: buffer_id.into(),
986 version: serialize_version(&version),
987 })
988 .await?;
989 Ok(deserialize_blame_buffer_response(response))
990 }
991 }
992 })
993 });
994
995 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
996 }
997
998 pub fn get_permalink_to_line(
999 &self,
1000 buffer: &Entity<Buffer>,
1001 selection: Range<u32>,
1002 cx: &mut App,
1003 ) -> Task<Result<url::Url>> {
1004 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1005 return Task::ready(Err(anyhow!("buffer has no file")));
1006 };
1007
1008 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1009 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1010 cx,
1011 ) else {
1012 // If we're not in a Git repo, check whether this is a Rust source
1013 // file in the Cargo registry (presumably opened with go-to-definition
1014 // from a normal Rust file). If so, we can put together a permalink
1015 // using crate metadata.
1016 if buffer
1017 .read(cx)
1018 .language()
1019 .is_none_or(|lang| lang.name() != "Rust".into())
1020 {
1021 return Task::ready(Err(anyhow!("no permalink available")));
1022 }
1023 let file_path = file.worktree.read(cx).absolutize(&file.path);
1024 return cx.spawn(async move |cx| {
1025 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1026 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1027 .context("no permalink available")
1028 });
1029 };
1030
1031 let buffer_id = buffer.read(cx).remote_id();
1032 let branch = repo.read(cx).branch.clone();
1033 let remote = branch
1034 .as_ref()
1035 .and_then(|b| b.upstream.as_ref())
1036 .and_then(|b| b.remote_name())
1037 .unwrap_or("origin")
1038 .to_string();
1039
1040 let rx = repo.update(cx, |repo, _| {
1041 repo.send_job(None, move |state, cx| async move {
1042 match state {
1043 RepositoryState::Local { backend, .. } => {
1044 let origin_url = backend
1045 .remote_url(&remote)
1046 .with_context(|| format!("remote \"{remote}\" not found"))?;
1047
1048 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1049
1050 let provider_registry =
1051 cx.update(GitHostingProviderRegistry::default_global)?;
1052
1053 let (provider, remote) =
1054 parse_git_remote_url(provider_registry, &origin_url)
1055 .context("parsing Git remote URL")?;
1056
1057 Ok(provider.build_permalink(
1058 remote,
1059 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1060 ))
1061 }
1062 RepositoryState::Remote { project_id, client } => {
1063 let response = client
1064 .request(proto::GetPermalinkToLine {
1065 project_id: project_id.to_proto(),
1066 buffer_id: buffer_id.into(),
1067 selection: Some(proto::Range {
1068 start: selection.start as u64,
1069 end: selection.end as u64,
1070 }),
1071 })
1072 .await?;
1073
1074 url::Url::parse(&response.permalink).context("failed to parse permalink")
1075 }
1076 }
1077 })
1078 });
1079 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1080 }
1081
1082 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1083 match &self.state {
1084 GitStoreState::Local {
1085 downstream: downstream_client,
1086 ..
1087 } => downstream_client
1088 .as_ref()
1089 .map(|state| (state.client.clone(), state.project_id)),
1090 GitStoreState::Remote {
1091 downstream: downstream_client,
1092 ..
1093 } => downstream_client.clone(),
1094 }
1095 }
1096
1097 fn upstream_client(&self) -> Option<AnyProtoClient> {
1098 match &self.state {
1099 GitStoreState::Local { .. } => None,
1100 GitStoreState::Remote {
1101 upstream_client, ..
1102 } => Some(upstream_client.clone()),
1103 }
1104 }
1105
1106 fn on_worktree_store_event(
1107 &mut self,
1108 worktree_store: Entity<WorktreeStore>,
1109 event: &WorktreeStoreEvent,
1110 cx: &mut Context<Self>,
1111 ) {
1112 let GitStoreState::Local {
1113 project_environment,
1114 downstream,
1115 next_repository_id,
1116 fs,
1117 } = &self.state
1118 else {
1119 return;
1120 };
1121
1122 match event {
1123 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1124 if let Some(worktree) = self
1125 .worktree_store
1126 .read(cx)
1127 .worktree_for_id(*worktree_id, cx)
1128 {
1129 let paths_by_git_repo =
1130 self.process_updated_entries(&worktree, updated_entries, cx);
1131 let downstream = downstream
1132 .as_ref()
1133 .map(|downstream| downstream.updates_tx.clone());
1134 cx.spawn(async move |_, cx| {
1135 let paths_by_git_repo = paths_by_git_repo.await;
1136 for (repo, paths) in paths_by_git_repo {
1137 repo.update(cx, |repo, cx| {
1138 repo.paths_changed(paths, downstream.clone(), cx);
1139 })
1140 .ok();
1141 }
1142 })
1143 .detach();
1144 }
1145 }
1146 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1147 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1148 else {
1149 return;
1150 };
1151 if !worktree.read(cx).is_visible() {
1152 log::debug!(
1153 "not adding repositories for local worktree {:?} because it's not visible",
1154 worktree.read(cx).abs_path()
1155 );
1156 return;
1157 }
1158 self.update_repositories_from_worktree(
1159 project_environment.clone(),
1160 next_repository_id.clone(),
1161 downstream
1162 .as_ref()
1163 .map(|downstream| downstream.updates_tx.clone()),
1164 changed_repos.clone(),
1165 fs.clone(),
1166 cx,
1167 );
1168 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1169 }
1170 _ => {}
1171 }
1172 }
1173 fn on_repository_event(
1174 &mut self,
1175 repo: Entity<Repository>,
1176 event: &RepositoryEvent,
1177 cx: &mut Context<Self>,
1178 ) {
1179 let id = repo.read(cx).id;
1180 let repo_snapshot = repo.read(cx).snapshot.clone();
1181 for (buffer_id, diff) in self.diffs.iter() {
1182 if let Some((buffer_repo, repo_path)) =
1183 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1184 && buffer_repo == repo
1185 {
1186 diff.update(cx, |diff, cx| {
1187 if let Some(conflict_set) = &diff.conflict_set {
1188 let conflict_status_changed =
1189 conflict_set.update(cx, |conflict_set, cx| {
1190 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1191 conflict_set.set_has_conflict(has_conflict, cx)
1192 })?;
1193 if conflict_status_changed {
1194 let buffer_store = self.buffer_store.read(cx);
1195 if let Some(buffer) = buffer_store.get(*buffer_id) {
1196 let _ = diff
1197 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1198 }
1199 }
1200 }
1201 anyhow::Ok(())
1202 })
1203 .ok();
1204 }
1205 }
1206 cx.emit(GitStoreEvent::RepositoryUpdated(
1207 id,
1208 event.clone(),
1209 self.active_repo_id == Some(id),
1210 ))
1211 }
1212
1213 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1214 cx.emit(GitStoreEvent::JobsUpdated)
1215 }
1216
1217 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1218 fn update_repositories_from_worktree(
1219 &mut self,
1220 project_environment: Entity<ProjectEnvironment>,
1221 next_repository_id: Arc<AtomicU64>,
1222 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1223 updated_git_repositories: UpdatedGitRepositoriesSet,
1224 fs: Arc<dyn Fs>,
1225 cx: &mut Context<Self>,
1226 ) {
1227 let mut removed_ids = Vec::new();
1228 for update in updated_git_repositories.iter() {
1229 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1230 let existing_work_directory_abs_path =
1231 repo.read(cx).work_directory_abs_path.clone();
1232 Some(&existing_work_directory_abs_path)
1233 == update.old_work_directory_abs_path.as_ref()
1234 || Some(&existing_work_directory_abs_path)
1235 == update.new_work_directory_abs_path.as_ref()
1236 }) {
1237 if let Some(new_work_directory_abs_path) =
1238 update.new_work_directory_abs_path.clone()
1239 {
1240 existing.update(cx, |existing, cx| {
1241 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1242 existing.schedule_scan(updates_tx.clone(), cx);
1243 });
1244 } else {
1245 removed_ids.push(*id);
1246 }
1247 } else if let UpdatedGitRepository {
1248 new_work_directory_abs_path: Some(work_directory_abs_path),
1249 dot_git_abs_path: Some(dot_git_abs_path),
1250 repository_dir_abs_path: Some(repository_dir_abs_path),
1251 common_dir_abs_path: Some(common_dir_abs_path),
1252 ..
1253 } = update
1254 {
1255 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1256 let git_store = cx.weak_entity();
1257 let repo = cx.new(|cx| {
1258 let mut repo = Repository::local(
1259 id,
1260 work_directory_abs_path.clone(),
1261 dot_git_abs_path.clone(),
1262 repository_dir_abs_path.clone(),
1263 common_dir_abs_path.clone(),
1264 project_environment.downgrade(),
1265 fs.clone(),
1266 git_store,
1267 cx,
1268 );
1269 repo.schedule_scan(updates_tx.clone(), cx);
1270 repo
1271 });
1272 self._subscriptions
1273 .push(cx.subscribe(&repo, Self::on_repository_event));
1274 self._subscriptions
1275 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1276 self.repositories.insert(id, repo);
1277 cx.emit(GitStoreEvent::RepositoryAdded);
1278 self.active_repo_id.get_or_insert_with(|| {
1279 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1280 id
1281 });
1282 }
1283 }
1284
1285 for id in removed_ids {
1286 if self.active_repo_id == Some(id) {
1287 self.active_repo_id = None;
1288 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1289 }
1290 self.repositories.remove(&id);
1291 if let Some(updates_tx) = updates_tx.as_ref() {
1292 updates_tx
1293 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1294 .ok();
1295 }
1296 }
1297 }
1298
1299 fn on_buffer_store_event(
1300 &mut self,
1301 _: Entity<BufferStore>,
1302 event: &BufferStoreEvent,
1303 cx: &mut Context<Self>,
1304 ) {
1305 match event {
1306 BufferStoreEvent::BufferAdded(buffer) => {
1307 cx.subscribe(buffer, |this, buffer, event, cx| {
1308 if let BufferEvent::LanguageChanged = event {
1309 let buffer_id = buffer.read(cx).remote_id();
1310 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1311 diff_state.update(cx, |diff_state, cx| {
1312 diff_state.buffer_language_changed(buffer, cx);
1313 });
1314 }
1315 }
1316 })
1317 .detach();
1318 }
1319 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1320 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1321 diffs.remove(buffer_id);
1322 }
1323 }
1324 BufferStoreEvent::BufferDropped(buffer_id) => {
1325 self.diffs.remove(buffer_id);
1326 for diffs in self.shared_diffs.values_mut() {
1327 diffs.remove(buffer_id);
1328 }
1329 }
1330
1331 _ => {}
1332 }
1333 }
1334
1335 pub fn recalculate_buffer_diffs(
1336 &mut self,
1337 buffers: Vec<Entity<Buffer>>,
1338 cx: &mut Context<Self>,
1339 ) -> impl Future<Output = ()> + use<> {
1340 let mut futures = Vec::new();
1341 for buffer in buffers {
1342 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1343 let buffer = buffer.read(cx).text_snapshot();
1344 diff_state.update(cx, |diff_state, cx| {
1345 diff_state.recalculate_diffs(buffer.clone(), cx);
1346 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1347 });
1348 futures.push(diff_state.update(cx, |diff_state, cx| {
1349 diff_state
1350 .reparse_conflict_markers(buffer, cx)
1351 .map(|_| {})
1352 .boxed()
1353 }));
1354 }
1355 }
1356 async move {
1357 futures::future::join_all(futures).await;
1358 }
1359 }
1360
1361 fn on_buffer_diff_event(
1362 &mut self,
1363 diff: Entity<buffer_diff::BufferDiff>,
1364 event: &BufferDiffEvent,
1365 cx: &mut Context<Self>,
1366 ) {
1367 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1368 let buffer_id = diff.read(cx).buffer_id;
1369 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1370 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1371 diff_state.hunk_staging_operation_count += 1;
1372 diff_state.hunk_staging_operation_count
1373 });
1374 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1375 let recv = repo.update(cx, |repo, cx| {
1376 log::debug!("hunks changed for {}", path.as_unix_str());
1377 repo.spawn_set_index_text_job(
1378 path,
1379 new_index_text.as_ref().map(|rope| rope.to_string()),
1380 Some(hunk_staging_operation_count),
1381 cx,
1382 )
1383 });
1384 let diff = diff.downgrade();
1385 cx.spawn(async move |this, cx| {
1386 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1387 diff.update(cx, |diff, cx| {
1388 diff.clear_pending_hunks(cx);
1389 })
1390 .ok();
1391 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1392 .ok();
1393 }
1394 })
1395 .detach();
1396 }
1397 }
1398 }
1399 }
1400
1401 fn local_worktree_git_repos_changed(
1402 &mut self,
1403 worktree: Entity<Worktree>,
1404 changed_repos: &UpdatedGitRepositoriesSet,
1405 cx: &mut Context<Self>,
1406 ) {
1407 log::debug!("local worktree repos changed");
1408 debug_assert!(worktree.read(cx).is_local());
1409
1410 for repository in self.repositories.values() {
1411 repository.update(cx, |repository, cx| {
1412 let repo_abs_path = &repository.work_directory_abs_path;
1413 if changed_repos.iter().any(|update| {
1414 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1415 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1416 }) {
1417 repository.reload_buffer_diff_bases(cx);
1418 }
1419 });
1420 }
1421 }
1422
1423 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1424 &self.repositories
1425 }
1426
1427 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1428 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1429 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1430 Some(status.status)
1431 }
1432
1433 pub fn repository_and_path_for_buffer_id(
1434 &self,
1435 buffer_id: BufferId,
1436 cx: &App,
1437 ) -> Option<(Entity<Repository>, RepoPath)> {
1438 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1439 let project_path = buffer.read(cx).project_path(cx)?;
1440 self.repository_and_path_for_project_path(&project_path, cx)
1441 }
1442
1443 pub fn repository_and_path_for_project_path(
1444 &self,
1445 path: &ProjectPath,
1446 cx: &App,
1447 ) -> Option<(Entity<Repository>, RepoPath)> {
1448 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1449 self.repositories
1450 .values()
1451 .filter_map(|repo| {
1452 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1453 Some((repo.clone(), repo_path))
1454 })
1455 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1456 }
1457
1458 pub fn git_init(
1459 &self,
1460 path: Arc<Path>,
1461 fallback_branch_name: String,
1462 cx: &App,
1463 ) -> Task<Result<()>> {
1464 match &self.state {
1465 GitStoreState::Local { fs, .. } => {
1466 let fs = fs.clone();
1467 cx.background_executor()
1468 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1469 }
1470 GitStoreState::Remote {
1471 upstream_client,
1472 upstream_project_id: project_id,
1473 ..
1474 } => {
1475 let client = upstream_client.clone();
1476 let project_id = *project_id;
1477 cx.background_executor().spawn(async move {
1478 client
1479 .request(proto::GitInit {
1480 project_id: project_id,
1481 abs_path: path.to_string_lossy().into_owned(),
1482 fallback_branch_name,
1483 })
1484 .await?;
1485 Ok(())
1486 })
1487 }
1488 }
1489 }
1490
1491 pub fn git_clone(
1492 &self,
1493 repo: String,
1494 path: impl Into<Arc<std::path::Path>>,
1495 cx: &App,
1496 ) -> Task<Result<()>> {
1497 let path = path.into();
1498 match &self.state {
1499 GitStoreState::Local { fs, .. } => {
1500 let fs = fs.clone();
1501 cx.background_executor()
1502 .spawn(async move { fs.git_clone(&repo, &path).await })
1503 }
1504 GitStoreState::Remote {
1505 upstream_client,
1506 upstream_project_id,
1507 ..
1508 } => {
1509 if upstream_client.is_via_collab() {
1510 return Task::ready(Err(anyhow!(
1511 "Git Clone isn't supported for project guests"
1512 )));
1513 }
1514 let request = upstream_client.request(proto::GitClone {
1515 project_id: *upstream_project_id,
1516 abs_path: path.to_string_lossy().into_owned(),
1517 remote_repo: repo,
1518 });
1519
1520 cx.background_spawn(async move {
1521 let result = request.await?;
1522
1523 match result.success {
1524 true => Ok(()),
1525 false => Err(anyhow!("Git Clone failed")),
1526 }
1527 })
1528 }
1529 }
1530 }
1531
1532 async fn handle_update_repository(
1533 this: Entity<Self>,
1534 envelope: TypedEnvelope<proto::UpdateRepository>,
1535 mut cx: AsyncApp,
1536 ) -> Result<()> {
1537 this.update(&mut cx, |this, cx| {
1538 let path_style = this.worktree_store.read(cx).path_style();
1539 let mut update = envelope.payload;
1540
1541 let id = RepositoryId::from_proto(update.id);
1542 let client = this.upstream_client().context("no upstream client")?;
1543
1544 let mut repo_subscription = None;
1545 let repo = this.repositories.entry(id).or_insert_with(|| {
1546 let git_store = cx.weak_entity();
1547 let repo = cx.new(|cx| {
1548 Repository::remote(
1549 id,
1550 Path::new(&update.abs_path).into(),
1551 path_style,
1552 ProjectId(update.project_id),
1553 client,
1554 git_store,
1555 cx,
1556 )
1557 });
1558 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1559 cx.emit(GitStoreEvent::RepositoryAdded);
1560 repo
1561 });
1562 this._subscriptions.extend(repo_subscription);
1563
1564 repo.update(cx, {
1565 let update = update.clone();
1566 |repo, cx| repo.apply_remote_update(update, cx)
1567 })?;
1568
1569 this.active_repo_id.get_or_insert_with(|| {
1570 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1571 id
1572 });
1573
1574 if let Some((client, project_id)) = this.downstream_client() {
1575 update.project_id = project_id.to_proto();
1576 client.send(update).log_err();
1577 }
1578 Ok(())
1579 })?
1580 }
1581
1582 async fn handle_remove_repository(
1583 this: Entity<Self>,
1584 envelope: TypedEnvelope<proto::RemoveRepository>,
1585 mut cx: AsyncApp,
1586 ) -> Result<()> {
1587 this.update(&mut cx, |this, cx| {
1588 let mut update = envelope.payload;
1589 let id = RepositoryId::from_proto(update.id);
1590 this.repositories.remove(&id);
1591 if let Some((client, project_id)) = this.downstream_client() {
1592 update.project_id = project_id.to_proto();
1593 client.send(update).log_err();
1594 }
1595 if this.active_repo_id == Some(id) {
1596 this.active_repo_id = None;
1597 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1598 }
1599 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1600 })
1601 }
1602
1603 async fn handle_git_init(
1604 this: Entity<Self>,
1605 envelope: TypedEnvelope<proto::GitInit>,
1606 cx: AsyncApp,
1607 ) -> Result<proto::Ack> {
1608 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1609 let name = envelope.payload.fallback_branch_name;
1610 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1611 .await?;
1612
1613 Ok(proto::Ack {})
1614 }
1615
1616 async fn handle_git_clone(
1617 this: Entity<Self>,
1618 envelope: TypedEnvelope<proto::GitClone>,
1619 cx: AsyncApp,
1620 ) -> Result<proto::GitCloneResponse> {
1621 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1622 let repo_name = envelope.payload.remote_repo;
1623 let result = cx
1624 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1625 .await;
1626
1627 Ok(proto::GitCloneResponse {
1628 success: result.is_ok(),
1629 })
1630 }
1631
1632 async fn handle_fetch(
1633 this: Entity<Self>,
1634 envelope: TypedEnvelope<proto::Fetch>,
1635 mut cx: AsyncApp,
1636 ) -> Result<proto::RemoteMessageResponse> {
1637 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1638 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1639 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1640 let askpass_id = envelope.payload.askpass_id;
1641
1642 let askpass = make_remote_delegate(
1643 this,
1644 envelope.payload.project_id,
1645 repository_id,
1646 askpass_id,
1647 &mut cx,
1648 );
1649
1650 let remote_output = repository_handle
1651 .update(&mut cx, |repository_handle, cx| {
1652 repository_handle.fetch(fetch_options, askpass, cx)
1653 })?
1654 .await??;
1655
1656 Ok(proto::RemoteMessageResponse {
1657 stdout: remote_output.stdout,
1658 stderr: remote_output.stderr,
1659 })
1660 }
1661
1662 async fn handle_push(
1663 this: Entity<Self>,
1664 envelope: TypedEnvelope<proto::Push>,
1665 mut cx: AsyncApp,
1666 ) -> Result<proto::RemoteMessageResponse> {
1667 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1668 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1669
1670 let askpass_id = envelope.payload.askpass_id;
1671 let askpass = make_remote_delegate(
1672 this,
1673 envelope.payload.project_id,
1674 repository_id,
1675 askpass_id,
1676 &mut cx,
1677 );
1678
1679 let options = envelope
1680 .payload
1681 .options
1682 .as_ref()
1683 .map(|_| match envelope.payload.options() {
1684 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1685 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1686 });
1687
1688 let branch_name = envelope.payload.branch_name.into();
1689 let remote_name = envelope.payload.remote_name.into();
1690
1691 let remote_output = repository_handle
1692 .update(&mut cx, |repository_handle, cx| {
1693 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1694 })?
1695 .await??;
1696 Ok(proto::RemoteMessageResponse {
1697 stdout: remote_output.stdout,
1698 stderr: remote_output.stderr,
1699 })
1700 }
1701
1702 async fn handle_pull(
1703 this: Entity<Self>,
1704 envelope: TypedEnvelope<proto::Pull>,
1705 mut cx: AsyncApp,
1706 ) -> Result<proto::RemoteMessageResponse> {
1707 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1708 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1709 let askpass_id = envelope.payload.askpass_id;
1710 let askpass = make_remote_delegate(
1711 this,
1712 envelope.payload.project_id,
1713 repository_id,
1714 askpass_id,
1715 &mut cx,
1716 );
1717
1718 let branch_name = envelope.payload.branch_name.into();
1719 let remote_name = envelope.payload.remote_name.into();
1720
1721 let remote_message = repository_handle
1722 .update(&mut cx, |repository_handle, cx| {
1723 repository_handle.pull(branch_name, remote_name, askpass, cx)
1724 })?
1725 .await??;
1726
1727 Ok(proto::RemoteMessageResponse {
1728 stdout: remote_message.stdout,
1729 stderr: remote_message.stderr,
1730 })
1731 }
1732
1733 async fn handle_stage(
1734 this: Entity<Self>,
1735 envelope: TypedEnvelope<proto::Stage>,
1736 mut cx: AsyncApp,
1737 ) -> Result<proto::Ack> {
1738 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1739 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1740
1741 let entries = envelope
1742 .payload
1743 .paths
1744 .into_iter()
1745 .map(|path| RepoPath::new(&path))
1746 .collect::<Result<Vec<_>>>()?;
1747
1748 repository_handle
1749 .update(&mut cx, |repository_handle, cx| {
1750 repository_handle.stage_entries(entries, cx)
1751 })?
1752 .await?;
1753 Ok(proto::Ack {})
1754 }
1755
1756 async fn handle_unstage(
1757 this: Entity<Self>,
1758 envelope: TypedEnvelope<proto::Unstage>,
1759 mut cx: AsyncApp,
1760 ) -> Result<proto::Ack> {
1761 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1762 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1763
1764 let entries = envelope
1765 .payload
1766 .paths
1767 .into_iter()
1768 .map(|path| RepoPath::new(&path))
1769 .collect::<Result<Vec<_>>>()?;
1770
1771 repository_handle
1772 .update(&mut cx, |repository_handle, cx| {
1773 repository_handle.unstage_entries(entries, cx)
1774 })?
1775 .await?;
1776
1777 Ok(proto::Ack {})
1778 }
1779
1780 async fn handle_stash(
1781 this: Entity<Self>,
1782 envelope: TypedEnvelope<proto::Stash>,
1783 mut cx: AsyncApp,
1784 ) -> Result<proto::Ack> {
1785 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1786 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1787
1788 let entries = envelope
1789 .payload
1790 .paths
1791 .into_iter()
1792 .map(|path| RepoPath::new(&path))
1793 .collect::<Result<Vec<_>>>()?;
1794
1795 repository_handle
1796 .update(&mut cx, |repository_handle, cx| {
1797 repository_handle.stash_entries(entries, cx)
1798 })?
1799 .await?;
1800
1801 Ok(proto::Ack {})
1802 }
1803
1804 async fn handle_stash_pop(
1805 this: Entity<Self>,
1806 envelope: TypedEnvelope<proto::StashPop>,
1807 mut cx: AsyncApp,
1808 ) -> Result<proto::Ack> {
1809 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1810 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1811 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1812
1813 repository_handle
1814 .update(&mut cx, |repository_handle, cx| {
1815 repository_handle.stash_pop(stash_index, cx)
1816 })?
1817 .await?;
1818
1819 Ok(proto::Ack {})
1820 }
1821
1822 async fn handle_stash_apply(
1823 this: Entity<Self>,
1824 envelope: TypedEnvelope<proto::StashApply>,
1825 mut cx: AsyncApp,
1826 ) -> Result<proto::Ack> {
1827 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1828 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1829 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1830
1831 repository_handle
1832 .update(&mut cx, |repository_handle, cx| {
1833 repository_handle.stash_apply(stash_index, cx)
1834 })?
1835 .await?;
1836
1837 Ok(proto::Ack {})
1838 }
1839
1840 async fn handle_stash_drop(
1841 this: Entity<Self>,
1842 envelope: TypedEnvelope<proto::StashDrop>,
1843 mut cx: AsyncApp,
1844 ) -> Result<proto::Ack> {
1845 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1846 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1847 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1848
1849 repository_handle
1850 .update(&mut cx, |repository_handle, cx| {
1851 repository_handle.stash_drop(stash_index, cx)
1852 })?
1853 .await??;
1854
1855 Ok(proto::Ack {})
1856 }
1857
1858 async fn handle_set_index_text(
1859 this: Entity<Self>,
1860 envelope: TypedEnvelope<proto::SetIndexText>,
1861 mut cx: AsyncApp,
1862 ) -> Result<proto::Ack> {
1863 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1864 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1865 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1866
1867 repository_handle
1868 .update(&mut cx, |repository_handle, cx| {
1869 repository_handle.spawn_set_index_text_job(
1870 repo_path,
1871 envelope.payload.text,
1872 None,
1873 cx,
1874 )
1875 })?
1876 .await??;
1877 Ok(proto::Ack {})
1878 }
1879
1880 async fn handle_commit(
1881 this: Entity<Self>,
1882 envelope: TypedEnvelope<proto::Commit>,
1883 mut cx: AsyncApp,
1884 ) -> Result<proto::Ack> {
1885 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1886 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1887
1888 let message = SharedString::from(envelope.payload.message);
1889 let name = envelope.payload.name.map(SharedString::from);
1890 let email = envelope.payload.email.map(SharedString::from);
1891 let options = envelope.payload.options.unwrap_or_default();
1892
1893 repository_handle
1894 .update(&mut cx, |repository_handle, cx| {
1895 repository_handle.commit(
1896 message,
1897 name.zip(email),
1898 CommitOptions {
1899 amend: options.amend,
1900 signoff: options.signoff,
1901 },
1902 cx,
1903 )
1904 })?
1905 .await??;
1906 Ok(proto::Ack {})
1907 }
1908
1909 async fn handle_get_remotes(
1910 this: Entity<Self>,
1911 envelope: TypedEnvelope<proto::GetRemotes>,
1912 mut cx: AsyncApp,
1913 ) -> Result<proto::GetRemotesResponse> {
1914 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1915 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1916
1917 let branch_name = envelope.payload.branch_name;
1918
1919 let remotes = repository_handle
1920 .update(&mut cx, |repository_handle, _| {
1921 repository_handle.get_remotes(branch_name)
1922 })?
1923 .await??;
1924
1925 Ok(proto::GetRemotesResponse {
1926 remotes: remotes
1927 .into_iter()
1928 .map(|remotes| proto::get_remotes_response::Remote {
1929 name: remotes.name.to_string(),
1930 })
1931 .collect::<Vec<_>>(),
1932 })
1933 }
1934
1935 async fn handle_get_branches(
1936 this: Entity<Self>,
1937 envelope: TypedEnvelope<proto::GitGetBranches>,
1938 mut cx: AsyncApp,
1939 ) -> Result<proto::GitBranchesResponse> {
1940 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1941 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1942
1943 let branches = repository_handle
1944 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1945 .await??;
1946
1947 Ok(proto::GitBranchesResponse {
1948 branches: branches
1949 .into_iter()
1950 .map(|branch| branch_to_proto(&branch))
1951 .collect::<Vec<_>>(),
1952 })
1953 }
1954 async fn handle_get_default_branch(
1955 this: Entity<Self>,
1956 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1957 mut cx: AsyncApp,
1958 ) -> Result<proto::GetDefaultBranchResponse> {
1959 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1960 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1961
1962 let branch = repository_handle
1963 .update(&mut cx, |repository_handle, _| {
1964 repository_handle.default_branch()
1965 })?
1966 .await??
1967 .map(Into::into);
1968
1969 Ok(proto::GetDefaultBranchResponse { branch })
1970 }
1971 async fn handle_create_branch(
1972 this: Entity<Self>,
1973 envelope: TypedEnvelope<proto::GitCreateBranch>,
1974 mut cx: AsyncApp,
1975 ) -> Result<proto::Ack> {
1976 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1977 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1978 let branch_name = envelope.payload.branch_name;
1979
1980 repository_handle
1981 .update(&mut cx, |repository_handle, _| {
1982 repository_handle.create_branch(branch_name)
1983 })?
1984 .await??;
1985
1986 Ok(proto::Ack {})
1987 }
1988
1989 async fn handle_change_branch(
1990 this: Entity<Self>,
1991 envelope: TypedEnvelope<proto::GitChangeBranch>,
1992 mut cx: AsyncApp,
1993 ) -> Result<proto::Ack> {
1994 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1995 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1996 let branch_name = envelope.payload.branch_name;
1997
1998 repository_handle
1999 .update(&mut cx, |repository_handle, _| {
2000 repository_handle.change_branch(branch_name)
2001 })?
2002 .await??;
2003
2004 Ok(proto::Ack {})
2005 }
2006
2007 async fn handle_rename_branch(
2008 this: Entity<Self>,
2009 envelope: TypedEnvelope<proto::GitRenameBranch>,
2010 mut cx: AsyncApp,
2011 ) -> Result<proto::Ack> {
2012 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2013 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2014 let branch = envelope.payload.branch;
2015 let new_name = envelope.payload.new_name;
2016
2017 repository_handle
2018 .update(&mut cx, |repository_handle, _| {
2019 repository_handle.rename_branch(branch, new_name)
2020 })?
2021 .await??;
2022
2023 Ok(proto::Ack {})
2024 }
2025
2026 async fn handle_show(
2027 this: Entity<Self>,
2028 envelope: TypedEnvelope<proto::GitShow>,
2029 mut cx: AsyncApp,
2030 ) -> Result<proto::GitCommitDetails> {
2031 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2032 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2033
2034 let commit = repository_handle
2035 .update(&mut cx, |repository_handle, _| {
2036 repository_handle.show(envelope.payload.commit)
2037 })?
2038 .await??;
2039 Ok(proto::GitCommitDetails {
2040 sha: commit.sha.into(),
2041 message: commit.message.into(),
2042 commit_timestamp: commit.commit_timestamp,
2043 author_email: commit.author_email.into(),
2044 author_name: commit.author_name.into(),
2045 })
2046 }
2047
2048 async fn handle_load_commit_diff(
2049 this: Entity<Self>,
2050 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2051 mut cx: AsyncApp,
2052 ) -> Result<proto::LoadCommitDiffResponse> {
2053 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2054 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2055
2056 let commit_diff = repository_handle
2057 .update(&mut cx, |repository_handle, _| {
2058 repository_handle.load_commit_diff(envelope.payload.commit)
2059 })?
2060 .await??;
2061 Ok(proto::LoadCommitDiffResponse {
2062 files: commit_diff
2063 .files
2064 .into_iter()
2065 .map(|file| proto::CommitFile {
2066 path: file.path.to_proto(),
2067 old_text: file.old_text,
2068 new_text: file.new_text,
2069 })
2070 .collect(),
2071 })
2072 }
2073
2074 async fn handle_reset(
2075 this: Entity<Self>,
2076 envelope: TypedEnvelope<proto::GitReset>,
2077 mut cx: AsyncApp,
2078 ) -> Result<proto::Ack> {
2079 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2080 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2081
2082 let mode = match envelope.payload.mode() {
2083 git_reset::ResetMode::Soft => ResetMode::Soft,
2084 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2085 };
2086
2087 repository_handle
2088 .update(&mut cx, |repository_handle, cx| {
2089 repository_handle.reset(envelope.payload.commit, mode, cx)
2090 })?
2091 .await??;
2092 Ok(proto::Ack {})
2093 }
2094
2095 async fn handle_checkout_files(
2096 this: Entity<Self>,
2097 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2098 mut cx: AsyncApp,
2099 ) -> Result<proto::Ack> {
2100 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2101 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2102 let paths = envelope
2103 .payload
2104 .paths
2105 .iter()
2106 .map(|s| RepoPath::from_proto(s))
2107 .collect::<Result<Vec<_>>>()?;
2108
2109 repository_handle
2110 .update(&mut cx, |repository_handle, cx| {
2111 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2112 })?
2113 .await??;
2114 Ok(proto::Ack {})
2115 }
2116
2117 async fn handle_open_commit_message_buffer(
2118 this: Entity<Self>,
2119 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2120 mut cx: AsyncApp,
2121 ) -> Result<proto::OpenBufferResponse> {
2122 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2123 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2124 let buffer = repository
2125 .update(&mut cx, |repository, cx| {
2126 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2127 })?
2128 .await?;
2129
2130 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2131 this.update(&mut cx, |this, cx| {
2132 this.buffer_store.update(cx, |buffer_store, cx| {
2133 buffer_store
2134 .create_buffer_for_peer(
2135 &buffer,
2136 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2137 cx,
2138 )
2139 .detach_and_log_err(cx);
2140 })
2141 })?;
2142
2143 Ok(proto::OpenBufferResponse {
2144 buffer_id: buffer_id.to_proto(),
2145 })
2146 }
2147
2148 async fn handle_askpass(
2149 this: Entity<Self>,
2150 envelope: TypedEnvelope<proto::AskPassRequest>,
2151 mut cx: AsyncApp,
2152 ) -> Result<proto::AskPassResponse> {
2153 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2154 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2155
2156 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2157 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2158 debug_panic!("no askpass found");
2159 anyhow::bail!("no askpass found");
2160 };
2161
2162 let response = askpass
2163 .ask_password(envelope.payload.prompt)
2164 .await
2165 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2166
2167 delegates
2168 .lock()
2169 .insert(envelope.payload.askpass_id, askpass);
2170
2171 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2172 Ok(proto::AskPassResponse {
2173 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2174 })
2175 }
2176
2177 async fn handle_check_for_pushed_commits(
2178 this: Entity<Self>,
2179 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2180 mut cx: AsyncApp,
2181 ) -> Result<proto::CheckForPushedCommitsResponse> {
2182 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2183 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2184
2185 let branches = repository_handle
2186 .update(&mut cx, |repository_handle, _| {
2187 repository_handle.check_for_pushed_commits()
2188 })?
2189 .await??;
2190 Ok(proto::CheckForPushedCommitsResponse {
2191 pushed_to: branches
2192 .into_iter()
2193 .map(|commit| commit.to_string())
2194 .collect(),
2195 })
2196 }
2197
2198 async fn handle_git_diff(
2199 this: Entity<Self>,
2200 envelope: TypedEnvelope<proto::GitDiff>,
2201 mut cx: AsyncApp,
2202 ) -> Result<proto::GitDiffResponse> {
2203 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2204 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2205 let diff_type = match envelope.payload.diff_type() {
2206 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2207 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2208 };
2209
2210 let mut diff = repository_handle
2211 .update(&mut cx, |repository_handle, cx| {
2212 repository_handle.diff(diff_type, cx)
2213 })?
2214 .await??;
2215 const ONE_MB: usize = 1_000_000;
2216 if diff.len() > ONE_MB {
2217 diff = diff.chars().take(ONE_MB).collect()
2218 }
2219
2220 Ok(proto::GitDiffResponse { diff })
2221 }
2222
2223 async fn handle_tree_diff(
2224 this: Entity<Self>,
2225 request: TypedEnvelope<proto::GetTreeDiff>,
2226 mut cx: AsyncApp,
2227 ) -> Result<proto::GetTreeDiffResponse> {
2228 let repository_id = RepositoryId(request.payload.repository_id);
2229 let diff_type = if request.payload.is_merge {
2230 DiffTreeType::MergeBase {
2231 base: request.payload.base.into(),
2232 head: request.payload.head.into(),
2233 }
2234 } else {
2235 DiffTreeType::Since {
2236 base: request.payload.base.into(),
2237 head: request.payload.head.into(),
2238 }
2239 };
2240
2241 let diff = this
2242 .update(&mut cx, |this, cx| {
2243 let repository = this.repositories().get(&repository_id)?;
2244 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2245 })?
2246 .context("missing repository")?
2247 .await??;
2248
2249 Ok(proto::GetTreeDiffResponse {
2250 entries: diff
2251 .entries
2252 .into_iter()
2253 .map(|(path, status)| proto::TreeDiffStatus {
2254 path: path.0.to_proto(),
2255 status: match status {
2256 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2257 TreeDiffStatus::Modified { .. } => {
2258 proto::tree_diff_status::Status::Modified.into()
2259 }
2260 TreeDiffStatus::Deleted { .. } => {
2261 proto::tree_diff_status::Status::Deleted.into()
2262 }
2263 },
2264 oid: match status {
2265 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2266 Some(old.to_string())
2267 }
2268 TreeDiffStatus::Added => None,
2269 },
2270 })
2271 .collect(),
2272 })
2273 }
2274
2275 async fn handle_get_blob_content(
2276 this: Entity<Self>,
2277 request: TypedEnvelope<proto::GetBlobContent>,
2278 mut cx: AsyncApp,
2279 ) -> Result<proto::GetBlobContentResponse> {
2280 let oid = git::Oid::from_str(&request.payload.oid)?;
2281 let repository_id = RepositoryId(request.payload.repository_id);
2282 let content = this
2283 .update(&mut cx, |this, cx| {
2284 let repository = this.repositories().get(&repository_id)?;
2285 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2286 })?
2287 .context("missing repository")?
2288 .await?;
2289 Ok(proto::GetBlobContentResponse { content })
2290 }
2291
2292 async fn handle_open_unstaged_diff(
2293 this: Entity<Self>,
2294 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2295 mut cx: AsyncApp,
2296 ) -> Result<proto::OpenUnstagedDiffResponse> {
2297 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2298 let diff = this
2299 .update(&mut cx, |this, cx| {
2300 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2301 Some(this.open_unstaged_diff(buffer, cx))
2302 })?
2303 .context("missing buffer")?
2304 .await?;
2305 this.update(&mut cx, |this, _| {
2306 let shared_diffs = this
2307 .shared_diffs
2308 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2309 .or_default();
2310 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2311 })?;
2312 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2313 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2314 }
2315
2316 async fn handle_open_uncommitted_diff(
2317 this: Entity<Self>,
2318 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2319 mut cx: AsyncApp,
2320 ) -> Result<proto::OpenUncommittedDiffResponse> {
2321 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2322 let diff = this
2323 .update(&mut cx, |this, cx| {
2324 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2325 Some(this.open_uncommitted_diff(buffer, cx))
2326 })?
2327 .context("missing buffer")?
2328 .await?;
2329 this.update(&mut cx, |this, _| {
2330 let shared_diffs = this
2331 .shared_diffs
2332 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2333 .or_default();
2334 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2335 })?;
2336 diff.read_with(&cx, |diff, cx| {
2337 use proto::open_uncommitted_diff_response::Mode;
2338
2339 let unstaged_diff = diff.secondary_diff();
2340 let index_snapshot = unstaged_diff.and_then(|diff| {
2341 let diff = diff.read(cx);
2342 diff.base_text_exists().then(|| diff.base_text())
2343 });
2344
2345 let mode;
2346 let staged_text;
2347 let committed_text;
2348 if diff.base_text_exists() {
2349 let committed_snapshot = diff.base_text();
2350 committed_text = Some(committed_snapshot.text());
2351 if let Some(index_text) = index_snapshot {
2352 if index_text.remote_id() == committed_snapshot.remote_id() {
2353 mode = Mode::IndexMatchesHead;
2354 staged_text = None;
2355 } else {
2356 mode = Mode::IndexAndHead;
2357 staged_text = Some(index_text.text());
2358 }
2359 } else {
2360 mode = Mode::IndexAndHead;
2361 staged_text = None;
2362 }
2363 } else {
2364 mode = Mode::IndexAndHead;
2365 committed_text = None;
2366 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2367 }
2368
2369 proto::OpenUncommittedDiffResponse {
2370 committed_text,
2371 staged_text,
2372 mode: mode.into(),
2373 }
2374 })
2375 }
2376
2377 async fn handle_update_diff_bases(
2378 this: Entity<Self>,
2379 request: TypedEnvelope<proto::UpdateDiffBases>,
2380 mut cx: AsyncApp,
2381 ) -> Result<()> {
2382 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2383 this.update(&mut cx, |this, cx| {
2384 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2385 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2386 {
2387 let buffer = buffer.read(cx).text_snapshot();
2388 diff_state.update(cx, |diff_state, cx| {
2389 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2390 })
2391 }
2392 })
2393 }
2394
2395 async fn handle_blame_buffer(
2396 this: Entity<Self>,
2397 envelope: TypedEnvelope<proto::BlameBuffer>,
2398 mut cx: AsyncApp,
2399 ) -> Result<proto::BlameBufferResponse> {
2400 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2401 let version = deserialize_version(&envelope.payload.version);
2402 let buffer = this.read_with(&cx, |this, cx| {
2403 this.buffer_store.read(cx).get_existing(buffer_id)
2404 })??;
2405 buffer
2406 .update(&mut cx, |buffer, _| {
2407 buffer.wait_for_version(version.clone())
2408 })?
2409 .await?;
2410 let blame = this
2411 .update(&mut cx, |this, cx| {
2412 this.blame_buffer(&buffer, Some(version), cx)
2413 })?
2414 .await?;
2415 Ok(serialize_blame_buffer_response(blame))
2416 }
2417
2418 async fn handle_get_permalink_to_line(
2419 this: Entity<Self>,
2420 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2421 mut cx: AsyncApp,
2422 ) -> Result<proto::GetPermalinkToLineResponse> {
2423 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2424 // let version = deserialize_version(&envelope.payload.version);
2425 let selection = {
2426 let proto_selection = envelope
2427 .payload
2428 .selection
2429 .context("no selection to get permalink for defined")?;
2430 proto_selection.start as u32..proto_selection.end as u32
2431 };
2432 let buffer = this.read_with(&cx, |this, cx| {
2433 this.buffer_store.read(cx).get_existing(buffer_id)
2434 })??;
2435 let permalink = this
2436 .update(&mut cx, |this, cx| {
2437 this.get_permalink_to_line(&buffer, selection, cx)
2438 })?
2439 .await?;
2440 Ok(proto::GetPermalinkToLineResponse {
2441 permalink: permalink.to_string(),
2442 })
2443 }
2444
2445 fn repository_for_request(
2446 this: &Entity<Self>,
2447 id: RepositoryId,
2448 cx: &mut AsyncApp,
2449 ) -> Result<Entity<Repository>> {
2450 this.read_with(cx, |this, _| {
2451 this.repositories
2452 .get(&id)
2453 .context("missing repository handle")
2454 .cloned()
2455 })?
2456 }
2457
2458 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2459 self.repositories
2460 .iter()
2461 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2462 .collect()
2463 }
2464
2465 fn process_updated_entries(
2466 &self,
2467 worktree: &Entity<Worktree>,
2468 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2469 cx: &mut App,
2470 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2471 let path_style = worktree.read(cx).path_style();
2472 let mut repo_paths = self
2473 .repositories
2474 .values()
2475 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2476 .collect::<Vec<_>>();
2477 let mut entries: Vec<_> = updated_entries
2478 .iter()
2479 .map(|(path, _, _)| path.clone())
2480 .collect();
2481 entries.sort();
2482 let worktree = worktree.read(cx);
2483
2484 let entries = entries
2485 .into_iter()
2486 .map(|path| worktree.absolutize(&path))
2487 .collect::<Arc<[_]>>();
2488
2489 let executor = cx.background_executor().clone();
2490 cx.background_executor().spawn(async move {
2491 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2492 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2493 let mut tasks = FuturesOrdered::new();
2494 for (repo_path, repo) in repo_paths.into_iter().rev() {
2495 let entries = entries.clone();
2496 let task = executor.spawn(async move {
2497 // Find all repository paths that belong to this repo
2498 let mut ix = entries.partition_point(|path| path < &*repo_path);
2499 if ix == entries.len() {
2500 return None;
2501 };
2502
2503 let mut paths = Vec::new();
2504 // All paths prefixed by a given repo will constitute a continuous range.
2505 while let Some(path) = entries.get(ix)
2506 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2507 &repo_path, path, path_style,
2508 )
2509 {
2510 paths.push((repo_path, ix));
2511 ix += 1;
2512 }
2513 if paths.is_empty() {
2514 None
2515 } else {
2516 Some((repo, paths))
2517 }
2518 });
2519 tasks.push_back(task);
2520 }
2521
2522 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2523 let mut path_was_used = vec![false; entries.len()];
2524 let tasks = tasks.collect::<Vec<_>>().await;
2525 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2526 // We always want to assign a path to it's innermost repository.
2527 for t in tasks {
2528 let Some((repo, paths)) = t else {
2529 continue;
2530 };
2531 let entry = paths_by_git_repo.entry(repo).or_default();
2532 for (repo_path, ix) in paths {
2533 if path_was_used[ix] {
2534 continue;
2535 }
2536 path_was_used[ix] = true;
2537 entry.push(repo_path);
2538 }
2539 }
2540
2541 paths_by_git_repo
2542 })
2543 }
2544}
2545
2546impl BufferGitState {
2547 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2548 Self {
2549 unstaged_diff: Default::default(),
2550 uncommitted_diff: Default::default(),
2551 recalculate_diff_task: Default::default(),
2552 language: Default::default(),
2553 language_registry: Default::default(),
2554 recalculating_tx: postage::watch::channel_with(false).0,
2555 hunk_staging_operation_count: 0,
2556 hunk_staging_operation_count_as_of_write: 0,
2557 head_text: Default::default(),
2558 index_text: Default::default(),
2559 head_changed: Default::default(),
2560 index_changed: Default::default(),
2561 language_changed: Default::default(),
2562 conflict_updated_futures: Default::default(),
2563 conflict_set: Default::default(),
2564 reparse_conflict_markers_task: Default::default(),
2565 }
2566 }
2567
2568 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2569 self.language = buffer.read(cx).language().cloned();
2570 self.language_changed = true;
2571 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2572 }
2573
2574 fn reparse_conflict_markers(
2575 &mut self,
2576 buffer: text::BufferSnapshot,
2577 cx: &mut Context<Self>,
2578 ) -> oneshot::Receiver<()> {
2579 let (tx, rx) = oneshot::channel();
2580
2581 let Some(conflict_set) = self
2582 .conflict_set
2583 .as_ref()
2584 .and_then(|conflict_set| conflict_set.upgrade())
2585 else {
2586 return rx;
2587 };
2588
2589 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2590 if conflict_set.has_conflict {
2591 Some(conflict_set.snapshot())
2592 } else {
2593 None
2594 }
2595 });
2596
2597 if let Some(old_snapshot) = old_snapshot {
2598 self.conflict_updated_futures.push(tx);
2599 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2600 let (snapshot, changed_range) = cx
2601 .background_spawn(async move {
2602 let new_snapshot = ConflictSet::parse(&buffer);
2603 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2604 (new_snapshot, changed_range)
2605 })
2606 .await;
2607 this.update(cx, |this, cx| {
2608 if let Some(conflict_set) = &this.conflict_set {
2609 conflict_set
2610 .update(cx, |conflict_set, cx| {
2611 conflict_set.set_snapshot(snapshot, changed_range, cx);
2612 })
2613 .ok();
2614 }
2615 let futures = std::mem::take(&mut this.conflict_updated_futures);
2616 for tx in futures {
2617 tx.send(()).ok();
2618 }
2619 })
2620 }))
2621 }
2622
2623 rx
2624 }
2625
2626 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2627 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2628 }
2629
2630 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2631 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2632 }
2633
2634 fn handle_base_texts_updated(
2635 &mut self,
2636 buffer: text::BufferSnapshot,
2637 message: proto::UpdateDiffBases,
2638 cx: &mut Context<Self>,
2639 ) {
2640 use proto::update_diff_bases::Mode;
2641
2642 let Some(mode) = Mode::from_i32(message.mode) else {
2643 return;
2644 };
2645
2646 let diff_bases_change = match mode {
2647 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2648 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2649 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2650 Mode::IndexAndHead => DiffBasesChange::SetEach {
2651 index: message.staged_text,
2652 head: message.committed_text,
2653 },
2654 };
2655
2656 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2657 }
2658
2659 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2660 if *self.recalculating_tx.borrow() {
2661 let mut rx = self.recalculating_tx.subscribe();
2662 Some(async move {
2663 loop {
2664 let is_recalculating = rx.recv().await;
2665 if is_recalculating != Some(true) {
2666 break;
2667 }
2668 }
2669 })
2670 } else {
2671 None
2672 }
2673 }
2674
2675 fn diff_bases_changed(
2676 &mut self,
2677 buffer: text::BufferSnapshot,
2678 diff_bases_change: Option<DiffBasesChange>,
2679 cx: &mut Context<Self>,
2680 ) {
2681 match diff_bases_change {
2682 Some(DiffBasesChange::SetIndex(index)) => {
2683 self.index_text = index.map(|mut index| {
2684 text::LineEnding::normalize(&mut index);
2685 Arc::new(index)
2686 });
2687 self.index_changed = true;
2688 }
2689 Some(DiffBasesChange::SetHead(head)) => {
2690 self.head_text = head.map(|mut head| {
2691 text::LineEnding::normalize(&mut head);
2692 Arc::new(head)
2693 });
2694 self.head_changed = true;
2695 }
2696 Some(DiffBasesChange::SetBoth(text)) => {
2697 let text = text.map(|mut text| {
2698 text::LineEnding::normalize(&mut text);
2699 Arc::new(text)
2700 });
2701 self.head_text = text.clone();
2702 self.index_text = text;
2703 self.head_changed = true;
2704 self.index_changed = true;
2705 }
2706 Some(DiffBasesChange::SetEach { index, head }) => {
2707 self.index_text = index.map(|mut index| {
2708 text::LineEnding::normalize(&mut index);
2709 Arc::new(index)
2710 });
2711 self.index_changed = true;
2712 self.head_text = head.map(|mut head| {
2713 text::LineEnding::normalize(&mut head);
2714 Arc::new(head)
2715 });
2716 self.head_changed = true;
2717 }
2718 None => {}
2719 }
2720
2721 self.recalculate_diffs(buffer, cx)
2722 }
2723
2724 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2725 *self.recalculating_tx.borrow_mut() = true;
2726
2727 let language = self.language.clone();
2728 let language_registry = self.language_registry.clone();
2729 let unstaged_diff = self.unstaged_diff();
2730 let uncommitted_diff = self.uncommitted_diff();
2731 let head = self.head_text.clone();
2732 let index = self.index_text.clone();
2733 let index_changed = self.index_changed;
2734 let head_changed = self.head_changed;
2735 let language_changed = self.language_changed;
2736 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2737 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2738 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2739 (None, None) => true,
2740 _ => false,
2741 };
2742 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2743 log::debug!(
2744 "start recalculating diffs for buffer {}",
2745 buffer.remote_id()
2746 );
2747
2748 let mut new_unstaged_diff = None;
2749 if let Some(unstaged_diff) = &unstaged_diff {
2750 new_unstaged_diff = Some(
2751 BufferDiff::update_diff(
2752 unstaged_diff.clone(),
2753 buffer.clone(),
2754 index,
2755 index_changed,
2756 language_changed,
2757 language.clone(),
2758 language_registry.clone(),
2759 cx,
2760 )
2761 .await?,
2762 );
2763 }
2764
2765 let mut new_uncommitted_diff = None;
2766 if let Some(uncommitted_diff) = &uncommitted_diff {
2767 new_uncommitted_diff = if index_matches_head {
2768 new_unstaged_diff.clone()
2769 } else {
2770 Some(
2771 BufferDiff::update_diff(
2772 uncommitted_diff.clone(),
2773 buffer.clone(),
2774 head,
2775 head_changed,
2776 language_changed,
2777 language.clone(),
2778 language_registry.clone(),
2779 cx,
2780 )
2781 .await?,
2782 )
2783 }
2784 }
2785
2786 let cancel = this.update(cx, |this, _| {
2787 // This checks whether all pending stage/unstage operations
2788 // have quiesced (i.e. both the corresponding write and the
2789 // read of that write have completed). If not, then we cancel
2790 // this recalculation attempt to avoid invalidating pending
2791 // state too quickly; another recalculation will come along
2792 // later and clear the pending state once the state of the index has settled.
2793 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2794 *this.recalculating_tx.borrow_mut() = false;
2795 true
2796 } else {
2797 false
2798 }
2799 })?;
2800 if cancel {
2801 log::debug!(
2802 concat!(
2803 "aborting recalculating diffs for buffer {}",
2804 "due to subsequent hunk operations",
2805 ),
2806 buffer.remote_id()
2807 );
2808 return Ok(());
2809 }
2810
2811 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2812 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2813 {
2814 unstaged_diff.update(cx, |diff, cx| {
2815 if language_changed {
2816 diff.language_changed(cx);
2817 }
2818 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2819 })?
2820 } else {
2821 None
2822 };
2823
2824 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2825 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2826 {
2827 uncommitted_diff.update(cx, |diff, cx| {
2828 if language_changed {
2829 diff.language_changed(cx);
2830 }
2831 diff.set_snapshot_with_secondary(
2832 new_uncommitted_diff,
2833 &buffer,
2834 unstaged_changed_range,
2835 true,
2836 cx,
2837 );
2838 })?;
2839 }
2840
2841 log::debug!(
2842 "finished recalculating diffs for buffer {}",
2843 buffer.remote_id()
2844 );
2845
2846 if let Some(this) = this.upgrade() {
2847 this.update(cx, |this, _| {
2848 this.index_changed = false;
2849 this.head_changed = false;
2850 this.language_changed = false;
2851 *this.recalculating_tx.borrow_mut() = false;
2852 })?;
2853 }
2854
2855 Ok(())
2856 }));
2857 }
2858}
2859
2860fn make_remote_delegate(
2861 this: Entity<GitStore>,
2862 project_id: u64,
2863 repository_id: RepositoryId,
2864 askpass_id: u64,
2865 cx: &mut AsyncApp,
2866) -> AskPassDelegate {
2867 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2868 this.update(cx, |this, cx| {
2869 let Some((client, _)) = this.downstream_client() else {
2870 return;
2871 };
2872 let response = client.request(proto::AskPassRequest {
2873 project_id,
2874 repository_id: repository_id.to_proto(),
2875 askpass_id,
2876 prompt,
2877 });
2878 cx.spawn(async move |_, _| {
2879 let mut response = response.await?.response;
2880 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2881 .ok();
2882 response.zeroize();
2883 anyhow::Ok(())
2884 })
2885 .detach_and_log_err(cx);
2886 })
2887 .log_err();
2888 })
2889}
2890
2891impl RepositoryId {
2892 pub fn to_proto(self) -> u64 {
2893 self.0
2894 }
2895
2896 pub fn from_proto(id: u64) -> Self {
2897 RepositoryId(id)
2898 }
2899}
2900
2901impl RepositorySnapshot {
2902 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2903 Self {
2904 id,
2905 statuses_by_path: Default::default(),
2906 work_directory_abs_path,
2907 branch: None,
2908 head_commit: None,
2909 scan_id: 0,
2910 merge: Default::default(),
2911 remote_origin_url: None,
2912 remote_upstream_url: None,
2913 stash_entries: Default::default(),
2914 path_style,
2915 }
2916 }
2917
2918 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2919 proto::UpdateRepository {
2920 branch_summary: self.branch.as_ref().map(branch_to_proto),
2921 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2922 updated_statuses: self
2923 .statuses_by_path
2924 .iter()
2925 .map(|entry| entry.to_proto())
2926 .collect(),
2927 removed_statuses: Default::default(),
2928 current_merge_conflicts: self
2929 .merge
2930 .conflicted_paths
2931 .iter()
2932 .map(|repo_path| repo_path.to_proto())
2933 .collect(),
2934 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2935 project_id,
2936 id: self.id.to_proto(),
2937 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2938 entry_ids: vec![self.id.to_proto()],
2939 scan_id: self.scan_id,
2940 is_last_update: true,
2941 stash_entries: self
2942 .stash_entries
2943 .entries
2944 .iter()
2945 .map(stash_to_proto)
2946 .collect(),
2947 }
2948 }
2949
2950 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2951 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2952 let mut removed_statuses: Vec<String> = Vec::new();
2953
2954 let mut new_statuses = self.statuses_by_path.iter().peekable();
2955 let mut old_statuses = old.statuses_by_path.iter().peekable();
2956
2957 let mut current_new_entry = new_statuses.next();
2958 let mut current_old_entry = old_statuses.next();
2959 loop {
2960 match (current_new_entry, current_old_entry) {
2961 (Some(new_entry), Some(old_entry)) => {
2962 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2963 Ordering::Less => {
2964 updated_statuses.push(new_entry.to_proto());
2965 current_new_entry = new_statuses.next();
2966 }
2967 Ordering::Equal => {
2968 if new_entry.status != old_entry.status {
2969 updated_statuses.push(new_entry.to_proto());
2970 }
2971 current_old_entry = old_statuses.next();
2972 current_new_entry = new_statuses.next();
2973 }
2974 Ordering::Greater => {
2975 removed_statuses.push(old_entry.repo_path.to_proto());
2976 current_old_entry = old_statuses.next();
2977 }
2978 }
2979 }
2980 (None, Some(old_entry)) => {
2981 removed_statuses.push(old_entry.repo_path.to_proto());
2982 current_old_entry = old_statuses.next();
2983 }
2984 (Some(new_entry), None) => {
2985 updated_statuses.push(new_entry.to_proto());
2986 current_new_entry = new_statuses.next();
2987 }
2988 (None, None) => break,
2989 }
2990 }
2991
2992 proto::UpdateRepository {
2993 branch_summary: self.branch.as_ref().map(branch_to_proto),
2994 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2995 updated_statuses,
2996 removed_statuses,
2997 current_merge_conflicts: self
2998 .merge
2999 .conflicted_paths
3000 .iter()
3001 .map(|path| path.to_proto())
3002 .collect(),
3003 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3004 project_id,
3005 id: self.id.to_proto(),
3006 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3007 entry_ids: vec![],
3008 scan_id: self.scan_id,
3009 is_last_update: true,
3010 stash_entries: self
3011 .stash_entries
3012 .entries
3013 .iter()
3014 .map(stash_to_proto)
3015 .collect(),
3016 }
3017 }
3018
3019 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3020 self.statuses_by_path.iter().cloned()
3021 }
3022
3023 pub fn status_summary(&self) -> GitSummary {
3024 self.statuses_by_path.summary().item_summary
3025 }
3026
3027 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3028 self.statuses_by_path
3029 .get(&PathKey(path.0.clone()), ())
3030 .cloned()
3031 }
3032
3033 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3034 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3035 }
3036
3037 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3038 self.path_style
3039 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3040 .unwrap()
3041 .into()
3042 }
3043
3044 #[inline]
3045 fn abs_path_to_repo_path_inner(
3046 work_directory_abs_path: &Path,
3047 abs_path: &Path,
3048 path_style: PathStyle,
3049 ) -> Option<RepoPath> {
3050 abs_path
3051 .strip_prefix(&work_directory_abs_path)
3052 .ok()
3053 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3054 }
3055
3056 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3057 self.merge.conflicted_paths.contains(repo_path)
3058 }
3059
3060 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3061 let had_conflict_on_last_merge_head_change =
3062 self.merge.conflicted_paths.contains(repo_path);
3063 let has_conflict_currently = self
3064 .status_for_path(repo_path)
3065 .is_some_and(|entry| entry.status.is_conflicted());
3066 had_conflict_on_last_merge_head_change || has_conflict_currently
3067 }
3068
3069 /// This is the name that will be displayed in the repository selector for this repository.
3070 pub fn display_name(&self) -> SharedString {
3071 self.work_directory_abs_path
3072 .file_name()
3073 .unwrap_or_default()
3074 .to_string_lossy()
3075 .to_string()
3076 .into()
3077 }
3078}
3079
3080pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3081 proto::StashEntry {
3082 oid: entry.oid.as_bytes().to_vec(),
3083 message: entry.message.clone(),
3084 branch: entry.branch.clone(),
3085 index: entry.index as u64,
3086 timestamp: entry.timestamp,
3087 }
3088}
3089
3090pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3091 Ok(StashEntry {
3092 oid: Oid::from_bytes(&entry.oid)?,
3093 message: entry.message.clone(),
3094 index: entry.index as usize,
3095 branch: entry.branch.clone(),
3096 timestamp: entry.timestamp,
3097 })
3098}
3099
3100impl MergeDetails {
3101 async fn load(
3102 backend: &Arc<dyn GitRepository>,
3103 status: &SumTree<StatusEntry>,
3104 prev_snapshot: &RepositorySnapshot,
3105 ) -> Result<(MergeDetails, bool)> {
3106 log::debug!("load merge details");
3107 let message = backend.merge_message().await;
3108 let heads = backend
3109 .revparse_batch(vec![
3110 "MERGE_HEAD".into(),
3111 "CHERRY_PICK_HEAD".into(),
3112 "REBASE_HEAD".into(),
3113 "REVERT_HEAD".into(),
3114 "APPLY_HEAD".into(),
3115 ])
3116 .await
3117 .log_err()
3118 .unwrap_or_default()
3119 .into_iter()
3120 .map(|opt| opt.map(SharedString::from))
3121 .collect::<Vec<_>>();
3122 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3123 let conflicted_paths = if merge_heads_changed {
3124 let current_conflicted_paths = TreeSet::from_ordered_entries(
3125 status
3126 .iter()
3127 .filter(|entry| entry.status.is_conflicted())
3128 .map(|entry| entry.repo_path.clone()),
3129 );
3130
3131 // It can happen that we run a scan while a lengthy merge is in progress
3132 // that will eventually result in conflicts, but before those conflicts
3133 // are reported by `git status`. Since for the moment we only care about
3134 // the merge heads state for the purposes of tracking conflicts, don't update
3135 // this state until we see some conflicts.
3136 if heads.iter().any(Option::is_some)
3137 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3138 && current_conflicted_paths.is_empty()
3139 {
3140 log::debug!("not updating merge heads because no conflicts found");
3141 return Ok((
3142 MergeDetails {
3143 message: message.map(SharedString::from),
3144 ..prev_snapshot.merge.clone()
3145 },
3146 false,
3147 ));
3148 }
3149
3150 current_conflicted_paths
3151 } else {
3152 prev_snapshot.merge.conflicted_paths.clone()
3153 };
3154 let details = MergeDetails {
3155 conflicted_paths,
3156 message: message.map(SharedString::from),
3157 heads,
3158 };
3159 Ok((details, merge_heads_changed))
3160 }
3161}
3162
3163impl Repository {
3164 pub fn snapshot(&self) -> RepositorySnapshot {
3165 self.snapshot.clone()
3166 }
3167
3168 fn local(
3169 id: RepositoryId,
3170 work_directory_abs_path: Arc<Path>,
3171 dot_git_abs_path: Arc<Path>,
3172 repository_dir_abs_path: Arc<Path>,
3173 common_dir_abs_path: Arc<Path>,
3174 project_environment: WeakEntity<ProjectEnvironment>,
3175 fs: Arc<dyn Fs>,
3176 git_store: WeakEntity<GitStore>,
3177 cx: &mut Context<Self>,
3178 ) -> Self {
3179 let snapshot =
3180 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3181 Repository {
3182 this: cx.weak_entity(),
3183 git_store,
3184 snapshot,
3185 commit_message_buffer: None,
3186 askpass_delegates: Default::default(),
3187 paths_needing_status_update: Default::default(),
3188 latest_askpass_id: 0,
3189 job_sender: Repository::spawn_local_git_worker(
3190 work_directory_abs_path,
3191 dot_git_abs_path,
3192 repository_dir_abs_path,
3193 common_dir_abs_path,
3194 project_environment,
3195 fs,
3196 cx,
3197 ),
3198 job_id: 0,
3199 active_jobs: Default::default(),
3200 }
3201 }
3202
3203 fn remote(
3204 id: RepositoryId,
3205 work_directory_abs_path: Arc<Path>,
3206 path_style: PathStyle,
3207 project_id: ProjectId,
3208 client: AnyProtoClient,
3209 git_store: WeakEntity<GitStore>,
3210 cx: &mut Context<Self>,
3211 ) -> Self {
3212 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3213 Self {
3214 this: cx.weak_entity(),
3215 snapshot,
3216 commit_message_buffer: None,
3217 git_store,
3218 paths_needing_status_update: Default::default(),
3219 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3220 askpass_delegates: Default::default(),
3221 latest_askpass_id: 0,
3222 active_jobs: Default::default(),
3223 job_id: 0,
3224 }
3225 }
3226
3227 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3228 self.git_store.upgrade()
3229 }
3230
3231 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3232 let this = cx.weak_entity();
3233 let git_store = self.git_store.clone();
3234 let _ = self.send_keyed_job(
3235 Some(GitJobKey::ReloadBufferDiffBases),
3236 None,
3237 |state, mut cx| async move {
3238 let RepositoryState::Local { backend, .. } = state else {
3239 log::error!("tried to recompute diffs for a non-local repository");
3240 return Ok(());
3241 };
3242
3243 let Some(this) = this.upgrade() else {
3244 return Ok(());
3245 };
3246
3247 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3248 git_store.update(cx, |git_store, cx| {
3249 git_store
3250 .diffs
3251 .iter()
3252 .filter_map(|(buffer_id, diff_state)| {
3253 let buffer_store = git_store.buffer_store.read(cx);
3254 let buffer = buffer_store.get(*buffer_id)?;
3255 let file = File::from_dyn(buffer.read(cx).file())?;
3256 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3257 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3258 log::debug!(
3259 "start reload diff bases for repo path {}",
3260 repo_path.as_unix_str()
3261 );
3262 diff_state.update(cx, |diff_state, _| {
3263 let has_unstaged_diff = diff_state
3264 .unstaged_diff
3265 .as_ref()
3266 .is_some_and(|diff| diff.is_upgradable());
3267 let has_uncommitted_diff = diff_state
3268 .uncommitted_diff
3269 .as_ref()
3270 .is_some_and(|set| set.is_upgradable());
3271
3272 Some((
3273 buffer,
3274 repo_path,
3275 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3276 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3277 ))
3278 })
3279 })
3280 .collect::<Vec<_>>()
3281 })
3282 })??;
3283
3284 let buffer_diff_base_changes = cx
3285 .background_spawn(async move {
3286 let mut changes = Vec::new();
3287 for (buffer, repo_path, current_index_text, current_head_text) in
3288 &repo_diff_state_updates
3289 {
3290 let index_text = if current_index_text.is_some() {
3291 backend.load_index_text(repo_path.clone()).await
3292 } else {
3293 None
3294 };
3295 let head_text = if current_head_text.is_some() {
3296 backend.load_committed_text(repo_path.clone()).await
3297 } else {
3298 None
3299 };
3300
3301 let change =
3302 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3303 (Some(current_index), Some(current_head)) => {
3304 let index_changed =
3305 index_text.as_ref() != current_index.as_deref();
3306 let head_changed =
3307 head_text.as_ref() != current_head.as_deref();
3308 if index_changed && head_changed {
3309 if index_text == head_text {
3310 Some(DiffBasesChange::SetBoth(head_text))
3311 } else {
3312 Some(DiffBasesChange::SetEach {
3313 index: index_text,
3314 head: head_text,
3315 })
3316 }
3317 } else if index_changed {
3318 Some(DiffBasesChange::SetIndex(index_text))
3319 } else if head_changed {
3320 Some(DiffBasesChange::SetHead(head_text))
3321 } else {
3322 None
3323 }
3324 }
3325 (Some(current_index), None) => {
3326 let index_changed =
3327 index_text.as_ref() != current_index.as_deref();
3328 index_changed
3329 .then_some(DiffBasesChange::SetIndex(index_text))
3330 }
3331 (None, Some(current_head)) => {
3332 let head_changed =
3333 head_text.as_ref() != current_head.as_deref();
3334 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3335 }
3336 (None, None) => None,
3337 };
3338
3339 changes.push((buffer.clone(), change))
3340 }
3341 changes
3342 })
3343 .await;
3344
3345 git_store.update(&mut cx, |git_store, cx| {
3346 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3347 let buffer_snapshot = buffer.read(cx).text_snapshot();
3348 let buffer_id = buffer_snapshot.remote_id();
3349 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3350 continue;
3351 };
3352
3353 let downstream_client = git_store.downstream_client();
3354 diff_state.update(cx, |diff_state, cx| {
3355 use proto::update_diff_bases::Mode;
3356
3357 if let Some((diff_bases_change, (client, project_id))) =
3358 diff_bases_change.clone().zip(downstream_client)
3359 {
3360 let (staged_text, committed_text, mode) = match diff_bases_change {
3361 DiffBasesChange::SetIndex(index) => {
3362 (index, None, Mode::IndexOnly)
3363 }
3364 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3365 DiffBasesChange::SetEach { index, head } => {
3366 (index, head, Mode::IndexAndHead)
3367 }
3368 DiffBasesChange::SetBoth(text) => {
3369 (None, text, Mode::IndexMatchesHead)
3370 }
3371 };
3372 client
3373 .send(proto::UpdateDiffBases {
3374 project_id: project_id.to_proto(),
3375 buffer_id: buffer_id.to_proto(),
3376 staged_text,
3377 committed_text,
3378 mode: mode as i32,
3379 })
3380 .log_err();
3381 }
3382
3383 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3384 });
3385 }
3386 })
3387 },
3388 );
3389 }
3390
3391 pub fn send_job<F, Fut, R>(
3392 &mut self,
3393 status: Option<SharedString>,
3394 job: F,
3395 ) -> oneshot::Receiver<R>
3396 where
3397 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3398 Fut: Future<Output = R> + 'static,
3399 R: Send + 'static,
3400 {
3401 self.send_keyed_job(None, status, job)
3402 }
3403
3404 fn send_keyed_job<F, Fut, R>(
3405 &mut self,
3406 key: Option<GitJobKey>,
3407 status: Option<SharedString>,
3408 job: F,
3409 ) -> oneshot::Receiver<R>
3410 where
3411 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3412 Fut: Future<Output = R> + 'static,
3413 R: Send + 'static,
3414 {
3415 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3416 let job_id = post_inc(&mut self.job_id);
3417 let this = self.this.clone();
3418 self.job_sender
3419 .unbounded_send(GitJob {
3420 key,
3421 job: Box::new(move |state, cx: &mut AsyncApp| {
3422 let job = job(state, cx.clone());
3423 cx.spawn(async move |cx| {
3424 if let Some(s) = status.clone() {
3425 this.update(cx, |this, cx| {
3426 this.active_jobs.insert(
3427 job_id,
3428 JobInfo {
3429 start: Instant::now(),
3430 message: s.clone(),
3431 },
3432 );
3433
3434 cx.notify();
3435 })
3436 .ok();
3437 }
3438 let result = job.await;
3439
3440 this.update(cx, |this, cx| {
3441 this.active_jobs.remove(&job_id);
3442 cx.notify();
3443 })
3444 .ok();
3445
3446 result_tx.send(result).ok();
3447 })
3448 }),
3449 })
3450 .ok();
3451 result_rx
3452 }
3453
3454 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3455 let Some(git_store) = self.git_store.upgrade() else {
3456 return;
3457 };
3458 let entity = cx.entity();
3459 git_store.update(cx, |git_store, cx| {
3460 let Some((&id, _)) = git_store
3461 .repositories
3462 .iter()
3463 .find(|(_, handle)| *handle == &entity)
3464 else {
3465 return;
3466 };
3467 git_store.active_repo_id = Some(id);
3468 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3469 });
3470 }
3471
3472 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3473 self.snapshot.status()
3474 }
3475
3476 pub fn cached_stash(&self) -> GitStash {
3477 self.snapshot.stash_entries.clone()
3478 }
3479
3480 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3481 let git_store = self.git_store.upgrade()?;
3482 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3483 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3484 let abs_path = SanitizedPath::new(&abs_path);
3485 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3486 Some(ProjectPath {
3487 worktree_id: worktree.read(cx).id(),
3488 path: relative_path,
3489 })
3490 }
3491
3492 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3493 let git_store = self.git_store.upgrade()?;
3494 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3495 let abs_path = worktree_store.absolutize(path, cx)?;
3496 self.snapshot.abs_path_to_repo_path(&abs_path)
3497 }
3498
3499 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3500 other
3501 .read(cx)
3502 .snapshot
3503 .work_directory_abs_path
3504 .starts_with(&self.snapshot.work_directory_abs_path)
3505 }
3506
3507 pub fn open_commit_buffer(
3508 &mut self,
3509 languages: Option<Arc<LanguageRegistry>>,
3510 buffer_store: Entity<BufferStore>,
3511 cx: &mut Context<Self>,
3512 ) -> Task<Result<Entity<Buffer>>> {
3513 let id = self.id;
3514 if let Some(buffer) = self.commit_message_buffer.clone() {
3515 return Task::ready(Ok(buffer));
3516 }
3517 let this = cx.weak_entity();
3518
3519 let rx = self.send_job(None, move |state, mut cx| async move {
3520 let Some(this) = this.upgrade() else {
3521 bail!("git store was dropped");
3522 };
3523 match state {
3524 RepositoryState::Local { .. } => {
3525 this.update(&mut cx, |_, cx| {
3526 Self::open_local_commit_buffer(languages, buffer_store, cx)
3527 })?
3528 .await
3529 }
3530 RepositoryState::Remote { project_id, client } => {
3531 let request = client.request(proto::OpenCommitMessageBuffer {
3532 project_id: project_id.0,
3533 repository_id: id.to_proto(),
3534 });
3535 let response = request.await.context("requesting to open commit buffer")?;
3536 let buffer_id = BufferId::new(response.buffer_id)?;
3537 let buffer = buffer_store
3538 .update(&mut cx, |buffer_store, cx| {
3539 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3540 })?
3541 .await?;
3542 if let Some(language_registry) = languages {
3543 let git_commit_language =
3544 language_registry.language_for_name("Git Commit").await?;
3545 buffer.update(&mut cx, |buffer, cx| {
3546 buffer.set_language(Some(git_commit_language), cx);
3547 })?;
3548 }
3549 this.update(&mut cx, |this, _| {
3550 this.commit_message_buffer = Some(buffer.clone());
3551 })?;
3552 Ok(buffer)
3553 }
3554 }
3555 });
3556
3557 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3558 }
3559
3560 fn open_local_commit_buffer(
3561 language_registry: Option<Arc<LanguageRegistry>>,
3562 buffer_store: Entity<BufferStore>,
3563 cx: &mut Context<Self>,
3564 ) -> Task<Result<Entity<Buffer>>> {
3565 cx.spawn(async move |repository, cx| {
3566 let buffer = buffer_store
3567 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3568 .await?;
3569
3570 if let Some(language_registry) = language_registry {
3571 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3572 buffer.update(cx, |buffer, cx| {
3573 buffer.set_language(Some(git_commit_language), cx);
3574 })?;
3575 }
3576
3577 repository.update(cx, |repository, _| {
3578 repository.commit_message_buffer = Some(buffer.clone());
3579 })?;
3580 Ok(buffer)
3581 })
3582 }
3583
3584 pub fn checkout_files(
3585 &mut self,
3586 commit: &str,
3587 paths: Vec<RepoPath>,
3588 _cx: &mut App,
3589 ) -> oneshot::Receiver<Result<()>> {
3590 let commit = commit.to_string();
3591 let id = self.id;
3592
3593 self.send_job(
3594 Some(format!("git checkout {}", commit).into()),
3595 move |git_repo, _| async move {
3596 match git_repo {
3597 RepositoryState::Local {
3598 backend,
3599 environment,
3600 ..
3601 } => {
3602 backend
3603 .checkout_files(commit, paths, environment.clone())
3604 .await
3605 }
3606 RepositoryState::Remote { project_id, client } => {
3607 client
3608 .request(proto::GitCheckoutFiles {
3609 project_id: project_id.0,
3610 repository_id: id.to_proto(),
3611 commit,
3612 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3613 })
3614 .await?;
3615
3616 Ok(())
3617 }
3618 }
3619 },
3620 )
3621 }
3622
3623 pub fn reset(
3624 &mut self,
3625 commit: String,
3626 reset_mode: ResetMode,
3627 _cx: &mut App,
3628 ) -> oneshot::Receiver<Result<()>> {
3629 let id = self.id;
3630
3631 self.send_job(None, move |git_repo, _| async move {
3632 match git_repo {
3633 RepositoryState::Local {
3634 backend,
3635 environment,
3636 ..
3637 } => backend.reset(commit, reset_mode, environment).await,
3638 RepositoryState::Remote { project_id, client } => {
3639 client
3640 .request(proto::GitReset {
3641 project_id: project_id.0,
3642 repository_id: id.to_proto(),
3643 commit,
3644 mode: match reset_mode {
3645 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3646 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3647 },
3648 })
3649 .await?;
3650
3651 Ok(())
3652 }
3653 }
3654 })
3655 }
3656
3657 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3658 let id = self.id;
3659 self.send_job(None, move |git_repo, _cx| async move {
3660 match git_repo {
3661 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3662 RepositoryState::Remote { project_id, client } => {
3663 let resp = client
3664 .request(proto::GitShow {
3665 project_id: project_id.0,
3666 repository_id: id.to_proto(),
3667 commit,
3668 })
3669 .await?;
3670
3671 Ok(CommitDetails {
3672 sha: resp.sha.into(),
3673 message: resp.message.into(),
3674 commit_timestamp: resp.commit_timestamp,
3675 author_email: resp.author_email.into(),
3676 author_name: resp.author_name.into(),
3677 })
3678 }
3679 }
3680 })
3681 }
3682
3683 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3684 let id = self.id;
3685 self.send_job(None, move |git_repo, cx| async move {
3686 match git_repo {
3687 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3688 RepositoryState::Remote {
3689 client, project_id, ..
3690 } => {
3691 let response = client
3692 .request(proto::LoadCommitDiff {
3693 project_id: project_id.0,
3694 repository_id: id.to_proto(),
3695 commit,
3696 })
3697 .await?;
3698 Ok(CommitDiff {
3699 files: response
3700 .files
3701 .into_iter()
3702 .map(|file| {
3703 Ok(CommitFile {
3704 path: RepoPath::from_proto(&file.path)?,
3705 old_text: file.old_text,
3706 new_text: file.new_text,
3707 })
3708 })
3709 .collect::<Result<Vec<_>>>()?,
3710 })
3711 }
3712 }
3713 })
3714 }
3715
3716 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3717 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3718 }
3719
3720 fn save_buffers<'a>(
3721 &self,
3722 entries: impl IntoIterator<Item = &'a RepoPath>,
3723 cx: &mut Context<Self>,
3724 ) -> Vec<Task<anyhow::Result<()>>> {
3725 let mut save_futures = Vec::new();
3726 if let Some(buffer_store) = self.buffer_store(cx) {
3727 buffer_store.update(cx, |buffer_store, cx| {
3728 for path in entries {
3729 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3730 continue;
3731 };
3732 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3733 && buffer
3734 .read(cx)
3735 .file()
3736 .is_some_and(|file| file.disk_state().exists())
3737 && buffer.read(cx).has_unsaved_edits()
3738 {
3739 save_futures.push(buffer_store.save_buffer(buffer, cx));
3740 }
3741 }
3742 })
3743 }
3744 save_futures
3745 }
3746
3747 pub fn stage_entries(
3748 &self,
3749 entries: Vec<RepoPath>,
3750 cx: &mut Context<Self>,
3751 ) -> Task<anyhow::Result<()>> {
3752 if entries.is_empty() {
3753 return Task::ready(Ok(()));
3754 }
3755 let id = self.id;
3756 let save_tasks = self.save_buffers(&entries, cx);
3757 let paths = entries
3758 .iter()
3759 .map(|p| p.as_unix_str())
3760 .collect::<Vec<_>>()
3761 .join(" ");
3762 let status = format!("git add {paths}");
3763 let job_key = match entries.len() {
3764 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3765 _ => None,
3766 };
3767
3768 cx.spawn(async move |this, cx| {
3769 for save_task in save_tasks {
3770 save_task.await?;
3771 }
3772
3773 this.update(cx, |this, _| {
3774 this.send_keyed_job(
3775 job_key,
3776 Some(status.into()),
3777 move |git_repo, _cx| async move {
3778 match git_repo {
3779 RepositoryState::Local {
3780 backend,
3781 environment,
3782 ..
3783 } => backend.stage_paths(entries, environment.clone()).await,
3784 RepositoryState::Remote { project_id, client } => {
3785 client
3786 .request(proto::Stage {
3787 project_id: project_id.0,
3788 repository_id: id.to_proto(),
3789 paths: entries
3790 .into_iter()
3791 .map(|repo_path| repo_path.to_proto())
3792 .collect(),
3793 })
3794 .await
3795 .context("sending stage request")?;
3796
3797 Ok(())
3798 }
3799 }
3800 },
3801 )
3802 })?
3803 .await??;
3804
3805 Ok(())
3806 })
3807 }
3808
3809 pub fn unstage_entries(
3810 &self,
3811 entries: Vec<RepoPath>,
3812 cx: &mut Context<Self>,
3813 ) -> Task<anyhow::Result<()>> {
3814 if entries.is_empty() {
3815 return Task::ready(Ok(()));
3816 }
3817 let id = self.id;
3818 let save_tasks = self.save_buffers(&entries, cx);
3819 let paths = entries
3820 .iter()
3821 .map(|p| p.as_unix_str())
3822 .collect::<Vec<_>>()
3823 .join(" ");
3824 let status = format!("git reset {paths}");
3825 let job_key = match entries.len() {
3826 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3827 _ => None,
3828 };
3829
3830 cx.spawn(async move |this, cx| {
3831 for save_task in save_tasks {
3832 save_task.await?;
3833 }
3834
3835 this.update(cx, |this, _| {
3836 this.send_keyed_job(
3837 job_key,
3838 Some(status.into()),
3839 move |git_repo, _cx| async move {
3840 match git_repo {
3841 RepositoryState::Local {
3842 backend,
3843 environment,
3844 ..
3845 } => backend.unstage_paths(entries, environment).await,
3846 RepositoryState::Remote { project_id, client } => {
3847 client
3848 .request(proto::Unstage {
3849 project_id: project_id.0,
3850 repository_id: id.to_proto(),
3851 paths: entries
3852 .into_iter()
3853 .map(|repo_path| repo_path.to_proto())
3854 .collect(),
3855 })
3856 .await
3857 .context("sending unstage request")?;
3858
3859 Ok(())
3860 }
3861 }
3862 },
3863 )
3864 })?
3865 .await??;
3866
3867 Ok(())
3868 })
3869 }
3870
3871 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3872 let to_stage = self
3873 .cached_status()
3874 .filter(|entry| !entry.status.staging().is_fully_staged())
3875 .map(|entry| entry.repo_path)
3876 .collect();
3877 self.stage_entries(to_stage, cx)
3878 }
3879
3880 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3881 let to_unstage = self
3882 .cached_status()
3883 .filter(|entry| entry.status.staging().has_staged())
3884 .map(|entry| entry.repo_path)
3885 .collect();
3886 self.unstage_entries(to_unstage, cx)
3887 }
3888
3889 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3890 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3891
3892 self.stash_entries(to_stash, cx)
3893 }
3894
3895 pub fn stash_entries(
3896 &mut self,
3897 entries: Vec<RepoPath>,
3898 cx: &mut Context<Self>,
3899 ) -> Task<anyhow::Result<()>> {
3900 let id = self.id;
3901
3902 cx.spawn(async move |this, cx| {
3903 this.update(cx, |this, _| {
3904 this.send_job(None, move |git_repo, _cx| async move {
3905 match git_repo {
3906 RepositoryState::Local {
3907 backend,
3908 environment,
3909 ..
3910 } => backend.stash_paths(entries, environment).await,
3911 RepositoryState::Remote { project_id, client } => {
3912 client
3913 .request(proto::Stash {
3914 project_id: project_id.0,
3915 repository_id: id.to_proto(),
3916 paths: entries
3917 .into_iter()
3918 .map(|repo_path| repo_path.to_proto())
3919 .collect(),
3920 })
3921 .await
3922 .context("sending stash request")?;
3923 Ok(())
3924 }
3925 }
3926 })
3927 })?
3928 .await??;
3929 Ok(())
3930 })
3931 }
3932
3933 pub fn stash_pop(
3934 &mut self,
3935 index: Option<usize>,
3936 cx: &mut Context<Self>,
3937 ) -> Task<anyhow::Result<()>> {
3938 let id = self.id;
3939 cx.spawn(async move |this, cx| {
3940 this.update(cx, |this, _| {
3941 this.send_job(None, move |git_repo, _cx| async move {
3942 match git_repo {
3943 RepositoryState::Local {
3944 backend,
3945 environment,
3946 ..
3947 } => backend.stash_pop(index, environment).await,
3948 RepositoryState::Remote { project_id, client } => {
3949 client
3950 .request(proto::StashPop {
3951 project_id: project_id.0,
3952 repository_id: id.to_proto(),
3953 stash_index: index.map(|i| i as u64),
3954 })
3955 .await
3956 .context("sending stash pop request")?;
3957 Ok(())
3958 }
3959 }
3960 })
3961 })?
3962 .await??;
3963 Ok(())
3964 })
3965 }
3966
3967 pub fn stash_apply(
3968 &mut self,
3969 index: Option<usize>,
3970 cx: &mut Context<Self>,
3971 ) -> Task<anyhow::Result<()>> {
3972 let id = self.id;
3973 cx.spawn(async move |this, cx| {
3974 this.update(cx, |this, _| {
3975 this.send_job(None, move |git_repo, _cx| async move {
3976 match git_repo {
3977 RepositoryState::Local {
3978 backend,
3979 environment,
3980 ..
3981 } => backend.stash_apply(index, environment).await,
3982 RepositoryState::Remote { project_id, client } => {
3983 client
3984 .request(proto::StashApply {
3985 project_id: project_id.0,
3986 repository_id: id.to_proto(),
3987 stash_index: index.map(|i| i as u64),
3988 })
3989 .await
3990 .context("sending stash apply request")?;
3991 Ok(())
3992 }
3993 }
3994 })
3995 })?
3996 .await??;
3997 Ok(())
3998 })
3999 }
4000
4001 pub fn stash_drop(
4002 &mut self,
4003 index: Option<usize>,
4004 cx: &mut Context<Self>,
4005 ) -> oneshot::Receiver<anyhow::Result<()>> {
4006 let id = self.id;
4007 let updates_tx = self
4008 .git_store()
4009 .and_then(|git_store| match &git_store.read(cx).state {
4010 GitStoreState::Local { downstream, .. } => downstream
4011 .as_ref()
4012 .map(|downstream| downstream.updates_tx.clone()),
4013 _ => None,
4014 });
4015 let this = cx.weak_entity();
4016 self.send_job(None, move |git_repo, mut cx| async move {
4017 match git_repo {
4018 RepositoryState::Local {
4019 backend,
4020 environment,
4021 ..
4022 } => {
4023 // TODO would be nice to not have to do this manually
4024 let result = backend.stash_drop(index, environment).await;
4025 if result.is_ok()
4026 && let Ok(stash_entries) = backend.stash_entries().await
4027 {
4028 let snapshot = this.update(&mut cx, |this, cx| {
4029 this.snapshot.stash_entries = stash_entries;
4030 cx.emit(RepositoryEvent::StashEntriesChanged);
4031 this.snapshot.clone()
4032 })?;
4033 if let Some(updates_tx) = updates_tx {
4034 updates_tx
4035 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4036 .ok();
4037 }
4038 }
4039
4040 result
4041 }
4042 RepositoryState::Remote { project_id, client } => {
4043 client
4044 .request(proto::StashDrop {
4045 project_id: project_id.0,
4046 repository_id: id.to_proto(),
4047 stash_index: index.map(|i| i as u64),
4048 })
4049 .await
4050 .context("sending stash pop request")?;
4051 Ok(())
4052 }
4053 }
4054 })
4055 }
4056
4057 pub fn commit(
4058 &mut self,
4059 message: SharedString,
4060 name_and_email: Option<(SharedString, SharedString)>,
4061 options: CommitOptions,
4062 _cx: &mut App,
4063 ) -> oneshot::Receiver<Result<()>> {
4064 let id = self.id;
4065
4066 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4067 match git_repo {
4068 RepositoryState::Local {
4069 backend,
4070 environment,
4071 ..
4072 } => {
4073 backend
4074 .commit(message, name_and_email, options, environment)
4075 .await
4076 }
4077 RepositoryState::Remote { project_id, client } => {
4078 let (name, email) = name_and_email.unzip();
4079 client
4080 .request(proto::Commit {
4081 project_id: project_id.0,
4082 repository_id: id.to_proto(),
4083 message: String::from(message),
4084 name: name.map(String::from),
4085 email: email.map(String::from),
4086 options: Some(proto::commit::CommitOptions {
4087 amend: options.amend,
4088 signoff: options.signoff,
4089 }),
4090 })
4091 .await
4092 .context("sending commit request")?;
4093
4094 Ok(())
4095 }
4096 }
4097 })
4098 }
4099
4100 pub fn fetch(
4101 &mut self,
4102 fetch_options: FetchOptions,
4103 askpass: AskPassDelegate,
4104 _cx: &mut App,
4105 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4106 let askpass_delegates = self.askpass_delegates.clone();
4107 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4108 let id = self.id;
4109
4110 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4111 match git_repo {
4112 RepositoryState::Local {
4113 backend,
4114 environment,
4115 ..
4116 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4117 RepositoryState::Remote { project_id, client } => {
4118 askpass_delegates.lock().insert(askpass_id, askpass);
4119 let _defer = util::defer(|| {
4120 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4121 debug_assert!(askpass_delegate.is_some());
4122 });
4123
4124 let response = client
4125 .request(proto::Fetch {
4126 project_id: project_id.0,
4127 repository_id: id.to_proto(),
4128 askpass_id,
4129 remote: fetch_options.to_proto(),
4130 })
4131 .await
4132 .context("sending fetch request")?;
4133
4134 Ok(RemoteCommandOutput {
4135 stdout: response.stdout,
4136 stderr: response.stderr,
4137 })
4138 }
4139 }
4140 })
4141 }
4142
4143 pub fn push(
4144 &mut self,
4145 branch: SharedString,
4146 remote: SharedString,
4147 options: Option<PushOptions>,
4148 askpass: AskPassDelegate,
4149 cx: &mut Context<Self>,
4150 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4151 let askpass_delegates = self.askpass_delegates.clone();
4152 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4153 let id = self.id;
4154
4155 let args = options
4156 .map(|option| match option {
4157 PushOptions::SetUpstream => " --set-upstream",
4158 PushOptions::Force => " --force-with-lease",
4159 })
4160 .unwrap_or("");
4161
4162 let updates_tx = self
4163 .git_store()
4164 .and_then(|git_store| match &git_store.read(cx).state {
4165 GitStoreState::Local { downstream, .. } => downstream
4166 .as_ref()
4167 .map(|downstream| downstream.updates_tx.clone()),
4168 _ => None,
4169 });
4170
4171 let this = cx.weak_entity();
4172 self.send_job(
4173 Some(format!("git push {} {} {}", args, remote, branch).into()),
4174 move |git_repo, mut cx| async move {
4175 match git_repo {
4176 RepositoryState::Local {
4177 backend,
4178 environment,
4179 ..
4180 } => {
4181 let result = backend
4182 .push(
4183 branch.to_string(),
4184 remote.to_string(),
4185 options,
4186 askpass,
4187 environment.clone(),
4188 cx.clone(),
4189 )
4190 .await;
4191 // TODO would be nice to not have to do this manually
4192 if result.is_ok() {
4193 let branches = backend.branches().await?;
4194 let branch = branches.into_iter().find(|branch| branch.is_head);
4195 log::info!("head branch after scan is {branch:?}");
4196 let snapshot = this.update(&mut cx, |this, cx| {
4197 this.snapshot.branch = branch;
4198 cx.emit(RepositoryEvent::BranchChanged);
4199 this.snapshot.clone()
4200 })?;
4201 if let Some(updates_tx) = updates_tx {
4202 updates_tx
4203 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4204 .ok();
4205 }
4206 }
4207 result
4208 }
4209 RepositoryState::Remote { project_id, client } => {
4210 askpass_delegates.lock().insert(askpass_id, askpass);
4211 let _defer = util::defer(|| {
4212 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4213 debug_assert!(askpass_delegate.is_some());
4214 });
4215 let response = client
4216 .request(proto::Push {
4217 project_id: project_id.0,
4218 repository_id: id.to_proto(),
4219 askpass_id,
4220 branch_name: branch.to_string(),
4221 remote_name: remote.to_string(),
4222 options: options.map(|options| match options {
4223 PushOptions::Force => proto::push::PushOptions::Force,
4224 PushOptions::SetUpstream => {
4225 proto::push::PushOptions::SetUpstream
4226 }
4227 }
4228 as i32),
4229 })
4230 .await
4231 .context("sending push request")?;
4232
4233 Ok(RemoteCommandOutput {
4234 stdout: response.stdout,
4235 stderr: response.stderr,
4236 })
4237 }
4238 }
4239 },
4240 )
4241 }
4242
4243 pub fn pull(
4244 &mut self,
4245 branch: SharedString,
4246 remote: SharedString,
4247 askpass: AskPassDelegate,
4248 _cx: &mut App,
4249 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4250 let askpass_delegates = self.askpass_delegates.clone();
4251 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4252 let id = self.id;
4253
4254 self.send_job(
4255 Some(format!("git pull {} {}", remote, branch).into()),
4256 move |git_repo, cx| async move {
4257 match git_repo {
4258 RepositoryState::Local {
4259 backend,
4260 environment,
4261 ..
4262 } => {
4263 backend
4264 .pull(
4265 branch.to_string(),
4266 remote.to_string(),
4267 askpass,
4268 environment.clone(),
4269 cx,
4270 )
4271 .await
4272 }
4273 RepositoryState::Remote { project_id, client } => {
4274 askpass_delegates.lock().insert(askpass_id, askpass);
4275 let _defer = util::defer(|| {
4276 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4277 debug_assert!(askpass_delegate.is_some());
4278 });
4279 let response = client
4280 .request(proto::Pull {
4281 project_id: project_id.0,
4282 repository_id: id.to_proto(),
4283 askpass_id,
4284 branch_name: branch.to_string(),
4285 remote_name: remote.to_string(),
4286 })
4287 .await
4288 .context("sending pull request")?;
4289
4290 Ok(RemoteCommandOutput {
4291 stdout: response.stdout,
4292 stderr: response.stderr,
4293 })
4294 }
4295 }
4296 },
4297 )
4298 }
4299
4300 fn spawn_set_index_text_job(
4301 &mut self,
4302 path: RepoPath,
4303 content: Option<String>,
4304 hunk_staging_operation_count: Option<usize>,
4305 cx: &mut Context<Self>,
4306 ) -> oneshot::Receiver<anyhow::Result<()>> {
4307 let id = self.id;
4308 let this = cx.weak_entity();
4309 let git_store = self.git_store.clone();
4310 self.send_keyed_job(
4311 Some(GitJobKey::WriteIndex(path.clone())),
4312 None,
4313 move |git_repo, mut cx| async move {
4314 log::debug!(
4315 "start updating index text for buffer {}",
4316 path.as_unix_str()
4317 );
4318 match git_repo {
4319 RepositoryState::Local {
4320 backend,
4321 environment,
4322 ..
4323 } => {
4324 backend
4325 .set_index_text(path.clone(), content, environment.clone())
4326 .await?;
4327 }
4328 RepositoryState::Remote { project_id, client } => {
4329 client
4330 .request(proto::SetIndexText {
4331 project_id: project_id.0,
4332 repository_id: id.to_proto(),
4333 path: path.to_proto(),
4334 text: content,
4335 })
4336 .await?;
4337 }
4338 }
4339 log::debug!(
4340 "finish updating index text for buffer {}",
4341 path.as_unix_str()
4342 );
4343
4344 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4345 let project_path = this
4346 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4347 .ok()
4348 .flatten();
4349 git_store.update(&mut cx, |git_store, cx| {
4350 let buffer_id = git_store
4351 .buffer_store
4352 .read(cx)
4353 .get_by_path(&project_path?)?
4354 .read(cx)
4355 .remote_id();
4356 let diff_state = git_store.diffs.get(&buffer_id)?;
4357 diff_state.update(cx, |diff_state, _| {
4358 diff_state.hunk_staging_operation_count_as_of_write =
4359 hunk_staging_operation_count;
4360 });
4361 Some(())
4362 })?;
4363 }
4364 Ok(())
4365 },
4366 )
4367 }
4368
4369 pub fn get_remotes(
4370 &mut self,
4371 branch_name: Option<String>,
4372 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4373 let id = self.id;
4374 self.send_job(None, move |repo, _cx| async move {
4375 match repo {
4376 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4377 RepositoryState::Remote { project_id, client } => {
4378 let response = client
4379 .request(proto::GetRemotes {
4380 project_id: project_id.0,
4381 repository_id: id.to_proto(),
4382 branch_name,
4383 })
4384 .await?;
4385
4386 let remotes = response
4387 .remotes
4388 .into_iter()
4389 .map(|remotes| git::repository::Remote {
4390 name: remotes.name.into(),
4391 })
4392 .collect();
4393
4394 Ok(remotes)
4395 }
4396 }
4397 })
4398 }
4399
4400 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4401 let id = self.id;
4402 self.send_job(None, move |repo, _| async move {
4403 match repo {
4404 RepositoryState::Local { backend, .. } => backend.branches().await,
4405 RepositoryState::Remote { project_id, client } => {
4406 let response = client
4407 .request(proto::GitGetBranches {
4408 project_id: project_id.0,
4409 repository_id: id.to_proto(),
4410 })
4411 .await?;
4412
4413 let branches = response
4414 .branches
4415 .into_iter()
4416 .map(|branch| proto_to_branch(&branch))
4417 .collect();
4418
4419 Ok(branches)
4420 }
4421 }
4422 })
4423 }
4424
4425 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4426 let id = self.id;
4427 self.send_job(None, move |repo, _| async move {
4428 match repo {
4429 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4430 RepositoryState::Remote { project_id, client } => {
4431 let response = client
4432 .request(proto::GetDefaultBranch {
4433 project_id: project_id.0,
4434 repository_id: id.to_proto(),
4435 })
4436 .await?;
4437
4438 anyhow::Ok(response.branch.map(SharedString::from))
4439 }
4440 }
4441 })
4442 }
4443
4444 pub fn diff_tree(
4445 &mut self,
4446 diff_type: DiffTreeType,
4447 _cx: &App,
4448 ) -> oneshot::Receiver<Result<TreeDiff>> {
4449 let repository_id = self.snapshot.id;
4450 self.send_job(None, move |repo, _cx| async move {
4451 match repo {
4452 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4453 RepositoryState::Remote { client, project_id } => {
4454 let response = client
4455 .request(proto::GetTreeDiff {
4456 project_id: project_id.0,
4457 repository_id: repository_id.0,
4458 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4459 base: diff_type.base().to_string(),
4460 head: diff_type.head().to_string(),
4461 })
4462 .await?;
4463
4464 let entries = response
4465 .entries
4466 .into_iter()
4467 .filter_map(|entry| {
4468 let status = match entry.status() {
4469 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4470 proto::tree_diff_status::Status::Modified => {
4471 TreeDiffStatus::Modified {
4472 old: git::Oid::from_str(
4473 &entry.oid.context("missing oid").log_err()?,
4474 )
4475 .log_err()?,
4476 }
4477 }
4478 proto::tree_diff_status::Status::Deleted => {
4479 TreeDiffStatus::Deleted {
4480 old: git::Oid::from_str(
4481 &entry.oid.context("missing oid").log_err()?,
4482 )
4483 .log_err()?,
4484 }
4485 }
4486 };
4487 Some((
4488 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4489 status,
4490 ))
4491 })
4492 .collect();
4493
4494 Ok(TreeDiff { entries })
4495 }
4496 }
4497 })
4498 }
4499
4500 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4501 let id = self.id;
4502 self.send_job(None, move |repo, _cx| async move {
4503 match repo {
4504 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4505 RepositoryState::Remote { project_id, client } => {
4506 let response = client
4507 .request(proto::GitDiff {
4508 project_id: project_id.0,
4509 repository_id: id.to_proto(),
4510 diff_type: match diff_type {
4511 DiffType::HeadToIndex => {
4512 proto::git_diff::DiffType::HeadToIndex.into()
4513 }
4514 DiffType::HeadToWorktree => {
4515 proto::git_diff::DiffType::HeadToWorktree.into()
4516 }
4517 },
4518 })
4519 .await?;
4520
4521 Ok(response.diff)
4522 }
4523 }
4524 })
4525 }
4526
4527 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4528 let id = self.id;
4529 self.send_job(
4530 Some(format!("git switch -c {branch_name}").into()),
4531 move |repo, _cx| async move {
4532 match repo {
4533 RepositoryState::Local { backend, .. } => {
4534 backend.create_branch(branch_name).await
4535 }
4536 RepositoryState::Remote { project_id, client } => {
4537 client
4538 .request(proto::GitCreateBranch {
4539 project_id: project_id.0,
4540 repository_id: id.to_proto(),
4541 branch_name,
4542 })
4543 .await?;
4544
4545 Ok(())
4546 }
4547 }
4548 },
4549 )
4550 }
4551
4552 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4553 let id = self.id;
4554 self.send_job(
4555 Some(format!("git switch {branch_name}").into()),
4556 move |repo, _cx| async move {
4557 match repo {
4558 RepositoryState::Local { backend, .. } => {
4559 backend.change_branch(branch_name).await
4560 }
4561 RepositoryState::Remote { project_id, client } => {
4562 client
4563 .request(proto::GitChangeBranch {
4564 project_id: project_id.0,
4565 repository_id: id.to_proto(),
4566 branch_name,
4567 })
4568 .await?;
4569
4570 Ok(())
4571 }
4572 }
4573 },
4574 )
4575 }
4576
4577 pub fn rename_branch(
4578 &mut self,
4579 branch: String,
4580 new_name: String,
4581 ) -> oneshot::Receiver<Result<()>> {
4582 let id = self.id;
4583 self.send_job(
4584 Some(format!("git branch -m {branch} {new_name}").into()),
4585 move |repo, _cx| async move {
4586 match repo {
4587 RepositoryState::Local { backend, .. } => {
4588 backend.rename_branch(branch, new_name).await
4589 }
4590 RepositoryState::Remote { project_id, client } => {
4591 client
4592 .request(proto::GitRenameBranch {
4593 project_id: project_id.0,
4594 repository_id: id.to_proto(),
4595 branch,
4596 new_name,
4597 })
4598 .await?;
4599
4600 Ok(())
4601 }
4602 }
4603 },
4604 )
4605 }
4606
4607 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4608 let id = self.id;
4609 self.send_job(None, move |repo, _cx| async move {
4610 match repo {
4611 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4612 RepositoryState::Remote { project_id, client } => {
4613 let response = client
4614 .request(proto::CheckForPushedCommits {
4615 project_id: project_id.0,
4616 repository_id: id.to_proto(),
4617 })
4618 .await?;
4619
4620 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4621
4622 Ok(branches)
4623 }
4624 }
4625 })
4626 }
4627
4628 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4629 self.send_job(None, |repo, _cx| async move {
4630 match repo {
4631 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4632 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4633 }
4634 })
4635 }
4636
4637 pub fn restore_checkpoint(
4638 &mut self,
4639 checkpoint: GitRepositoryCheckpoint,
4640 ) -> oneshot::Receiver<Result<()>> {
4641 self.send_job(None, move |repo, _cx| async move {
4642 match repo {
4643 RepositoryState::Local { backend, .. } => {
4644 backend.restore_checkpoint(checkpoint).await
4645 }
4646 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4647 }
4648 })
4649 }
4650
4651 pub(crate) fn apply_remote_update(
4652 &mut self,
4653 update: proto::UpdateRepository,
4654 cx: &mut Context<Self>,
4655 ) -> Result<()> {
4656 let conflicted_paths = TreeSet::from_ordered_entries(
4657 update
4658 .current_merge_conflicts
4659 .into_iter()
4660 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4661 );
4662 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4663 let new_head_commit = update
4664 .head_commit_details
4665 .as_ref()
4666 .map(proto_to_commit_details);
4667 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4668 cx.emit(RepositoryEvent::BranchChanged)
4669 }
4670 self.snapshot.branch = new_branch;
4671 self.snapshot.head_commit = new_head_commit;
4672
4673 self.snapshot.merge.conflicted_paths = conflicted_paths;
4674 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4675 let new_stash_entries = GitStash {
4676 entries: update
4677 .stash_entries
4678 .iter()
4679 .filter_map(|entry| proto_to_stash(entry).ok())
4680 .collect(),
4681 };
4682 if self.snapshot.stash_entries != new_stash_entries {
4683 cx.emit(RepositoryEvent::StashEntriesChanged)
4684 }
4685 self.snapshot.stash_entries = new_stash_entries;
4686
4687 let edits = update
4688 .removed_statuses
4689 .into_iter()
4690 .filter_map(|path| {
4691 Some(sum_tree::Edit::Remove(PathKey(
4692 RelPath::from_proto(&path).log_err()?,
4693 )))
4694 })
4695 .chain(
4696 update
4697 .updated_statuses
4698 .into_iter()
4699 .filter_map(|updated_status| {
4700 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4701 }),
4702 )
4703 .collect::<Vec<_>>();
4704 if !edits.is_empty() {
4705 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4706 }
4707 self.snapshot.statuses_by_path.edit(edits, ());
4708 if update.is_last_update {
4709 self.snapshot.scan_id = update.scan_id;
4710 }
4711 Ok(())
4712 }
4713
4714 pub fn compare_checkpoints(
4715 &mut self,
4716 left: GitRepositoryCheckpoint,
4717 right: GitRepositoryCheckpoint,
4718 ) -> oneshot::Receiver<Result<bool>> {
4719 self.send_job(None, move |repo, _cx| async move {
4720 match repo {
4721 RepositoryState::Local { backend, .. } => {
4722 backend.compare_checkpoints(left, right).await
4723 }
4724 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4725 }
4726 })
4727 }
4728
4729 pub fn diff_checkpoints(
4730 &mut self,
4731 base_checkpoint: GitRepositoryCheckpoint,
4732 target_checkpoint: GitRepositoryCheckpoint,
4733 ) -> oneshot::Receiver<Result<String>> {
4734 self.send_job(None, move |repo, _cx| async move {
4735 match repo {
4736 RepositoryState::Local { backend, .. } => {
4737 backend
4738 .diff_checkpoints(base_checkpoint, target_checkpoint)
4739 .await
4740 }
4741 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4742 }
4743 })
4744 }
4745
4746 fn schedule_scan(
4747 &mut self,
4748 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4749 cx: &mut Context<Self>,
4750 ) {
4751 let this = cx.weak_entity();
4752 let _ = self.send_keyed_job(
4753 Some(GitJobKey::ReloadGitState),
4754 None,
4755 |state, mut cx| async move {
4756 log::debug!("run scheduled git status scan");
4757
4758 let Some(this) = this.upgrade() else {
4759 return Ok(());
4760 };
4761 let RepositoryState::Local { backend, .. } = state else {
4762 bail!("not a local repository")
4763 };
4764 let (snapshot, events) = this
4765 .update(&mut cx, |this, _| {
4766 this.paths_needing_status_update.clear();
4767 compute_snapshot(
4768 this.id,
4769 this.work_directory_abs_path.clone(),
4770 this.snapshot.clone(),
4771 backend.clone(),
4772 )
4773 })?
4774 .await?;
4775 this.update(&mut cx, |this, cx| {
4776 this.snapshot = snapshot.clone();
4777 for event in events {
4778 cx.emit(event);
4779 }
4780 })?;
4781 if let Some(updates_tx) = updates_tx {
4782 updates_tx
4783 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4784 .ok();
4785 }
4786 Ok(())
4787 },
4788 );
4789 }
4790
4791 fn spawn_local_git_worker(
4792 work_directory_abs_path: Arc<Path>,
4793 dot_git_abs_path: Arc<Path>,
4794 _repository_dir_abs_path: Arc<Path>,
4795 _common_dir_abs_path: Arc<Path>,
4796 project_environment: WeakEntity<ProjectEnvironment>,
4797 fs: Arc<dyn Fs>,
4798 cx: &mut Context<Self>,
4799 ) -> mpsc::UnboundedSender<GitJob> {
4800 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4801
4802 cx.spawn(async move |_, cx| {
4803 let environment = project_environment
4804 .upgrade()
4805 .context("missing project environment")?
4806 .update(cx, |project_environment, cx| {
4807 project_environment.get_local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4808 })?
4809 .await
4810 .unwrap_or_else(|| {
4811 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4812 HashMap::default()
4813 });
4814 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4815 let backend = cx
4816 .background_spawn(async move {
4817 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4818 .or_else(|| which::which("git").ok());
4819 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4820 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4821 })
4822 .await?;
4823
4824 if let Some(git_hosting_provider_registry) =
4825 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4826 {
4827 git_hosting_providers::register_additional_providers(
4828 git_hosting_provider_registry,
4829 backend.clone(),
4830 );
4831 }
4832
4833 let state = RepositoryState::Local {
4834 backend,
4835 environment: Arc::new(environment),
4836 };
4837 let mut jobs = VecDeque::new();
4838 loop {
4839 while let Ok(Some(next_job)) = job_rx.try_next() {
4840 jobs.push_back(next_job);
4841 }
4842
4843 if let Some(job) = jobs.pop_front() {
4844 if let Some(current_key) = &job.key
4845 && jobs
4846 .iter()
4847 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4848 {
4849 continue;
4850 }
4851 (job.job)(state.clone(), cx).await;
4852 } else if let Some(job) = job_rx.next().await {
4853 jobs.push_back(job);
4854 } else {
4855 break;
4856 }
4857 }
4858 anyhow::Ok(())
4859 })
4860 .detach_and_log_err(cx);
4861
4862 job_tx
4863 }
4864
4865 fn spawn_remote_git_worker(
4866 project_id: ProjectId,
4867 client: AnyProtoClient,
4868 cx: &mut Context<Self>,
4869 ) -> mpsc::UnboundedSender<GitJob> {
4870 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4871
4872 cx.spawn(async move |_, cx| {
4873 let state = RepositoryState::Remote { project_id, client };
4874 let mut jobs = VecDeque::new();
4875 loop {
4876 while let Ok(Some(next_job)) = job_rx.try_next() {
4877 jobs.push_back(next_job);
4878 }
4879
4880 if let Some(job) = jobs.pop_front() {
4881 if let Some(current_key) = &job.key
4882 && jobs
4883 .iter()
4884 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4885 {
4886 continue;
4887 }
4888 (job.job)(state.clone(), cx).await;
4889 } else if let Some(job) = job_rx.next().await {
4890 jobs.push_back(job);
4891 } else {
4892 break;
4893 }
4894 }
4895 anyhow::Ok(())
4896 })
4897 .detach_and_log_err(cx);
4898
4899 job_tx
4900 }
4901
4902 fn load_staged_text(
4903 &mut self,
4904 buffer_id: BufferId,
4905 repo_path: RepoPath,
4906 cx: &App,
4907 ) -> Task<Result<Option<String>>> {
4908 let rx = self.send_job(None, move |state, _| async move {
4909 match state {
4910 RepositoryState::Local { backend, .. } => {
4911 anyhow::Ok(backend.load_index_text(repo_path).await)
4912 }
4913 RepositoryState::Remote { project_id, client } => {
4914 let response = client
4915 .request(proto::OpenUnstagedDiff {
4916 project_id: project_id.to_proto(),
4917 buffer_id: buffer_id.to_proto(),
4918 })
4919 .await?;
4920 Ok(response.staged_text)
4921 }
4922 }
4923 });
4924 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4925 }
4926
4927 fn load_committed_text(
4928 &mut self,
4929 buffer_id: BufferId,
4930 repo_path: RepoPath,
4931 cx: &App,
4932 ) -> Task<Result<DiffBasesChange>> {
4933 let rx = self.send_job(None, move |state, _| async move {
4934 match state {
4935 RepositoryState::Local { backend, .. } => {
4936 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4937 let staged_text = backend.load_index_text(repo_path).await;
4938 let diff_bases_change = if committed_text == staged_text {
4939 DiffBasesChange::SetBoth(committed_text)
4940 } else {
4941 DiffBasesChange::SetEach {
4942 index: staged_text,
4943 head: committed_text,
4944 }
4945 };
4946 anyhow::Ok(diff_bases_change)
4947 }
4948 RepositoryState::Remote { project_id, client } => {
4949 use proto::open_uncommitted_diff_response::Mode;
4950
4951 let response = client
4952 .request(proto::OpenUncommittedDiff {
4953 project_id: project_id.to_proto(),
4954 buffer_id: buffer_id.to_proto(),
4955 })
4956 .await?;
4957 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4958 let bases = match mode {
4959 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4960 Mode::IndexAndHead => DiffBasesChange::SetEach {
4961 head: response.committed_text,
4962 index: response.staged_text,
4963 },
4964 };
4965 Ok(bases)
4966 }
4967 }
4968 });
4969
4970 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4971 }
4972 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
4973 let repository_id = self.snapshot.id;
4974 let rx = self.send_job(None, move |state, _| async move {
4975 match state {
4976 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
4977 RepositoryState::Remote { client, project_id } => {
4978 let response = client
4979 .request(proto::GetBlobContent {
4980 project_id: project_id.to_proto(),
4981 repository_id: repository_id.0,
4982 oid: oid.to_string(),
4983 })
4984 .await?;
4985 Ok(response.content)
4986 }
4987 }
4988 });
4989 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4990 }
4991
4992 fn paths_changed(
4993 &mut self,
4994 paths: Vec<RepoPath>,
4995 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4996 cx: &mut Context<Self>,
4997 ) {
4998 self.paths_needing_status_update.extend(paths);
4999
5000 let this = cx.weak_entity();
5001 let _ = self.send_keyed_job(
5002 Some(GitJobKey::RefreshStatuses),
5003 None,
5004 |state, mut cx| async move {
5005 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5006 (
5007 this.snapshot.clone(),
5008 mem::take(&mut this.paths_needing_status_update),
5009 )
5010 })?;
5011 let RepositoryState::Local { backend, .. } = state else {
5012 bail!("not a local repository")
5013 };
5014
5015 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5016 if paths.is_empty() {
5017 return Ok(());
5018 }
5019 let statuses = backend.status(&paths).await?;
5020 let stash_entries = backend.stash_entries().await?;
5021
5022 let changed_path_statuses = cx
5023 .background_spawn(async move {
5024 let mut changed_path_statuses = Vec::new();
5025 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5026 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5027
5028 for (repo_path, status) in &*statuses.entries {
5029 changed_paths.remove(repo_path);
5030 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5031 && cursor.item().is_some_and(|entry| entry.status == *status)
5032 {
5033 continue;
5034 }
5035
5036 changed_path_statuses.push(Edit::Insert(StatusEntry {
5037 repo_path: repo_path.clone(),
5038 status: *status,
5039 }));
5040 }
5041 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5042 for path in changed_paths.into_iter() {
5043 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5044 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5045 }
5046 }
5047 changed_path_statuses
5048 })
5049 .await;
5050
5051 this.update(&mut cx, |this, cx| {
5052 if this.snapshot.stash_entries != stash_entries {
5053 cx.emit(RepositoryEvent::StashEntriesChanged);
5054 this.snapshot.stash_entries = stash_entries;
5055 }
5056
5057 if !changed_path_statuses.is_empty() {
5058 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5059 this.snapshot
5060 .statuses_by_path
5061 .edit(changed_path_statuses, ());
5062 this.snapshot.scan_id += 1;
5063 }
5064
5065 if let Some(updates_tx) = updates_tx {
5066 updates_tx
5067 .unbounded_send(DownstreamUpdate::UpdateRepository(
5068 this.snapshot.clone(),
5069 ))
5070 .ok();
5071 }
5072 })
5073 },
5074 );
5075 }
5076
5077 /// currently running git command and when it started
5078 pub fn current_job(&self) -> Option<JobInfo> {
5079 self.active_jobs.values().next().cloned()
5080 }
5081
5082 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5083 self.send_job(None, |_, _| async {})
5084 }
5085}
5086
5087fn get_permalink_in_rust_registry_src(
5088 provider_registry: Arc<GitHostingProviderRegistry>,
5089 path: PathBuf,
5090 selection: Range<u32>,
5091) -> Result<url::Url> {
5092 #[derive(Deserialize)]
5093 struct CargoVcsGit {
5094 sha1: String,
5095 }
5096
5097 #[derive(Deserialize)]
5098 struct CargoVcsInfo {
5099 git: CargoVcsGit,
5100 path_in_vcs: String,
5101 }
5102
5103 #[derive(Deserialize)]
5104 struct CargoPackage {
5105 repository: String,
5106 }
5107
5108 #[derive(Deserialize)]
5109 struct CargoToml {
5110 package: CargoPackage,
5111 }
5112
5113 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5114 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5115 Some((dir, json))
5116 }) else {
5117 bail!("No .cargo_vcs_info.json found in parent directories")
5118 };
5119 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5120 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5121 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5122 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5123 .context("parsing package.repository field of manifest")?;
5124 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5125 let permalink = provider.build_permalink(
5126 remote,
5127 BuildPermalinkParams::new(
5128 &cargo_vcs_info.git.sha1,
5129 &RepoPath(
5130 RelPath::new(&path, PathStyle::local())
5131 .context("invalid path")?
5132 .into_arc(),
5133 ),
5134 Some(selection),
5135 ),
5136 );
5137 Ok(permalink)
5138}
5139
5140fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5141 let Some(blame) = blame else {
5142 return proto::BlameBufferResponse {
5143 blame_response: None,
5144 };
5145 };
5146
5147 let entries = blame
5148 .entries
5149 .into_iter()
5150 .map(|entry| proto::BlameEntry {
5151 sha: entry.sha.as_bytes().into(),
5152 start_line: entry.range.start,
5153 end_line: entry.range.end,
5154 original_line_number: entry.original_line_number,
5155 author: entry.author,
5156 author_mail: entry.author_mail,
5157 author_time: entry.author_time,
5158 author_tz: entry.author_tz,
5159 committer: entry.committer_name,
5160 committer_mail: entry.committer_email,
5161 committer_time: entry.committer_time,
5162 committer_tz: entry.committer_tz,
5163 summary: entry.summary,
5164 previous: entry.previous,
5165 filename: entry.filename,
5166 })
5167 .collect::<Vec<_>>();
5168
5169 let messages = blame
5170 .messages
5171 .into_iter()
5172 .map(|(oid, message)| proto::CommitMessage {
5173 oid: oid.as_bytes().into(),
5174 message,
5175 })
5176 .collect::<Vec<_>>();
5177
5178 proto::BlameBufferResponse {
5179 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5180 entries,
5181 messages,
5182 remote_url: blame.remote_url,
5183 }),
5184 }
5185}
5186
5187fn deserialize_blame_buffer_response(
5188 response: proto::BlameBufferResponse,
5189) -> Option<git::blame::Blame> {
5190 let response = response.blame_response?;
5191 let entries = response
5192 .entries
5193 .into_iter()
5194 .filter_map(|entry| {
5195 Some(git::blame::BlameEntry {
5196 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5197 range: entry.start_line..entry.end_line,
5198 original_line_number: entry.original_line_number,
5199 committer_name: entry.committer,
5200 committer_time: entry.committer_time,
5201 committer_tz: entry.committer_tz,
5202 committer_email: entry.committer_mail,
5203 author: entry.author,
5204 author_mail: entry.author_mail,
5205 author_time: entry.author_time,
5206 author_tz: entry.author_tz,
5207 summary: entry.summary,
5208 previous: entry.previous,
5209 filename: entry.filename,
5210 })
5211 })
5212 .collect::<Vec<_>>();
5213
5214 let messages = response
5215 .messages
5216 .into_iter()
5217 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5218 .collect::<HashMap<_, _>>();
5219
5220 Some(Blame {
5221 entries,
5222 messages,
5223 remote_url: response.remote_url,
5224 })
5225}
5226
5227fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5228 proto::Branch {
5229 is_head: branch.is_head,
5230 ref_name: branch.ref_name.to_string(),
5231 unix_timestamp: branch
5232 .most_recent_commit
5233 .as_ref()
5234 .map(|commit| commit.commit_timestamp as u64),
5235 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5236 ref_name: upstream.ref_name.to_string(),
5237 tracking: upstream
5238 .tracking
5239 .status()
5240 .map(|upstream| proto::UpstreamTracking {
5241 ahead: upstream.ahead as u64,
5242 behind: upstream.behind as u64,
5243 }),
5244 }),
5245 most_recent_commit: branch
5246 .most_recent_commit
5247 .as_ref()
5248 .map(|commit| proto::CommitSummary {
5249 sha: commit.sha.to_string(),
5250 subject: commit.subject.to_string(),
5251 commit_timestamp: commit.commit_timestamp,
5252 author_name: commit.author_name.to_string(),
5253 }),
5254 }
5255}
5256
5257fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5258 git::repository::Branch {
5259 is_head: proto.is_head,
5260 ref_name: proto.ref_name.clone().into(),
5261 upstream: proto
5262 .upstream
5263 .as_ref()
5264 .map(|upstream| git::repository::Upstream {
5265 ref_name: upstream.ref_name.to_string().into(),
5266 tracking: upstream
5267 .tracking
5268 .as_ref()
5269 .map(|tracking| {
5270 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5271 ahead: tracking.ahead as u32,
5272 behind: tracking.behind as u32,
5273 })
5274 })
5275 .unwrap_or(git::repository::UpstreamTracking::Gone),
5276 }),
5277 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5278 git::repository::CommitSummary {
5279 sha: commit.sha.to_string().into(),
5280 subject: commit.subject.to_string().into(),
5281 commit_timestamp: commit.commit_timestamp,
5282 author_name: commit.author_name.to_string().into(),
5283 has_parent: true,
5284 }
5285 }),
5286 }
5287}
5288
5289fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5290 proto::GitCommitDetails {
5291 sha: commit.sha.to_string(),
5292 message: commit.message.to_string(),
5293 commit_timestamp: commit.commit_timestamp,
5294 author_email: commit.author_email.to_string(),
5295 author_name: commit.author_name.to_string(),
5296 }
5297}
5298
5299fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5300 CommitDetails {
5301 sha: proto.sha.clone().into(),
5302 message: proto.message.clone().into(),
5303 commit_timestamp: proto.commit_timestamp,
5304 author_email: proto.author_email.clone().into(),
5305 author_name: proto.author_name.clone().into(),
5306 }
5307}
5308
5309async fn compute_snapshot(
5310 id: RepositoryId,
5311 work_directory_abs_path: Arc<Path>,
5312 prev_snapshot: RepositorySnapshot,
5313 backend: Arc<dyn GitRepository>,
5314) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5315 let mut events = Vec::new();
5316 let branches = backend.branches().await?;
5317 let branch = branches.into_iter().find(|branch| branch.is_head);
5318 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5319 let stash_entries = backend.stash_entries().await?;
5320 let statuses_by_path = SumTree::from_iter(
5321 statuses
5322 .entries
5323 .iter()
5324 .map(|(repo_path, status)| StatusEntry {
5325 repo_path: repo_path.clone(),
5326 status: *status,
5327 }),
5328 (),
5329 );
5330 let (merge_details, merge_heads_changed) =
5331 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5332 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5333
5334 if merge_heads_changed {
5335 events.push(RepositoryEvent::MergeHeadsChanged);
5336 }
5337
5338 if statuses_by_path != prev_snapshot.statuses_by_path {
5339 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5340 }
5341
5342 // Useful when branch is None in detached head state
5343 let head_commit = match backend.head_sha().await {
5344 Some(head_sha) => backend.show(head_sha).await.log_err(),
5345 None => None,
5346 };
5347
5348 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5349 events.push(RepositoryEvent::BranchChanged);
5350 }
5351
5352 // Used by edit prediction data collection
5353 let remote_origin_url = backend.remote_url("origin");
5354 let remote_upstream_url = backend.remote_url("upstream");
5355
5356 let snapshot = RepositorySnapshot {
5357 id,
5358 statuses_by_path,
5359 work_directory_abs_path,
5360 path_style: prev_snapshot.path_style,
5361 scan_id: prev_snapshot.scan_id + 1,
5362 branch,
5363 head_commit,
5364 merge: merge_details,
5365 remote_origin_url,
5366 remote_upstream_url,
5367 stash_entries,
5368 };
5369
5370 Ok((snapshot, events))
5371}
5372
5373fn status_from_proto(
5374 simple_status: i32,
5375 status: Option<proto::GitFileStatus>,
5376) -> anyhow::Result<FileStatus> {
5377 use proto::git_file_status::Variant;
5378
5379 let Some(variant) = status.and_then(|status| status.variant) else {
5380 let code = proto::GitStatus::from_i32(simple_status)
5381 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5382 let result = match code {
5383 proto::GitStatus::Added => TrackedStatus {
5384 worktree_status: StatusCode::Added,
5385 index_status: StatusCode::Unmodified,
5386 }
5387 .into(),
5388 proto::GitStatus::Modified => TrackedStatus {
5389 worktree_status: StatusCode::Modified,
5390 index_status: StatusCode::Unmodified,
5391 }
5392 .into(),
5393 proto::GitStatus::Conflict => UnmergedStatus {
5394 first_head: UnmergedStatusCode::Updated,
5395 second_head: UnmergedStatusCode::Updated,
5396 }
5397 .into(),
5398 proto::GitStatus::Deleted => TrackedStatus {
5399 worktree_status: StatusCode::Deleted,
5400 index_status: StatusCode::Unmodified,
5401 }
5402 .into(),
5403 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5404 };
5405 return Ok(result);
5406 };
5407
5408 let result = match variant {
5409 Variant::Untracked(_) => FileStatus::Untracked,
5410 Variant::Ignored(_) => FileStatus::Ignored,
5411 Variant::Unmerged(unmerged) => {
5412 let [first_head, second_head] =
5413 [unmerged.first_head, unmerged.second_head].map(|head| {
5414 let code = proto::GitStatus::from_i32(head)
5415 .with_context(|| format!("Invalid git status code: {head}"))?;
5416 let result = match code {
5417 proto::GitStatus::Added => UnmergedStatusCode::Added,
5418 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5419 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5420 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5421 };
5422 Ok(result)
5423 });
5424 let [first_head, second_head] = [first_head?, second_head?];
5425 UnmergedStatus {
5426 first_head,
5427 second_head,
5428 }
5429 .into()
5430 }
5431 Variant::Tracked(tracked) => {
5432 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5433 .map(|status| {
5434 let code = proto::GitStatus::from_i32(status)
5435 .with_context(|| format!("Invalid git status code: {status}"))?;
5436 let result = match code {
5437 proto::GitStatus::Modified => StatusCode::Modified,
5438 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5439 proto::GitStatus::Added => StatusCode::Added,
5440 proto::GitStatus::Deleted => StatusCode::Deleted,
5441 proto::GitStatus::Renamed => StatusCode::Renamed,
5442 proto::GitStatus::Copied => StatusCode::Copied,
5443 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5444 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5445 };
5446 Ok(result)
5447 });
5448 let [index_status, worktree_status] = [index_status?, worktree_status?];
5449 TrackedStatus {
5450 index_status,
5451 worktree_status,
5452 }
5453 .into()
5454 }
5455 };
5456 Ok(result)
5457}
5458
5459fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5460 use proto::git_file_status::{Tracked, Unmerged, Variant};
5461
5462 let variant = match status {
5463 FileStatus::Untracked => Variant::Untracked(Default::default()),
5464 FileStatus::Ignored => Variant::Ignored(Default::default()),
5465 FileStatus::Unmerged(UnmergedStatus {
5466 first_head,
5467 second_head,
5468 }) => Variant::Unmerged(Unmerged {
5469 first_head: unmerged_status_to_proto(first_head),
5470 second_head: unmerged_status_to_proto(second_head),
5471 }),
5472 FileStatus::Tracked(TrackedStatus {
5473 index_status,
5474 worktree_status,
5475 }) => Variant::Tracked(Tracked {
5476 index_status: tracked_status_to_proto(index_status),
5477 worktree_status: tracked_status_to_proto(worktree_status),
5478 }),
5479 };
5480 proto::GitFileStatus {
5481 variant: Some(variant),
5482 }
5483}
5484
5485fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5486 match code {
5487 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5488 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5489 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5490 }
5491}
5492
5493fn tracked_status_to_proto(code: StatusCode) -> i32 {
5494 match code {
5495 StatusCode::Added => proto::GitStatus::Added as _,
5496 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5497 StatusCode::Modified => proto::GitStatus::Modified as _,
5498 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5499 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5500 StatusCode::Copied => proto::GitStatus::Copied as _,
5501 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5502 }
5503}