1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4
5use crate::{
6 ProjectEnvironment, ProjectItem, ProjectPath,
7 buffer_store::{BufferStore, BufferStoreEvent},
8 worktree_store::{WorktreeStore, WorktreeStoreEvent},
9};
10use anyhow::{Context as _, Result, anyhow, bail};
11use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
12use buffer_diff::{BufferDiff, BufferDiffEvent};
13use client::ProjectId;
14use collections::HashMap;
15pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
16use fs::Fs;
17use futures::{
18 FutureExt, StreamExt,
19 channel::{mpsc, oneshot},
20 future::{self, Shared},
21 stream::FuturesOrdered,
22};
23use git::{
24 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
25 blame::Blame,
26 parse_git_remote_url,
27 repository::{
28 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
29 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
30 ResetMode, UpstreamTrackingStatus,
31 },
32 stash::{GitStash, StashEntry},
33 status::{
34 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
35 UnmergedStatus, UnmergedStatusCode,
36 },
37};
38use gpui::{
39 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
40 WeakEntity,
41};
42use language::{
43 Buffer, BufferEvent, Language, LanguageRegistry,
44 proto::{deserialize_version, serialize_version},
45};
46use parking_lot::Mutex;
47use postage::stream::Stream as _;
48use rpc::{
49 AnyProtoClient, TypedEnvelope,
50 proto::{self, git_reset, split_repository_update},
51};
52use serde::Deserialize;
53use std::{
54 cmp::Ordering,
55 collections::{BTreeSet, VecDeque},
56 future::Future,
57 mem,
58 ops::Range,
59 path::{Path, PathBuf},
60 str::FromStr,
61 sync::{
62 Arc,
63 atomic::{self, AtomicU64},
64 },
65 time::Instant,
66};
67use sum_tree::{Edit, SumTree, TreeSet};
68use task::Shell;
69use text::{Bias, BufferId};
70use util::{
71 ResultExt, debug_panic,
72 paths::{PathStyle, SanitizedPath},
73 post_inc,
74 rel_path::RelPath,
75};
76use worktree::{
77 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
78 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
79};
80use zeroize::Zeroize;
81
82pub struct GitStore {
83 state: GitStoreState,
84 buffer_store: Entity<BufferStore>,
85 worktree_store: Entity<WorktreeStore>,
86 repositories: HashMap<RepositoryId, Entity<Repository>>,
87 active_repo_id: Option<RepositoryId>,
88 #[allow(clippy::type_complexity)]
89 loading_diffs:
90 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
91 diffs: HashMap<BufferId, Entity<BufferGitState>>,
92 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
93 _subscriptions: Vec<Subscription>,
94}
95
96#[derive(Default)]
97struct SharedDiffs {
98 unstaged: Option<Entity<BufferDiff>>,
99 uncommitted: Option<Entity<BufferDiff>>,
100}
101
102struct BufferGitState {
103 unstaged_diff: Option<WeakEntity<BufferDiff>>,
104 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
105 conflict_set: Option<WeakEntity<ConflictSet>>,
106 recalculate_diff_task: Option<Task<Result<()>>>,
107 reparse_conflict_markers_task: Option<Task<Result<()>>>,
108 language: Option<Arc<Language>>,
109 language_registry: Option<Arc<LanguageRegistry>>,
110 conflict_updated_futures: Vec<oneshot::Sender<()>>,
111 recalculating_tx: postage::watch::Sender<bool>,
112
113 /// These operation counts are used to ensure that head and index text
114 /// values read from the git repository are up-to-date with any hunk staging
115 /// operations that have been performed on the BufferDiff.
116 ///
117 /// The operation count is incremented immediately when the user initiates a
118 /// hunk stage/unstage operation. Then, upon finishing writing the new index
119 /// text do disk, the `operation count as of write` is updated to reflect
120 /// the operation count that prompted the write.
121 hunk_staging_operation_count: usize,
122 hunk_staging_operation_count_as_of_write: usize,
123
124 head_text: Option<Arc<String>>,
125 index_text: Option<Arc<String>>,
126 head_changed: bool,
127 index_changed: bool,
128 language_changed: bool,
129}
130
131#[derive(Clone, Debug)]
132enum DiffBasesChange {
133 SetIndex(Option<String>),
134 SetHead(Option<String>),
135 SetEach {
136 index: Option<String>,
137 head: Option<String>,
138 },
139 SetBoth(Option<String>),
140}
141
142#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
143enum DiffKind {
144 Unstaged,
145 Uncommitted,
146}
147
148enum GitStoreState {
149 Local {
150 next_repository_id: Arc<AtomicU64>,
151 downstream: Option<LocalDownstreamState>,
152 project_environment: Entity<ProjectEnvironment>,
153 fs: Arc<dyn Fs>,
154 },
155 Remote {
156 upstream_client: AnyProtoClient,
157 upstream_project_id: u64,
158 downstream: Option<(AnyProtoClient, ProjectId)>,
159 },
160}
161
162enum DownstreamUpdate {
163 UpdateRepository(RepositorySnapshot),
164 RemoveRepository(RepositoryId),
165}
166
167struct LocalDownstreamState {
168 client: AnyProtoClient,
169 project_id: ProjectId,
170 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
171 _task: Task<Result<()>>,
172}
173
174#[derive(Clone, Debug)]
175pub struct GitStoreCheckpoint {
176 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
177}
178
179#[derive(Clone, Debug, PartialEq, Eq)]
180pub struct StatusEntry {
181 pub repo_path: RepoPath,
182 pub status: FileStatus,
183}
184
185impl StatusEntry {
186 fn to_proto(&self) -> proto::StatusEntry {
187 let simple_status = match self.status {
188 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
189 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
190 FileStatus::Tracked(TrackedStatus {
191 index_status,
192 worktree_status,
193 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
194 worktree_status
195 } else {
196 index_status
197 }),
198 };
199
200 proto::StatusEntry {
201 repo_path: self.repo_path.to_proto(),
202 simple_status,
203 status: Some(status_to_proto(self.status)),
204 }
205 }
206}
207
208impl TryFrom<proto::StatusEntry> for StatusEntry {
209 type Error = anyhow::Error;
210
211 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
212 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
213 let status = status_from_proto(value.simple_status, value.status)?;
214 Ok(Self { repo_path, status })
215 }
216}
217
218impl sum_tree::Item for StatusEntry {
219 type Summary = PathSummary<GitSummary>;
220
221 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
222 PathSummary {
223 max_path: self.repo_path.0.clone(),
224 item_summary: self.status.summary(),
225 }
226 }
227}
228
229impl sum_tree::KeyedItem for StatusEntry {
230 type Key = PathKey;
231
232 fn key(&self) -> Self::Key {
233 PathKey(self.repo_path.0.clone())
234 }
235}
236
237#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
238pub struct RepositoryId(pub u64);
239
240#[derive(Clone, Debug, Default, PartialEq, Eq)]
241pub struct MergeDetails {
242 pub conflicted_paths: TreeSet<RepoPath>,
243 pub message: Option<SharedString>,
244 pub heads: Vec<Option<SharedString>>,
245}
246
247#[derive(Clone, Debug, PartialEq, Eq)]
248pub struct RepositorySnapshot {
249 pub id: RepositoryId,
250 pub statuses_by_path: SumTree<StatusEntry>,
251 pub work_directory_abs_path: Arc<Path>,
252 pub path_style: PathStyle,
253 pub branch: Option<Branch>,
254 pub head_commit: Option<CommitDetails>,
255 pub scan_id: u64,
256 pub merge: MergeDetails,
257 pub remote_origin_url: Option<String>,
258 pub remote_upstream_url: Option<String>,
259 pub stash_entries: GitStash,
260}
261
262type JobId = u64;
263
264#[derive(Clone, Debug, PartialEq, Eq)]
265pub struct JobInfo {
266 pub start: Instant,
267 pub message: SharedString,
268}
269
270pub struct Repository {
271 this: WeakEntity<Self>,
272 snapshot: RepositorySnapshot,
273 commit_message_buffer: Option<Entity<Buffer>>,
274 git_store: WeakEntity<GitStore>,
275 // For a local repository, holds paths that have had worktree events since the last status scan completed,
276 // and that should be examined during the next status scan.
277 paths_needing_status_update: BTreeSet<RepoPath>,
278 job_sender: mpsc::UnboundedSender<GitJob>,
279 active_jobs: HashMap<JobId, JobInfo>,
280 job_id: JobId,
281 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
282 latest_askpass_id: u64,
283}
284
285impl std::ops::Deref for Repository {
286 type Target = RepositorySnapshot;
287
288 fn deref(&self) -> &Self::Target {
289 &self.snapshot
290 }
291}
292
293#[derive(Clone)]
294pub enum RepositoryState {
295 Local {
296 backend: Arc<dyn GitRepository>,
297 environment: Arc<HashMap<String, String>>,
298 },
299 Remote {
300 project_id: ProjectId,
301 client: AnyProtoClient,
302 },
303}
304
305#[derive(Clone, Debug, PartialEq, Eq)]
306pub enum RepositoryEvent {
307 StatusesChanged {
308 // TODO could report which statuses changed here
309 full_scan: bool,
310 },
311 MergeHeadsChanged,
312 BranchChanged,
313 StashEntriesChanged,
314}
315
316#[derive(Clone, Debug)]
317pub struct JobsUpdated;
318
319#[derive(Debug)]
320pub enum GitStoreEvent {
321 ActiveRepositoryChanged(Option<RepositoryId>),
322 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
323 RepositoryAdded,
324 RepositoryRemoved(RepositoryId),
325 IndexWriteError(anyhow::Error),
326 JobsUpdated,
327 ConflictsUpdated,
328}
329
330impl EventEmitter<RepositoryEvent> for Repository {}
331impl EventEmitter<JobsUpdated> for Repository {}
332impl EventEmitter<GitStoreEvent> for GitStore {}
333
334pub struct GitJob {
335 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
336 key: Option<GitJobKey>,
337}
338
339#[derive(Debug, PartialEq, Eq)]
340enum GitJobKey {
341 WriteIndex(RepoPath),
342 ReloadBufferDiffBases,
343 RefreshStatuses,
344 ReloadGitState,
345}
346
347impl GitStore {
348 pub fn local(
349 worktree_store: &Entity<WorktreeStore>,
350 buffer_store: Entity<BufferStore>,
351 environment: Entity<ProjectEnvironment>,
352 fs: Arc<dyn Fs>,
353 cx: &mut Context<Self>,
354 ) -> Self {
355 Self::new(
356 worktree_store.clone(),
357 buffer_store,
358 GitStoreState::Local {
359 next_repository_id: Arc::new(AtomicU64::new(1)),
360 downstream: None,
361 project_environment: environment,
362 fs,
363 },
364 cx,
365 )
366 }
367
368 pub fn remote(
369 worktree_store: &Entity<WorktreeStore>,
370 buffer_store: Entity<BufferStore>,
371 upstream_client: AnyProtoClient,
372 project_id: u64,
373 cx: &mut Context<Self>,
374 ) -> Self {
375 Self::new(
376 worktree_store.clone(),
377 buffer_store,
378 GitStoreState::Remote {
379 upstream_client,
380 upstream_project_id: project_id,
381 downstream: None,
382 },
383 cx,
384 )
385 }
386
387 fn new(
388 worktree_store: Entity<WorktreeStore>,
389 buffer_store: Entity<BufferStore>,
390 state: GitStoreState,
391 cx: &mut Context<Self>,
392 ) -> Self {
393 let _subscriptions = vec![
394 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
395 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
396 ];
397
398 GitStore {
399 state,
400 buffer_store,
401 worktree_store,
402 repositories: HashMap::default(),
403 active_repo_id: None,
404 _subscriptions,
405 loading_diffs: HashMap::default(),
406 shared_diffs: HashMap::default(),
407 diffs: HashMap::default(),
408 }
409 }
410
411 pub fn init(client: &AnyProtoClient) {
412 client.add_entity_request_handler(Self::handle_get_remotes);
413 client.add_entity_request_handler(Self::handle_get_branches);
414 client.add_entity_request_handler(Self::handle_get_default_branch);
415 client.add_entity_request_handler(Self::handle_change_branch);
416 client.add_entity_request_handler(Self::handle_create_branch);
417 client.add_entity_request_handler(Self::handle_rename_branch);
418 client.add_entity_request_handler(Self::handle_git_init);
419 client.add_entity_request_handler(Self::handle_push);
420 client.add_entity_request_handler(Self::handle_pull);
421 client.add_entity_request_handler(Self::handle_fetch);
422 client.add_entity_request_handler(Self::handle_stage);
423 client.add_entity_request_handler(Self::handle_unstage);
424 client.add_entity_request_handler(Self::handle_stash);
425 client.add_entity_request_handler(Self::handle_stash_pop);
426 client.add_entity_request_handler(Self::handle_stash_apply);
427 client.add_entity_request_handler(Self::handle_stash_drop);
428 client.add_entity_request_handler(Self::handle_commit);
429 client.add_entity_request_handler(Self::handle_reset);
430 client.add_entity_request_handler(Self::handle_show);
431 client.add_entity_request_handler(Self::handle_load_commit_diff);
432 client.add_entity_request_handler(Self::handle_checkout_files);
433 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
434 client.add_entity_request_handler(Self::handle_set_index_text);
435 client.add_entity_request_handler(Self::handle_askpass);
436 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
437 client.add_entity_request_handler(Self::handle_git_diff);
438 client.add_entity_request_handler(Self::handle_tree_diff);
439 client.add_entity_request_handler(Self::handle_get_blob_content);
440 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
441 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
442 client.add_entity_message_handler(Self::handle_update_diff_bases);
443 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
444 client.add_entity_request_handler(Self::handle_blame_buffer);
445 client.add_entity_message_handler(Self::handle_update_repository);
446 client.add_entity_message_handler(Self::handle_remove_repository);
447 client.add_entity_request_handler(Self::handle_git_clone);
448 }
449
450 pub fn is_local(&self) -> bool {
451 matches!(self.state, GitStoreState::Local { .. })
452 }
453 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
454 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
455 let id = repo.read(cx).id;
456 if self.active_repo_id != Some(id) {
457 self.active_repo_id = Some(id);
458 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
459 }
460 }
461 }
462
463 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
464 match &mut self.state {
465 GitStoreState::Remote {
466 downstream: downstream_client,
467 ..
468 } => {
469 for repo in self.repositories.values() {
470 let update = repo.read(cx).snapshot.initial_update(project_id);
471 for update in split_repository_update(update) {
472 client.send(update).log_err();
473 }
474 }
475 *downstream_client = Some((client, ProjectId(project_id)));
476 }
477 GitStoreState::Local {
478 downstream: downstream_client,
479 ..
480 } => {
481 let mut snapshots = HashMap::default();
482 let (updates_tx, mut updates_rx) = mpsc::unbounded();
483 for repo in self.repositories.values() {
484 updates_tx
485 .unbounded_send(DownstreamUpdate::UpdateRepository(
486 repo.read(cx).snapshot.clone(),
487 ))
488 .ok();
489 }
490 *downstream_client = Some(LocalDownstreamState {
491 client: client.clone(),
492 project_id: ProjectId(project_id),
493 updates_tx,
494 _task: cx.spawn(async move |this, cx| {
495 cx.background_spawn(async move {
496 while let Some(update) = updates_rx.next().await {
497 match update {
498 DownstreamUpdate::UpdateRepository(snapshot) => {
499 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
500 {
501 let update =
502 snapshot.build_update(old_snapshot, project_id);
503 *old_snapshot = snapshot;
504 for update in split_repository_update(update) {
505 client.send(update)?;
506 }
507 } else {
508 let update = snapshot.initial_update(project_id);
509 for update in split_repository_update(update) {
510 client.send(update)?;
511 }
512 snapshots.insert(snapshot.id, snapshot);
513 }
514 }
515 DownstreamUpdate::RemoveRepository(id) => {
516 client.send(proto::RemoveRepository {
517 project_id,
518 id: id.to_proto(),
519 })?;
520 }
521 }
522 }
523 anyhow::Ok(())
524 })
525 .await
526 .ok();
527 this.update(cx, |this, _| {
528 if let GitStoreState::Local {
529 downstream: downstream_client,
530 ..
531 } = &mut this.state
532 {
533 downstream_client.take();
534 } else {
535 unreachable!("unshared called on remote store");
536 }
537 })
538 }),
539 });
540 }
541 }
542 }
543
544 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
545 match &mut self.state {
546 GitStoreState::Local {
547 downstream: downstream_client,
548 ..
549 } => {
550 downstream_client.take();
551 }
552 GitStoreState::Remote {
553 downstream: downstream_client,
554 ..
555 } => {
556 downstream_client.take();
557 }
558 }
559 self.shared_diffs.clear();
560 }
561
562 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
563 self.shared_diffs.remove(peer_id);
564 }
565
566 pub fn active_repository(&self) -> Option<Entity<Repository>> {
567 self.active_repo_id
568 .as_ref()
569 .map(|id| self.repositories[id].clone())
570 }
571
572 pub fn open_unstaged_diff(
573 &mut self,
574 buffer: Entity<Buffer>,
575 cx: &mut Context<Self>,
576 ) -> Task<Result<Entity<BufferDiff>>> {
577 let buffer_id = buffer.read(cx).remote_id();
578 if let Some(diff_state) = self.diffs.get(&buffer_id)
579 && let Some(unstaged_diff) = diff_state
580 .read(cx)
581 .unstaged_diff
582 .as_ref()
583 .and_then(|weak| weak.upgrade())
584 {
585 if let Some(task) =
586 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
587 {
588 return cx.background_executor().spawn(async move {
589 task.await;
590 Ok(unstaged_diff)
591 });
592 }
593 return Task::ready(Ok(unstaged_diff));
594 }
595
596 let Some((repo, repo_path)) =
597 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
598 else {
599 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
600 };
601
602 let task = self
603 .loading_diffs
604 .entry((buffer_id, DiffKind::Unstaged))
605 .or_insert_with(|| {
606 let staged_text = repo.update(cx, |repo, cx| {
607 repo.load_staged_text(buffer_id, repo_path, cx)
608 });
609 cx.spawn(async move |this, cx| {
610 Self::open_diff_internal(
611 this,
612 DiffKind::Unstaged,
613 staged_text.await.map(DiffBasesChange::SetIndex),
614 buffer,
615 cx,
616 )
617 .await
618 .map_err(Arc::new)
619 })
620 .shared()
621 })
622 .clone();
623
624 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
625 }
626
627 pub fn open_diff_since(
628 &mut self,
629 oid: Option<git::Oid>,
630 buffer: Entity<Buffer>,
631 repo: Entity<Repository>,
632 languages: Arc<LanguageRegistry>,
633 cx: &mut Context<Self>,
634 ) -> Task<Result<Entity<BufferDiff>>> {
635 cx.spawn(async move |this, cx| {
636 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
637 let content = match oid {
638 None => None,
639 Some(oid) => Some(
640 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
641 .await?,
642 ),
643 };
644 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
645
646 buffer_diff
647 .update(cx, |buffer_diff, cx| {
648 buffer_diff.set_base_text(
649 content.map(Arc::new),
650 buffer_snapshot.language().cloned(),
651 Some(languages.clone()),
652 buffer_snapshot.text,
653 cx,
654 )
655 })?
656 .await?;
657 let unstaged_diff = this
658 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
659 .await?;
660 buffer_diff.update(cx, |buffer_diff, _| {
661 buffer_diff.set_secondary_diff(unstaged_diff);
662 })?;
663
664 this.update(cx, |_, cx| {
665 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
666 .detach();
667 })?;
668
669 Ok(buffer_diff)
670 })
671 }
672
673 pub fn open_uncommitted_diff(
674 &mut self,
675 buffer: Entity<Buffer>,
676 cx: &mut Context<Self>,
677 ) -> Task<Result<Entity<BufferDiff>>> {
678 let buffer_id = buffer.read(cx).remote_id();
679
680 if let Some(diff_state) = self.diffs.get(&buffer_id)
681 && let Some(uncommitted_diff) = diff_state
682 .read(cx)
683 .uncommitted_diff
684 .as_ref()
685 .and_then(|weak| weak.upgrade())
686 {
687 if let Some(task) =
688 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
689 {
690 return cx.background_executor().spawn(async move {
691 task.await;
692 Ok(uncommitted_diff)
693 });
694 }
695 return Task::ready(Ok(uncommitted_diff));
696 }
697
698 let Some((repo, repo_path)) =
699 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
700 else {
701 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
702 };
703
704 let task = self
705 .loading_diffs
706 .entry((buffer_id, DiffKind::Uncommitted))
707 .or_insert_with(|| {
708 let changes = repo.update(cx, |repo, cx| {
709 repo.load_committed_text(buffer_id, repo_path, cx)
710 });
711
712 cx.spawn(async move |this, cx| {
713 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
714 .await
715 .map_err(Arc::new)
716 })
717 .shared()
718 })
719 .clone();
720
721 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
722 }
723
724 async fn open_diff_internal(
725 this: WeakEntity<Self>,
726 kind: DiffKind,
727 texts: Result<DiffBasesChange>,
728 buffer_entity: Entity<Buffer>,
729 cx: &mut AsyncApp,
730 ) -> Result<Entity<BufferDiff>> {
731 let diff_bases_change = match texts {
732 Err(e) => {
733 this.update(cx, |this, cx| {
734 let buffer = buffer_entity.read(cx);
735 let buffer_id = buffer.remote_id();
736 this.loading_diffs.remove(&(buffer_id, kind));
737 })?;
738 return Err(e);
739 }
740 Ok(change) => change,
741 };
742
743 this.update(cx, |this, cx| {
744 let buffer = buffer_entity.read(cx);
745 let buffer_id = buffer.remote_id();
746 let language = buffer.language().cloned();
747 let language_registry = buffer.language_registry();
748 let text_snapshot = buffer.text_snapshot();
749 this.loading_diffs.remove(&(buffer_id, kind));
750
751 let git_store = cx.weak_entity();
752 let diff_state = this
753 .diffs
754 .entry(buffer_id)
755 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
756
757 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
758
759 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
760 diff_state.update(cx, |diff_state, cx| {
761 diff_state.language = language;
762 diff_state.language_registry = language_registry;
763
764 match kind {
765 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
766 DiffKind::Uncommitted => {
767 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
768 diff
769 } else {
770 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
771 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
772 unstaged_diff
773 };
774
775 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
776 diff_state.uncommitted_diff = Some(diff.downgrade())
777 }
778 }
779
780 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
781 let rx = diff_state.wait_for_recalculation();
782
783 anyhow::Ok(async move {
784 if let Some(rx) = rx {
785 rx.await;
786 }
787 Ok(diff)
788 })
789 })
790 })??
791 .await
792 }
793
794 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
795 let diff_state = self.diffs.get(&buffer_id)?;
796 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
797 }
798
799 pub fn get_uncommitted_diff(
800 &self,
801 buffer_id: BufferId,
802 cx: &App,
803 ) -> Option<Entity<BufferDiff>> {
804 let diff_state = self.diffs.get(&buffer_id)?;
805 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
806 }
807
808 pub fn open_conflict_set(
809 &mut self,
810 buffer: Entity<Buffer>,
811 cx: &mut Context<Self>,
812 ) -> Entity<ConflictSet> {
813 log::debug!("open conflict set");
814 let buffer_id = buffer.read(cx).remote_id();
815
816 if let Some(git_state) = self.diffs.get(&buffer_id)
817 && let Some(conflict_set) = git_state
818 .read(cx)
819 .conflict_set
820 .as_ref()
821 .and_then(|weak| weak.upgrade())
822 {
823 let conflict_set = conflict_set;
824 let buffer_snapshot = buffer.read(cx).text_snapshot();
825
826 git_state.update(cx, |state, cx| {
827 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
828 });
829
830 return conflict_set;
831 }
832
833 let is_unmerged = self
834 .repository_and_path_for_buffer_id(buffer_id, cx)
835 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
836 let git_store = cx.weak_entity();
837 let buffer_git_state = self
838 .diffs
839 .entry(buffer_id)
840 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
841 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
842
843 self._subscriptions
844 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
845 cx.emit(GitStoreEvent::ConflictsUpdated);
846 }));
847
848 buffer_git_state.update(cx, |state, cx| {
849 state.conflict_set = Some(conflict_set.downgrade());
850 let buffer_snapshot = buffer.read(cx).text_snapshot();
851 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
852 });
853
854 conflict_set
855 }
856
857 pub fn project_path_git_status(
858 &self,
859 project_path: &ProjectPath,
860 cx: &App,
861 ) -> Option<FileStatus> {
862 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
863 Some(repo.read(cx).status_for_path(&repo_path)?.status)
864 }
865
866 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
867 let mut work_directory_abs_paths = Vec::new();
868 let mut checkpoints = Vec::new();
869 for repository in self.repositories.values() {
870 repository.update(cx, |repository, _| {
871 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
872 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
873 });
874 }
875
876 cx.background_executor().spawn(async move {
877 let checkpoints = future::try_join_all(checkpoints).await?;
878 Ok(GitStoreCheckpoint {
879 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
880 .into_iter()
881 .zip(checkpoints)
882 .collect(),
883 })
884 })
885 }
886
887 pub fn restore_checkpoint(
888 &self,
889 checkpoint: GitStoreCheckpoint,
890 cx: &mut App,
891 ) -> Task<Result<()>> {
892 let repositories_by_work_dir_abs_path = self
893 .repositories
894 .values()
895 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
896 .collect::<HashMap<_, _>>();
897
898 let mut tasks = Vec::new();
899 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
900 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
901 let restore = repository.update(cx, |repository, _| {
902 repository.restore_checkpoint(checkpoint)
903 });
904 tasks.push(async move { restore.await? });
905 }
906 }
907 cx.background_spawn(async move {
908 future::try_join_all(tasks).await?;
909 Ok(())
910 })
911 }
912
913 /// Compares two checkpoints, returning true if they are equal.
914 pub fn compare_checkpoints(
915 &self,
916 left: GitStoreCheckpoint,
917 mut right: GitStoreCheckpoint,
918 cx: &mut App,
919 ) -> Task<Result<bool>> {
920 let repositories_by_work_dir_abs_path = self
921 .repositories
922 .values()
923 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
924 .collect::<HashMap<_, _>>();
925
926 let mut tasks = Vec::new();
927 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
928 if let Some(right_checkpoint) = right
929 .checkpoints_by_work_dir_abs_path
930 .remove(&work_dir_abs_path)
931 {
932 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
933 {
934 let compare = repository.update(cx, |repository, _| {
935 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
936 });
937
938 tasks.push(async move { compare.await? });
939 }
940 } else {
941 return Task::ready(Ok(false));
942 }
943 }
944 cx.background_spawn(async move {
945 Ok(future::try_join_all(tasks)
946 .await?
947 .into_iter()
948 .all(|result| result))
949 })
950 }
951
952 /// Blames a buffer.
953 pub fn blame_buffer(
954 &self,
955 buffer: &Entity<Buffer>,
956 version: Option<clock::Global>,
957 cx: &mut App,
958 ) -> Task<Result<Option<Blame>>> {
959 let buffer = buffer.read(cx);
960 let Some((repo, repo_path)) =
961 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
962 else {
963 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
964 };
965 let content = match &version {
966 Some(version) => buffer.rope_for_version(version),
967 None => buffer.as_rope().clone(),
968 };
969 let version = version.unwrap_or(buffer.version());
970 let buffer_id = buffer.remote_id();
971
972 let rx = repo.update(cx, |repo, _| {
973 repo.send_job(None, move |state, _| async move {
974 match state {
975 RepositoryState::Local { backend, .. } => backend
976 .blame(repo_path.clone(), content)
977 .await
978 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
979 .map(Some),
980 RepositoryState::Remote { project_id, client } => {
981 let response = client
982 .request(proto::BlameBuffer {
983 project_id: project_id.to_proto(),
984 buffer_id: buffer_id.into(),
985 version: serialize_version(&version),
986 })
987 .await?;
988 Ok(deserialize_blame_buffer_response(response))
989 }
990 }
991 })
992 });
993
994 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
995 }
996
997 pub fn get_permalink_to_line(
998 &self,
999 buffer: &Entity<Buffer>,
1000 selection: Range<u32>,
1001 cx: &mut App,
1002 ) -> Task<Result<url::Url>> {
1003 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1004 return Task::ready(Err(anyhow!("buffer has no file")));
1005 };
1006
1007 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1008 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1009 cx,
1010 ) else {
1011 // If we're not in a Git repo, check whether this is a Rust source
1012 // file in the Cargo registry (presumably opened with go-to-definition
1013 // from a normal Rust file). If so, we can put together a permalink
1014 // using crate metadata.
1015 if buffer
1016 .read(cx)
1017 .language()
1018 .is_none_or(|lang| lang.name() != "Rust".into())
1019 {
1020 return Task::ready(Err(anyhow!("no permalink available")));
1021 }
1022 let file_path = file.worktree.read(cx).absolutize(&file.path);
1023 return cx.spawn(async move |cx| {
1024 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1025 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1026 .context("no permalink available")
1027 });
1028 };
1029
1030 let buffer_id = buffer.read(cx).remote_id();
1031 let branch = repo.read(cx).branch.clone();
1032 let remote = branch
1033 .as_ref()
1034 .and_then(|b| b.upstream.as_ref())
1035 .and_then(|b| b.remote_name())
1036 .unwrap_or("origin")
1037 .to_string();
1038
1039 let rx = repo.update(cx, |repo, _| {
1040 repo.send_job(None, move |state, cx| async move {
1041 match state {
1042 RepositoryState::Local { backend, .. } => {
1043 let origin_url = backend
1044 .remote_url(&remote)
1045 .with_context(|| format!("remote \"{remote}\" not found"))?;
1046
1047 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1048
1049 let provider_registry =
1050 cx.update(GitHostingProviderRegistry::default_global)?;
1051
1052 let (provider, remote) =
1053 parse_git_remote_url(provider_registry, &origin_url)
1054 .context("parsing Git remote URL")?;
1055
1056 Ok(provider.build_permalink(
1057 remote,
1058 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1059 ))
1060 }
1061 RepositoryState::Remote { project_id, client } => {
1062 let response = client
1063 .request(proto::GetPermalinkToLine {
1064 project_id: project_id.to_proto(),
1065 buffer_id: buffer_id.into(),
1066 selection: Some(proto::Range {
1067 start: selection.start as u64,
1068 end: selection.end as u64,
1069 }),
1070 })
1071 .await?;
1072
1073 url::Url::parse(&response.permalink).context("failed to parse permalink")
1074 }
1075 }
1076 })
1077 });
1078 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1079 }
1080
1081 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1082 match &self.state {
1083 GitStoreState::Local {
1084 downstream: downstream_client,
1085 ..
1086 } => downstream_client
1087 .as_ref()
1088 .map(|state| (state.client.clone(), state.project_id)),
1089 GitStoreState::Remote {
1090 downstream: downstream_client,
1091 ..
1092 } => downstream_client.clone(),
1093 }
1094 }
1095
1096 fn upstream_client(&self) -> Option<AnyProtoClient> {
1097 match &self.state {
1098 GitStoreState::Local { .. } => None,
1099 GitStoreState::Remote {
1100 upstream_client, ..
1101 } => Some(upstream_client.clone()),
1102 }
1103 }
1104
1105 fn on_worktree_store_event(
1106 &mut self,
1107 worktree_store: Entity<WorktreeStore>,
1108 event: &WorktreeStoreEvent,
1109 cx: &mut Context<Self>,
1110 ) {
1111 let GitStoreState::Local {
1112 project_environment,
1113 downstream,
1114 next_repository_id,
1115 fs,
1116 } = &self.state
1117 else {
1118 return;
1119 };
1120
1121 match event {
1122 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1123 if let Some(worktree) = self
1124 .worktree_store
1125 .read(cx)
1126 .worktree_for_id(*worktree_id, cx)
1127 {
1128 let paths_by_git_repo =
1129 self.process_updated_entries(&worktree, updated_entries, cx);
1130 let downstream = downstream
1131 .as_ref()
1132 .map(|downstream| downstream.updates_tx.clone());
1133 cx.spawn(async move |_, cx| {
1134 let paths_by_git_repo = paths_by_git_repo.await;
1135 for (repo, paths) in paths_by_git_repo {
1136 repo.update(cx, |repo, cx| {
1137 repo.paths_changed(paths, downstream.clone(), cx);
1138 })
1139 .ok();
1140 }
1141 })
1142 .detach();
1143 }
1144 }
1145 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1146 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1147 else {
1148 return;
1149 };
1150 if !worktree.read(cx).is_visible() {
1151 log::debug!(
1152 "not adding repositories for local worktree {:?} because it's not visible",
1153 worktree.read(cx).abs_path()
1154 );
1155 return;
1156 }
1157 self.update_repositories_from_worktree(
1158 project_environment.clone(),
1159 next_repository_id.clone(),
1160 downstream
1161 .as_ref()
1162 .map(|downstream| downstream.updates_tx.clone()),
1163 changed_repos.clone(),
1164 fs.clone(),
1165 cx,
1166 );
1167 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1168 }
1169 _ => {}
1170 }
1171 }
1172 fn on_repository_event(
1173 &mut self,
1174 repo: Entity<Repository>,
1175 event: &RepositoryEvent,
1176 cx: &mut Context<Self>,
1177 ) {
1178 let id = repo.read(cx).id;
1179 let repo_snapshot = repo.read(cx).snapshot.clone();
1180 for (buffer_id, diff) in self.diffs.iter() {
1181 if let Some((buffer_repo, repo_path)) =
1182 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1183 && buffer_repo == repo
1184 {
1185 diff.update(cx, |diff, cx| {
1186 if let Some(conflict_set) = &diff.conflict_set {
1187 let conflict_status_changed =
1188 conflict_set.update(cx, |conflict_set, cx| {
1189 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1190 conflict_set.set_has_conflict(has_conflict, cx)
1191 })?;
1192 if conflict_status_changed {
1193 let buffer_store = self.buffer_store.read(cx);
1194 if let Some(buffer) = buffer_store.get(*buffer_id) {
1195 let _ = diff
1196 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1197 }
1198 }
1199 }
1200 anyhow::Ok(())
1201 })
1202 .ok();
1203 }
1204 }
1205 cx.emit(GitStoreEvent::RepositoryUpdated(
1206 id,
1207 event.clone(),
1208 self.active_repo_id == Some(id),
1209 ))
1210 }
1211
1212 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1213 cx.emit(GitStoreEvent::JobsUpdated)
1214 }
1215
1216 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1217 fn update_repositories_from_worktree(
1218 &mut self,
1219 project_environment: Entity<ProjectEnvironment>,
1220 next_repository_id: Arc<AtomicU64>,
1221 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1222 updated_git_repositories: UpdatedGitRepositoriesSet,
1223 fs: Arc<dyn Fs>,
1224 cx: &mut Context<Self>,
1225 ) {
1226 let mut removed_ids = Vec::new();
1227 for update in updated_git_repositories.iter() {
1228 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1229 let existing_work_directory_abs_path =
1230 repo.read(cx).work_directory_abs_path.clone();
1231 Some(&existing_work_directory_abs_path)
1232 == update.old_work_directory_abs_path.as_ref()
1233 || Some(&existing_work_directory_abs_path)
1234 == update.new_work_directory_abs_path.as_ref()
1235 }) {
1236 if let Some(new_work_directory_abs_path) =
1237 update.new_work_directory_abs_path.clone()
1238 {
1239 existing.update(cx, |existing, cx| {
1240 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1241 existing.schedule_scan(updates_tx.clone(), cx);
1242 });
1243 } else {
1244 removed_ids.push(*id);
1245 }
1246 } else if let UpdatedGitRepository {
1247 new_work_directory_abs_path: Some(work_directory_abs_path),
1248 dot_git_abs_path: Some(dot_git_abs_path),
1249 repository_dir_abs_path: Some(repository_dir_abs_path),
1250 common_dir_abs_path: Some(common_dir_abs_path),
1251 ..
1252 } = update
1253 {
1254 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1255 let git_store = cx.weak_entity();
1256 let repo = cx.new(|cx| {
1257 let mut repo = Repository::local(
1258 id,
1259 work_directory_abs_path.clone(),
1260 dot_git_abs_path.clone(),
1261 repository_dir_abs_path.clone(),
1262 common_dir_abs_path.clone(),
1263 project_environment.downgrade(),
1264 fs.clone(),
1265 git_store,
1266 cx,
1267 );
1268 repo.schedule_scan(updates_tx.clone(), cx);
1269 repo
1270 });
1271 self._subscriptions
1272 .push(cx.subscribe(&repo, Self::on_repository_event));
1273 self._subscriptions
1274 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1275 self.repositories.insert(id, repo);
1276 cx.emit(GitStoreEvent::RepositoryAdded);
1277 self.active_repo_id.get_or_insert_with(|| {
1278 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1279 id
1280 });
1281 }
1282 }
1283
1284 for id in removed_ids {
1285 if self.active_repo_id == Some(id) {
1286 self.active_repo_id = None;
1287 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1288 }
1289 self.repositories.remove(&id);
1290 if let Some(updates_tx) = updates_tx.as_ref() {
1291 updates_tx
1292 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1293 .ok();
1294 }
1295 }
1296 }
1297
1298 fn on_buffer_store_event(
1299 &mut self,
1300 _: Entity<BufferStore>,
1301 event: &BufferStoreEvent,
1302 cx: &mut Context<Self>,
1303 ) {
1304 match event {
1305 BufferStoreEvent::BufferAdded(buffer) => {
1306 cx.subscribe(buffer, |this, buffer, event, cx| {
1307 if let BufferEvent::LanguageChanged = event {
1308 let buffer_id = buffer.read(cx).remote_id();
1309 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1310 diff_state.update(cx, |diff_state, cx| {
1311 diff_state.buffer_language_changed(buffer, cx);
1312 });
1313 }
1314 }
1315 })
1316 .detach();
1317 }
1318 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1319 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1320 diffs.remove(buffer_id);
1321 }
1322 }
1323 BufferStoreEvent::BufferDropped(buffer_id) => {
1324 self.diffs.remove(buffer_id);
1325 for diffs in self.shared_diffs.values_mut() {
1326 diffs.remove(buffer_id);
1327 }
1328 }
1329
1330 _ => {}
1331 }
1332 }
1333
1334 pub fn recalculate_buffer_diffs(
1335 &mut self,
1336 buffers: Vec<Entity<Buffer>>,
1337 cx: &mut Context<Self>,
1338 ) -> impl Future<Output = ()> + use<> {
1339 let mut futures = Vec::new();
1340 for buffer in buffers {
1341 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1342 let buffer = buffer.read(cx).text_snapshot();
1343 diff_state.update(cx, |diff_state, cx| {
1344 diff_state.recalculate_diffs(buffer.clone(), cx);
1345 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1346 });
1347 futures.push(diff_state.update(cx, |diff_state, cx| {
1348 diff_state
1349 .reparse_conflict_markers(buffer, cx)
1350 .map(|_| {})
1351 .boxed()
1352 }));
1353 }
1354 }
1355 async move {
1356 futures::future::join_all(futures).await;
1357 }
1358 }
1359
1360 fn on_buffer_diff_event(
1361 &mut self,
1362 diff: Entity<buffer_diff::BufferDiff>,
1363 event: &BufferDiffEvent,
1364 cx: &mut Context<Self>,
1365 ) {
1366 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1367 let buffer_id = diff.read(cx).buffer_id;
1368 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1369 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1370 diff_state.hunk_staging_operation_count += 1;
1371 diff_state.hunk_staging_operation_count
1372 });
1373 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1374 let recv = repo.update(cx, |repo, cx| {
1375 log::debug!("hunks changed for {}", path.as_unix_str());
1376 repo.spawn_set_index_text_job(
1377 path,
1378 new_index_text.as_ref().map(|rope| rope.to_string()),
1379 Some(hunk_staging_operation_count),
1380 cx,
1381 )
1382 });
1383 let diff = diff.downgrade();
1384 cx.spawn(async move |this, cx| {
1385 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1386 diff.update(cx, |diff, cx| {
1387 diff.clear_pending_hunks(cx);
1388 })
1389 .ok();
1390 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1391 .ok();
1392 }
1393 })
1394 .detach();
1395 }
1396 }
1397 }
1398 }
1399
1400 fn local_worktree_git_repos_changed(
1401 &mut self,
1402 worktree: Entity<Worktree>,
1403 changed_repos: &UpdatedGitRepositoriesSet,
1404 cx: &mut Context<Self>,
1405 ) {
1406 log::debug!("local worktree repos changed");
1407 debug_assert!(worktree.read(cx).is_local());
1408
1409 for repository in self.repositories.values() {
1410 repository.update(cx, |repository, cx| {
1411 let repo_abs_path = &repository.work_directory_abs_path;
1412 if changed_repos.iter().any(|update| {
1413 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1414 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1415 }) {
1416 repository.reload_buffer_diff_bases(cx);
1417 }
1418 });
1419 }
1420 }
1421
1422 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1423 &self.repositories
1424 }
1425
1426 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1427 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1428 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1429 Some(status.status)
1430 }
1431
1432 pub fn repository_and_path_for_buffer_id(
1433 &self,
1434 buffer_id: BufferId,
1435 cx: &App,
1436 ) -> Option<(Entity<Repository>, RepoPath)> {
1437 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1438 let project_path = buffer.read(cx).project_path(cx)?;
1439 self.repository_and_path_for_project_path(&project_path, cx)
1440 }
1441
1442 pub fn repository_and_path_for_project_path(
1443 &self,
1444 path: &ProjectPath,
1445 cx: &App,
1446 ) -> Option<(Entity<Repository>, RepoPath)> {
1447 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1448 self.repositories
1449 .values()
1450 .filter_map(|repo| {
1451 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1452 Some((repo.clone(), repo_path))
1453 })
1454 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1455 }
1456
1457 pub fn git_init(
1458 &self,
1459 path: Arc<Path>,
1460 fallback_branch_name: String,
1461 cx: &App,
1462 ) -> Task<Result<()>> {
1463 match &self.state {
1464 GitStoreState::Local { fs, .. } => {
1465 let fs = fs.clone();
1466 cx.background_executor()
1467 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1468 }
1469 GitStoreState::Remote {
1470 upstream_client,
1471 upstream_project_id: project_id,
1472 ..
1473 } => {
1474 let client = upstream_client.clone();
1475 let project_id = *project_id;
1476 cx.background_executor().spawn(async move {
1477 client
1478 .request(proto::GitInit {
1479 project_id: project_id,
1480 abs_path: path.to_string_lossy().into_owned(),
1481 fallback_branch_name,
1482 })
1483 .await?;
1484 Ok(())
1485 })
1486 }
1487 }
1488 }
1489
1490 pub fn git_clone(
1491 &self,
1492 repo: String,
1493 path: impl Into<Arc<std::path::Path>>,
1494 cx: &App,
1495 ) -> Task<Result<()>> {
1496 let path = path.into();
1497 match &self.state {
1498 GitStoreState::Local { fs, .. } => {
1499 let fs = fs.clone();
1500 cx.background_executor()
1501 .spawn(async move { fs.git_clone(&repo, &path).await })
1502 }
1503 GitStoreState::Remote {
1504 upstream_client,
1505 upstream_project_id,
1506 ..
1507 } => {
1508 if upstream_client.is_via_collab() {
1509 return Task::ready(Err(anyhow!(
1510 "Git Clone isn't supported for project guests"
1511 )));
1512 }
1513 let request = upstream_client.request(proto::GitClone {
1514 project_id: *upstream_project_id,
1515 abs_path: path.to_string_lossy().into_owned(),
1516 remote_repo: repo,
1517 });
1518
1519 cx.background_spawn(async move {
1520 let result = request.await?;
1521
1522 match result.success {
1523 true => Ok(()),
1524 false => Err(anyhow!("Git Clone failed")),
1525 }
1526 })
1527 }
1528 }
1529 }
1530
1531 async fn handle_update_repository(
1532 this: Entity<Self>,
1533 envelope: TypedEnvelope<proto::UpdateRepository>,
1534 mut cx: AsyncApp,
1535 ) -> Result<()> {
1536 this.update(&mut cx, |this, cx| {
1537 let path_style = this.worktree_store.read(cx).path_style();
1538 let mut update = envelope.payload;
1539
1540 let id = RepositoryId::from_proto(update.id);
1541 let client = this.upstream_client().context("no upstream client")?;
1542
1543 let mut repo_subscription = None;
1544 let repo = this.repositories.entry(id).or_insert_with(|| {
1545 let git_store = cx.weak_entity();
1546 let repo = cx.new(|cx| {
1547 Repository::remote(
1548 id,
1549 Path::new(&update.abs_path).into(),
1550 path_style,
1551 ProjectId(update.project_id),
1552 client,
1553 git_store,
1554 cx,
1555 )
1556 });
1557 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1558 cx.emit(GitStoreEvent::RepositoryAdded);
1559 repo
1560 });
1561 this._subscriptions.extend(repo_subscription);
1562
1563 repo.update(cx, {
1564 let update = update.clone();
1565 |repo, cx| repo.apply_remote_update(update, cx)
1566 })?;
1567
1568 this.active_repo_id.get_or_insert_with(|| {
1569 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1570 id
1571 });
1572
1573 if let Some((client, project_id)) = this.downstream_client() {
1574 update.project_id = project_id.to_proto();
1575 client.send(update).log_err();
1576 }
1577 Ok(())
1578 })?
1579 }
1580
1581 async fn handle_remove_repository(
1582 this: Entity<Self>,
1583 envelope: TypedEnvelope<proto::RemoveRepository>,
1584 mut cx: AsyncApp,
1585 ) -> Result<()> {
1586 this.update(&mut cx, |this, cx| {
1587 let mut update = envelope.payload;
1588 let id = RepositoryId::from_proto(update.id);
1589 this.repositories.remove(&id);
1590 if let Some((client, project_id)) = this.downstream_client() {
1591 update.project_id = project_id.to_proto();
1592 client.send(update).log_err();
1593 }
1594 if this.active_repo_id == Some(id) {
1595 this.active_repo_id = None;
1596 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1597 }
1598 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1599 })
1600 }
1601
1602 async fn handle_git_init(
1603 this: Entity<Self>,
1604 envelope: TypedEnvelope<proto::GitInit>,
1605 cx: AsyncApp,
1606 ) -> Result<proto::Ack> {
1607 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1608 let name = envelope.payload.fallback_branch_name;
1609 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1610 .await?;
1611
1612 Ok(proto::Ack {})
1613 }
1614
1615 async fn handle_git_clone(
1616 this: Entity<Self>,
1617 envelope: TypedEnvelope<proto::GitClone>,
1618 cx: AsyncApp,
1619 ) -> Result<proto::GitCloneResponse> {
1620 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1621 let repo_name = envelope.payload.remote_repo;
1622 let result = cx
1623 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1624 .await;
1625
1626 Ok(proto::GitCloneResponse {
1627 success: result.is_ok(),
1628 })
1629 }
1630
1631 async fn handle_fetch(
1632 this: Entity<Self>,
1633 envelope: TypedEnvelope<proto::Fetch>,
1634 mut cx: AsyncApp,
1635 ) -> Result<proto::RemoteMessageResponse> {
1636 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1637 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1638 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1639 let askpass_id = envelope.payload.askpass_id;
1640
1641 let askpass = make_remote_delegate(
1642 this,
1643 envelope.payload.project_id,
1644 repository_id,
1645 askpass_id,
1646 &mut cx,
1647 );
1648
1649 let remote_output = repository_handle
1650 .update(&mut cx, |repository_handle, cx| {
1651 repository_handle.fetch(fetch_options, askpass, cx)
1652 })?
1653 .await??;
1654
1655 Ok(proto::RemoteMessageResponse {
1656 stdout: remote_output.stdout,
1657 stderr: remote_output.stderr,
1658 })
1659 }
1660
1661 async fn handle_push(
1662 this: Entity<Self>,
1663 envelope: TypedEnvelope<proto::Push>,
1664 mut cx: AsyncApp,
1665 ) -> Result<proto::RemoteMessageResponse> {
1666 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1667 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1668
1669 let askpass_id = envelope.payload.askpass_id;
1670 let askpass = make_remote_delegate(
1671 this,
1672 envelope.payload.project_id,
1673 repository_id,
1674 askpass_id,
1675 &mut cx,
1676 );
1677
1678 let options = envelope
1679 .payload
1680 .options
1681 .as_ref()
1682 .map(|_| match envelope.payload.options() {
1683 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1684 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1685 });
1686
1687 let branch_name = envelope.payload.branch_name.into();
1688 let remote_name = envelope.payload.remote_name.into();
1689
1690 let remote_output = repository_handle
1691 .update(&mut cx, |repository_handle, cx| {
1692 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1693 })?
1694 .await??;
1695 Ok(proto::RemoteMessageResponse {
1696 stdout: remote_output.stdout,
1697 stderr: remote_output.stderr,
1698 })
1699 }
1700
1701 async fn handle_pull(
1702 this: Entity<Self>,
1703 envelope: TypedEnvelope<proto::Pull>,
1704 mut cx: AsyncApp,
1705 ) -> Result<proto::RemoteMessageResponse> {
1706 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1707 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1708 let askpass_id = envelope.payload.askpass_id;
1709 let askpass = make_remote_delegate(
1710 this,
1711 envelope.payload.project_id,
1712 repository_id,
1713 askpass_id,
1714 &mut cx,
1715 );
1716
1717 let branch_name = envelope.payload.branch_name.into();
1718 let remote_name = envelope.payload.remote_name.into();
1719
1720 let remote_message = repository_handle
1721 .update(&mut cx, |repository_handle, cx| {
1722 repository_handle.pull(branch_name, remote_name, askpass, cx)
1723 })?
1724 .await??;
1725
1726 Ok(proto::RemoteMessageResponse {
1727 stdout: remote_message.stdout,
1728 stderr: remote_message.stderr,
1729 })
1730 }
1731
1732 async fn handle_stage(
1733 this: Entity<Self>,
1734 envelope: TypedEnvelope<proto::Stage>,
1735 mut cx: AsyncApp,
1736 ) -> Result<proto::Ack> {
1737 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1738 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1739
1740 let entries = envelope
1741 .payload
1742 .paths
1743 .into_iter()
1744 .map(|path| RepoPath::new(&path))
1745 .collect::<Result<Vec<_>>>()?;
1746
1747 repository_handle
1748 .update(&mut cx, |repository_handle, cx| {
1749 repository_handle.stage_entries(entries, cx)
1750 })?
1751 .await?;
1752 Ok(proto::Ack {})
1753 }
1754
1755 async fn handle_unstage(
1756 this: Entity<Self>,
1757 envelope: TypedEnvelope<proto::Unstage>,
1758 mut cx: AsyncApp,
1759 ) -> Result<proto::Ack> {
1760 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1761 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1762
1763 let entries = envelope
1764 .payload
1765 .paths
1766 .into_iter()
1767 .map(|path| RepoPath::new(&path))
1768 .collect::<Result<Vec<_>>>()?;
1769
1770 repository_handle
1771 .update(&mut cx, |repository_handle, cx| {
1772 repository_handle.unstage_entries(entries, cx)
1773 })?
1774 .await?;
1775
1776 Ok(proto::Ack {})
1777 }
1778
1779 async fn handle_stash(
1780 this: Entity<Self>,
1781 envelope: TypedEnvelope<proto::Stash>,
1782 mut cx: AsyncApp,
1783 ) -> Result<proto::Ack> {
1784 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1785 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1786
1787 let entries = envelope
1788 .payload
1789 .paths
1790 .into_iter()
1791 .map(|path| RepoPath::new(&path))
1792 .collect::<Result<Vec<_>>>()?;
1793
1794 repository_handle
1795 .update(&mut cx, |repository_handle, cx| {
1796 repository_handle.stash_entries(entries, cx)
1797 })?
1798 .await?;
1799
1800 Ok(proto::Ack {})
1801 }
1802
1803 async fn handle_stash_pop(
1804 this: Entity<Self>,
1805 envelope: TypedEnvelope<proto::StashPop>,
1806 mut cx: AsyncApp,
1807 ) -> Result<proto::Ack> {
1808 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1809 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1810 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1811
1812 repository_handle
1813 .update(&mut cx, |repository_handle, cx| {
1814 repository_handle.stash_pop(stash_index, cx)
1815 })?
1816 .await?;
1817
1818 Ok(proto::Ack {})
1819 }
1820
1821 async fn handle_stash_apply(
1822 this: Entity<Self>,
1823 envelope: TypedEnvelope<proto::StashApply>,
1824 mut cx: AsyncApp,
1825 ) -> Result<proto::Ack> {
1826 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1827 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1828 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1829
1830 repository_handle
1831 .update(&mut cx, |repository_handle, cx| {
1832 repository_handle.stash_apply(stash_index, cx)
1833 })?
1834 .await?;
1835
1836 Ok(proto::Ack {})
1837 }
1838
1839 async fn handle_stash_drop(
1840 this: Entity<Self>,
1841 envelope: TypedEnvelope<proto::StashDrop>,
1842 mut cx: AsyncApp,
1843 ) -> Result<proto::Ack> {
1844 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1845 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1846 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1847
1848 repository_handle
1849 .update(&mut cx, |repository_handle, cx| {
1850 repository_handle.stash_drop(stash_index, cx)
1851 })?
1852 .await??;
1853
1854 Ok(proto::Ack {})
1855 }
1856
1857 async fn handle_set_index_text(
1858 this: Entity<Self>,
1859 envelope: TypedEnvelope<proto::SetIndexText>,
1860 mut cx: AsyncApp,
1861 ) -> Result<proto::Ack> {
1862 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1863 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1864 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1865
1866 repository_handle
1867 .update(&mut cx, |repository_handle, cx| {
1868 repository_handle.spawn_set_index_text_job(
1869 repo_path,
1870 envelope.payload.text,
1871 None,
1872 cx,
1873 )
1874 })?
1875 .await??;
1876 Ok(proto::Ack {})
1877 }
1878
1879 async fn handle_commit(
1880 this: Entity<Self>,
1881 envelope: TypedEnvelope<proto::Commit>,
1882 mut cx: AsyncApp,
1883 ) -> Result<proto::Ack> {
1884 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1885 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1886
1887 let message = SharedString::from(envelope.payload.message);
1888 let name = envelope.payload.name.map(SharedString::from);
1889 let email = envelope.payload.email.map(SharedString::from);
1890 let options = envelope.payload.options.unwrap_or_default();
1891
1892 repository_handle
1893 .update(&mut cx, |repository_handle, cx| {
1894 repository_handle.commit(
1895 message,
1896 name.zip(email),
1897 CommitOptions {
1898 amend: options.amend,
1899 signoff: options.signoff,
1900 },
1901 cx,
1902 )
1903 })?
1904 .await??;
1905 Ok(proto::Ack {})
1906 }
1907
1908 async fn handle_get_remotes(
1909 this: Entity<Self>,
1910 envelope: TypedEnvelope<proto::GetRemotes>,
1911 mut cx: AsyncApp,
1912 ) -> Result<proto::GetRemotesResponse> {
1913 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1914 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1915
1916 let branch_name = envelope.payload.branch_name;
1917
1918 let remotes = repository_handle
1919 .update(&mut cx, |repository_handle, _| {
1920 repository_handle.get_remotes(branch_name)
1921 })?
1922 .await??;
1923
1924 Ok(proto::GetRemotesResponse {
1925 remotes: remotes
1926 .into_iter()
1927 .map(|remotes| proto::get_remotes_response::Remote {
1928 name: remotes.name.to_string(),
1929 })
1930 .collect::<Vec<_>>(),
1931 })
1932 }
1933
1934 async fn handle_get_branches(
1935 this: Entity<Self>,
1936 envelope: TypedEnvelope<proto::GitGetBranches>,
1937 mut cx: AsyncApp,
1938 ) -> Result<proto::GitBranchesResponse> {
1939 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1940 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1941
1942 let branches = repository_handle
1943 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1944 .await??;
1945
1946 Ok(proto::GitBranchesResponse {
1947 branches: branches
1948 .into_iter()
1949 .map(|branch| branch_to_proto(&branch))
1950 .collect::<Vec<_>>(),
1951 })
1952 }
1953 async fn handle_get_default_branch(
1954 this: Entity<Self>,
1955 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1956 mut cx: AsyncApp,
1957 ) -> Result<proto::GetDefaultBranchResponse> {
1958 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1959 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1960
1961 let branch = repository_handle
1962 .update(&mut cx, |repository_handle, _| {
1963 repository_handle.default_branch()
1964 })?
1965 .await??
1966 .map(Into::into);
1967
1968 Ok(proto::GetDefaultBranchResponse { branch })
1969 }
1970 async fn handle_create_branch(
1971 this: Entity<Self>,
1972 envelope: TypedEnvelope<proto::GitCreateBranch>,
1973 mut cx: AsyncApp,
1974 ) -> Result<proto::Ack> {
1975 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1976 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1977 let branch_name = envelope.payload.branch_name;
1978
1979 repository_handle
1980 .update(&mut cx, |repository_handle, _| {
1981 repository_handle.create_branch(branch_name)
1982 })?
1983 .await??;
1984
1985 Ok(proto::Ack {})
1986 }
1987
1988 async fn handle_change_branch(
1989 this: Entity<Self>,
1990 envelope: TypedEnvelope<proto::GitChangeBranch>,
1991 mut cx: AsyncApp,
1992 ) -> Result<proto::Ack> {
1993 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1994 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1995 let branch_name = envelope.payload.branch_name;
1996
1997 repository_handle
1998 .update(&mut cx, |repository_handle, _| {
1999 repository_handle.change_branch(branch_name)
2000 })?
2001 .await??;
2002
2003 Ok(proto::Ack {})
2004 }
2005
2006 async fn handle_rename_branch(
2007 this: Entity<Self>,
2008 envelope: TypedEnvelope<proto::GitRenameBranch>,
2009 mut cx: AsyncApp,
2010 ) -> Result<proto::Ack> {
2011 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2012 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2013 let branch = envelope.payload.branch;
2014 let new_name = envelope.payload.new_name;
2015
2016 repository_handle
2017 .update(&mut cx, |repository_handle, _| {
2018 repository_handle.rename_branch(branch, new_name)
2019 })?
2020 .await??;
2021
2022 Ok(proto::Ack {})
2023 }
2024
2025 async fn handle_show(
2026 this: Entity<Self>,
2027 envelope: TypedEnvelope<proto::GitShow>,
2028 mut cx: AsyncApp,
2029 ) -> Result<proto::GitCommitDetails> {
2030 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2031 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2032
2033 let commit = repository_handle
2034 .update(&mut cx, |repository_handle, _| {
2035 repository_handle.show(envelope.payload.commit)
2036 })?
2037 .await??;
2038 Ok(proto::GitCommitDetails {
2039 sha: commit.sha.into(),
2040 message: commit.message.into(),
2041 commit_timestamp: commit.commit_timestamp,
2042 author_email: commit.author_email.into(),
2043 author_name: commit.author_name.into(),
2044 })
2045 }
2046
2047 async fn handle_load_commit_diff(
2048 this: Entity<Self>,
2049 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2050 mut cx: AsyncApp,
2051 ) -> Result<proto::LoadCommitDiffResponse> {
2052 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2053 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2054
2055 let commit_diff = repository_handle
2056 .update(&mut cx, |repository_handle, _| {
2057 repository_handle.load_commit_diff(envelope.payload.commit)
2058 })?
2059 .await??;
2060 Ok(proto::LoadCommitDiffResponse {
2061 files: commit_diff
2062 .files
2063 .into_iter()
2064 .map(|file| proto::CommitFile {
2065 path: file.path.to_proto(),
2066 old_text: file.old_text,
2067 new_text: file.new_text,
2068 })
2069 .collect(),
2070 })
2071 }
2072
2073 async fn handle_reset(
2074 this: Entity<Self>,
2075 envelope: TypedEnvelope<proto::GitReset>,
2076 mut cx: AsyncApp,
2077 ) -> Result<proto::Ack> {
2078 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2079 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2080
2081 let mode = match envelope.payload.mode() {
2082 git_reset::ResetMode::Soft => ResetMode::Soft,
2083 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2084 };
2085
2086 repository_handle
2087 .update(&mut cx, |repository_handle, cx| {
2088 repository_handle.reset(envelope.payload.commit, mode, cx)
2089 })?
2090 .await??;
2091 Ok(proto::Ack {})
2092 }
2093
2094 async fn handle_checkout_files(
2095 this: Entity<Self>,
2096 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2097 mut cx: AsyncApp,
2098 ) -> Result<proto::Ack> {
2099 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2100 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2101 let paths = envelope
2102 .payload
2103 .paths
2104 .iter()
2105 .map(|s| RepoPath::from_proto(s))
2106 .collect::<Result<Vec<_>>>()?;
2107
2108 repository_handle
2109 .update(&mut cx, |repository_handle, cx| {
2110 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2111 })?
2112 .await??;
2113 Ok(proto::Ack {})
2114 }
2115
2116 async fn handle_open_commit_message_buffer(
2117 this: Entity<Self>,
2118 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2119 mut cx: AsyncApp,
2120 ) -> Result<proto::OpenBufferResponse> {
2121 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2122 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2123 let buffer = repository
2124 .update(&mut cx, |repository, cx| {
2125 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2126 })?
2127 .await?;
2128
2129 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2130 this.update(&mut cx, |this, cx| {
2131 this.buffer_store.update(cx, |buffer_store, cx| {
2132 buffer_store
2133 .create_buffer_for_peer(
2134 &buffer,
2135 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2136 cx,
2137 )
2138 .detach_and_log_err(cx);
2139 })
2140 })?;
2141
2142 Ok(proto::OpenBufferResponse {
2143 buffer_id: buffer_id.to_proto(),
2144 })
2145 }
2146
2147 async fn handle_askpass(
2148 this: Entity<Self>,
2149 envelope: TypedEnvelope<proto::AskPassRequest>,
2150 mut cx: AsyncApp,
2151 ) -> Result<proto::AskPassResponse> {
2152 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2153 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2154
2155 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2156 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2157 debug_panic!("no askpass found");
2158 anyhow::bail!("no askpass found");
2159 };
2160
2161 let response = askpass
2162 .ask_password(envelope.payload.prompt)
2163 .await
2164 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2165
2166 delegates
2167 .lock()
2168 .insert(envelope.payload.askpass_id, askpass);
2169
2170 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2171 Ok(proto::AskPassResponse {
2172 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2173 })
2174 }
2175
2176 async fn handle_check_for_pushed_commits(
2177 this: Entity<Self>,
2178 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2179 mut cx: AsyncApp,
2180 ) -> Result<proto::CheckForPushedCommitsResponse> {
2181 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2182 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2183
2184 let branches = repository_handle
2185 .update(&mut cx, |repository_handle, _| {
2186 repository_handle.check_for_pushed_commits()
2187 })?
2188 .await??;
2189 Ok(proto::CheckForPushedCommitsResponse {
2190 pushed_to: branches
2191 .into_iter()
2192 .map(|commit| commit.to_string())
2193 .collect(),
2194 })
2195 }
2196
2197 async fn handle_git_diff(
2198 this: Entity<Self>,
2199 envelope: TypedEnvelope<proto::GitDiff>,
2200 mut cx: AsyncApp,
2201 ) -> Result<proto::GitDiffResponse> {
2202 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2203 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2204 let diff_type = match envelope.payload.diff_type() {
2205 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2206 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2207 };
2208
2209 let mut diff = repository_handle
2210 .update(&mut cx, |repository_handle, cx| {
2211 repository_handle.diff(diff_type, cx)
2212 })?
2213 .await??;
2214 const ONE_MB: usize = 1_000_000;
2215 if diff.len() > ONE_MB {
2216 diff = diff.chars().take(ONE_MB).collect()
2217 }
2218
2219 Ok(proto::GitDiffResponse { diff })
2220 }
2221
2222 async fn handle_tree_diff(
2223 this: Entity<Self>,
2224 request: TypedEnvelope<proto::GetTreeDiff>,
2225 mut cx: AsyncApp,
2226 ) -> Result<proto::GetTreeDiffResponse> {
2227 let repository_id = RepositoryId(request.payload.repository_id);
2228 let diff_type = if request.payload.is_merge {
2229 DiffTreeType::MergeBase {
2230 base: request.payload.base.into(),
2231 head: request.payload.head.into(),
2232 }
2233 } else {
2234 DiffTreeType::Since {
2235 base: request.payload.base.into(),
2236 head: request.payload.head.into(),
2237 }
2238 };
2239
2240 let diff = this
2241 .update(&mut cx, |this, cx| {
2242 let repository = this.repositories().get(&repository_id)?;
2243 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2244 })?
2245 .context("missing repository")?
2246 .await??;
2247
2248 Ok(proto::GetTreeDiffResponse {
2249 entries: diff
2250 .entries
2251 .into_iter()
2252 .map(|(path, status)| proto::TreeDiffStatus {
2253 path: path.0.to_proto(),
2254 status: match status {
2255 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2256 TreeDiffStatus::Modified { .. } => {
2257 proto::tree_diff_status::Status::Modified.into()
2258 }
2259 TreeDiffStatus::Deleted { .. } => {
2260 proto::tree_diff_status::Status::Deleted.into()
2261 }
2262 },
2263 oid: match status {
2264 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2265 Some(old.to_string())
2266 }
2267 TreeDiffStatus::Added => None,
2268 },
2269 })
2270 .collect(),
2271 })
2272 }
2273
2274 async fn handle_get_blob_content(
2275 this: Entity<Self>,
2276 request: TypedEnvelope<proto::GetBlobContent>,
2277 mut cx: AsyncApp,
2278 ) -> Result<proto::GetBlobContentResponse> {
2279 let oid = git::Oid::from_str(&request.payload.oid)?;
2280 let repository_id = RepositoryId(request.payload.repository_id);
2281 let content = this
2282 .update(&mut cx, |this, cx| {
2283 let repository = this.repositories().get(&repository_id)?;
2284 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2285 })?
2286 .context("missing repository")?
2287 .await?;
2288 Ok(proto::GetBlobContentResponse { content })
2289 }
2290
2291 async fn handle_open_unstaged_diff(
2292 this: Entity<Self>,
2293 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2294 mut cx: AsyncApp,
2295 ) -> Result<proto::OpenUnstagedDiffResponse> {
2296 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2297 let diff = this
2298 .update(&mut cx, |this, cx| {
2299 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2300 Some(this.open_unstaged_diff(buffer, cx))
2301 })?
2302 .context("missing buffer")?
2303 .await?;
2304 this.update(&mut cx, |this, _| {
2305 let shared_diffs = this
2306 .shared_diffs
2307 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2308 .or_default();
2309 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2310 })?;
2311 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2312 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2313 }
2314
2315 async fn handle_open_uncommitted_diff(
2316 this: Entity<Self>,
2317 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2318 mut cx: AsyncApp,
2319 ) -> Result<proto::OpenUncommittedDiffResponse> {
2320 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2321 let diff = this
2322 .update(&mut cx, |this, cx| {
2323 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2324 Some(this.open_uncommitted_diff(buffer, cx))
2325 })?
2326 .context("missing buffer")?
2327 .await?;
2328 this.update(&mut cx, |this, _| {
2329 let shared_diffs = this
2330 .shared_diffs
2331 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2332 .or_default();
2333 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2334 })?;
2335 diff.read_with(&cx, |diff, cx| {
2336 use proto::open_uncommitted_diff_response::Mode;
2337
2338 let unstaged_diff = diff.secondary_diff();
2339 let index_snapshot = unstaged_diff.and_then(|diff| {
2340 let diff = diff.read(cx);
2341 diff.base_text_exists().then(|| diff.base_text())
2342 });
2343
2344 let mode;
2345 let staged_text;
2346 let committed_text;
2347 if diff.base_text_exists() {
2348 let committed_snapshot = diff.base_text();
2349 committed_text = Some(committed_snapshot.text());
2350 if let Some(index_text) = index_snapshot {
2351 if index_text.remote_id() == committed_snapshot.remote_id() {
2352 mode = Mode::IndexMatchesHead;
2353 staged_text = None;
2354 } else {
2355 mode = Mode::IndexAndHead;
2356 staged_text = Some(index_text.text());
2357 }
2358 } else {
2359 mode = Mode::IndexAndHead;
2360 staged_text = None;
2361 }
2362 } else {
2363 mode = Mode::IndexAndHead;
2364 committed_text = None;
2365 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2366 }
2367
2368 proto::OpenUncommittedDiffResponse {
2369 committed_text,
2370 staged_text,
2371 mode: mode.into(),
2372 }
2373 })
2374 }
2375
2376 async fn handle_update_diff_bases(
2377 this: Entity<Self>,
2378 request: TypedEnvelope<proto::UpdateDiffBases>,
2379 mut cx: AsyncApp,
2380 ) -> Result<()> {
2381 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2382 this.update(&mut cx, |this, cx| {
2383 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2384 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2385 {
2386 let buffer = buffer.read(cx).text_snapshot();
2387 diff_state.update(cx, |diff_state, cx| {
2388 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2389 })
2390 }
2391 })
2392 }
2393
2394 async fn handle_blame_buffer(
2395 this: Entity<Self>,
2396 envelope: TypedEnvelope<proto::BlameBuffer>,
2397 mut cx: AsyncApp,
2398 ) -> Result<proto::BlameBufferResponse> {
2399 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2400 let version = deserialize_version(&envelope.payload.version);
2401 let buffer = this.read_with(&cx, |this, cx| {
2402 this.buffer_store.read(cx).get_existing(buffer_id)
2403 })??;
2404 buffer
2405 .update(&mut cx, |buffer, _| {
2406 buffer.wait_for_version(version.clone())
2407 })?
2408 .await?;
2409 let blame = this
2410 .update(&mut cx, |this, cx| {
2411 this.blame_buffer(&buffer, Some(version), cx)
2412 })?
2413 .await?;
2414 Ok(serialize_blame_buffer_response(blame))
2415 }
2416
2417 async fn handle_get_permalink_to_line(
2418 this: Entity<Self>,
2419 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2420 mut cx: AsyncApp,
2421 ) -> Result<proto::GetPermalinkToLineResponse> {
2422 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2423 // let version = deserialize_version(&envelope.payload.version);
2424 let selection = {
2425 let proto_selection = envelope
2426 .payload
2427 .selection
2428 .context("no selection to get permalink for defined")?;
2429 proto_selection.start as u32..proto_selection.end as u32
2430 };
2431 let buffer = this.read_with(&cx, |this, cx| {
2432 this.buffer_store.read(cx).get_existing(buffer_id)
2433 })??;
2434 let permalink = this
2435 .update(&mut cx, |this, cx| {
2436 this.get_permalink_to_line(&buffer, selection, cx)
2437 })?
2438 .await?;
2439 Ok(proto::GetPermalinkToLineResponse {
2440 permalink: permalink.to_string(),
2441 })
2442 }
2443
2444 fn repository_for_request(
2445 this: &Entity<Self>,
2446 id: RepositoryId,
2447 cx: &mut AsyncApp,
2448 ) -> Result<Entity<Repository>> {
2449 this.read_with(cx, |this, _| {
2450 this.repositories
2451 .get(&id)
2452 .context("missing repository handle")
2453 .cloned()
2454 })?
2455 }
2456
2457 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2458 self.repositories
2459 .iter()
2460 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2461 .collect()
2462 }
2463
2464 fn process_updated_entries(
2465 &self,
2466 worktree: &Entity<Worktree>,
2467 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2468 cx: &mut App,
2469 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2470 let path_style = worktree.read(cx).path_style();
2471 let mut repo_paths = self
2472 .repositories
2473 .values()
2474 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2475 .collect::<Vec<_>>();
2476 let mut entries: Vec<_> = updated_entries
2477 .iter()
2478 .map(|(path, _, _)| path.clone())
2479 .collect();
2480 entries.sort();
2481 let worktree = worktree.read(cx);
2482
2483 let entries = entries
2484 .into_iter()
2485 .map(|path| worktree.absolutize(&path))
2486 .collect::<Arc<[_]>>();
2487
2488 let executor = cx.background_executor().clone();
2489 cx.background_executor().spawn(async move {
2490 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2491 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2492 let mut tasks = FuturesOrdered::new();
2493 for (repo_path, repo) in repo_paths.into_iter().rev() {
2494 let entries = entries.clone();
2495 let task = executor.spawn(async move {
2496 // Find all repository paths that belong to this repo
2497 let mut ix = entries.partition_point(|path| path < &*repo_path);
2498 if ix == entries.len() {
2499 return None;
2500 };
2501
2502 let mut paths = Vec::new();
2503 // All paths prefixed by a given repo will constitute a continuous range.
2504 while let Some(path) = entries.get(ix)
2505 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2506 &repo_path, path, path_style,
2507 )
2508 {
2509 paths.push((repo_path, ix));
2510 ix += 1;
2511 }
2512 if paths.is_empty() {
2513 None
2514 } else {
2515 Some((repo, paths))
2516 }
2517 });
2518 tasks.push_back(task);
2519 }
2520
2521 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2522 let mut path_was_used = vec![false; entries.len()];
2523 let tasks = tasks.collect::<Vec<_>>().await;
2524 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2525 // We always want to assign a path to it's innermost repository.
2526 for t in tasks {
2527 let Some((repo, paths)) = t else {
2528 continue;
2529 };
2530 let entry = paths_by_git_repo.entry(repo).or_default();
2531 for (repo_path, ix) in paths {
2532 if path_was_used[ix] {
2533 continue;
2534 }
2535 path_was_used[ix] = true;
2536 entry.push(repo_path);
2537 }
2538 }
2539
2540 paths_by_git_repo
2541 })
2542 }
2543}
2544
2545impl BufferGitState {
2546 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2547 Self {
2548 unstaged_diff: Default::default(),
2549 uncommitted_diff: Default::default(),
2550 recalculate_diff_task: Default::default(),
2551 language: Default::default(),
2552 language_registry: Default::default(),
2553 recalculating_tx: postage::watch::channel_with(false).0,
2554 hunk_staging_operation_count: 0,
2555 hunk_staging_operation_count_as_of_write: 0,
2556 head_text: Default::default(),
2557 index_text: Default::default(),
2558 head_changed: Default::default(),
2559 index_changed: Default::default(),
2560 language_changed: Default::default(),
2561 conflict_updated_futures: Default::default(),
2562 conflict_set: Default::default(),
2563 reparse_conflict_markers_task: Default::default(),
2564 }
2565 }
2566
2567 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2568 self.language = buffer.read(cx).language().cloned();
2569 self.language_changed = true;
2570 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2571 }
2572
2573 fn reparse_conflict_markers(
2574 &mut self,
2575 buffer: text::BufferSnapshot,
2576 cx: &mut Context<Self>,
2577 ) -> oneshot::Receiver<()> {
2578 let (tx, rx) = oneshot::channel();
2579
2580 let Some(conflict_set) = self
2581 .conflict_set
2582 .as_ref()
2583 .and_then(|conflict_set| conflict_set.upgrade())
2584 else {
2585 return rx;
2586 };
2587
2588 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2589 if conflict_set.has_conflict {
2590 Some(conflict_set.snapshot())
2591 } else {
2592 None
2593 }
2594 });
2595
2596 if let Some(old_snapshot) = old_snapshot {
2597 self.conflict_updated_futures.push(tx);
2598 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2599 let (snapshot, changed_range) = cx
2600 .background_spawn(async move {
2601 let new_snapshot = ConflictSet::parse(&buffer);
2602 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2603 (new_snapshot, changed_range)
2604 })
2605 .await;
2606 this.update(cx, |this, cx| {
2607 if let Some(conflict_set) = &this.conflict_set {
2608 conflict_set
2609 .update(cx, |conflict_set, cx| {
2610 conflict_set.set_snapshot(snapshot, changed_range, cx);
2611 })
2612 .ok();
2613 }
2614 let futures = std::mem::take(&mut this.conflict_updated_futures);
2615 for tx in futures {
2616 tx.send(()).ok();
2617 }
2618 })
2619 }))
2620 }
2621
2622 rx
2623 }
2624
2625 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2626 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2627 }
2628
2629 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2630 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2631 }
2632
2633 fn handle_base_texts_updated(
2634 &mut self,
2635 buffer: text::BufferSnapshot,
2636 message: proto::UpdateDiffBases,
2637 cx: &mut Context<Self>,
2638 ) {
2639 use proto::update_diff_bases::Mode;
2640
2641 let Some(mode) = Mode::from_i32(message.mode) else {
2642 return;
2643 };
2644
2645 let diff_bases_change = match mode {
2646 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2647 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2648 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2649 Mode::IndexAndHead => DiffBasesChange::SetEach {
2650 index: message.staged_text,
2651 head: message.committed_text,
2652 },
2653 };
2654
2655 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2656 }
2657
2658 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2659 if *self.recalculating_tx.borrow() {
2660 let mut rx = self.recalculating_tx.subscribe();
2661 Some(async move {
2662 loop {
2663 let is_recalculating = rx.recv().await;
2664 if is_recalculating != Some(true) {
2665 break;
2666 }
2667 }
2668 })
2669 } else {
2670 None
2671 }
2672 }
2673
2674 fn diff_bases_changed(
2675 &mut self,
2676 buffer: text::BufferSnapshot,
2677 diff_bases_change: Option<DiffBasesChange>,
2678 cx: &mut Context<Self>,
2679 ) {
2680 match diff_bases_change {
2681 Some(DiffBasesChange::SetIndex(index)) => {
2682 self.index_text = index.map(|mut index| {
2683 text::LineEnding::normalize(&mut index);
2684 Arc::new(index)
2685 });
2686 self.index_changed = true;
2687 }
2688 Some(DiffBasesChange::SetHead(head)) => {
2689 self.head_text = head.map(|mut head| {
2690 text::LineEnding::normalize(&mut head);
2691 Arc::new(head)
2692 });
2693 self.head_changed = true;
2694 }
2695 Some(DiffBasesChange::SetBoth(text)) => {
2696 let text = text.map(|mut text| {
2697 text::LineEnding::normalize(&mut text);
2698 Arc::new(text)
2699 });
2700 self.head_text = text.clone();
2701 self.index_text = text;
2702 self.head_changed = true;
2703 self.index_changed = true;
2704 }
2705 Some(DiffBasesChange::SetEach { index, head }) => {
2706 self.index_text = index.map(|mut index| {
2707 text::LineEnding::normalize(&mut index);
2708 Arc::new(index)
2709 });
2710 self.index_changed = true;
2711 self.head_text = head.map(|mut head| {
2712 text::LineEnding::normalize(&mut head);
2713 Arc::new(head)
2714 });
2715 self.head_changed = true;
2716 }
2717 None => {}
2718 }
2719
2720 self.recalculate_diffs(buffer, cx)
2721 }
2722
2723 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2724 *self.recalculating_tx.borrow_mut() = true;
2725
2726 let language = self.language.clone();
2727 let language_registry = self.language_registry.clone();
2728 let unstaged_diff = self.unstaged_diff();
2729 let uncommitted_diff = self.uncommitted_diff();
2730 let head = self.head_text.clone();
2731 let index = self.index_text.clone();
2732 let index_changed = self.index_changed;
2733 let head_changed = self.head_changed;
2734 let language_changed = self.language_changed;
2735 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2736 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2737 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2738 (None, None) => true,
2739 _ => false,
2740 };
2741 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2742 log::debug!(
2743 "start recalculating diffs for buffer {}",
2744 buffer.remote_id()
2745 );
2746
2747 let mut new_unstaged_diff = None;
2748 if let Some(unstaged_diff) = &unstaged_diff {
2749 new_unstaged_diff = Some(
2750 BufferDiff::update_diff(
2751 unstaged_diff.clone(),
2752 buffer.clone(),
2753 index,
2754 index_changed,
2755 language_changed,
2756 language.clone(),
2757 language_registry.clone(),
2758 cx,
2759 )
2760 .await?,
2761 );
2762 }
2763
2764 let mut new_uncommitted_diff = None;
2765 if let Some(uncommitted_diff) = &uncommitted_diff {
2766 new_uncommitted_diff = if index_matches_head {
2767 new_unstaged_diff.clone()
2768 } else {
2769 Some(
2770 BufferDiff::update_diff(
2771 uncommitted_diff.clone(),
2772 buffer.clone(),
2773 head,
2774 head_changed,
2775 language_changed,
2776 language.clone(),
2777 language_registry.clone(),
2778 cx,
2779 )
2780 .await?,
2781 )
2782 }
2783 }
2784
2785 let cancel = this.update(cx, |this, _| {
2786 // This checks whether all pending stage/unstage operations
2787 // have quiesced (i.e. both the corresponding write and the
2788 // read of that write have completed). If not, then we cancel
2789 // this recalculation attempt to avoid invalidating pending
2790 // state too quickly; another recalculation will come along
2791 // later and clear the pending state once the state of the index has settled.
2792 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2793 *this.recalculating_tx.borrow_mut() = false;
2794 true
2795 } else {
2796 false
2797 }
2798 })?;
2799 if cancel {
2800 log::debug!(
2801 concat!(
2802 "aborting recalculating diffs for buffer {}",
2803 "due to subsequent hunk operations",
2804 ),
2805 buffer.remote_id()
2806 );
2807 return Ok(());
2808 }
2809
2810 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2811 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2812 {
2813 unstaged_diff.update(cx, |diff, cx| {
2814 if language_changed {
2815 diff.language_changed(cx);
2816 }
2817 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2818 })?
2819 } else {
2820 None
2821 };
2822
2823 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2824 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2825 {
2826 uncommitted_diff.update(cx, |diff, cx| {
2827 if language_changed {
2828 diff.language_changed(cx);
2829 }
2830 diff.set_snapshot_with_secondary(
2831 new_uncommitted_diff,
2832 &buffer,
2833 unstaged_changed_range,
2834 true,
2835 cx,
2836 );
2837 })?;
2838 }
2839
2840 log::debug!(
2841 "finished recalculating diffs for buffer {}",
2842 buffer.remote_id()
2843 );
2844
2845 if let Some(this) = this.upgrade() {
2846 this.update(cx, |this, _| {
2847 this.index_changed = false;
2848 this.head_changed = false;
2849 this.language_changed = false;
2850 *this.recalculating_tx.borrow_mut() = false;
2851 })?;
2852 }
2853
2854 Ok(())
2855 }));
2856 }
2857}
2858
2859fn make_remote_delegate(
2860 this: Entity<GitStore>,
2861 project_id: u64,
2862 repository_id: RepositoryId,
2863 askpass_id: u64,
2864 cx: &mut AsyncApp,
2865) -> AskPassDelegate {
2866 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2867 this.update(cx, |this, cx| {
2868 let Some((client, _)) = this.downstream_client() else {
2869 return;
2870 };
2871 let response = client.request(proto::AskPassRequest {
2872 project_id,
2873 repository_id: repository_id.to_proto(),
2874 askpass_id,
2875 prompt,
2876 });
2877 cx.spawn(async move |_, _| {
2878 let mut response = response.await?.response;
2879 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2880 .ok();
2881 response.zeroize();
2882 anyhow::Ok(())
2883 })
2884 .detach_and_log_err(cx);
2885 })
2886 .log_err();
2887 })
2888}
2889
2890impl RepositoryId {
2891 pub fn to_proto(self) -> u64 {
2892 self.0
2893 }
2894
2895 pub fn from_proto(id: u64) -> Self {
2896 RepositoryId(id)
2897 }
2898}
2899
2900impl RepositorySnapshot {
2901 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2902 Self {
2903 id,
2904 statuses_by_path: Default::default(),
2905 work_directory_abs_path,
2906 branch: None,
2907 head_commit: None,
2908 scan_id: 0,
2909 merge: Default::default(),
2910 remote_origin_url: None,
2911 remote_upstream_url: None,
2912 stash_entries: Default::default(),
2913 path_style,
2914 }
2915 }
2916
2917 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2918 proto::UpdateRepository {
2919 branch_summary: self.branch.as_ref().map(branch_to_proto),
2920 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2921 updated_statuses: self
2922 .statuses_by_path
2923 .iter()
2924 .map(|entry| entry.to_proto())
2925 .collect(),
2926 removed_statuses: Default::default(),
2927 current_merge_conflicts: self
2928 .merge
2929 .conflicted_paths
2930 .iter()
2931 .map(|repo_path| repo_path.to_proto())
2932 .collect(),
2933 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2934 project_id,
2935 id: self.id.to_proto(),
2936 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2937 entry_ids: vec![self.id.to_proto()],
2938 scan_id: self.scan_id,
2939 is_last_update: true,
2940 stash_entries: self
2941 .stash_entries
2942 .entries
2943 .iter()
2944 .map(stash_to_proto)
2945 .collect(),
2946 }
2947 }
2948
2949 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2950 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2951 let mut removed_statuses: Vec<String> = Vec::new();
2952
2953 let mut new_statuses = self.statuses_by_path.iter().peekable();
2954 let mut old_statuses = old.statuses_by_path.iter().peekable();
2955
2956 let mut current_new_entry = new_statuses.next();
2957 let mut current_old_entry = old_statuses.next();
2958 loop {
2959 match (current_new_entry, current_old_entry) {
2960 (Some(new_entry), Some(old_entry)) => {
2961 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2962 Ordering::Less => {
2963 updated_statuses.push(new_entry.to_proto());
2964 current_new_entry = new_statuses.next();
2965 }
2966 Ordering::Equal => {
2967 if new_entry.status != old_entry.status {
2968 updated_statuses.push(new_entry.to_proto());
2969 }
2970 current_old_entry = old_statuses.next();
2971 current_new_entry = new_statuses.next();
2972 }
2973 Ordering::Greater => {
2974 removed_statuses.push(old_entry.repo_path.to_proto());
2975 current_old_entry = old_statuses.next();
2976 }
2977 }
2978 }
2979 (None, Some(old_entry)) => {
2980 removed_statuses.push(old_entry.repo_path.to_proto());
2981 current_old_entry = old_statuses.next();
2982 }
2983 (Some(new_entry), None) => {
2984 updated_statuses.push(new_entry.to_proto());
2985 current_new_entry = new_statuses.next();
2986 }
2987 (None, None) => break,
2988 }
2989 }
2990
2991 proto::UpdateRepository {
2992 branch_summary: self.branch.as_ref().map(branch_to_proto),
2993 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2994 updated_statuses,
2995 removed_statuses,
2996 current_merge_conflicts: self
2997 .merge
2998 .conflicted_paths
2999 .iter()
3000 .map(|path| path.to_proto())
3001 .collect(),
3002 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3003 project_id,
3004 id: self.id.to_proto(),
3005 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3006 entry_ids: vec![],
3007 scan_id: self.scan_id,
3008 is_last_update: true,
3009 stash_entries: self
3010 .stash_entries
3011 .entries
3012 .iter()
3013 .map(stash_to_proto)
3014 .collect(),
3015 }
3016 }
3017
3018 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3019 self.statuses_by_path.iter().cloned()
3020 }
3021
3022 pub fn status_summary(&self) -> GitSummary {
3023 self.statuses_by_path.summary().item_summary
3024 }
3025
3026 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3027 self.statuses_by_path
3028 .get(&PathKey(path.0.clone()), ())
3029 .cloned()
3030 }
3031
3032 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3033 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3034 }
3035
3036 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3037 self.path_style
3038 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3039 .unwrap()
3040 .into()
3041 }
3042
3043 #[inline]
3044 fn abs_path_to_repo_path_inner(
3045 work_directory_abs_path: &Path,
3046 abs_path: &Path,
3047 path_style: PathStyle,
3048 ) -> Option<RepoPath> {
3049 abs_path
3050 .strip_prefix(&work_directory_abs_path)
3051 .ok()
3052 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3053 }
3054
3055 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3056 self.merge.conflicted_paths.contains(repo_path)
3057 }
3058
3059 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3060 let had_conflict_on_last_merge_head_change =
3061 self.merge.conflicted_paths.contains(repo_path);
3062 let has_conflict_currently = self
3063 .status_for_path(repo_path)
3064 .is_some_and(|entry| entry.status.is_conflicted());
3065 had_conflict_on_last_merge_head_change || has_conflict_currently
3066 }
3067
3068 /// This is the name that will be displayed in the repository selector for this repository.
3069 pub fn display_name(&self) -> SharedString {
3070 self.work_directory_abs_path
3071 .file_name()
3072 .unwrap_or_default()
3073 .to_string_lossy()
3074 .to_string()
3075 .into()
3076 }
3077}
3078
3079pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3080 proto::StashEntry {
3081 oid: entry.oid.as_bytes().to_vec(),
3082 message: entry.message.clone(),
3083 branch: entry.branch.clone(),
3084 index: entry.index as u64,
3085 timestamp: entry.timestamp,
3086 }
3087}
3088
3089pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3090 Ok(StashEntry {
3091 oid: Oid::from_bytes(&entry.oid)?,
3092 message: entry.message.clone(),
3093 index: entry.index as usize,
3094 branch: entry.branch.clone(),
3095 timestamp: entry.timestamp,
3096 })
3097}
3098
3099impl MergeDetails {
3100 async fn load(
3101 backend: &Arc<dyn GitRepository>,
3102 status: &SumTree<StatusEntry>,
3103 prev_snapshot: &RepositorySnapshot,
3104 ) -> Result<(MergeDetails, bool)> {
3105 log::debug!("load merge details");
3106 let message = backend.merge_message().await;
3107 let heads = backend
3108 .revparse_batch(vec![
3109 "MERGE_HEAD".into(),
3110 "CHERRY_PICK_HEAD".into(),
3111 "REBASE_HEAD".into(),
3112 "REVERT_HEAD".into(),
3113 "APPLY_HEAD".into(),
3114 ])
3115 .await
3116 .log_err()
3117 .unwrap_or_default()
3118 .into_iter()
3119 .map(|opt| opt.map(SharedString::from))
3120 .collect::<Vec<_>>();
3121 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3122 let conflicted_paths = if merge_heads_changed {
3123 let current_conflicted_paths = TreeSet::from_ordered_entries(
3124 status
3125 .iter()
3126 .filter(|entry| entry.status.is_conflicted())
3127 .map(|entry| entry.repo_path.clone()),
3128 );
3129
3130 // It can happen that we run a scan while a lengthy merge is in progress
3131 // that will eventually result in conflicts, but before those conflicts
3132 // are reported by `git status`. Since for the moment we only care about
3133 // the merge heads state for the purposes of tracking conflicts, don't update
3134 // this state until we see some conflicts.
3135 if heads.iter().any(Option::is_some)
3136 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3137 && current_conflicted_paths.is_empty()
3138 {
3139 log::debug!("not updating merge heads because no conflicts found");
3140 return Ok((
3141 MergeDetails {
3142 message: message.map(SharedString::from),
3143 ..prev_snapshot.merge.clone()
3144 },
3145 false,
3146 ));
3147 }
3148
3149 current_conflicted_paths
3150 } else {
3151 prev_snapshot.merge.conflicted_paths.clone()
3152 };
3153 let details = MergeDetails {
3154 conflicted_paths,
3155 message: message.map(SharedString::from),
3156 heads,
3157 };
3158 Ok((details, merge_heads_changed))
3159 }
3160}
3161
3162impl Repository {
3163 pub fn snapshot(&self) -> RepositorySnapshot {
3164 self.snapshot.clone()
3165 }
3166
3167 fn local(
3168 id: RepositoryId,
3169 work_directory_abs_path: Arc<Path>,
3170 dot_git_abs_path: Arc<Path>,
3171 repository_dir_abs_path: Arc<Path>,
3172 common_dir_abs_path: Arc<Path>,
3173 project_environment: WeakEntity<ProjectEnvironment>,
3174 fs: Arc<dyn Fs>,
3175 git_store: WeakEntity<GitStore>,
3176 cx: &mut Context<Self>,
3177 ) -> Self {
3178 let snapshot =
3179 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3180 Repository {
3181 this: cx.weak_entity(),
3182 git_store,
3183 snapshot,
3184 commit_message_buffer: None,
3185 askpass_delegates: Default::default(),
3186 paths_needing_status_update: Default::default(),
3187 latest_askpass_id: 0,
3188 job_sender: Repository::spawn_local_git_worker(
3189 work_directory_abs_path,
3190 dot_git_abs_path,
3191 repository_dir_abs_path,
3192 common_dir_abs_path,
3193 project_environment,
3194 fs,
3195 cx,
3196 ),
3197 job_id: 0,
3198 active_jobs: Default::default(),
3199 }
3200 }
3201
3202 fn remote(
3203 id: RepositoryId,
3204 work_directory_abs_path: Arc<Path>,
3205 path_style: PathStyle,
3206 project_id: ProjectId,
3207 client: AnyProtoClient,
3208 git_store: WeakEntity<GitStore>,
3209 cx: &mut Context<Self>,
3210 ) -> Self {
3211 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3212 Self {
3213 this: cx.weak_entity(),
3214 snapshot,
3215 commit_message_buffer: None,
3216 git_store,
3217 paths_needing_status_update: Default::default(),
3218 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3219 askpass_delegates: Default::default(),
3220 latest_askpass_id: 0,
3221 active_jobs: Default::default(),
3222 job_id: 0,
3223 }
3224 }
3225
3226 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3227 self.git_store.upgrade()
3228 }
3229
3230 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3231 let this = cx.weak_entity();
3232 let git_store = self.git_store.clone();
3233 let _ = self.send_keyed_job(
3234 Some(GitJobKey::ReloadBufferDiffBases),
3235 None,
3236 |state, mut cx| async move {
3237 let RepositoryState::Local { backend, .. } = state else {
3238 log::error!("tried to recompute diffs for a non-local repository");
3239 return Ok(());
3240 };
3241
3242 let Some(this) = this.upgrade() else {
3243 return Ok(());
3244 };
3245
3246 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3247 git_store.update(cx, |git_store, cx| {
3248 git_store
3249 .diffs
3250 .iter()
3251 .filter_map(|(buffer_id, diff_state)| {
3252 let buffer_store = git_store.buffer_store.read(cx);
3253 let buffer = buffer_store.get(*buffer_id)?;
3254 let file = File::from_dyn(buffer.read(cx).file())?;
3255 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3256 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3257 log::debug!(
3258 "start reload diff bases for repo path {}",
3259 repo_path.as_unix_str()
3260 );
3261 diff_state.update(cx, |diff_state, _| {
3262 let has_unstaged_diff = diff_state
3263 .unstaged_diff
3264 .as_ref()
3265 .is_some_and(|diff| diff.is_upgradable());
3266 let has_uncommitted_diff = diff_state
3267 .uncommitted_diff
3268 .as_ref()
3269 .is_some_and(|set| set.is_upgradable());
3270
3271 Some((
3272 buffer,
3273 repo_path,
3274 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3275 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3276 ))
3277 })
3278 })
3279 .collect::<Vec<_>>()
3280 })
3281 })??;
3282
3283 let buffer_diff_base_changes = cx
3284 .background_spawn(async move {
3285 let mut changes = Vec::new();
3286 for (buffer, repo_path, current_index_text, current_head_text) in
3287 &repo_diff_state_updates
3288 {
3289 let index_text = if current_index_text.is_some() {
3290 backend.load_index_text(repo_path.clone()).await
3291 } else {
3292 None
3293 };
3294 let head_text = if current_head_text.is_some() {
3295 backend.load_committed_text(repo_path.clone()).await
3296 } else {
3297 None
3298 };
3299
3300 let change =
3301 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3302 (Some(current_index), Some(current_head)) => {
3303 let index_changed =
3304 index_text.as_ref() != current_index.as_deref();
3305 let head_changed =
3306 head_text.as_ref() != current_head.as_deref();
3307 if index_changed && head_changed {
3308 if index_text == head_text {
3309 Some(DiffBasesChange::SetBoth(head_text))
3310 } else {
3311 Some(DiffBasesChange::SetEach {
3312 index: index_text,
3313 head: head_text,
3314 })
3315 }
3316 } else if index_changed {
3317 Some(DiffBasesChange::SetIndex(index_text))
3318 } else if head_changed {
3319 Some(DiffBasesChange::SetHead(head_text))
3320 } else {
3321 None
3322 }
3323 }
3324 (Some(current_index), None) => {
3325 let index_changed =
3326 index_text.as_ref() != current_index.as_deref();
3327 index_changed
3328 .then_some(DiffBasesChange::SetIndex(index_text))
3329 }
3330 (None, Some(current_head)) => {
3331 let head_changed =
3332 head_text.as_ref() != current_head.as_deref();
3333 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3334 }
3335 (None, None) => None,
3336 };
3337
3338 changes.push((buffer.clone(), change))
3339 }
3340 changes
3341 })
3342 .await;
3343
3344 git_store.update(&mut cx, |git_store, cx| {
3345 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3346 let buffer_snapshot = buffer.read(cx).text_snapshot();
3347 let buffer_id = buffer_snapshot.remote_id();
3348 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3349 continue;
3350 };
3351
3352 let downstream_client = git_store.downstream_client();
3353 diff_state.update(cx, |diff_state, cx| {
3354 use proto::update_diff_bases::Mode;
3355
3356 if let Some((diff_bases_change, (client, project_id))) =
3357 diff_bases_change.clone().zip(downstream_client)
3358 {
3359 let (staged_text, committed_text, mode) = match diff_bases_change {
3360 DiffBasesChange::SetIndex(index) => {
3361 (index, None, Mode::IndexOnly)
3362 }
3363 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3364 DiffBasesChange::SetEach { index, head } => {
3365 (index, head, Mode::IndexAndHead)
3366 }
3367 DiffBasesChange::SetBoth(text) => {
3368 (None, text, Mode::IndexMatchesHead)
3369 }
3370 };
3371 client
3372 .send(proto::UpdateDiffBases {
3373 project_id: project_id.to_proto(),
3374 buffer_id: buffer_id.to_proto(),
3375 staged_text,
3376 committed_text,
3377 mode: mode as i32,
3378 })
3379 .log_err();
3380 }
3381
3382 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3383 });
3384 }
3385 })
3386 },
3387 );
3388 }
3389
3390 pub fn send_job<F, Fut, R>(
3391 &mut self,
3392 status: Option<SharedString>,
3393 job: F,
3394 ) -> oneshot::Receiver<R>
3395 where
3396 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3397 Fut: Future<Output = R> + 'static,
3398 R: Send + 'static,
3399 {
3400 self.send_keyed_job(None, status, job)
3401 }
3402
3403 fn send_keyed_job<F, Fut, R>(
3404 &mut self,
3405 key: Option<GitJobKey>,
3406 status: Option<SharedString>,
3407 job: F,
3408 ) -> oneshot::Receiver<R>
3409 where
3410 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3411 Fut: Future<Output = R> + 'static,
3412 R: Send + 'static,
3413 {
3414 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3415 let job_id = post_inc(&mut self.job_id);
3416 let this = self.this.clone();
3417 self.job_sender
3418 .unbounded_send(GitJob {
3419 key,
3420 job: Box::new(move |state, cx: &mut AsyncApp| {
3421 let job = job(state, cx.clone());
3422 cx.spawn(async move |cx| {
3423 if let Some(s) = status.clone() {
3424 this.update(cx, |this, cx| {
3425 this.active_jobs.insert(
3426 job_id,
3427 JobInfo {
3428 start: Instant::now(),
3429 message: s.clone(),
3430 },
3431 );
3432
3433 cx.notify();
3434 })
3435 .ok();
3436 }
3437 let result = job.await;
3438
3439 this.update(cx, |this, cx| {
3440 this.active_jobs.remove(&job_id);
3441 cx.notify();
3442 })
3443 .ok();
3444
3445 result_tx.send(result).ok();
3446 })
3447 }),
3448 })
3449 .ok();
3450 result_rx
3451 }
3452
3453 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3454 let Some(git_store) = self.git_store.upgrade() else {
3455 return;
3456 };
3457 let entity = cx.entity();
3458 git_store.update(cx, |git_store, cx| {
3459 let Some((&id, _)) = git_store
3460 .repositories
3461 .iter()
3462 .find(|(_, handle)| *handle == &entity)
3463 else {
3464 return;
3465 };
3466 git_store.active_repo_id = Some(id);
3467 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3468 });
3469 }
3470
3471 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3472 self.snapshot.status()
3473 }
3474
3475 pub fn cached_stash(&self) -> GitStash {
3476 self.snapshot.stash_entries.clone()
3477 }
3478
3479 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3480 let git_store = self.git_store.upgrade()?;
3481 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3482 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3483 let abs_path = SanitizedPath::new(&abs_path);
3484 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3485 Some(ProjectPath {
3486 worktree_id: worktree.read(cx).id(),
3487 path: relative_path,
3488 })
3489 }
3490
3491 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3492 let git_store = self.git_store.upgrade()?;
3493 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3494 let abs_path = worktree_store.absolutize(path, cx)?;
3495 self.snapshot.abs_path_to_repo_path(&abs_path)
3496 }
3497
3498 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3499 other
3500 .read(cx)
3501 .snapshot
3502 .work_directory_abs_path
3503 .starts_with(&self.snapshot.work_directory_abs_path)
3504 }
3505
3506 pub fn open_commit_buffer(
3507 &mut self,
3508 languages: Option<Arc<LanguageRegistry>>,
3509 buffer_store: Entity<BufferStore>,
3510 cx: &mut Context<Self>,
3511 ) -> Task<Result<Entity<Buffer>>> {
3512 let id = self.id;
3513 if let Some(buffer) = self.commit_message_buffer.clone() {
3514 return Task::ready(Ok(buffer));
3515 }
3516 let this = cx.weak_entity();
3517
3518 let rx = self.send_job(None, move |state, mut cx| async move {
3519 let Some(this) = this.upgrade() else {
3520 bail!("git store was dropped");
3521 };
3522 match state {
3523 RepositoryState::Local { .. } => {
3524 this.update(&mut cx, |_, cx| {
3525 Self::open_local_commit_buffer(languages, buffer_store, cx)
3526 })?
3527 .await
3528 }
3529 RepositoryState::Remote { project_id, client } => {
3530 let request = client.request(proto::OpenCommitMessageBuffer {
3531 project_id: project_id.0,
3532 repository_id: id.to_proto(),
3533 });
3534 let response = request.await.context("requesting to open commit buffer")?;
3535 let buffer_id = BufferId::new(response.buffer_id)?;
3536 let buffer = buffer_store
3537 .update(&mut cx, |buffer_store, cx| {
3538 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3539 })?
3540 .await?;
3541 if let Some(language_registry) = languages {
3542 let git_commit_language =
3543 language_registry.language_for_name("Git Commit").await?;
3544 buffer.update(&mut cx, |buffer, cx| {
3545 buffer.set_language(Some(git_commit_language), cx);
3546 })?;
3547 }
3548 this.update(&mut cx, |this, _| {
3549 this.commit_message_buffer = Some(buffer.clone());
3550 })?;
3551 Ok(buffer)
3552 }
3553 }
3554 });
3555
3556 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3557 }
3558
3559 fn open_local_commit_buffer(
3560 language_registry: Option<Arc<LanguageRegistry>>,
3561 buffer_store: Entity<BufferStore>,
3562 cx: &mut Context<Self>,
3563 ) -> Task<Result<Entity<Buffer>>> {
3564 cx.spawn(async move |repository, cx| {
3565 let buffer = buffer_store
3566 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3567 .await?;
3568
3569 if let Some(language_registry) = language_registry {
3570 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3571 buffer.update(cx, |buffer, cx| {
3572 buffer.set_language(Some(git_commit_language), cx);
3573 })?;
3574 }
3575
3576 repository.update(cx, |repository, _| {
3577 repository.commit_message_buffer = Some(buffer.clone());
3578 })?;
3579 Ok(buffer)
3580 })
3581 }
3582
3583 pub fn checkout_files(
3584 &mut self,
3585 commit: &str,
3586 paths: Vec<RepoPath>,
3587 _cx: &mut App,
3588 ) -> oneshot::Receiver<Result<()>> {
3589 let commit = commit.to_string();
3590 let id = self.id;
3591
3592 self.send_job(
3593 Some(format!("git checkout {}", commit).into()),
3594 move |git_repo, _| async move {
3595 match git_repo {
3596 RepositoryState::Local {
3597 backend,
3598 environment,
3599 ..
3600 } => {
3601 backend
3602 .checkout_files(commit, paths, environment.clone())
3603 .await
3604 }
3605 RepositoryState::Remote { project_id, client } => {
3606 client
3607 .request(proto::GitCheckoutFiles {
3608 project_id: project_id.0,
3609 repository_id: id.to_proto(),
3610 commit,
3611 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3612 })
3613 .await?;
3614
3615 Ok(())
3616 }
3617 }
3618 },
3619 )
3620 }
3621
3622 pub fn reset(
3623 &mut self,
3624 commit: String,
3625 reset_mode: ResetMode,
3626 _cx: &mut App,
3627 ) -> oneshot::Receiver<Result<()>> {
3628 let id = self.id;
3629
3630 self.send_job(None, move |git_repo, _| async move {
3631 match git_repo {
3632 RepositoryState::Local {
3633 backend,
3634 environment,
3635 ..
3636 } => backend.reset(commit, reset_mode, environment).await,
3637 RepositoryState::Remote { project_id, client } => {
3638 client
3639 .request(proto::GitReset {
3640 project_id: project_id.0,
3641 repository_id: id.to_proto(),
3642 commit,
3643 mode: match reset_mode {
3644 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3645 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3646 },
3647 })
3648 .await?;
3649
3650 Ok(())
3651 }
3652 }
3653 })
3654 }
3655
3656 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3657 let id = self.id;
3658 self.send_job(None, move |git_repo, _cx| async move {
3659 match git_repo {
3660 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3661 RepositoryState::Remote { project_id, client } => {
3662 let resp = client
3663 .request(proto::GitShow {
3664 project_id: project_id.0,
3665 repository_id: id.to_proto(),
3666 commit,
3667 })
3668 .await?;
3669
3670 Ok(CommitDetails {
3671 sha: resp.sha.into(),
3672 message: resp.message.into(),
3673 commit_timestamp: resp.commit_timestamp,
3674 author_email: resp.author_email.into(),
3675 author_name: resp.author_name.into(),
3676 })
3677 }
3678 }
3679 })
3680 }
3681
3682 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3683 let id = self.id;
3684 self.send_job(None, move |git_repo, cx| async move {
3685 match git_repo {
3686 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3687 RepositoryState::Remote {
3688 client, project_id, ..
3689 } => {
3690 let response = client
3691 .request(proto::LoadCommitDiff {
3692 project_id: project_id.0,
3693 repository_id: id.to_proto(),
3694 commit,
3695 })
3696 .await?;
3697 Ok(CommitDiff {
3698 files: response
3699 .files
3700 .into_iter()
3701 .map(|file| {
3702 Ok(CommitFile {
3703 path: RepoPath::from_proto(&file.path)?,
3704 old_text: file.old_text,
3705 new_text: file.new_text,
3706 })
3707 })
3708 .collect::<Result<Vec<_>>>()?,
3709 })
3710 }
3711 }
3712 })
3713 }
3714
3715 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3716 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3717 }
3718
3719 fn save_buffers<'a>(
3720 &self,
3721 entries: impl IntoIterator<Item = &'a RepoPath>,
3722 cx: &mut Context<Self>,
3723 ) -> Vec<Task<anyhow::Result<()>>> {
3724 let mut save_futures = Vec::new();
3725 if let Some(buffer_store) = self.buffer_store(cx) {
3726 buffer_store.update(cx, |buffer_store, cx| {
3727 for path in entries {
3728 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3729 continue;
3730 };
3731 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3732 && buffer
3733 .read(cx)
3734 .file()
3735 .is_some_and(|file| file.disk_state().exists())
3736 && buffer.read(cx).has_unsaved_edits()
3737 {
3738 save_futures.push(buffer_store.save_buffer(buffer, cx));
3739 }
3740 }
3741 })
3742 }
3743 save_futures
3744 }
3745
3746 pub fn stage_entries(
3747 &self,
3748 entries: Vec<RepoPath>,
3749 cx: &mut Context<Self>,
3750 ) -> Task<anyhow::Result<()>> {
3751 if entries.is_empty() {
3752 return Task::ready(Ok(()));
3753 }
3754
3755 let id = self.id;
3756 let save_tasks = self.save_buffers(&entries, cx);
3757 let job_key = match entries.len() {
3758 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3759 _ => None,
3760 };
3761 let paths: Vec<_> = entries.iter().map(|p| p.as_unix_str()).collect();
3762 let status = format!("git add {}", paths.join(" "));
3763
3764 cx.spawn(async move |this, cx| {
3765 for save_task in save_tasks {
3766 save_task.await?;
3767 }
3768
3769 this.update(cx, |this, _| {
3770 this.send_keyed_job(
3771 job_key,
3772 Some(status.into()),
3773 move |git_repo, _cx| async move {
3774 match git_repo {
3775 RepositoryState::Local {
3776 backend,
3777 environment,
3778 ..
3779 } => backend.stage_paths(entries, environment.clone()).await,
3780 RepositoryState::Remote { project_id, client } => {
3781 client
3782 .request(proto::Stage {
3783 project_id: project_id.0,
3784 repository_id: id.to_proto(),
3785 paths: entries
3786 .into_iter()
3787 .map(|repo_path| repo_path.to_proto())
3788 .collect(),
3789 })
3790 .await
3791 .context("sending stage request")?;
3792
3793 Ok(())
3794 }
3795 }
3796 },
3797 )
3798 })?
3799 .await??;
3800
3801 Ok(())
3802 })
3803 }
3804
3805 pub fn unstage_entries(
3806 &self,
3807 entries: Vec<RepoPath>,
3808 cx: &mut Context<Self>,
3809 ) -> Task<anyhow::Result<()>> {
3810 if entries.is_empty() {
3811 return Task::ready(Ok(()));
3812 }
3813
3814 let id = self.id;
3815 let save_tasks = self.save_buffers(&entries, cx);
3816 let job_key = match entries.len() {
3817 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3818 _ => None,
3819 };
3820 let paths: Vec<_> = entries.iter().map(|p| p.as_unix_str()).collect();
3821 let status = format!("git reset {}", paths.join(" "));
3822
3823 cx.spawn(async move |this, cx| {
3824 for save_task in save_tasks {
3825 save_task.await?;
3826 }
3827
3828 this.update(cx, |this, _| {
3829 this.send_keyed_job(
3830 job_key,
3831 Some(status.into()),
3832 move |git_repo, _cx| async move {
3833 match git_repo {
3834 RepositoryState::Local {
3835 backend,
3836 environment,
3837 ..
3838 } => backend.unstage_paths(entries, environment).await,
3839 RepositoryState::Remote { project_id, client } => {
3840 client
3841 .request(proto::Unstage {
3842 project_id: project_id.0,
3843 repository_id: id.to_proto(),
3844 paths: entries
3845 .into_iter()
3846 .map(|repo_path| repo_path.to_proto())
3847 .collect(),
3848 })
3849 .await
3850 .context("sending unstage request")?;
3851
3852 Ok(())
3853 }
3854 }
3855 },
3856 )
3857 })?
3858 .await??;
3859
3860 Ok(())
3861 })
3862 }
3863
3864 pub fn stage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3865 let to_stage = self
3866 .cached_status()
3867 .filter(|entry| !entry.status.staging().is_fully_staged())
3868 .map(|entry| entry.repo_path)
3869 .collect();
3870 dbg!(&to_stage);
3871 self.stage_entries(to_stage, cx)
3872 }
3873
3874 pub fn unstage_all(&self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3875 let to_unstage = self
3876 .cached_status()
3877 .filter(|entry| entry.status.staging().has_staged())
3878 .map(|entry| entry.repo_path)
3879 .collect();
3880 dbg!(&to_unstage);
3881 self.unstage_entries(to_unstage, cx)
3882 }
3883
3884 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3885 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3886
3887 self.stash_entries(to_stash, cx)
3888 }
3889
3890 pub fn stash_entries(
3891 &mut self,
3892 entries: Vec<RepoPath>,
3893 cx: &mut Context<Self>,
3894 ) -> Task<anyhow::Result<()>> {
3895 let id = self.id;
3896
3897 cx.spawn(async move |this, cx| {
3898 this.update(cx, |this, _| {
3899 this.send_job(None, move |git_repo, _cx| async move {
3900 match git_repo {
3901 RepositoryState::Local {
3902 backend,
3903 environment,
3904 ..
3905 } => backend.stash_paths(entries, environment).await,
3906 RepositoryState::Remote { project_id, client } => {
3907 client
3908 .request(proto::Stash {
3909 project_id: project_id.0,
3910 repository_id: id.to_proto(),
3911 paths: entries
3912 .into_iter()
3913 .map(|repo_path| repo_path.to_proto())
3914 .collect(),
3915 })
3916 .await
3917 .context("sending stash request")?;
3918 Ok(())
3919 }
3920 }
3921 })
3922 })?
3923 .await??;
3924 Ok(())
3925 })
3926 }
3927
3928 pub fn stash_pop(
3929 &mut self,
3930 index: Option<usize>,
3931 cx: &mut Context<Self>,
3932 ) -> Task<anyhow::Result<()>> {
3933 let id = self.id;
3934 cx.spawn(async move |this, cx| {
3935 this.update(cx, |this, _| {
3936 this.send_job(None, move |git_repo, _cx| async move {
3937 match git_repo {
3938 RepositoryState::Local {
3939 backend,
3940 environment,
3941 ..
3942 } => backend.stash_pop(index, environment).await,
3943 RepositoryState::Remote { project_id, client } => {
3944 client
3945 .request(proto::StashPop {
3946 project_id: project_id.0,
3947 repository_id: id.to_proto(),
3948 stash_index: index.map(|i| i as u64),
3949 })
3950 .await
3951 .context("sending stash pop request")?;
3952 Ok(())
3953 }
3954 }
3955 })
3956 })?
3957 .await??;
3958 Ok(())
3959 })
3960 }
3961
3962 pub fn stash_apply(
3963 &mut self,
3964 index: Option<usize>,
3965 cx: &mut Context<Self>,
3966 ) -> Task<anyhow::Result<()>> {
3967 let id = self.id;
3968 cx.spawn(async move |this, cx| {
3969 this.update(cx, |this, _| {
3970 this.send_job(None, move |git_repo, _cx| async move {
3971 match git_repo {
3972 RepositoryState::Local {
3973 backend,
3974 environment,
3975 ..
3976 } => backend.stash_apply(index, environment).await,
3977 RepositoryState::Remote { project_id, client } => {
3978 client
3979 .request(proto::StashApply {
3980 project_id: project_id.0,
3981 repository_id: id.to_proto(),
3982 stash_index: index.map(|i| i as u64),
3983 })
3984 .await
3985 .context("sending stash apply request")?;
3986 Ok(())
3987 }
3988 }
3989 })
3990 })?
3991 .await??;
3992 Ok(())
3993 })
3994 }
3995
3996 pub fn stash_drop(
3997 &mut self,
3998 index: Option<usize>,
3999 cx: &mut Context<Self>,
4000 ) -> oneshot::Receiver<anyhow::Result<()>> {
4001 let id = self.id;
4002 let updates_tx = self
4003 .git_store()
4004 .and_then(|git_store| match &git_store.read(cx).state {
4005 GitStoreState::Local { downstream, .. } => downstream
4006 .as_ref()
4007 .map(|downstream| downstream.updates_tx.clone()),
4008 _ => None,
4009 });
4010 let this = cx.weak_entity();
4011 self.send_job(None, move |git_repo, mut cx| async move {
4012 match git_repo {
4013 RepositoryState::Local {
4014 backend,
4015 environment,
4016 ..
4017 } => {
4018 // TODO would be nice to not have to do this manually
4019 let result = backend.stash_drop(index, environment).await;
4020 if result.is_ok()
4021 && let Ok(stash_entries) = backend.stash_entries().await
4022 {
4023 let snapshot = this.update(&mut cx, |this, cx| {
4024 this.snapshot.stash_entries = stash_entries;
4025 cx.emit(RepositoryEvent::StashEntriesChanged);
4026 this.snapshot.clone()
4027 })?;
4028 if let Some(updates_tx) = updates_tx {
4029 updates_tx
4030 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4031 .ok();
4032 }
4033 }
4034
4035 result
4036 }
4037 RepositoryState::Remote { project_id, client } => {
4038 client
4039 .request(proto::StashDrop {
4040 project_id: project_id.0,
4041 repository_id: id.to_proto(),
4042 stash_index: index.map(|i| i as u64),
4043 })
4044 .await
4045 .context("sending stash pop request")?;
4046 Ok(())
4047 }
4048 }
4049 })
4050 }
4051
4052 pub fn commit(
4053 &mut self,
4054 message: SharedString,
4055 name_and_email: Option<(SharedString, SharedString)>,
4056 options: CommitOptions,
4057 _cx: &mut App,
4058 ) -> oneshot::Receiver<Result<()>> {
4059 let id = self.id;
4060
4061 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4062 match git_repo {
4063 RepositoryState::Local {
4064 backend,
4065 environment,
4066 ..
4067 } => {
4068 backend
4069 .commit(message, name_and_email, options, environment)
4070 .await
4071 }
4072 RepositoryState::Remote { project_id, client } => {
4073 let (name, email) = name_and_email.unzip();
4074 client
4075 .request(proto::Commit {
4076 project_id: project_id.0,
4077 repository_id: id.to_proto(),
4078 message: String::from(message),
4079 name: name.map(String::from),
4080 email: email.map(String::from),
4081 options: Some(proto::commit::CommitOptions {
4082 amend: options.amend,
4083 signoff: options.signoff,
4084 }),
4085 })
4086 .await
4087 .context("sending commit request")?;
4088
4089 Ok(())
4090 }
4091 }
4092 })
4093 }
4094
4095 pub fn fetch(
4096 &mut self,
4097 fetch_options: FetchOptions,
4098 askpass: AskPassDelegate,
4099 _cx: &mut App,
4100 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4101 let askpass_delegates = self.askpass_delegates.clone();
4102 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4103 let id = self.id;
4104
4105 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4106 match git_repo {
4107 RepositoryState::Local {
4108 backend,
4109 environment,
4110 ..
4111 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4112 RepositoryState::Remote { project_id, client } => {
4113 askpass_delegates.lock().insert(askpass_id, askpass);
4114 let _defer = util::defer(|| {
4115 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4116 debug_assert!(askpass_delegate.is_some());
4117 });
4118
4119 let response = client
4120 .request(proto::Fetch {
4121 project_id: project_id.0,
4122 repository_id: id.to_proto(),
4123 askpass_id,
4124 remote: fetch_options.to_proto(),
4125 })
4126 .await
4127 .context("sending fetch request")?;
4128
4129 Ok(RemoteCommandOutput {
4130 stdout: response.stdout,
4131 stderr: response.stderr,
4132 })
4133 }
4134 }
4135 })
4136 }
4137
4138 pub fn push(
4139 &mut self,
4140 branch: SharedString,
4141 remote: SharedString,
4142 options: Option<PushOptions>,
4143 askpass: AskPassDelegate,
4144 cx: &mut Context<Self>,
4145 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4146 let askpass_delegates = self.askpass_delegates.clone();
4147 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4148 let id = self.id;
4149
4150 let args = options
4151 .map(|option| match option {
4152 PushOptions::SetUpstream => " --set-upstream",
4153 PushOptions::Force => " --force-with-lease",
4154 })
4155 .unwrap_or("");
4156
4157 let updates_tx = self
4158 .git_store()
4159 .and_then(|git_store| match &git_store.read(cx).state {
4160 GitStoreState::Local { downstream, .. } => downstream
4161 .as_ref()
4162 .map(|downstream| downstream.updates_tx.clone()),
4163 _ => None,
4164 });
4165
4166 let this = cx.weak_entity();
4167 self.send_job(
4168 Some(format!("git push {} {} {}", args, remote, branch).into()),
4169 move |git_repo, mut cx| async move {
4170 match git_repo {
4171 RepositoryState::Local {
4172 backend,
4173 environment,
4174 ..
4175 } => {
4176 let result = backend
4177 .push(
4178 branch.to_string(),
4179 remote.to_string(),
4180 options,
4181 askpass,
4182 environment.clone(),
4183 cx.clone(),
4184 )
4185 .await;
4186 // TODO would be nice to not have to do this manually
4187 if result.is_ok() {
4188 let branches = backend.branches().await?;
4189 let branch = branches.into_iter().find(|branch| branch.is_head);
4190 log::info!("head branch after scan is {branch:?}");
4191 let snapshot = this.update(&mut cx, |this, cx| {
4192 this.snapshot.branch = branch;
4193 cx.emit(RepositoryEvent::BranchChanged);
4194 this.snapshot.clone()
4195 })?;
4196 if let Some(updates_tx) = updates_tx {
4197 updates_tx
4198 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4199 .ok();
4200 }
4201 }
4202 result
4203 }
4204 RepositoryState::Remote { project_id, client } => {
4205 askpass_delegates.lock().insert(askpass_id, askpass);
4206 let _defer = util::defer(|| {
4207 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4208 debug_assert!(askpass_delegate.is_some());
4209 });
4210 let response = client
4211 .request(proto::Push {
4212 project_id: project_id.0,
4213 repository_id: id.to_proto(),
4214 askpass_id,
4215 branch_name: branch.to_string(),
4216 remote_name: remote.to_string(),
4217 options: options.map(|options| match options {
4218 PushOptions::Force => proto::push::PushOptions::Force,
4219 PushOptions::SetUpstream => {
4220 proto::push::PushOptions::SetUpstream
4221 }
4222 }
4223 as i32),
4224 })
4225 .await
4226 .context("sending push request")?;
4227
4228 Ok(RemoteCommandOutput {
4229 stdout: response.stdout,
4230 stderr: response.stderr,
4231 })
4232 }
4233 }
4234 },
4235 )
4236 }
4237
4238 pub fn pull(
4239 &mut self,
4240 branch: SharedString,
4241 remote: SharedString,
4242 askpass: AskPassDelegate,
4243 _cx: &mut App,
4244 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4245 let askpass_delegates = self.askpass_delegates.clone();
4246 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4247 let id = self.id;
4248
4249 self.send_job(
4250 Some(format!("git pull {} {}", remote, branch).into()),
4251 move |git_repo, cx| async move {
4252 match git_repo {
4253 RepositoryState::Local {
4254 backend,
4255 environment,
4256 ..
4257 } => {
4258 backend
4259 .pull(
4260 branch.to_string(),
4261 remote.to_string(),
4262 askpass,
4263 environment.clone(),
4264 cx,
4265 )
4266 .await
4267 }
4268 RepositoryState::Remote { project_id, client } => {
4269 askpass_delegates.lock().insert(askpass_id, askpass);
4270 let _defer = util::defer(|| {
4271 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4272 debug_assert!(askpass_delegate.is_some());
4273 });
4274 let response = client
4275 .request(proto::Pull {
4276 project_id: project_id.0,
4277 repository_id: id.to_proto(),
4278 askpass_id,
4279 branch_name: branch.to_string(),
4280 remote_name: remote.to_string(),
4281 })
4282 .await
4283 .context("sending pull request")?;
4284
4285 Ok(RemoteCommandOutput {
4286 stdout: response.stdout,
4287 stderr: response.stderr,
4288 })
4289 }
4290 }
4291 },
4292 )
4293 }
4294
4295 fn spawn_set_index_text_job(
4296 &mut self,
4297 path: RepoPath,
4298 content: Option<String>,
4299 hunk_staging_operation_count: Option<usize>,
4300 cx: &mut Context<Self>,
4301 ) -> oneshot::Receiver<anyhow::Result<()>> {
4302 let id = self.id;
4303 let this = cx.weak_entity();
4304 let git_store = self.git_store.clone();
4305 self.send_keyed_job(
4306 Some(GitJobKey::WriteIndex(path.clone())),
4307 None,
4308 move |git_repo, mut cx| async move {
4309 log::debug!(
4310 "start updating index text for buffer {}",
4311 path.as_unix_str()
4312 );
4313 match git_repo {
4314 RepositoryState::Local {
4315 backend,
4316 environment,
4317 ..
4318 } => {
4319 backend
4320 .set_index_text(path.clone(), content, environment.clone())
4321 .await?;
4322 }
4323 RepositoryState::Remote { project_id, client } => {
4324 client
4325 .request(proto::SetIndexText {
4326 project_id: project_id.0,
4327 repository_id: id.to_proto(),
4328 path: path.to_proto(),
4329 text: content,
4330 })
4331 .await?;
4332 }
4333 }
4334 log::debug!(
4335 "finish updating index text for buffer {}",
4336 path.as_unix_str()
4337 );
4338
4339 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4340 let project_path = this
4341 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4342 .ok()
4343 .flatten();
4344 git_store.update(&mut cx, |git_store, cx| {
4345 let buffer_id = git_store
4346 .buffer_store
4347 .read(cx)
4348 .get_by_path(&project_path?)?
4349 .read(cx)
4350 .remote_id();
4351 let diff_state = git_store.diffs.get(&buffer_id)?;
4352 diff_state.update(cx, |diff_state, _| {
4353 diff_state.hunk_staging_operation_count_as_of_write =
4354 hunk_staging_operation_count;
4355 });
4356 Some(())
4357 })?;
4358 }
4359 Ok(())
4360 },
4361 )
4362 }
4363
4364 pub fn get_remotes(
4365 &mut self,
4366 branch_name: Option<String>,
4367 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4368 let id = self.id;
4369 self.send_job(None, move |repo, _cx| async move {
4370 match repo {
4371 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4372 RepositoryState::Remote { project_id, client } => {
4373 let response = client
4374 .request(proto::GetRemotes {
4375 project_id: project_id.0,
4376 repository_id: id.to_proto(),
4377 branch_name,
4378 })
4379 .await?;
4380
4381 let remotes = response
4382 .remotes
4383 .into_iter()
4384 .map(|remotes| git::repository::Remote {
4385 name: remotes.name.into(),
4386 })
4387 .collect();
4388
4389 Ok(remotes)
4390 }
4391 }
4392 })
4393 }
4394
4395 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4396 let id = self.id;
4397 self.send_job(None, move |repo, _| async move {
4398 match repo {
4399 RepositoryState::Local { backend, .. } => backend.branches().await,
4400 RepositoryState::Remote { project_id, client } => {
4401 let response = client
4402 .request(proto::GitGetBranches {
4403 project_id: project_id.0,
4404 repository_id: id.to_proto(),
4405 })
4406 .await?;
4407
4408 let branches = response
4409 .branches
4410 .into_iter()
4411 .map(|branch| proto_to_branch(&branch))
4412 .collect();
4413
4414 Ok(branches)
4415 }
4416 }
4417 })
4418 }
4419
4420 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4421 let id = self.id;
4422 self.send_job(None, move |repo, _| async move {
4423 match repo {
4424 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4425 RepositoryState::Remote { project_id, client } => {
4426 let response = client
4427 .request(proto::GetDefaultBranch {
4428 project_id: project_id.0,
4429 repository_id: id.to_proto(),
4430 })
4431 .await?;
4432
4433 anyhow::Ok(response.branch.map(SharedString::from))
4434 }
4435 }
4436 })
4437 }
4438
4439 pub fn diff_tree(
4440 &mut self,
4441 diff_type: DiffTreeType,
4442 _cx: &App,
4443 ) -> oneshot::Receiver<Result<TreeDiff>> {
4444 let repository_id = self.snapshot.id;
4445 self.send_job(None, move |repo, _cx| async move {
4446 match repo {
4447 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4448 RepositoryState::Remote { client, project_id } => {
4449 let response = client
4450 .request(proto::GetTreeDiff {
4451 project_id: project_id.0,
4452 repository_id: repository_id.0,
4453 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4454 base: diff_type.base().to_string(),
4455 head: diff_type.head().to_string(),
4456 })
4457 .await?;
4458
4459 let entries = response
4460 .entries
4461 .into_iter()
4462 .filter_map(|entry| {
4463 let status = match entry.status() {
4464 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4465 proto::tree_diff_status::Status::Modified => {
4466 TreeDiffStatus::Modified {
4467 old: git::Oid::from_str(
4468 &entry.oid.context("missing oid").log_err()?,
4469 )
4470 .log_err()?,
4471 }
4472 }
4473 proto::tree_diff_status::Status::Deleted => {
4474 TreeDiffStatus::Deleted {
4475 old: git::Oid::from_str(
4476 &entry.oid.context("missing oid").log_err()?,
4477 )
4478 .log_err()?,
4479 }
4480 }
4481 };
4482 Some((
4483 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4484 status,
4485 ))
4486 })
4487 .collect();
4488
4489 Ok(TreeDiff { entries })
4490 }
4491 }
4492 })
4493 }
4494
4495 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4496 let id = self.id;
4497 self.send_job(None, move |repo, _cx| async move {
4498 match repo {
4499 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4500 RepositoryState::Remote { project_id, client } => {
4501 let response = client
4502 .request(proto::GitDiff {
4503 project_id: project_id.0,
4504 repository_id: id.to_proto(),
4505 diff_type: match diff_type {
4506 DiffType::HeadToIndex => {
4507 proto::git_diff::DiffType::HeadToIndex.into()
4508 }
4509 DiffType::HeadToWorktree => {
4510 proto::git_diff::DiffType::HeadToWorktree.into()
4511 }
4512 },
4513 })
4514 .await?;
4515
4516 Ok(response.diff)
4517 }
4518 }
4519 })
4520 }
4521
4522 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4523 let id = self.id;
4524 self.send_job(
4525 Some(format!("git switch -c {branch_name}").into()),
4526 move |repo, _cx| async move {
4527 match repo {
4528 RepositoryState::Local { backend, .. } => {
4529 backend.create_branch(branch_name).await
4530 }
4531 RepositoryState::Remote { project_id, client } => {
4532 client
4533 .request(proto::GitCreateBranch {
4534 project_id: project_id.0,
4535 repository_id: id.to_proto(),
4536 branch_name,
4537 })
4538 .await?;
4539
4540 Ok(())
4541 }
4542 }
4543 },
4544 )
4545 }
4546
4547 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4548 let id = self.id;
4549 self.send_job(
4550 Some(format!("git switch {branch_name}").into()),
4551 move |repo, _cx| async move {
4552 match repo {
4553 RepositoryState::Local { backend, .. } => {
4554 backend.change_branch(branch_name).await
4555 }
4556 RepositoryState::Remote { project_id, client } => {
4557 client
4558 .request(proto::GitChangeBranch {
4559 project_id: project_id.0,
4560 repository_id: id.to_proto(),
4561 branch_name,
4562 })
4563 .await?;
4564
4565 Ok(())
4566 }
4567 }
4568 },
4569 )
4570 }
4571
4572 pub fn rename_branch(
4573 &mut self,
4574 branch: String,
4575 new_name: String,
4576 ) -> oneshot::Receiver<Result<()>> {
4577 let id = self.id;
4578 self.send_job(
4579 Some(format!("git branch -m {branch} {new_name}").into()),
4580 move |repo, _cx| async move {
4581 match repo {
4582 RepositoryState::Local { backend, .. } => {
4583 backend.rename_branch(branch, new_name).await
4584 }
4585 RepositoryState::Remote { project_id, client } => {
4586 client
4587 .request(proto::GitRenameBranch {
4588 project_id: project_id.0,
4589 repository_id: id.to_proto(),
4590 branch,
4591 new_name,
4592 })
4593 .await?;
4594
4595 Ok(())
4596 }
4597 }
4598 },
4599 )
4600 }
4601
4602 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4603 let id = self.id;
4604 self.send_job(None, move |repo, _cx| async move {
4605 match repo {
4606 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4607 RepositoryState::Remote { project_id, client } => {
4608 let response = client
4609 .request(proto::CheckForPushedCommits {
4610 project_id: project_id.0,
4611 repository_id: id.to_proto(),
4612 })
4613 .await?;
4614
4615 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4616
4617 Ok(branches)
4618 }
4619 }
4620 })
4621 }
4622
4623 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4624 self.send_job(None, |repo, _cx| async move {
4625 match repo {
4626 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4627 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4628 }
4629 })
4630 }
4631
4632 pub fn restore_checkpoint(
4633 &mut self,
4634 checkpoint: GitRepositoryCheckpoint,
4635 ) -> oneshot::Receiver<Result<()>> {
4636 self.send_job(None, move |repo, _cx| async move {
4637 match repo {
4638 RepositoryState::Local { backend, .. } => {
4639 backend.restore_checkpoint(checkpoint).await
4640 }
4641 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4642 }
4643 })
4644 }
4645
4646 pub(crate) fn apply_remote_update(
4647 &mut self,
4648 update: proto::UpdateRepository,
4649 cx: &mut Context<Self>,
4650 ) -> Result<()> {
4651 let conflicted_paths = TreeSet::from_ordered_entries(
4652 update
4653 .current_merge_conflicts
4654 .into_iter()
4655 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4656 );
4657 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4658 let new_head_commit = update
4659 .head_commit_details
4660 .as_ref()
4661 .map(proto_to_commit_details);
4662 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4663 cx.emit(RepositoryEvent::BranchChanged)
4664 }
4665 self.snapshot.branch = new_branch;
4666 self.snapshot.head_commit = new_head_commit;
4667
4668 self.snapshot.merge.conflicted_paths = conflicted_paths;
4669 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4670 let new_stash_entries = GitStash {
4671 entries: update
4672 .stash_entries
4673 .iter()
4674 .filter_map(|entry| proto_to_stash(entry).ok())
4675 .collect(),
4676 };
4677 if self.snapshot.stash_entries != new_stash_entries {
4678 cx.emit(RepositoryEvent::StashEntriesChanged)
4679 }
4680 self.snapshot.stash_entries = new_stash_entries;
4681
4682 let edits = update
4683 .removed_statuses
4684 .into_iter()
4685 .filter_map(|path| {
4686 Some(sum_tree::Edit::Remove(PathKey(
4687 RelPath::from_proto(&path).log_err()?,
4688 )))
4689 })
4690 .chain(
4691 update
4692 .updated_statuses
4693 .into_iter()
4694 .filter_map(|updated_status| {
4695 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4696 }),
4697 )
4698 .collect::<Vec<_>>();
4699 if !edits.is_empty() {
4700 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4701 }
4702 self.snapshot.statuses_by_path.edit(edits, ());
4703 if update.is_last_update {
4704 self.snapshot.scan_id = update.scan_id;
4705 }
4706 Ok(())
4707 }
4708
4709 pub fn compare_checkpoints(
4710 &mut self,
4711 left: GitRepositoryCheckpoint,
4712 right: GitRepositoryCheckpoint,
4713 ) -> oneshot::Receiver<Result<bool>> {
4714 self.send_job(None, move |repo, _cx| async move {
4715 match repo {
4716 RepositoryState::Local { backend, .. } => {
4717 backend.compare_checkpoints(left, right).await
4718 }
4719 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4720 }
4721 })
4722 }
4723
4724 pub fn diff_checkpoints(
4725 &mut self,
4726 base_checkpoint: GitRepositoryCheckpoint,
4727 target_checkpoint: GitRepositoryCheckpoint,
4728 ) -> oneshot::Receiver<Result<String>> {
4729 self.send_job(None, move |repo, _cx| async move {
4730 match repo {
4731 RepositoryState::Local { backend, .. } => {
4732 backend
4733 .diff_checkpoints(base_checkpoint, target_checkpoint)
4734 .await
4735 }
4736 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4737 }
4738 })
4739 }
4740
4741 fn schedule_scan(
4742 &mut self,
4743 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4744 cx: &mut Context<Self>,
4745 ) {
4746 let this = cx.weak_entity();
4747 let _ = self.send_keyed_job(
4748 Some(GitJobKey::ReloadGitState),
4749 None,
4750 |state, mut cx| async move {
4751 log::debug!("run scheduled git status scan");
4752
4753 let Some(this) = this.upgrade() else {
4754 return Ok(());
4755 };
4756 let RepositoryState::Local { backend, .. } = state else {
4757 bail!("not a local repository")
4758 };
4759 let (snapshot, events) = this
4760 .update(&mut cx, |this, _| {
4761 this.paths_needing_status_update.clear();
4762 compute_snapshot(
4763 this.id,
4764 this.work_directory_abs_path.clone(),
4765 this.snapshot.clone(),
4766 backend.clone(),
4767 )
4768 })?
4769 .await?;
4770 this.update(&mut cx, |this, cx| {
4771 this.snapshot = snapshot.clone();
4772 for event in events {
4773 cx.emit(event);
4774 }
4775 })?;
4776 if let Some(updates_tx) = updates_tx {
4777 updates_tx
4778 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4779 .ok();
4780 }
4781 Ok(())
4782 },
4783 );
4784 }
4785
4786 fn spawn_local_git_worker(
4787 work_directory_abs_path: Arc<Path>,
4788 dot_git_abs_path: Arc<Path>,
4789 _repository_dir_abs_path: Arc<Path>,
4790 _common_dir_abs_path: Arc<Path>,
4791 project_environment: WeakEntity<ProjectEnvironment>,
4792 fs: Arc<dyn Fs>,
4793 cx: &mut Context<Self>,
4794 ) -> mpsc::UnboundedSender<GitJob> {
4795 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4796
4797 cx.spawn(async move |_, cx| {
4798 let environment = project_environment
4799 .upgrade()
4800 .context("missing project environment")?
4801 .update(cx, |project_environment, cx| {
4802 project_environment.get_local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4803 })?
4804 .await
4805 .unwrap_or_else(|| {
4806 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4807 HashMap::default()
4808 });
4809 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4810 let backend = cx
4811 .background_spawn(async move {
4812 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4813 .or_else(|| which::which("git").ok());
4814 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4815 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4816 })
4817 .await?;
4818
4819 if let Some(git_hosting_provider_registry) =
4820 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4821 {
4822 git_hosting_providers::register_additional_providers(
4823 git_hosting_provider_registry,
4824 backend.clone(),
4825 );
4826 }
4827
4828 let state = RepositoryState::Local {
4829 backend,
4830 environment: Arc::new(environment),
4831 };
4832 let mut jobs = VecDeque::new();
4833 loop {
4834 while let Ok(Some(next_job)) = job_rx.try_next() {
4835 jobs.push_back(next_job);
4836 }
4837
4838 if let Some(job) = jobs.pop_front() {
4839 if let Some(current_key) = &job.key
4840 && jobs
4841 .iter()
4842 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4843 {
4844 continue;
4845 }
4846 (job.job)(state.clone(), cx).await; // We will block here...
4847 } else if let Some(job) = job_rx.next().await {
4848 jobs.push_back(job);
4849 } else {
4850 break;
4851 }
4852 }
4853 anyhow::Ok(())
4854 })
4855 .detach_and_log_err(cx);
4856
4857 job_tx
4858 }
4859
4860 fn spawn_remote_git_worker(
4861 project_id: ProjectId,
4862 client: AnyProtoClient,
4863 cx: &mut Context<Self>,
4864 ) -> mpsc::UnboundedSender<GitJob> {
4865 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4866
4867 cx.spawn(async move |_, cx| {
4868 let state = RepositoryState::Remote { project_id, client };
4869 let mut jobs = VecDeque::new();
4870 loop {
4871 while let Ok(Some(next_job)) = job_rx.try_next() {
4872 jobs.push_back(next_job);
4873 }
4874
4875 if let Some(job) = jobs.pop_front() {
4876 if let Some(current_key) = &job.key
4877 && jobs
4878 .iter()
4879 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4880 {
4881 continue;
4882 }
4883 (job.job)(state.clone(), cx).await;
4884 } else if let Some(job) = job_rx.next().await {
4885 jobs.push_back(job);
4886 } else {
4887 break;
4888 }
4889 }
4890 anyhow::Ok(())
4891 })
4892 .detach_and_log_err(cx);
4893
4894 job_tx
4895 }
4896
4897 fn load_staged_text(
4898 &mut self,
4899 buffer_id: BufferId,
4900 repo_path: RepoPath,
4901 cx: &App,
4902 ) -> Task<Result<Option<String>>> {
4903 let rx = self.send_job(None, move |state, _| async move {
4904 match state {
4905 RepositoryState::Local { backend, .. } => {
4906 anyhow::Ok(backend.load_index_text(repo_path).await)
4907 }
4908 RepositoryState::Remote { project_id, client } => {
4909 let response = client
4910 .request(proto::OpenUnstagedDiff {
4911 project_id: project_id.to_proto(),
4912 buffer_id: buffer_id.to_proto(),
4913 })
4914 .await?;
4915 Ok(response.staged_text)
4916 }
4917 }
4918 });
4919 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4920 }
4921
4922 fn load_committed_text(
4923 &mut self,
4924 buffer_id: BufferId,
4925 repo_path: RepoPath,
4926 cx: &App,
4927 ) -> Task<Result<DiffBasesChange>> {
4928 let rx = self.send_job(None, move |state, _| async move {
4929 match state {
4930 RepositoryState::Local { backend, .. } => {
4931 let committed_text = backend.load_committed_text(repo_path.clone()).await;
4932 let staged_text = backend.load_index_text(repo_path).await;
4933 let diff_bases_change = if committed_text == staged_text {
4934 DiffBasesChange::SetBoth(committed_text)
4935 } else {
4936 DiffBasesChange::SetEach {
4937 index: staged_text,
4938 head: committed_text,
4939 }
4940 };
4941 anyhow::Ok(diff_bases_change)
4942 }
4943 RepositoryState::Remote { project_id, client } => {
4944 use proto::open_uncommitted_diff_response::Mode;
4945
4946 let response = client
4947 .request(proto::OpenUncommittedDiff {
4948 project_id: project_id.to_proto(),
4949 buffer_id: buffer_id.to_proto(),
4950 })
4951 .await?;
4952 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
4953 let bases = match mode {
4954 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
4955 Mode::IndexAndHead => DiffBasesChange::SetEach {
4956 head: response.committed_text,
4957 index: response.staged_text,
4958 },
4959 };
4960 Ok(bases)
4961 }
4962 }
4963 });
4964
4965 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4966 }
4967 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
4968 let repository_id = self.snapshot.id;
4969 let rx = self.send_job(None, move |state, _| async move {
4970 match state {
4971 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
4972 RepositoryState::Remote { client, project_id } => {
4973 let response = client
4974 .request(proto::GetBlobContent {
4975 project_id: project_id.to_proto(),
4976 repository_id: repository_id.0,
4977 oid: oid.to_string(),
4978 })
4979 .await?;
4980 Ok(response.content)
4981 }
4982 }
4983 });
4984 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4985 }
4986
4987 fn paths_changed(
4988 &mut self,
4989 paths: Vec<RepoPath>,
4990 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4991 cx: &mut Context<Self>,
4992 ) {
4993 self.paths_needing_status_update.extend(paths);
4994
4995 let this = cx.weak_entity();
4996 let _ = self.send_keyed_job(
4997 Some(GitJobKey::RefreshStatuses),
4998 None,
4999 |state, mut cx| async move {
5000 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5001 (
5002 this.snapshot.clone(),
5003 mem::take(&mut this.paths_needing_status_update),
5004 )
5005 })?;
5006 let RepositoryState::Local { backend, .. } = state else {
5007 bail!("not a local repository")
5008 };
5009
5010 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5011 if paths.is_empty() {
5012 return Ok(());
5013 }
5014 let statuses = backend.status(&paths).await?;
5015 let stash_entries = backend.stash_entries().await?;
5016
5017 let changed_path_statuses = cx
5018 .background_spawn(async move {
5019 let mut changed_path_statuses = Vec::new();
5020 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5021 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5022
5023 for (repo_path, status) in &*statuses.entries {
5024 changed_paths.remove(repo_path);
5025 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5026 && cursor.item().is_some_and(|entry| entry.status == *status)
5027 {
5028 continue;
5029 }
5030
5031 changed_path_statuses.push(Edit::Insert(StatusEntry {
5032 repo_path: repo_path.clone(),
5033 status: *status,
5034 }));
5035 }
5036 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5037 for path in changed_paths.into_iter() {
5038 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5039 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5040 }
5041 }
5042 changed_path_statuses
5043 })
5044 .await;
5045
5046 this.update(&mut cx, |this, cx| {
5047 if this.snapshot.stash_entries != stash_entries {
5048 cx.emit(RepositoryEvent::StashEntriesChanged);
5049 this.snapshot.stash_entries = stash_entries;
5050 }
5051
5052 if !changed_path_statuses.is_empty() {
5053 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5054 this.snapshot
5055 .statuses_by_path
5056 .edit(changed_path_statuses, ());
5057 this.snapshot.scan_id += 1;
5058 }
5059
5060 if let Some(updates_tx) = updates_tx {
5061 updates_tx
5062 .unbounded_send(DownstreamUpdate::UpdateRepository(
5063 this.snapshot.clone(),
5064 ))
5065 .ok();
5066 }
5067 })
5068 },
5069 );
5070 }
5071
5072 /// currently running git command and when it started
5073 pub fn current_job(&self) -> Option<JobInfo> {
5074 self.active_jobs.values().next().cloned()
5075 }
5076
5077 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5078 self.send_job(None, |_, _| async {})
5079 }
5080}
5081
5082fn get_permalink_in_rust_registry_src(
5083 provider_registry: Arc<GitHostingProviderRegistry>,
5084 path: PathBuf,
5085 selection: Range<u32>,
5086) -> Result<url::Url> {
5087 #[derive(Deserialize)]
5088 struct CargoVcsGit {
5089 sha1: String,
5090 }
5091
5092 #[derive(Deserialize)]
5093 struct CargoVcsInfo {
5094 git: CargoVcsGit,
5095 path_in_vcs: String,
5096 }
5097
5098 #[derive(Deserialize)]
5099 struct CargoPackage {
5100 repository: String,
5101 }
5102
5103 #[derive(Deserialize)]
5104 struct CargoToml {
5105 package: CargoPackage,
5106 }
5107
5108 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5109 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5110 Some((dir, json))
5111 }) else {
5112 bail!("No .cargo_vcs_info.json found in parent directories")
5113 };
5114 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5115 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5116 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5117 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5118 .context("parsing package.repository field of manifest")?;
5119 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5120 let permalink = provider.build_permalink(
5121 remote,
5122 BuildPermalinkParams::new(
5123 &cargo_vcs_info.git.sha1,
5124 &RepoPath(
5125 RelPath::new(&path, PathStyle::local())
5126 .context("invalid path")?
5127 .into_arc(),
5128 ),
5129 Some(selection),
5130 ),
5131 );
5132 Ok(permalink)
5133}
5134
5135fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5136 let Some(blame) = blame else {
5137 return proto::BlameBufferResponse {
5138 blame_response: None,
5139 };
5140 };
5141
5142 let entries = blame
5143 .entries
5144 .into_iter()
5145 .map(|entry| proto::BlameEntry {
5146 sha: entry.sha.as_bytes().into(),
5147 start_line: entry.range.start,
5148 end_line: entry.range.end,
5149 original_line_number: entry.original_line_number,
5150 author: entry.author,
5151 author_mail: entry.author_mail,
5152 author_time: entry.author_time,
5153 author_tz: entry.author_tz,
5154 committer: entry.committer_name,
5155 committer_mail: entry.committer_email,
5156 committer_time: entry.committer_time,
5157 committer_tz: entry.committer_tz,
5158 summary: entry.summary,
5159 previous: entry.previous,
5160 filename: entry.filename,
5161 })
5162 .collect::<Vec<_>>();
5163
5164 let messages = blame
5165 .messages
5166 .into_iter()
5167 .map(|(oid, message)| proto::CommitMessage {
5168 oid: oid.as_bytes().into(),
5169 message,
5170 })
5171 .collect::<Vec<_>>();
5172
5173 proto::BlameBufferResponse {
5174 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5175 entries,
5176 messages,
5177 remote_url: blame.remote_url,
5178 }),
5179 }
5180}
5181
5182fn deserialize_blame_buffer_response(
5183 response: proto::BlameBufferResponse,
5184) -> Option<git::blame::Blame> {
5185 let response = response.blame_response?;
5186 let entries = response
5187 .entries
5188 .into_iter()
5189 .filter_map(|entry| {
5190 Some(git::blame::BlameEntry {
5191 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5192 range: entry.start_line..entry.end_line,
5193 original_line_number: entry.original_line_number,
5194 committer_name: entry.committer,
5195 committer_time: entry.committer_time,
5196 committer_tz: entry.committer_tz,
5197 committer_email: entry.committer_mail,
5198 author: entry.author,
5199 author_mail: entry.author_mail,
5200 author_time: entry.author_time,
5201 author_tz: entry.author_tz,
5202 summary: entry.summary,
5203 previous: entry.previous,
5204 filename: entry.filename,
5205 })
5206 })
5207 .collect::<Vec<_>>();
5208
5209 let messages = response
5210 .messages
5211 .into_iter()
5212 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5213 .collect::<HashMap<_, _>>();
5214
5215 Some(Blame {
5216 entries,
5217 messages,
5218 remote_url: response.remote_url,
5219 })
5220}
5221
5222fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5223 proto::Branch {
5224 is_head: branch.is_head,
5225 ref_name: branch.ref_name.to_string(),
5226 unix_timestamp: branch
5227 .most_recent_commit
5228 .as_ref()
5229 .map(|commit| commit.commit_timestamp as u64),
5230 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5231 ref_name: upstream.ref_name.to_string(),
5232 tracking: upstream
5233 .tracking
5234 .status()
5235 .map(|upstream| proto::UpstreamTracking {
5236 ahead: upstream.ahead as u64,
5237 behind: upstream.behind as u64,
5238 }),
5239 }),
5240 most_recent_commit: branch
5241 .most_recent_commit
5242 .as_ref()
5243 .map(|commit| proto::CommitSummary {
5244 sha: commit.sha.to_string(),
5245 subject: commit.subject.to_string(),
5246 commit_timestamp: commit.commit_timestamp,
5247 author_name: commit.author_name.to_string(),
5248 }),
5249 }
5250}
5251
5252fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5253 git::repository::Branch {
5254 is_head: proto.is_head,
5255 ref_name: proto.ref_name.clone().into(),
5256 upstream: proto
5257 .upstream
5258 .as_ref()
5259 .map(|upstream| git::repository::Upstream {
5260 ref_name: upstream.ref_name.to_string().into(),
5261 tracking: upstream
5262 .tracking
5263 .as_ref()
5264 .map(|tracking| {
5265 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5266 ahead: tracking.ahead as u32,
5267 behind: tracking.behind as u32,
5268 })
5269 })
5270 .unwrap_or(git::repository::UpstreamTracking::Gone),
5271 }),
5272 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5273 git::repository::CommitSummary {
5274 sha: commit.sha.to_string().into(),
5275 subject: commit.subject.to_string().into(),
5276 commit_timestamp: commit.commit_timestamp,
5277 author_name: commit.author_name.to_string().into(),
5278 has_parent: true,
5279 }
5280 }),
5281 }
5282}
5283
5284fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5285 proto::GitCommitDetails {
5286 sha: commit.sha.to_string(),
5287 message: commit.message.to_string(),
5288 commit_timestamp: commit.commit_timestamp,
5289 author_email: commit.author_email.to_string(),
5290 author_name: commit.author_name.to_string(),
5291 }
5292}
5293
5294fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5295 CommitDetails {
5296 sha: proto.sha.clone().into(),
5297 message: proto.message.clone().into(),
5298 commit_timestamp: proto.commit_timestamp,
5299 author_email: proto.author_email.clone().into(),
5300 author_name: proto.author_name.clone().into(),
5301 }
5302}
5303
5304async fn compute_snapshot(
5305 id: RepositoryId,
5306 work_directory_abs_path: Arc<Path>,
5307 prev_snapshot: RepositorySnapshot,
5308 backend: Arc<dyn GitRepository>,
5309) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5310 let mut events = Vec::new();
5311 let branches = backend.branches().await?;
5312 let branch = branches.into_iter().find(|branch| branch.is_head);
5313 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5314 let stash_entries = backend.stash_entries().await?;
5315 let statuses_by_path = SumTree::from_iter(
5316 statuses
5317 .entries
5318 .iter()
5319 .map(|(repo_path, status)| StatusEntry {
5320 repo_path: repo_path.clone(),
5321 status: *status,
5322 }),
5323 (),
5324 );
5325 let (merge_details, merge_heads_changed) =
5326 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5327 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5328
5329 if merge_heads_changed {
5330 events.push(RepositoryEvent::MergeHeadsChanged);
5331 }
5332
5333 if statuses_by_path != prev_snapshot.statuses_by_path {
5334 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5335 }
5336
5337 // Useful when branch is None in detached head state
5338 let head_commit = match backend.head_sha().await {
5339 Some(head_sha) => backend.show(head_sha).await.log_err(),
5340 None => None,
5341 };
5342
5343 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5344 events.push(RepositoryEvent::BranchChanged);
5345 }
5346
5347 // Used by edit prediction data collection
5348 let remote_origin_url = backend.remote_url("origin");
5349 let remote_upstream_url = backend.remote_url("upstream");
5350
5351 let snapshot = RepositorySnapshot {
5352 id,
5353 statuses_by_path,
5354 work_directory_abs_path,
5355 path_style: prev_snapshot.path_style,
5356 scan_id: prev_snapshot.scan_id + 1,
5357 branch,
5358 head_commit,
5359 merge: merge_details,
5360 remote_origin_url,
5361 remote_upstream_url,
5362 stash_entries,
5363 };
5364
5365 Ok((snapshot, events))
5366}
5367
5368fn status_from_proto(
5369 simple_status: i32,
5370 status: Option<proto::GitFileStatus>,
5371) -> anyhow::Result<FileStatus> {
5372 use proto::git_file_status::Variant;
5373
5374 let Some(variant) = status.and_then(|status| status.variant) else {
5375 let code = proto::GitStatus::from_i32(simple_status)
5376 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5377 let result = match code {
5378 proto::GitStatus::Added => TrackedStatus {
5379 worktree_status: StatusCode::Added,
5380 index_status: StatusCode::Unmodified,
5381 }
5382 .into(),
5383 proto::GitStatus::Modified => TrackedStatus {
5384 worktree_status: StatusCode::Modified,
5385 index_status: StatusCode::Unmodified,
5386 }
5387 .into(),
5388 proto::GitStatus::Conflict => UnmergedStatus {
5389 first_head: UnmergedStatusCode::Updated,
5390 second_head: UnmergedStatusCode::Updated,
5391 }
5392 .into(),
5393 proto::GitStatus::Deleted => TrackedStatus {
5394 worktree_status: StatusCode::Deleted,
5395 index_status: StatusCode::Unmodified,
5396 }
5397 .into(),
5398 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5399 };
5400 return Ok(result);
5401 };
5402
5403 let result = match variant {
5404 Variant::Untracked(_) => FileStatus::Untracked,
5405 Variant::Ignored(_) => FileStatus::Ignored,
5406 Variant::Unmerged(unmerged) => {
5407 let [first_head, second_head] =
5408 [unmerged.first_head, unmerged.second_head].map(|head| {
5409 let code = proto::GitStatus::from_i32(head)
5410 .with_context(|| format!("Invalid git status code: {head}"))?;
5411 let result = match code {
5412 proto::GitStatus::Added => UnmergedStatusCode::Added,
5413 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5414 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5415 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5416 };
5417 Ok(result)
5418 });
5419 let [first_head, second_head] = [first_head?, second_head?];
5420 UnmergedStatus {
5421 first_head,
5422 second_head,
5423 }
5424 .into()
5425 }
5426 Variant::Tracked(tracked) => {
5427 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5428 .map(|status| {
5429 let code = proto::GitStatus::from_i32(status)
5430 .with_context(|| format!("Invalid git status code: {status}"))?;
5431 let result = match code {
5432 proto::GitStatus::Modified => StatusCode::Modified,
5433 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5434 proto::GitStatus::Added => StatusCode::Added,
5435 proto::GitStatus::Deleted => StatusCode::Deleted,
5436 proto::GitStatus::Renamed => StatusCode::Renamed,
5437 proto::GitStatus::Copied => StatusCode::Copied,
5438 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5439 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5440 };
5441 Ok(result)
5442 });
5443 let [index_status, worktree_status] = [index_status?, worktree_status?];
5444 TrackedStatus {
5445 index_status,
5446 worktree_status,
5447 }
5448 .into()
5449 }
5450 };
5451 Ok(result)
5452}
5453
5454fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5455 use proto::git_file_status::{Tracked, Unmerged, Variant};
5456
5457 let variant = match status {
5458 FileStatus::Untracked => Variant::Untracked(Default::default()),
5459 FileStatus::Ignored => Variant::Ignored(Default::default()),
5460 FileStatus::Unmerged(UnmergedStatus {
5461 first_head,
5462 second_head,
5463 }) => Variant::Unmerged(Unmerged {
5464 first_head: unmerged_status_to_proto(first_head),
5465 second_head: unmerged_status_to_proto(second_head),
5466 }),
5467 FileStatus::Tracked(TrackedStatus {
5468 index_status,
5469 worktree_status,
5470 }) => Variant::Tracked(Tracked {
5471 index_status: tracked_status_to_proto(index_status),
5472 worktree_status: tracked_status_to_proto(worktree_status),
5473 }),
5474 };
5475 proto::GitFileStatus {
5476 variant: Some(variant),
5477 }
5478}
5479
5480fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5481 match code {
5482 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5483 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5484 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5485 }
5486}
5487
5488fn tracked_status_to_proto(code: StatusCode) -> i32 {
5489 match code {
5490 StatusCode::Added => proto::GitStatus::Added as _,
5491 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5492 StatusCode::Modified => proto::GitStatus::Modified as _,
5493 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5494 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5495 StatusCode::Copied => proto::GitStatus::Copied as _,
5496 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5497 }
5498}