1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{mpsc, oneshot},
21 future::{self, Shared},
22 stream::FuturesOrdered,
23};
24use git::{
25 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
26 blame::Blame,
27 parse_git_remote_url,
28 repository::{
29 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
30 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
31 ResetMode, UpstreamTrackingStatus,
32 },
33 stash::{GitStash, StashEntry},
34 status::{
35 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
36 UnmergedStatus, UnmergedStatusCode,
37 },
38};
39use gpui::{
40 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
41 WeakEntity,
42};
43use language::{
44 Buffer, BufferEvent, Language, LanguageRegistry,
45 proto::{deserialize_version, serialize_version},
46};
47use parking_lot::Mutex;
48use pending_op::{PendingOp, PendingOpId, PendingOpStatus};
49use postage::stream::Stream as _;
50use rpc::{
51 AnyProtoClient, TypedEnvelope,
52 proto::{self, git_reset, split_repository_update},
53};
54use serde::Deserialize;
55use std::{
56 cmp::Ordering,
57 collections::{BTreeSet, VecDeque},
58 future::Future,
59 mem,
60 ops::Range,
61 path::{Path, PathBuf},
62 str::FromStr,
63 sync::{
64 Arc,
65 atomic::{self, AtomicU64},
66 },
67 time::Instant,
68};
69use sum_tree::{Edit, SumTree, TreeSet};
70use task::Shell;
71use text::{Bias, BufferId};
72use util::{
73 ResultExt, debug_panic,
74 paths::{PathStyle, SanitizedPath},
75 post_inc,
76 rel_path::RelPath,
77};
78use worktree::{
79 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
80 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
81};
82use zeroize::Zeroize;
83
84pub struct GitStore {
85 state: GitStoreState,
86 buffer_store: Entity<BufferStore>,
87 worktree_store: Entity<WorktreeStore>,
88 repositories: HashMap<RepositoryId, Entity<Repository>>,
89 active_repo_id: Option<RepositoryId>,
90 #[allow(clippy::type_complexity)]
91 loading_diffs:
92 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
93 diffs: HashMap<BufferId, Entity<BufferGitState>>,
94 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
95 _subscriptions: Vec<Subscription>,
96}
97
98#[derive(Default)]
99struct SharedDiffs {
100 unstaged: Option<Entity<BufferDiff>>,
101 uncommitted: Option<Entity<BufferDiff>>,
102}
103
104struct BufferGitState {
105 unstaged_diff: Option<WeakEntity<BufferDiff>>,
106 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
107 conflict_set: Option<WeakEntity<ConflictSet>>,
108 recalculate_diff_task: Option<Task<Result<()>>>,
109 reparse_conflict_markers_task: Option<Task<Result<()>>>,
110 language: Option<Arc<Language>>,
111 language_registry: Option<Arc<LanguageRegistry>>,
112 conflict_updated_futures: Vec<oneshot::Sender<()>>,
113 recalculating_tx: postage::watch::Sender<bool>,
114
115 /// These operation counts are used to ensure that head and index text
116 /// values read from the git repository are up-to-date with any hunk staging
117 /// operations that have been performed on the BufferDiff.
118 ///
119 /// The operation count is incremented immediately when the user initiates a
120 /// hunk stage/unstage operation. Then, upon finishing writing the new index
121 /// text do disk, the `operation count as of write` is updated to reflect
122 /// the operation count that prompted the write.
123 hunk_staging_operation_count: usize,
124 hunk_staging_operation_count_as_of_write: usize,
125
126 head_text: Option<Arc<String>>,
127 index_text: Option<Arc<String>>,
128 head_changed: bool,
129 index_changed: bool,
130 language_changed: bool,
131}
132
133#[derive(Clone, Debug)]
134enum DiffBasesChange {
135 SetIndex(Option<String>),
136 SetHead(Option<String>),
137 SetEach {
138 index: Option<String>,
139 head: Option<String>,
140 },
141 SetBoth(Option<String>),
142}
143
144#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
145enum DiffKind {
146 Unstaged,
147 Uncommitted,
148}
149
150enum GitStoreState {
151 Local {
152 next_repository_id: Arc<AtomicU64>,
153 downstream: Option<LocalDownstreamState>,
154 project_environment: Entity<ProjectEnvironment>,
155 fs: Arc<dyn Fs>,
156 },
157 Remote {
158 upstream_client: AnyProtoClient,
159 upstream_project_id: u64,
160 downstream: Option<(AnyProtoClient, ProjectId)>,
161 },
162}
163
164enum DownstreamUpdate {
165 UpdateRepository(RepositorySnapshot),
166 RemoveRepository(RepositoryId),
167}
168
169struct LocalDownstreamState {
170 client: AnyProtoClient,
171 project_id: ProjectId,
172 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
173 _task: Task<Result<()>>,
174}
175
176#[derive(Clone, Debug)]
177pub struct GitStoreCheckpoint {
178 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
179}
180
181#[derive(Clone, Debug, PartialEq, Eq)]
182pub struct StatusEntry {
183 pub repo_path: RepoPath,
184 pub status: FileStatus,
185}
186
187impl StatusEntry {
188 fn to_proto(&self) -> proto::StatusEntry {
189 let simple_status = match self.status {
190 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
191 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
192 FileStatus::Tracked(TrackedStatus {
193 index_status,
194 worktree_status,
195 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
196 worktree_status
197 } else {
198 index_status
199 }),
200 };
201
202 proto::StatusEntry {
203 repo_path: self.repo_path.to_proto(),
204 simple_status,
205 status: Some(status_to_proto(self.status)),
206 }
207 }
208}
209
210impl TryFrom<proto::StatusEntry> for StatusEntry {
211 type Error = anyhow::Error;
212
213 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
214 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
215 let status = status_from_proto(value.simple_status, value.status)?;
216 Ok(Self { repo_path, status })
217 }
218}
219
220impl sum_tree::Item for StatusEntry {
221 type Summary = PathSummary<GitSummary>;
222
223 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
224 PathSummary {
225 max_path: self.repo_path.0.clone(),
226 item_summary: self.status.summary(),
227 }
228 }
229}
230
231impl sum_tree::KeyedItem for StatusEntry {
232 type Key = PathKey;
233
234 fn key(&self) -> Self::Key {
235 PathKey(self.repo_path.0.clone())
236 }
237}
238
239#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
240pub struct RepositoryId(pub u64);
241
242#[derive(Clone, Debug, Default, PartialEq, Eq)]
243pub struct MergeDetails {
244 pub conflicted_paths: TreeSet<RepoPath>,
245 pub message: Option<SharedString>,
246 pub heads: Vec<Option<SharedString>>,
247}
248
249#[derive(Clone, Debug, PartialEq, Eq)]
250pub struct RepositorySnapshot {
251 pub id: RepositoryId,
252 pub statuses_by_path: SumTree<StatusEntry>,
253 pub pending_ops: SumTree<PendingOp>,
254 pub work_directory_abs_path: Arc<Path>,
255 pub path_style: PathStyle,
256 pub branch: Option<Branch>,
257 pub head_commit: Option<CommitDetails>,
258 pub scan_id: u64,
259 pub merge: MergeDetails,
260 pub remote_origin_url: Option<String>,
261 pub remote_upstream_url: Option<String>,
262 pub stash_entries: GitStash,
263}
264
265type JobId = u64;
266
267#[derive(Clone, Debug, PartialEq, Eq)]
268pub struct JobInfo {
269 pub start: Instant,
270 pub message: SharedString,
271}
272
273pub struct Repository {
274 this: WeakEntity<Self>,
275 snapshot: RepositorySnapshot,
276 commit_message_buffer: Option<Entity<Buffer>>,
277 git_store: WeakEntity<GitStore>,
278 // For a local repository, holds paths that have had worktree events since the last status scan completed,
279 // and that should be examined during the next status scan.
280 paths_needing_status_update: BTreeSet<RepoPath>,
281 job_sender: mpsc::UnboundedSender<GitJob>,
282 active_jobs: HashMap<JobId, JobInfo>,
283 job_id: JobId,
284 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
285 latest_askpass_id: u64,
286}
287
288impl std::ops::Deref for Repository {
289 type Target = RepositorySnapshot;
290
291 fn deref(&self) -> &Self::Target {
292 &self.snapshot
293 }
294}
295
296#[derive(Clone)]
297pub enum RepositoryState {
298 Local {
299 backend: Arc<dyn GitRepository>,
300 environment: Arc<HashMap<String, String>>,
301 },
302 Remote {
303 project_id: ProjectId,
304 client: AnyProtoClient,
305 },
306}
307
308#[derive(Clone, Debug, PartialEq, Eq)]
309pub enum RepositoryEvent {
310 StatusesChanged {
311 // TODO could report which statuses changed here
312 full_scan: bool,
313 },
314 MergeHeadsChanged,
315 BranchChanged,
316 StashEntriesChanged,
317}
318
319#[derive(Clone, Debug)]
320pub struct JobsUpdated;
321
322#[derive(Debug)]
323pub enum GitStoreEvent {
324 ActiveRepositoryChanged(Option<RepositoryId>),
325 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
326 RepositoryAdded,
327 RepositoryRemoved(RepositoryId),
328 IndexWriteError(anyhow::Error),
329 JobsUpdated,
330 ConflictsUpdated,
331}
332
333impl EventEmitter<RepositoryEvent> for Repository {}
334impl EventEmitter<JobsUpdated> for Repository {}
335impl EventEmitter<GitStoreEvent> for GitStore {}
336
337pub struct GitJob {
338 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
339 key: Option<GitJobKey>,
340}
341
342#[derive(PartialEq, Eq)]
343enum GitJobKey {
344 WriteIndex(RepoPath),
345 ReloadBufferDiffBases,
346 RefreshStatuses,
347 ReloadGitState,
348}
349
350impl GitStore {
351 pub fn local(
352 worktree_store: &Entity<WorktreeStore>,
353 buffer_store: Entity<BufferStore>,
354 environment: Entity<ProjectEnvironment>,
355 fs: Arc<dyn Fs>,
356 cx: &mut Context<Self>,
357 ) -> Self {
358 Self::new(
359 worktree_store.clone(),
360 buffer_store,
361 GitStoreState::Local {
362 next_repository_id: Arc::new(AtomicU64::new(1)),
363 downstream: None,
364 project_environment: environment,
365 fs,
366 },
367 cx,
368 )
369 }
370
371 pub fn remote(
372 worktree_store: &Entity<WorktreeStore>,
373 buffer_store: Entity<BufferStore>,
374 upstream_client: AnyProtoClient,
375 project_id: u64,
376 cx: &mut Context<Self>,
377 ) -> Self {
378 Self::new(
379 worktree_store.clone(),
380 buffer_store,
381 GitStoreState::Remote {
382 upstream_client,
383 upstream_project_id: project_id,
384 downstream: None,
385 },
386 cx,
387 )
388 }
389
390 fn new(
391 worktree_store: Entity<WorktreeStore>,
392 buffer_store: Entity<BufferStore>,
393 state: GitStoreState,
394 cx: &mut Context<Self>,
395 ) -> Self {
396 let _subscriptions = vec![
397 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
398 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
399 ];
400
401 GitStore {
402 state,
403 buffer_store,
404 worktree_store,
405 repositories: HashMap::default(),
406 active_repo_id: None,
407 _subscriptions,
408 loading_diffs: HashMap::default(),
409 shared_diffs: HashMap::default(),
410 diffs: HashMap::default(),
411 }
412 }
413
414 pub fn init(client: &AnyProtoClient) {
415 client.add_entity_request_handler(Self::handle_get_remotes);
416 client.add_entity_request_handler(Self::handle_get_branches);
417 client.add_entity_request_handler(Self::handle_get_default_branch);
418 client.add_entity_request_handler(Self::handle_change_branch);
419 client.add_entity_request_handler(Self::handle_create_branch);
420 client.add_entity_request_handler(Self::handle_rename_branch);
421 client.add_entity_request_handler(Self::handle_git_init);
422 client.add_entity_request_handler(Self::handle_push);
423 client.add_entity_request_handler(Self::handle_pull);
424 client.add_entity_request_handler(Self::handle_fetch);
425 client.add_entity_request_handler(Self::handle_stage);
426 client.add_entity_request_handler(Self::handle_unstage);
427 client.add_entity_request_handler(Self::handle_stash);
428 client.add_entity_request_handler(Self::handle_stash_pop);
429 client.add_entity_request_handler(Self::handle_stash_apply);
430 client.add_entity_request_handler(Self::handle_stash_drop);
431 client.add_entity_request_handler(Self::handle_commit);
432 client.add_entity_request_handler(Self::handle_reset);
433 client.add_entity_request_handler(Self::handle_show);
434 client.add_entity_request_handler(Self::handle_load_commit_diff);
435 client.add_entity_request_handler(Self::handle_checkout_files);
436 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
437 client.add_entity_request_handler(Self::handle_set_index_text);
438 client.add_entity_request_handler(Self::handle_askpass);
439 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
440 client.add_entity_request_handler(Self::handle_git_diff);
441 client.add_entity_request_handler(Self::handle_tree_diff);
442 client.add_entity_request_handler(Self::handle_get_blob_content);
443 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
444 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
445 client.add_entity_message_handler(Self::handle_update_diff_bases);
446 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
447 client.add_entity_request_handler(Self::handle_blame_buffer);
448 client.add_entity_message_handler(Self::handle_update_repository);
449 client.add_entity_message_handler(Self::handle_remove_repository);
450 client.add_entity_request_handler(Self::handle_git_clone);
451 }
452
453 pub fn is_local(&self) -> bool {
454 matches!(self.state, GitStoreState::Local { .. })
455 }
456 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
457 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
458 let id = repo.read(cx).id;
459 if self.active_repo_id != Some(id) {
460 self.active_repo_id = Some(id);
461 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
462 }
463 }
464 }
465
466 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
467 match &mut self.state {
468 GitStoreState::Remote {
469 downstream: downstream_client,
470 ..
471 } => {
472 for repo in self.repositories.values() {
473 let update = repo.read(cx).snapshot.initial_update(project_id);
474 for update in split_repository_update(update) {
475 client.send(update).log_err();
476 }
477 }
478 *downstream_client = Some((client, ProjectId(project_id)));
479 }
480 GitStoreState::Local {
481 downstream: downstream_client,
482 ..
483 } => {
484 let mut snapshots = HashMap::default();
485 let (updates_tx, mut updates_rx) = mpsc::unbounded();
486 for repo in self.repositories.values() {
487 updates_tx
488 .unbounded_send(DownstreamUpdate::UpdateRepository(
489 repo.read(cx).snapshot.clone(),
490 ))
491 .ok();
492 }
493 *downstream_client = Some(LocalDownstreamState {
494 client: client.clone(),
495 project_id: ProjectId(project_id),
496 updates_tx,
497 _task: cx.spawn(async move |this, cx| {
498 cx.background_spawn(async move {
499 while let Some(update) = updates_rx.next().await {
500 match update {
501 DownstreamUpdate::UpdateRepository(snapshot) => {
502 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
503 {
504 let update =
505 snapshot.build_update(old_snapshot, project_id);
506 *old_snapshot = snapshot;
507 for update in split_repository_update(update) {
508 client.send(update)?;
509 }
510 } else {
511 let update = snapshot.initial_update(project_id);
512 for update in split_repository_update(update) {
513 client.send(update)?;
514 }
515 snapshots.insert(snapshot.id, snapshot);
516 }
517 }
518 DownstreamUpdate::RemoveRepository(id) => {
519 client.send(proto::RemoveRepository {
520 project_id,
521 id: id.to_proto(),
522 })?;
523 }
524 }
525 }
526 anyhow::Ok(())
527 })
528 .await
529 .ok();
530 this.update(cx, |this, _| {
531 if let GitStoreState::Local {
532 downstream: downstream_client,
533 ..
534 } = &mut this.state
535 {
536 downstream_client.take();
537 } else {
538 unreachable!("unshared called on remote store");
539 }
540 })
541 }),
542 });
543 }
544 }
545 }
546
547 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
548 match &mut self.state {
549 GitStoreState::Local {
550 downstream: downstream_client,
551 ..
552 } => {
553 downstream_client.take();
554 }
555 GitStoreState::Remote {
556 downstream: downstream_client,
557 ..
558 } => {
559 downstream_client.take();
560 }
561 }
562 self.shared_diffs.clear();
563 }
564
565 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
566 self.shared_diffs.remove(peer_id);
567 }
568
569 pub fn active_repository(&self) -> Option<Entity<Repository>> {
570 self.active_repo_id
571 .as_ref()
572 .map(|id| self.repositories[id].clone())
573 }
574
575 pub fn open_unstaged_diff(
576 &mut self,
577 buffer: Entity<Buffer>,
578 cx: &mut Context<Self>,
579 ) -> Task<Result<Entity<BufferDiff>>> {
580 let buffer_id = buffer.read(cx).remote_id();
581 if let Some(diff_state) = self.diffs.get(&buffer_id)
582 && let Some(unstaged_diff) = diff_state
583 .read(cx)
584 .unstaged_diff
585 .as_ref()
586 .and_then(|weak| weak.upgrade())
587 {
588 if let Some(task) =
589 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
590 {
591 return cx.background_executor().spawn(async move {
592 task.await;
593 Ok(unstaged_diff)
594 });
595 }
596 return Task::ready(Ok(unstaged_diff));
597 }
598
599 let Some((repo, repo_path)) =
600 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
601 else {
602 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
603 };
604
605 let task = self
606 .loading_diffs
607 .entry((buffer_id, DiffKind::Unstaged))
608 .or_insert_with(|| {
609 let staged_text = repo.update(cx, |repo, cx| {
610 repo.load_staged_text(buffer_id, repo_path, cx)
611 });
612 cx.spawn(async move |this, cx| {
613 Self::open_diff_internal(
614 this,
615 DiffKind::Unstaged,
616 staged_text.await.map(DiffBasesChange::SetIndex),
617 buffer,
618 cx,
619 )
620 .await
621 .map_err(Arc::new)
622 })
623 .shared()
624 })
625 .clone();
626
627 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
628 }
629
630 pub fn open_diff_since(
631 &mut self,
632 oid: Option<git::Oid>,
633 buffer: Entity<Buffer>,
634 repo: Entity<Repository>,
635 languages: Arc<LanguageRegistry>,
636 cx: &mut Context<Self>,
637 ) -> Task<Result<Entity<BufferDiff>>> {
638 cx.spawn(async move |this, cx| {
639 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
640 let content = match oid {
641 None => None,
642 Some(oid) => Some(
643 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
644 .await?,
645 ),
646 };
647 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
648
649 buffer_diff
650 .update(cx, |buffer_diff, cx| {
651 buffer_diff.set_base_text(
652 content.map(Arc::new),
653 buffer_snapshot.language().cloned(),
654 Some(languages.clone()),
655 buffer_snapshot.text,
656 cx,
657 )
658 })?
659 .await?;
660 let unstaged_diff = this
661 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
662 .await?;
663 buffer_diff.update(cx, |buffer_diff, _| {
664 buffer_diff.set_secondary_diff(unstaged_diff);
665 })?;
666
667 this.update(cx, |_, cx| {
668 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
669 .detach();
670 })?;
671
672 Ok(buffer_diff)
673 })
674 }
675
676 pub fn open_uncommitted_diff(
677 &mut self,
678 buffer: Entity<Buffer>,
679 cx: &mut Context<Self>,
680 ) -> Task<Result<Entity<BufferDiff>>> {
681 let buffer_id = buffer.read(cx).remote_id();
682
683 if let Some(diff_state) = self.diffs.get(&buffer_id)
684 && let Some(uncommitted_diff) = diff_state
685 .read(cx)
686 .uncommitted_diff
687 .as_ref()
688 .and_then(|weak| weak.upgrade())
689 {
690 if let Some(task) =
691 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
692 {
693 return cx.background_executor().spawn(async move {
694 task.await;
695 Ok(uncommitted_diff)
696 });
697 }
698 return Task::ready(Ok(uncommitted_diff));
699 }
700
701 let Some((repo, repo_path)) =
702 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
703 else {
704 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
705 };
706
707 let task = self
708 .loading_diffs
709 .entry((buffer_id, DiffKind::Uncommitted))
710 .or_insert_with(|| {
711 let changes = repo.update(cx, |repo, cx| {
712 repo.load_committed_text(buffer_id, repo_path, cx)
713 });
714
715 // todo(lw): hot foreground spawn
716 cx.spawn(async move |this, cx| {
717 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
718 .await
719 .map_err(Arc::new)
720 })
721 .shared()
722 })
723 .clone();
724
725 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
726 }
727
728 async fn open_diff_internal(
729 this: WeakEntity<Self>,
730 kind: DiffKind,
731 texts: Result<DiffBasesChange>,
732 buffer_entity: Entity<Buffer>,
733 cx: &mut AsyncApp,
734 ) -> Result<Entity<BufferDiff>> {
735 let diff_bases_change = match texts {
736 Err(e) => {
737 this.update(cx, |this, cx| {
738 let buffer = buffer_entity.read(cx);
739 let buffer_id = buffer.remote_id();
740 this.loading_diffs.remove(&(buffer_id, kind));
741 })?;
742 return Err(e);
743 }
744 Ok(change) => change,
745 };
746
747 this.update(cx, |this, cx| {
748 let buffer = buffer_entity.read(cx);
749 let buffer_id = buffer.remote_id();
750 let language = buffer.language().cloned();
751 let language_registry = buffer.language_registry();
752 let text_snapshot = buffer.text_snapshot();
753 this.loading_diffs.remove(&(buffer_id, kind));
754
755 let git_store = cx.weak_entity();
756 let diff_state = this
757 .diffs
758 .entry(buffer_id)
759 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
760
761 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
762
763 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
764 diff_state.update(cx, |diff_state, cx| {
765 diff_state.language = language;
766 diff_state.language_registry = language_registry;
767
768 match kind {
769 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
770 DiffKind::Uncommitted => {
771 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
772 diff
773 } else {
774 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
775 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
776 unstaged_diff
777 };
778
779 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
780 diff_state.uncommitted_diff = Some(diff.downgrade())
781 }
782 }
783
784 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
785 let rx = diff_state.wait_for_recalculation();
786
787 anyhow::Ok(async move {
788 if let Some(rx) = rx {
789 rx.await;
790 }
791 Ok(diff)
792 })
793 })
794 })??
795 .await
796 }
797
798 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
799 let diff_state = self.diffs.get(&buffer_id)?;
800 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
801 }
802
803 pub fn get_uncommitted_diff(
804 &self,
805 buffer_id: BufferId,
806 cx: &App,
807 ) -> Option<Entity<BufferDiff>> {
808 let diff_state = self.diffs.get(&buffer_id)?;
809 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
810 }
811
812 pub fn open_conflict_set(
813 &mut self,
814 buffer: Entity<Buffer>,
815 cx: &mut Context<Self>,
816 ) -> Entity<ConflictSet> {
817 log::debug!("open conflict set");
818 let buffer_id = buffer.read(cx).remote_id();
819
820 if let Some(git_state) = self.diffs.get(&buffer_id)
821 && let Some(conflict_set) = git_state
822 .read(cx)
823 .conflict_set
824 .as_ref()
825 .and_then(|weak| weak.upgrade())
826 {
827 let conflict_set = conflict_set;
828 let buffer_snapshot = buffer.read(cx).text_snapshot();
829
830 git_state.update(cx, |state, cx| {
831 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
832 });
833
834 return conflict_set;
835 }
836
837 let is_unmerged = self
838 .repository_and_path_for_buffer_id(buffer_id, cx)
839 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
840 let git_store = cx.weak_entity();
841 let buffer_git_state = self
842 .diffs
843 .entry(buffer_id)
844 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
845 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
846
847 self._subscriptions
848 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
849 cx.emit(GitStoreEvent::ConflictsUpdated);
850 }));
851
852 buffer_git_state.update(cx, |state, cx| {
853 state.conflict_set = Some(conflict_set.downgrade());
854 let buffer_snapshot = buffer.read(cx).text_snapshot();
855 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
856 });
857
858 conflict_set
859 }
860
861 pub fn project_path_git_status(
862 &self,
863 project_path: &ProjectPath,
864 cx: &App,
865 ) -> Option<FileStatus> {
866 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
867 Some(repo.read(cx).status_for_path(&repo_path)?.status)
868 }
869
870 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
871 let mut work_directory_abs_paths = Vec::new();
872 let mut checkpoints = Vec::new();
873 for repository in self.repositories.values() {
874 repository.update(cx, |repository, _| {
875 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
876 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
877 });
878 }
879
880 cx.background_executor().spawn(async move {
881 let checkpoints = future::try_join_all(checkpoints).await?;
882 Ok(GitStoreCheckpoint {
883 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
884 .into_iter()
885 .zip(checkpoints)
886 .collect(),
887 })
888 })
889 }
890
891 pub fn restore_checkpoint(
892 &self,
893 checkpoint: GitStoreCheckpoint,
894 cx: &mut App,
895 ) -> Task<Result<()>> {
896 let repositories_by_work_dir_abs_path = self
897 .repositories
898 .values()
899 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
900 .collect::<HashMap<_, _>>();
901
902 let mut tasks = Vec::new();
903 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
904 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
905 let restore = repository.update(cx, |repository, _| {
906 repository.restore_checkpoint(checkpoint)
907 });
908 tasks.push(async move { restore.await? });
909 }
910 }
911 cx.background_spawn(async move {
912 future::try_join_all(tasks).await?;
913 Ok(())
914 })
915 }
916
917 /// Compares two checkpoints, returning true if they are equal.
918 pub fn compare_checkpoints(
919 &self,
920 left: GitStoreCheckpoint,
921 mut right: GitStoreCheckpoint,
922 cx: &mut App,
923 ) -> Task<Result<bool>> {
924 let repositories_by_work_dir_abs_path = self
925 .repositories
926 .values()
927 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
928 .collect::<HashMap<_, _>>();
929
930 let mut tasks = Vec::new();
931 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
932 if let Some(right_checkpoint) = right
933 .checkpoints_by_work_dir_abs_path
934 .remove(&work_dir_abs_path)
935 {
936 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
937 {
938 let compare = repository.update(cx, |repository, _| {
939 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
940 });
941
942 tasks.push(async move { compare.await? });
943 }
944 } else {
945 return Task::ready(Ok(false));
946 }
947 }
948 cx.background_spawn(async move {
949 Ok(future::try_join_all(tasks)
950 .await?
951 .into_iter()
952 .all(|result| result))
953 })
954 }
955
956 /// Blames a buffer.
957 pub fn blame_buffer(
958 &self,
959 buffer: &Entity<Buffer>,
960 version: Option<clock::Global>,
961 cx: &mut App,
962 ) -> Task<Result<Option<Blame>>> {
963 let buffer = buffer.read(cx);
964 let Some((repo, repo_path)) =
965 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
966 else {
967 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
968 };
969 let content = match &version {
970 Some(version) => buffer.rope_for_version(version),
971 None => buffer.as_rope().clone(),
972 };
973 let version = version.unwrap_or(buffer.version());
974 let buffer_id = buffer.remote_id();
975
976 let rx = repo.update(cx, |repo, _| {
977 repo.send_job(None, move |state, _| async move {
978 match state {
979 RepositoryState::Local { backend, .. } => backend
980 .blame(repo_path.clone(), content)
981 .await
982 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
983 .map(Some),
984 RepositoryState::Remote { project_id, client } => {
985 let response = client
986 .request(proto::BlameBuffer {
987 project_id: project_id.to_proto(),
988 buffer_id: buffer_id.into(),
989 version: serialize_version(&version),
990 })
991 .await?;
992 Ok(deserialize_blame_buffer_response(response))
993 }
994 }
995 })
996 });
997
998 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
999 }
1000
1001 pub fn get_permalink_to_line(
1002 &self,
1003 buffer: &Entity<Buffer>,
1004 selection: Range<u32>,
1005 cx: &mut App,
1006 ) -> Task<Result<url::Url>> {
1007 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1008 return Task::ready(Err(anyhow!("buffer has no file")));
1009 };
1010
1011 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1012 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1013 cx,
1014 ) else {
1015 // If we're not in a Git repo, check whether this is a Rust source
1016 // file in the Cargo registry (presumably opened with go-to-definition
1017 // from a normal Rust file). If so, we can put together a permalink
1018 // using crate metadata.
1019 if buffer
1020 .read(cx)
1021 .language()
1022 .is_none_or(|lang| lang.name() != "Rust".into())
1023 {
1024 return Task::ready(Err(anyhow!("no permalink available")));
1025 }
1026 let file_path = file.worktree.read(cx).absolutize(&file.path);
1027 return cx.spawn(async move |cx| {
1028 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1029 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1030 .context("no permalink available")
1031 });
1032 };
1033
1034 let buffer_id = buffer.read(cx).remote_id();
1035 let branch = repo.read(cx).branch.clone();
1036 let remote = branch
1037 .as_ref()
1038 .and_then(|b| b.upstream.as_ref())
1039 .and_then(|b| b.remote_name())
1040 .unwrap_or("origin")
1041 .to_string();
1042
1043 let rx = repo.update(cx, |repo, _| {
1044 repo.send_job(None, move |state, cx| async move {
1045 match state {
1046 RepositoryState::Local { backend, .. } => {
1047 let origin_url = backend
1048 .remote_url(&remote)
1049 .with_context(|| format!("remote \"{remote}\" not found"))?;
1050
1051 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1052
1053 let provider_registry =
1054 cx.update(GitHostingProviderRegistry::default_global)?;
1055
1056 let (provider, remote) =
1057 parse_git_remote_url(provider_registry, &origin_url)
1058 .context("parsing Git remote URL")?;
1059
1060 Ok(provider.build_permalink(
1061 remote,
1062 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1063 ))
1064 }
1065 RepositoryState::Remote { project_id, client } => {
1066 let response = client
1067 .request(proto::GetPermalinkToLine {
1068 project_id: project_id.to_proto(),
1069 buffer_id: buffer_id.into(),
1070 selection: Some(proto::Range {
1071 start: selection.start as u64,
1072 end: selection.end as u64,
1073 }),
1074 })
1075 .await?;
1076
1077 url::Url::parse(&response.permalink).context("failed to parse permalink")
1078 }
1079 }
1080 })
1081 });
1082 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1083 }
1084
1085 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1086 match &self.state {
1087 GitStoreState::Local {
1088 downstream: downstream_client,
1089 ..
1090 } => downstream_client
1091 .as_ref()
1092 .map(|state| (state.client.clone(), state.project_id)),
1093 GitStoreState::Remote {
1094 downstream: downstream_client,
1095 ..
1096 } => downstream_client.clone(),
1097 }
1098 }
1099
1100 fn upstream_client(&self) -> Option<AnyProtoClient> {
1101 match &self.state {
1102 GitStoreState::Local { .. } => None,
1103 GitStoreState::Remote {
1104 upstream_client, ..
1105 } => Some(upstream_client.clone()),
1106 }
1107 }
1108
1109 fn on_worktree_store_event(
1110 &mut self,
1111 worktree_store: Entity<WorktreeStore>,
1112 event: &WorktreeStoreEvent,
1113 cx: &mut Context<Self>,
1114 ) {
1115 let GitStoreState::Local {
1116 project_environment,
1117 downstream,
1118 next_repository_id,
1119 fs,
1120 } = &self.state
1121 else {
1122 return;
1123 };
1124
1125 match event {
1126 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1127 if let Some(worktree) = self
1128 .worktree_store
1129 .read(cx)
1130 .worktree_for_id(*worktree_id, cx)
1131 {
1132 let paths_by_git_repo =
1133 self.process_updated_entries(&worktree, updated_entries, cx);
1134 let downstream = downstream
1135 .as_ref()
1136 .map(|downstream| downstream.updates_tx.clone());
1137 cx.spawn(async move |_, cx| {
1138 let paths_by_git_repo = paths_by_git_repo.await;
1139 for (repo, paths) in paths_by_git_repo {
1140 repo.update(cx, |repo, cx| {
1141 repo.paths_changed(paths, downstream.clone(), cx);
1142 })
1143 .ok();
1144 }
1145 })
1146 .detach();
1147 }
1148 }
1149 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1150 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1151 else {
1152 return;
1153 };
1154 if !worktree.read(cx).is_visible() {
1155 log::debug!(
1156 "not adding repositories for local worktree {:?} because it's not visible",
1157 worktree.read(cx).abs_path()
1158 );
1159 return;
1160 }
1161 self.update_repositories_from_worktree(
1162 project_environment.clone(),
1163 next_repository_id.clone(),
1164 downstream
1165 .as_ref()
1166 .map(|downstream| downstream.updates_tx.clone()),
1167 changed_repos.clone(),
1168 fs.clone(),
1169 cx,
1170 );
1171 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1172 }
1173 _ => {}
1174 }
1175 }
1176 fn on_repository_event(
1177 &mut self,
1178 repo: Entity<Repository>,
1179 event: &RepositoryEvent,
1180 cx: &mut Context<Self>,
1181 ) {
1182 let id = repo.read(cx).id;
1183 let repo_snapshot = repo.read(cx).snapshot.clone();
1184 for (buffer_id, diff) in self.diffs.iter() {
1185 if let Some((buffer_repo, repo_path)) =
1186 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1187 && buffer_repo == repo
1188 {
1189 diff.update(cx, |diff, cx| {
1190 if let Some(conflict_set) = &diff.conflict_set {
1191 let conflict_status_changed =
1192 conflict_set.update(cx, |conflict_set, cx| {
1193 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1194 conflict_set.set_has_conflict(has_conflict, cx)
1195 })?;
1196 if conflict_status_changed {
1197 let buffer_store = self.buffer_store.read(cx);
1198 if let Some(buffer) = buffer_store.get(*buffer_id) {
1199 let _ = diff
1200 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1201 }
1202 }
1203 }
1204 anyhow::Ok(())
1205 })
1206 .ok();
1207 }
1208 }
1209 cx.emit(GitStoreEvent::RepositoryUpdated(
1210 id,
1211 event.clone(),
1212 self.active_repo_id == Some(id),
1213 ))
1214 }
1215
1216 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1217 cx.emit(GitStoreEvent::JobsUpdated)
1218 }
1219
1220 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1221 fn update_repositories_from_worktree(
1222 &mut self,
1223 project_environment: Entity<ProjectEnvironment>,
1224 next_repository_id: Arc<AtomicU64>,
1225 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1226 updated_git_repositories: UpdatedGitRepositoriesSet,
1227 fs: Arc<dyn Fs>,
1228 cx: &mut Context<Self>,
1229 ) {
1230 let mut removed_ids = Vec::new();
1231 for update in updated_git_repositories.iter() {
1232 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1233 let existing_work_directory_abs_path =
1234 repo.read(cx).work_directory_abs_path.clone();
1235 Some(&existing_work_directory_abs_path)
1236 == update.old_work_directory_abs_path.as_ref()
1237 || Some(&existing_work_directory_abs_path)
1238 == update.new_work_directory_abs_path.as_ref()
1239 }) {
1240 if let Some(new_work_directory_abs_path) =
1241 update.new_work_directory_abs_path.clone()
1242 {
1243 existing.update(cx, |existing, cx| {
1244 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1245 existing.schedule_scan(updates_tx.clone(), cx);
1246 });
1247 } else {
1248 removed_ids.push(*id);
1249 }
1250 } else if let UpdatedGitRepository {
1251 new_work_directory_abs_path: Some(work_directory_abs_path),
1252 dot_git_abs_path: Some(dot_git_abs_path),
1253 repository_dir_abs_path: Some(repository_dir_abs_path),
1254 common_dir_abs_path: Some(common_dir_abs_path),
1255 ..
1256 } = update
1257 {
1258 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1259 let git_store = cx.weak_entity();
1260 let repo = cx.new(|cx| {
1261 let mut repo = Repository::local(
1262 id,
1263 work_directory_abs_path.clone(),
1264 dot_git_abs_path.clone(),
1265 repository_dir_abs_path.clone(),
1266 common_dir_abs_path.clone(),
1267 project_environment.downgrade(),
1268 fs.clone(),
1269 git_store,
1270 cx,
1271 );
1272 repo.schedule_scan(updates_tx.clone(), cx);
1273 repo
1274 });
1275 self._subscriptions
1276 .push(cx.subscribe(&repo, Self::on_repository_event));
1277 self._subscriptions
1278 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1279 self.repositories.insert(id, repo);
1280 cx.emit(GitStoreEvent::RepositoryAdded);
1281 self.active_repo_id.get_or_insert_with(|| {
1282 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1283 id
1284 });
1285 }
1286 }
1287
1288 for id in removed_ids {
1289 if self.active_repo_id == Some(id) {
1290 self.active_repo_id = None;
1291 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1292 }
1293 self.repositories.remove(&id);
1294 if let Some(updates_tx) = updates_tx.as_ref() {
1295 updates_tx
1296 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1297 .ok();
1298 }
1299 }
1300 }
1301
1302 fn on_buffer_store_event(
1303 &mut self,
1304 _: Entity<BufferStore>,
1305 event: &BufferStoreEvent,
1306 cx: &mut Context<Self>,
1307 ) {
1308 match event {
1309 BufferStoreEvent::BufferAdded(buffer) => {
1310 cx.subscribe(buffer, |this, buffer, event, cx| {
1311 if let BufferEvent::LanguageChanged = event {
1312 let buffer_id = buffer.read(cx).remote_id();
1313 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1314 diff_state.update(cx, |diff_state, cx| {
1315 diff_state.buffer_language_changed(buffer, cx);
1316 });
1317 }
1318 }
1319 })
1320 .detach();
1321 }
1322 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1323 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1324 diffs.remove(buffer_id);
1325 }
1326 }
1327 BufferStoreEvent::BufferDropped(buffer_id) => {
1328 self.diffs.remove(buffer_id);
1329 for diffs in self.shared_diffs.values_mut() {
1330 diffs.remove(buffer_id);
1331 }
1332 }
1333
1334 _ => {}
1335 }
1336 }
1337
1338 pub fn recalculate_buffer_diffs(
1339 &mut self,
1340 buffers: Vec<Entity<Buffer>>,
1341 cx: &mut Context<Self>,
1342 ) -> impl Future<Output = ()> + use<> {
1343 let mut futures = Vec::new();
1344 for buffer in buffers {
1345 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1346 let buffer = buffer.read(cx).text_snapshot();
1347 diff_state.update(cx, |diff_state, cx| {
1348 diff_state.recalculate_diffs(buffer.clone(), cx);
1349 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1350 });
1351 futures.push(diff_state.update(cx, |diff_state, cx| {
1352 diff_state
1353 .reparse_conflict_markers(buffer, cx)
1354 .map(|_| {})
1355 .boxed()
1356 }));
1357 }
1358 }
1359 async move {
1360 futures::future::join_all(futures).await;
1361 }
1362 }
1363
1364 fn on_buffer_diff_event(
1365 &mut self,
1366 diff: Entity<buffer_diff::BufferDiff>,
1367 event: &BufferDiffEvent,
1368 cx: &mut Context<Self>,
1369 ) {
1370 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1371 let buffer_id = diff.read(cx).buffer_id;
1372 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1373 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1374 diff_state.hunk_staging_operation_count += 1;
1375 diff_state.hunk_staging_operation_count
1376 });
1377 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1378 let recv = repo.update(cx, |repo, cx| {
1379 log::debug!("hunks changed for {}", path.as_unix_str());
1380 repo.spawn_set_index_text_job(
1381 path,
1382 new_index_text.as_ref().map(|rope| rope.to_string()),
1383 Some(hunk_staging_operation_count),
1384 cx,
1385 )
1386 });
1387 let diff = diff.downgrade();
1388 cx.spawn(async move |this, cx| {
1389 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1390 diff.update(cx, |diff, cx| {
1391 diff.clear_pending_hunks(cx);
1392 })
1393 .ok();
1394 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1395 .ok();
1396 }
1397 })
1398 .detach();
1399 }
1400 }
1401 }
1402 }
1403
1404 fn local_worktree_git_repos_changed(
1405 &mut self,
1406 worktree: Entity<Worktree>,
1407 changed_repos: &UpdatedGitRepositoriesSet,
1408 cx: &mut Context<Self>,
1409 ) {
1410 log::debug!("local worktree repos changed");
1411 debug_assert!(worktree.read(cx).is_local());
1412
1413 for repository in self.repositories.values() {
1414 repository.update(cx, |repository, cx| {
1415 let repo_abs_path = &repository.work_directory_abs_path;
1416 if changed_repos.iter().any(|update| {
1417 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1418 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1419 }) {
1420 repository.reload_buffer_diff_bases(cx);
1421 }
1422 });
1423 }
1424 }
1425
1426 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1427 &self.repositories
1428 }
1429
1430 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1431 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1432 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1433 Some(status.status)
1434 }
1435
1436 pub fn repository_and_path_for_buffer_id(
1437 &self,
1438 buffer_id: BufferId,
1439 cx: &App,
1440 ) -> Option<(Entity<Repository>, RepoPath)> {
1441 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1442 let project_path = buffer.read(cx).project_path(cx)?;
1443 self.repository_and_path_for_project_path(&project_path, cx)
1444 }
1445
1446 pub fn repository_and_path_for_project_path(
1447 &self,
1448 path: &ProjectPath,
1449 cx: &App,
1450 ) -> Option<(Entity<Repository>, RepoPath)> {
1451 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1452 self.repositories
1453 .values()
1454 .filter_map(|repo| {
1455 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1456 Some((repo.clone(), repo_path))
1457 })
1458 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1459 }
1460
1461 pub fn git_init(
1462 &self,
1463 path: Arc<Path>,
1464 fallback_branch_name: String,
1465 cx: &App,
1466 ) -> Task<Result<()>> {
1467 match &self.state {
1468 GitStoreState::Local { fs, .. } => {
1469 let fs = fs.clone();
1470 cx.background_executor()
1471 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1472 }
1473 GitStoreState::Remote {
1474 upstream_client,
1475 upstream_project_id: project_id,
1476 ..
1477 } => {
1478 let client = upstream_client.clone();
1479 let project_id = *project_id;
1480 cx.background_executor().spawn(async move {
1481 client
1482 .request(proto::GitInit {
1483 project_id: project_id,
1484 abs_path: path.to_string_lossy().into_owned(),
1485 fallback_branch_name,
1486 })
1487 .await?;
1488 Ok(())
1489 })
1490 }
1491 }
1492 }
1493
1494 pub fn git_clone(
1495 &self,
1496 repo: String,
1497 path: impl Into<Arc<std::path::Path>>,
1498 cx: &App,
1499 ) -> Task<Result<()>> {
1500 let path = path.into();
1501 match &self.state {
1502 GitStoreState::Local { fs, .. } => {
1503 let fs = fs.clone();
1504 cx.background_executor()
1505 .spawn(async move { fs.git_clone(&repo, &path).await })
1506 }
1507 GitStoreState::Remote {
1508 upstream_client,
1509 upstream_project_id,
1510 ..
1511 } => {
1512 if upstream_client.is_via_collab() {
1513 return Task::ready(Err(anyhow!(
1514 "Git Clone isn't supported for project guests"
1515 )));
1516 }
1517 let request = upstream_client.request(proto::GitClone {
1518 project_id: *upstream_project_id,
1519 abs_path: path.to_string_lossy().into_owned(),
1520 remote_repo: repo,
1521 });
1522
1523 cx.background_spawn(async move {
1524 let result = request.await?;
1525
1526 match result.success {
1527 true => Ok(()),
1528 false => Err(anyhow!("Git Clone failed")),
1529 }
1530 })
1531 }
1532 }
1533 }
1534
1535 async fn handle_update_repository(
1536 this: Entity<Self>,
1537 envelope: TypedEnvelope<proto::UpdateRepository>,
1538 mut cx: AsyncApp,
1539 ) -> Result<()> {
1540 this.update(&mut cx, |this, cx| {
1541 let path_style = this.worktree_store.read(cx).path_style();
1542 let mut update = envelope.payload;
1543
1544 let id = RepositoryId::from_proto(update.id);
1545 let client = this.upstream_client().context("no upstream client")?;
1546
1547 let mut repo_subscription = None;
1548 let repo = this.repositories.entry(id).or_insert_with(|| {
1549 let git_store = cx.weak_entity();
1550 let repo = cx.new(|cx| {
1551 Repository::remote(
1552 id,
1553 Path::new(&update.abs_path).into(),
1554 path_style,
1555 ProjectId(update.project_id),
1556 client,
1557 git_store,
1558 cx,
1559 )
1560 });
1561 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1562 cx.emit(GitStoreEvent::RepositoryAdded);
1563 repo
1564 });
1565 this._subscriptions.extend(repo_subscription);
1566
1567 repo.update(cx, {
1568 let update = update.clone();
1569 |repo, cx| repo.apply_remote_update(update, cx)
1570 })?;
1571
1572 this.active_repo_id.get_or_insert_with(|| {
1573 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1574 id
1575 });
1576
1577 if let Some((client, project_id)) = this.downstream_client() {
1578 update.project_id = project_id.to_proto();
1579 client.send(update).log_err();
1580 }
1581 Ok(())
1582 })?
1583 }
1584
1585 async fn handle_remove_repository(
1586 this: Entity<Self>,
1587 envelope: TypedEnvelope<proto::RemoveRepository>,
1588 mut cx: AsyncApp,
1589 ) -> Result<()> {
1590 this.update(&mut cx, |this, cx| {
1591 let mut update = envelope.payload;
1592 let id = RepositoryId::from_proto(update.id);
1593 this.repositories.remove(&id);
1594 if let Some((client, project_id)) = this.downstream_client() {
1595 update.project_id = project_id.to_proto();
1596 client.send(update).log_err();
1597 }
1598 if this.active_repo_id == Some(id) {
1599 this.active_repo_id = None;
1600 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1601 }
1602 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1603 })
1604 }
1605
1606 async fn handle_git_init(
1607 this: Entity<Self>,
1608 envelope: TypedEnvelope<proto::GitInit>,
1609 cx: AsyncApp,
1610 ) -> Result<proto::Ack> {
1611 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1612 let name = envelope.payload.fallback_branch_name;
1613 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1614 .await?;
1615
1616 Ok(proto::Ack {})
1617 }
1618
1619 async fn handle_git_clone(
1620 this: Entity<Self>,
1621 envelope: TypedEnvelope<proto::GitClone>,
1622 cx: AsyncApp,
1623 ) -> Result<proto::GitCloneResponse> {
1624 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1625 let repo_name = envelope.payload.remote_repo;
1626 let result = cx
1627 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1628 .await;
1629
1630 Ok(proto::GitCloneResponse {
1631 success: result.is_ok(),
1632 })
1633 }
1634
1635 async fn handle_fetch(
1636 this: Entity<Self>,
1637 envelope: TypedEnvelope<proto::Fetch>,
1638 mut cx: AsyncApp,
1639 ) -> Result<proto::RemoteMessageResponse> {
1640 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1641 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1642 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1643 let askpass_id = envelope.payload.askpass_id;
1644
1645 let askpass = make_remote_delegate(
1646 this,
1647 envelope.payload.project_id,
1648 repository_id,
1649 askpass_id,
1650 &mut cx,
1651 );
1652
1653 let remote_output = repository_handle
1654 .update(&mut cx, |repository_handle, cx| {
1655 repository_handle.fetch(fetch_options, askpass, cx)
1656 })?
1657 .await??;
1658
1659 Ok(proto::RemoteMessageResponse {
1660 stdout: remote_output.stdout,
1661 stderr: remote_output.stderr,
1662 })
1663 }
1664
1665 async fn handle_push(
1666 this: Entity<Self>,
1667 envelope: TypedEnvelope<proto::Push>,
1668 mut cx: AsyncApp,
1669 ) -> Result<proto::RemoteMessageResponse> {
1670 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1671 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1672
1673 let askpass_id = envelope.payload.askpass_id;
1674 let askpass = make_remote_delegate(
1675 this,
1676 envelope.payload.project_id,
1677 repository_id,
1678 askpass_id,
1679 &mut cx,
1680 );
1681
1682 let options = envelope
1683 .payload
1684 .options
1685 .as_ref()
1686 .map(|_| match envelope.payload.options() {
1687 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1688 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1689 });
1690
1691 let branch_name = envelope.payload.branch_name.into();
1692 let remote_name = envelope.payload.remote_name.into();
1693
1694 let remote_output = repository_handle
1695 .update(&mut cx, |repository_handle, cx| {
1696 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1697 })?
1698 .await??;
1699 Ok(proto::RemoteMessageResponse {
1700 stdout: remote_output.stdout,
1701 stderr: remote_output.stderr,
1702 })
1703 }
1704
1705 async fn handle_pull(
1706 this: Entity<Self>,
1707 envelope: TypedEnvelope<proto::Pull>,
1708 mut cx: AsyncApp,
1709 ) -> Result<proto::RemoteMessageResponse> {
1710 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1711 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1712 let askpass_id = envelope.payload.askpass_id;
1713 let askpass = make_remote_delegate(
1714 this,
1715 envelope.payload.project_id,
1716 repository_id,
1717 askpass_id,
1718 &mut cx,
1719 );
1720
1721 let branch_name = envelope.payload.branch_name.into();
1722 let remote_name = envelope.payload.remote_name.into();
1723
1724 let remote_message = repository_handle
1725 .update(&mut cx, |repository_handle, cx| {
1726 repository_handle.pull(branch_name, remote_name, askpass, cx)
1727 })?
1728 .await??;
1729
1730 Ok(proto::RemoteMessageResponse {
1731 stdout: remote_message.stdout,
1732 stderr: remote_message.stderr,
1733 })
1734 }
1735
1736 async fn handle_stage(
1737 this: Entity<Self>,
1738 envelope: TypedEnvelope<proto::Stage>,
1739 mut cx: AsyncApp,
1740 ) -> Result<proto::Ack> {
1741 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1742 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1743
1744 let entries = envelope
1745 .payload
1746 .paths
1747 .into_iter()
1748 .map(|path| RepoPath::new(&path))
1749 .collect::<Result<Vec<_>>>()?;
1750
1751 repository_handle
1752 .update(&mut cx, |repository_handle, cx| {
1753 repository_handle.stage_entries(entries, cx)
1754 })?
1755 .await?;
1756 Ok(proto::Ack {})
1757 }
1758
1759 async fn handle_unstage(
1760 this: Entity<Self>,
1761 envelope: TypedEnvelope<proto::Unstage>,
1762 mut cx: AsyncApp,
1763 ) -> Result<proto::Ack> {
1764 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1765 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1766
1767 let entries = envelope
1768 .payload
1769 .paths
1770 .into_iter()
1771 .map(|path| RepoPath::new(&path))
1772 .collect::<Result<Vec<_>>>()?;
1773
1774 repository_handle
1775 .update(&mut cx, |repository_handle, cx| {
1776 repository_handle.unstage_entries(entries, cx)
1777 })?
1778 .await?;
1779
1780 Ok(proto::Ack {})
1781 }
1782
1783 async fn handle_stash(
1784 this: Entity<Self>,
1785 envelope: TypedEnvelope<proto::Stash>,
1786 mut cx: AsyncApp,
1787 ) -> Result<proto::Ack> {
1788 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1789 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1790
1791 let entries = envelope
1792 .payload
1793 .paths
1794 .into_iter()
1795 .map(|path| RepoPath::new(&path))
1796 .collect::<Result<Vec<_>>>()?;
1797
1798 repository_handle
1799 .update(&mut cx, |repository_handle, cx| {
1800 repository_handle.stash_entries(entries, cx)
1801 })?
1802 .await?;
1803
1804 Ok(proto::Ack {})
1805 }
1806
1807 async fn handle_stash_pop(
1808 this: Entity<Self>,
1809 envelope: TypedEnvelope<proto::StashPop>,
1810 mut cx: AsyncApp,
1811 ) -> Result<proto::Ack> {
1812 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1813 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1814 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1815
1816 repository_handle
1817 .update(&mut cx, |repository_handle, cx| {
1818 repository_handle.stash_pop(stash_index, cx)
1819 })?
1820 .await?;
1821
1822 Ok(proto::Ack {})
1823 }
1824
1825 async fn handle_stash_apply(
1826 this: Entity<Self>,
1827 envelope: TypedEnvelope<proto::StashApply>,
1828 mut cx: AsyncApp,
1829 ) -> Result<proto::Ack> {
1830 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1831 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1832 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1833
1834 repository_handle
1835 .update(&mut cx, |repository_handle, cx| {
1836 repository_handle.stash_apply(stash_index, cx)
1837 })?
1838 .await?;
1839
1840 Ok(proto::Ack {})
1841 }
1842
1843 async fn handle_stash_drop(
1844 this: Entity<Self>,
1845 envelope: TypedEnvelope<proto::StashDrop>,
1846 mut cx: AsyncApp,
1847 ) -> Result<proto::Ack> {
1848 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1849 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1850 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1851
1852 repository_handle
1853 .update(&mut cx, |repository_handle, cx| {
1854 repository_handle.stash_drop(stash_index, cx)
1855 })?
1856 .await??;
1857
1858 Ok(proto::Ack {})
1859 }
1860
1861 async fn handle_set_index_text(
1862 this: Entity<Self>,
1863 envelope: TypedEnvelope<proto::SetIndexText>,
1864 mut cx: AsyncApp,
1865 ) -> Result<proto::Ack> {
1866 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1867 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1868 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1869
1870 repository_handle
1871 .update(&mut cx, |repository_handle, cx| {
1872 repository_handle.spawn_set_index_text_job(
1873 repo_path,
1874 envelope.payload.text,
1875 None,
1876 cx,
1877 )
1878 })?
1879 .await??;
1880 Ok(proto::Ack {})
1881 }
1882
1883 async fn handle_commit(
1884 this: Entity<Self>,
1885 envelope: TypedEnvelope<proto::Commit>,
1886 mut cx: AsyncApp,
1887 ) -> Result<proto::Ack> {
1888 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1889 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1890
1891 let message = SharedString::from(envelope.payload.message);
1892 let name = envelope.payload.name.map(SharedString::from);
1893 let email = envelope.payload.email.map(SharedString::from);
1894 let options = envelope.payload.options.unwrap_or_default();
1895
1896 repository_handle
1897 .update(&mut cx, |repository_handle, cx| {
1898 repository_handle.commit(
1899 message,
1900 name.zip(email),
1901 CommitOptions {
1902 amend: options.amend,
1903 signoff: options.signoff,
1904 },
1905 cx,
1906 )
1907 })?
1908 .await??;
1909 Ok(proto::Ack {})
1910 }
1911
1912 async fn handle_get_remotes(
1913 this: Entity<Self>,
1914 envelope: TypedEnvelope<proto::GetRemotes>,
1915 mut cx: AsyncApp,
1916 ) -> Result<proto::GetRemotesResponse> {
1917 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1918 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1919
1920 let branch_name = envelope.payload.branch_name;
1921
1922 let remotes = repository_handle
1923 .update(&mut cx, |repository_handle, _| {
1924 repository_handle.get_remotes(branch_name)
1925 })?
1926 .await??;
1927
1928 Ok(proto::GetRemotesResponse {
1929 remotes: remotes
1930 .into_iter()
1931 .map(|remotes| proto::get_remotes_response::Remote {
1932 name: remotes.name.to_string(),
1933 })
1934 .collect::<Vec<_>>(),
1935 })
1936 }
1937
1938 async fn handle_get_branches(
1939 this: Entity<Self>,
1940 envelope: TypedEnvelope<proto::GitGetBranches>,
1941 mut cx: AsyncApp,
1942 ) -> Result<proto::GitBranchesResponse> {
1943 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1944 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1945
1946 let branches = repository_handle
1947 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
1948 .await??;
1949
1950 Ok(proto::GitBranchesResponse {
1951 branches: branches
1952 .into_iter()
1953 .map(|branch| branch_to_proto(&branch))
1954 .collect::<Vec<_>>(),
1955 })
1956 }
1957 async fn handle_get_default_branch(
1958 this: Entity<Self>,
1959 envelope: TypedEnvelope<proto::GetDefaultBranch>,
1960 mut cx: AsyncApp,
1961 ) -> Result<proto::GetDefaultBranchResponse> {
1962 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1963 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1964
1965 let branch = repository_handle
1966 .update(&mut cx, |repository_handle, _| {
1967 repository_handle.default_branch()
1968 })?
1969 .await??
1970 .map(Into::into);
1971
1972 Ok(proto::GetDefaultBranchResponse { branch })
1973 }
1974 async fn handle_create_branch(
1975 this: Entity<Self>,
1976 envelope: TypedEnvelope<proto::GitCreateBranch>,
1977 mut cx: AsyncApp,
1978 ) -> Result<proto::Ack> {
1979 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1980 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1981 let branch_name = envelope.payload.branch_name;
1982
1983 repository_handle
1984 .update(&mut cx, |repository_handle, _| {
1985 repository_handle.create_branch(branch_name)
1986 })?
1987 .await??;
1988
1989 Ok(proto::Ack {})
1990 }
1991
1992 async fn handle_change_branch(
1993 this: Entity<Self>,
1994 envelope: TypedEnvelope<proto::GitChangeBranch>,
1995 mut cx: AsyncApp,
1996 ) -> Result<proto::Ack> {
1997 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1998 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1999 let branch_name = envelope.payload.branch_name;
2000
2001 repository_handle
2002 .update(&mut cx, |repository_handle, _| {
2003 repository_handle.change_branch(branch_name)
2004 })?
2005 .await??;
2006
2007 Ok(proto::Ack {})
2008 }
2009
2010 async fn handle_rename_branch(
2011 this: Entity<Self>,
2012 envelope: TypedEnvelope<proto::GitRenameBranch>,
2013 mut cx: AsyncApp,
2014 ) -> Result<proto::Ack> {
2015 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2016 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2017 let branch = envelope.payload.branch;
2018 let new_name = envelope.payload.new_name;
2019
2020 repository_handle
2021 .update(&mut cx, |repository_handle, _| {
2022 repository_handle.rename_branch(branch, new_name)
2023 })?
2024 .await??;
2025
2026 Ok(proto::Ack {})
2027 }
2028
2029 async fn handle_show(
2030 this: Entity<Self>,
2031 envelope: TypedEnvelope<proto::GitShow>,
2032 mut cx: AsyncApp,
2033 ) -> Result<proto::GitCommitDetails> {
2034 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2035 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2036
2037 let commit = repository_handle
2038 .update(&mut cx, |repository_handle, _| {
2039 repository_handle.show(envelope.payload.commit)
2040 })?
2041 .await??;
2042 Ok(proto::GitCommitDetails {
2043 sha: commit.sha.into(),
2044 message: commit.message.into(),
2045 commit_timestamp: commit.commit_timestamp,
2046 author_email: commit.author_email.into(),
2047 author_name: commit.author_name.into(),
2048 })
2049 }
2050
2051 async fn handle_load_commit_diff(
2052 this: Entity<Self>,
2053 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2054 mut cx: AsyncApp,
2055 ) -> Result<proto::LoadCommitDiffResponse> {
2056 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2057 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2058
2059 let commit_diff = repository_handle
2060 .update(&mut cx, |repository_handle, _| {
2061 repository_handle.load_commit_diff(envelope.payload.commit)
2062 })?
2063 .await??;
2064 Ok(proto::LoadCommitDiffResponse {
2065 files: commit_diff
2066 .files
2067 .into_iter()
2068 .map(|file| proto::CommitFile {
2069 path: file.path.to_proto(),
2070 old_text: file.old_text,
2071 new_text: file.new_text,
2072 })
2073 .collect(),
2074 })
2075 }
2076
2077 async fn handle_reset(
2078 this: Entity<Self>,
2079 envelope: TypedEnvelope<proto::GitReset>,
2080 mut cx: AsyncApp,
2081 ) -> Result<proto::Ack> {
2082 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2083 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2084
2085 let mode = match envelope.payload.mode() {
2086 git_reset::ResetMode::Soft => ResetMode::Soft,
2087 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2088 };
2089
2090 repository_handle
2091 .update(&mut cx, |repository_handle, cx| {
2092 repository_handle.reset(envelope.payload.commit, mode, cx)
2093 })?
2094 .await??;
2095 Ok(proto::Ack {})
2096 }
2097
2098 async fn handle_checkout_files(
2099 this: Entity<Self>,
2100 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2101 mut cx: AsyncApp,
2102 ) -> Result<proto::Ack> {
2103 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2104 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2105 let paths = envelope
2106 .payload
2107 .paths
2108 .iter()
2109 .map(|s| RepoPath::from_proto(s))
2110 .collect::<Result<Vec<_>>>()?;
2111
2112 repository_handle
2113 .update(&mut cx, |repository_handle, cx| {
2114 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2115 })?
2116 .await??;
2117 Ok(proto::Ack {})
2118 }
2119
2120 async fn handle_open_commit_message_buffer(
2121 this: Entity<Self>,
2122 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2123 mut cx: AsyncApp,
2124 ) -> Result<proto::OpenBufferResponse> {
2125 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2126 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2127 let buffer = repository
2128 .update(&mut cx, |repository, cx| {
2129 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2130 })?
2131 .await?;
2132
2133 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2134 this.update(&mut cx, |this, cx| {
2135 this.buffer_store.update(cx, |buffer_store, cx| {
2136 buffer_store
2137 .create_buffer_for_peer(
2138 &buffer,
2139 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2140 cx,
2141 )
2142 .detach_and_log_err(cx);
2143 })
2144 })?;
2145
2146 Ok(proto::OpenBufferResponse {
2147 buffer_id: buffer_id.to_proto(),
2148 })
2149 }
2150
2151 async fn handle_askpass(
2152 this: Entity<Self>,
2153 envelope: TypedEnvelope<proto::AskPassRequest>,
2154 mut cx: AsyncApp,
2155 ) -> Result<proto::AskPassResponse> {
2156 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2157 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2158
2159 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2160 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2161 debug_panic!("no askpass found");
2162 anyhow::bail!("no askpass found");
2163 };
2164
2165 let response = askpass
2166 .ask_password(envelope.payload.prompt)
2167 .await
2168 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2169
2170 delegates
2171 .lock()
2172 .insert(envelope.payload.askpass_id, askpass);
2173
2174 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2175 Ok(proto::AskPassResponse {
2176 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2177 })
2178 }
2179
2180 async fn handle_check_for_pushed_commits(
2181 this: Entity<Self>,
2182 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2183 mut cx: AsyncApp,
2184 ) -> Result<proto::CheckForPushedCommitsResponse> {
2185 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2186 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2187
2188 let branches = repository_handle
2189 .update(&mut cx, |repository_handle, _| {
2190 repository_handle.check_for_pushed_commits()
2191 })?
2192 .await??;
2193 Ok(proto::CheckForPushedCommitsResponse {
2194 pushed_to: branches
2195 .into_iter()
2196 .map(|commit| commit.to_string())
2197 .collect(),
2198 })
2199 }
2200
2201 async fn handle_git_diff(
2202 this: Entity<Self>,
2203 envelope: TypedEnvelope<proto::GitDiff>,
2204 mut cx: AsyncApp,
2205 ) -> Result<proto::GitDiffResponse> {
2206 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2207 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2208 let diff_type = match envelope.payload.diff_type() {
2209 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2210 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2211 };
2212
2213 let mut diff = repository_handle
2214 .update(&mut cx, |repository_handle, cx| {
2215 repository_handle.diff(diff_type, cx)
2216 })?
2217 .await??;
2218 const ONE_MB: usize = 1_000_000;
2219 if diff.len() > ONE_MB {
2220 diff = diff.chars().take(ONE_MB).collect()
2221 }
2222
2223 Ok(proto::GitDiffResponse { diff })
2224 }
2225
2226 async fn handle_tree_diff(
2227 this: Entity<Self>,
2228 request: TypedEnvelope<proto::GetTreeDiff>,
2229 mut cx: AsyncApp,
2230 ) -> Result<proto::GetTreeDiffResponse> {
2231 let repository_id = RepositoryId(request.payload.repository_id);
2232 let diff_type = if request.payload.is_merge {
2233 DiffTreeType::MergeBase {
2234 base: request.payload.base.into(),
2235 head: request.payload.head.into(),
2236 }
2237 } else {
2238 DiffTreeType::Since {
2239 base: request.payload.base.into(),
2240 head: request.payload.head.into(),
2241 }
2242 };
2243
2244 let diff = this
2245 .update(&mut cx, |this, cx| {
2246 let repository = this.repositories().get(&repository_id)?;
2247 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2248 })?
2249 .context("missing repository")?
2250 .await??;
2251
2252 Ok(proto::GetTreeDiffResponse {
2253 entries: diff
2254 .entries
2255 .into_iter()
2256 .map(|(path, status)| proto::TreeDiffStatus {
2257 path: path.0.to_proto(),
2258 status: match status {
2259 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2260 TreeDiffStatus::Modified { .. } => {
2261 proto::tree_diff_status::Status::Modified.into()
2262 }
2263 TreeDiffStatus::Deleted { .. } => {
2264 proto::tree_diff_status::Status::Deleted.into()
2265 }
2266 },
2267 oid: match status {
2268 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2269 Some(old.to_string())
2270 }
2271 TreeDiffStatus::Added => None,
2272 },
2273 })
2274 .collect(),
2275 })
2276 }
2277
2278 async fn handle_get_blob_content(
2279 this: Entity<Self>,
2280 request: TypedEnvelope<proto::GetBlobContent>,
2281 mut cx: AsyncApp,
2282 ) -> Result<proto::GetBlobContentResponse> {
2283 let oid = git::Oid::from_str(&request.payload.oid)?;
2284 let repository_id = RepositoryId(request.payload.repository_id);
2285 let content = this
2286 .update(&mut cx, |this, cx| {
2287 let repository = this.repositories().get(&repository_id)?;
2288 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2289 })?
2290 .context("missing repository")?
2291 .await?;
2292 Ok(proto::GetBlobContentResponse { content })
2293 }
2294
2295 async fn handle_open_unstaged_diff(
2296 this: Entity<Self>,
2297 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2298 mut cx: AsyncApp,
2299 ) -> Result<proto::OpenUnstagedDiffResponse> {
2300 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2301 let diff = this
2302 .update(&mut cx, |this, cx| {
2303 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2304 Some(this.open_unstaged_diff(buffer, cx))
2305 })?
2306 .context("missing buffer")?
2307 .await?;
2308 this.update(&mut cx, |this, _| {
2309 let shared_diffs = this
2310 .shared_diffs
2311 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2312 .or_default();
2313 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2314 })?;
2315 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2316 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2317 }
2318
2319 async fn handle_open_uncommitted_diff(
2320 this: Entity<Self>,
2321 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2322 mut cx: AsyncApp,
2323 ) -> Result<proto::OpenUncommittedDiffResponse> {
2324 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2325 let diff = this
2326 .update(&mut cx, |this, cx| {
2327 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2328 Some(this.open_uncommitted_diff(buffer, cx))
2329 })?
2330 .context("missing buffer")?
2331 .await?;
2332 this.update(&mut cx, |this, _| {
2333 let shared_diffs = this
2334 .shared_diffs
2335 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2336 .or_default();
2337 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2338 })?;
2339 diff.read_with(&cx, |diff, cx| {
2340 use proto::open_uncommitted_diff_response::Mode;
2341
2342 let unstaged_diff = diff.secondary_diff();
2343 let index_snapshot = unstaged_diff.and_then(|diff| {
2344 let diff = diff.read(cx);
2345 diff.base_text_exists().then(|| diff.base_text())
2346 });
2347
2348 let mode;
2349 let staged_text;
2350 let committed_text;
2351 if diff.base_text_exists() {
2352 let committed_snapshot = diff.base_text();
2353 committed_text = Some(committed_snapshot.text());
2354 if let Some(index_text) = index_snapshot {
2355 if index_text.remote_id() == committed_snapshot.remote_id() {
2356 mode = Mode::IndexMatchesHead;
2357 staged_text = None;
2358 } else {
2359 mode = Mode::IndexAndHead;
2360 staged_text = Some(index_text.text());
2361 }
2362 } else {
2363 mode = Mode::IndexAndHead;
2364 staged_text = None;
2365 }
2366 } else {
2367 mode = Mode::IndexAndHead;
2368 committed_text = None;
2369 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2370 }
2371
2372 proto::OpenUncommittedDiffResponse {
2373 committed_text,
2374 staged_text,
2375 mode: mode.into(),
2376 }
2377 })
2378 }
2379
2380 async fn handle_update_diff_bases(
2381 this: Entity<Self>,
2382 request: TypedEnvelope<proto::UpdateDiffBases>,
2383 mut cx: AsyncApp,
2384 ) -> Result<()> {
2385 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2386 this.update(&mut cx, |this, cx| {
2387 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2388 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2389 {
2390 let buffer = buffer.read(cx).text_snapshot();
2391 diff_state.update(cx, |diff_state, cx| {
2392 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2393 })
2394 }
2395 })
2396 }
2397
2398 async fn handle_blame_buffer(
2399 this: Entity<Self>,
2400 envelope: TypedEnvelope<proto::BlameBuffer>,
2401 mut cx: AsyncApp,
2402 ) -> Result<proto::BlameBufferResponse> {
2403 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2404 let version = deserialize_version(&envelope.payload.version);
2405 let buffer = this.read_with(&cx, |this, cx| {
2406 this.buffer_store.read(cx).get_existing(buffer_id)
2407 })??;
2408 buffer
2409 .update(&mut cx, |buffer, _| {
2410 buffer.wait_for_version(version.clone())
2411 })?
2412 .await?;
2413 let blame = this
2414 .update(&mut cx, |this, cx| {
2415 this.blame_buffer(&buffer, Some(version), cx)
2416 })?
2417 .await?;
2418 Ok(serialize_blame_buffer_response(blame))
2419 }
2420
2421 async fn handle_get_permalink_to_line(
2422 this: Entity<Self>,
2423 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2424 mut cx: AsyncApp,
2425 ) -> Result<proto::GetPermalinkToLineResponse> {
2426 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2427 // let version = deserialize_version(&envelope.payload.version);
2428 let selection = {
2429 let proto_selection = envelope
2430 .payload
2431 .selection
2432 .context("no selection to get permalink for defined")?;
2433 proto_selection.start as u32..proto_selection.end as u32
2434 };
2435 let buffer = this.read_with(&cx, |this, cx| {
2436 this.buffer_store.read(cx).get_existing(buffer_id)
2437 })??;
2438 let permalink = this
2439 .update(&mut cx, |this, cx| {
2440 this.get_permalink_to_line(&buffer, selection, cx)
2441 })?
2442 .await?;
2443 Ok(proto::GetPermalinkToLineResponse {
2444 permalink: permalink.to_string(),
2445 })
2446 }
2447
2448 fn repository_for_request(
2449 this: &Entity<Self>,
2450 id: RepositoryId,
2451 cx: &mut AsyncApp,
2452 ) -> Result<Entity<Repository>> {
2453 this.read_with(cx, |this, _| {
2454 this.repositories
2455 .get(&id)
2456 .context("missing repository handle")
2457 .cloned()
2458 })?
2459 }
2460
2461 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2462 self.repositories
2463 .iter()
2464 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2465 .collect()
2466 }
2467
2468 fn process_updated_entries(
2469 &self,
2470 worktree: &Entity<Worktree>,
2471 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2472 cx: &mut App,
2473 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2474 let path_style = worktree.read(cx).path_style();
2475 let mut repo_paths = self
2476 .repositories
2477 .values()
2478 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2479 .collect::<Vec<_>>();
2480 let mut entries: Vec<_> = updated_entries
2481 .iter()
2482 .map(|(path, _, _)| path.clone())
2483 .collect();
2484 entries.sort();
2485 let worktree = worktree.read(cx);
2486
2487 let entries = entries
2488 .into_iter()
2489 .map(|path| worktree.absolutize(&path))
2490 .collect::<Arc<[_]>>();
2491
2492 let executor = cx.background_executor().clone();
2493 cx.background_executor().spawn(async move {
2494 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2495 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2496 let mut tasks = FuturesOrdered::new();
2497 for (repo_path, repo) in repo_paths.into_iter().rev() {
2498 let entries = entries.clone();
2499 let task = executor.spawn(async move {
2500 // Find all repository paths that belong to this repo
2501 let mut ix = entries.partition_point(|path| path < &*repo_path);
2502 if ix == entries.len() {
2503 return None;
2504 };
2505
2506 let mut paths = Vec::new();
2507 // All paths prefixed by a given repo will constitute a continuous range.
2508 while let Some(path) = entries.get(ix)
2509 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2510 &repo_path, path, path_style,
2511 )
2512 {
2513 paths.push((repo_path, ix));
2514 ix += 1;
2515 }
2516 if paths.is_empty() {
2517 None
2518 } else {
2519 Some((repo, paths))
2520 }
2521 });
2522 tasks.push_back(task);
2523 }
2524
2525 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2526 let mut path_was_used = vec![false; entries.len()];
2527 let tasks = tasks.collect::<Vec<_>>().await;
2528 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2529 // We always want to assign a path to it's innermost repository.
2530 for t in tasks {
2531 let Some((repo, paths)) = t else {
2532 continue;
2533 };
2534 let entry = paths_by_git_repo.entry(repo).or_default();
2535 for (repo_path, ix) in paths {
2536 if path_was_used[ix] {
2537 continue;
2538 }
2539 path_was_used[ix] = true;
2540 entry.push(repo_path);
2541 }
2542 }
2543
2544 paths_by_git_repo
2545 })
2546 }
2547}
2548
2549impl BufferGitState {
2550 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2551 Self {
2552 unstaged_diff: Default::default(),
2553 uncommitted_diff: Default::default(),
2554 recalculate_diff_task: Default::default(),
2555 language: Default::default(),
2556 language_registry: Default::default(),
2557 recalculating_tx: postage::watch::channel_with(false).0,
2558 hunk_staging_operation_count: 0,
2559 hunk_staging_operation_count_as_of_write: 0,
2560 head_text: Default::default(),
2561 index_text: Default::default(),
2562 head_changed: Default::default(),
2563 index_changed: Default::default(),
2564 language_changed: Default::default(),
2565 conflict_updated_futures: Default::default(),
2566 conflict_set: Default::default(),
2567 reparse_conflict_markers_task: Default::default(),
2568 }
2569 }
2570
2571 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2572 self.language = buffer.read(cx).language().cloned();
2573 self.language_changed = true;
2574 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2575 }
2576
2577 fn reparse_conflict_markers(
2578 &mut self,
2579 buffer: text::BufferSnapshot,
2580 cx: &mut Context<Self>,
2581 ) -> oneshot::Receiver<()> {
2582 let (tx, rx) = oneshot::channel();
2583
2584 let Some(conflict_set) = self
2585 .conflict_set
2586 .as_ref()
2587 .and_then(|conflict_set| conflict_set.upgrade())
2588 else {
2589 return rx;
2590 };
2591
2592 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2593 if conflict_set.has_conflict {
2594 Some(conflict_set.snapshot())
2595 } else {
2596 None
2597 }
2598 });
2599
2600 if let Some(old_snapshot) = old_snapshot {
2601 self.conflict_updated_futures.push(tx);
2602 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2603 let (snapshot, changed_range) = cx
2604 .background_spawn(async move {
2605 let new_snapshot = ConflictSet::parse(&buffer);
2606 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2607 (new_snapshot, changed_range)
2608 })
2609 .await;
2610 this.update(cx, |this, cx| {
2611 if let Some(conflict_set) = &this.conflict_set {
2612 conflict_set
2613 .update(cx, |conflict_set, cx| {
2614 conflict_set.set_snapshot(snapshot, changed_range, cx);
2615 })
2616 .ok();
2617 }
2618 let futures = std::mem::take(&mut this.conflict_updated_futures);
2619 for tx in futures {
2620 tx.send(()).ok();
2621 }
2622 })
2623 }))
2624 }
2625
2626 rx
2627 }
2628
2629 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2630 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2631 }
2632
2633 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2634 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2635 }
2636
2637 fn handle_base_texts_updated(
2638 &mut self,
2639 buffer: text::BufferSnapshot,
2640 message: proto::UpdateDiffBases,
2641 cx: &mut Context<Self>,
2642 ) {
2643 use proto::update_diff_bases::Mode;
2644
2645 let Some(mode) = Mode::from_i32(message.mode) else {
2646 return;
2647 };
2648
2649 let diff_bases_change = match mode {
2650 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2651 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2652 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2653 Mode::IndexAndHead => DiffBasesChange::SetEach {
2654 index: message.staged_text,
2655 head: message.committed_text,
2656 },
2657 };
2658
2659 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2660 }
2661
2662 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2663 if *self.recalculating_tx.borrow() {
2664 let mut rx = self.recalculating_tx.subscribe();
2665 Some(async move {
2666 loop {
2667 let is_recalculating = rx.recv().await;
2668 if is_recalculating != Some(true) {
2669 break;
2670 }
2671 }
2672 })
2673 } else {
2674 None
2675 }
2676 }
2677
2678 fn diff_bases_changed(
2679 &mut self,
2680 buffer: text::BufferSnapshot,
2681 diff_bases_change: Option<DiffBasesChange>,
2682 cx: &mut Context<Self>,
2683 ) {
2684 match diff_bases_change {
2685 Some(DiffBasesChange::SetIndex(index)) => {
2686 self.index_text = index.map(|mut index| {
2687 text::LineEnding::normalize(&mut index);
2688 Arc::new(index)
2689 });
2690 self.index_changed = true;
2691 }
2692 Some(DiffBasesChange::SetHead(head)) => {
2693 self.head_text = head.map(|mut head| {
2694 text::LineEnding::normalize(&mut head);
2695 Arc::new(head)
2696 });
2697 self.head_changed = true;
2698 }
2699 Some(DiffBasesChange::SetBoth(text)) => {
2700 let text = text.map(|mut text| {
2701 text::LineEnding::normalize(&mut text);
2702 Arc::new(text)
2703 });
2704 self.head_text = text.clone();
2705 self.index_text = text;
2706 self.head_changed = true;
2707 self.index_changed = true;
2708 }
2709 Some(DiffBasesChange::SetEach { index, head }) => {
2710 self.index_text = index.map(|mut index| {
2711 text::LineEnding::normalize(&mut index);
2712 Arc::new(index)
2713 });
2714 self.index_changed = true;
2715 self.head_text = head.map(|mut head| {
2716 text::LineEnding::normalize(&mut head);
2717 Arc::new(head)
2718 });
2719 self.head_changed = true;
2720 }
2721 None => {}
2722 }
2723
2724 self.recalculate_diffs(buffer, cx)
2725 }
2726
2727 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2728 *self.recalculating_tx.borrow_mut() = true;
2729
2730 let language = self.language.clone();
2731 let language_registry = self.language_registry.clone();
2732 let unstaged_diff = self.unstaged_diff();
2733 let uncommitted_diff = self.uncommitted_diff();
2734 let head = self.head_text.clone();
2735 let index = self.index_text.clone();
2736 let index_changed = self.index_changed;
2737 let head_changed = self.head_changed;
2738 let language_changed = self.language_changed;
2739 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2740 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2741 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2742 (None, None) => true,
2743 _ => false,
2744 };
2745 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2746 log::debug!(
2747 "start recalculating diffs for buffer {}",
2748 buffer.remote_id()
2749 );
2750
2751 let mut new_unstaged_diff = None;
2752 if let Some(unstaged_diff) = &unstaged_diff {
2753 new_unstaged_diff = Some(
2754 BufferDiff::update_diff(
2755 unstaged_diff.clone(),
2756 buffer.clone(),
2757 index,
2758 index_changed,
2759 language_changed,
2760 language.clone(),
2761 language_registry.clone(),
2762 cx,
2763 )
2764 .await?,
2765 );
2766 }
2767
2768 let mut new_uncommitted_diff = None;
2769 if let Some(uncommitted_diff) = &uncommitted_diff {
2770 new_uncommitted_diff = if index_matches_head {
2771 new_unstaged_diff.clone()
2772 } else {
2773 Some(
2774 BufferDiff::update_diff(
2775 uncommitted_diff.clone(),
2776 buffer.clone(),
2777 head,
2778 head_changed,
2779 language_changed,
2780 language.clone(),
2781 language_registry.clone(),
2782 cx,
2783 )
2784 .await?,
2785 )
2786 }
2787 }
2788
2789 let cancel = this.update(cx, |this, _| {
2790 // This checks whether all pending stage/unstage operations
2791 // have quiesced (i.e. both the corresponding write and the
2792 // read of that write have completed). If not, then we cancel
2793 // this recalculation attempt to avoid invalidating pending
2794 // state too quickly; another recalculation will come along
2795 // later and clear the pending state once the state of the index has settled.
2796 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2797 *this.recalculating_tx.borrow_mut() = false;
2798 true
2799 } else {
2800 false
2801 }
2802 })?;
2803 if cancel {
2804 log::debug!(
2805 concat!(
2806 "aborting recalculating diffs for buffer {}",
2807 "due to subsequent hunk operations",
2808 ),
2809 buffer.remote_id()
2810 );
2811 return Ok(());
2812 }
2813
2814 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2815 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2816 {
2817 unstaged_diff.update(cx, |diff, cx| {
2818 if language_changed {
2819 diff.language_changed(cx);
2820 }
2821 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2822 })?
2823 } else {
2824 None
2825 };
2826
2827 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2828 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2829 {
2830 uncommitted_diff.update(cx, |diff, cx| {
2831 if language_changed {
2832 diff.language_changed(cx);
2833 }
2834 diff.set_snapshot_with_secondary(
2835 new_uncommitted_diff,
2836 &buffer,
2837 unstaged_changed_range,
2838 true,
2839 cx,
2840 );
2841 })?;
2842 }
2843
2844 log::debug!(
2845 "finished recalculating diffs for buffer {}",
2846 buffer.remote_id()
2847 );
2848
2849 if let Some(this) = this.upgrade() {
2850 this.update(cx, |this, _| {
2851 this.index_changed = false;
2852 this.head_changed = false;
2853 this.language_changed = false;
2854 *this.recalculating_tx.borrow_mut() = false;
2855 })?;
2856 }
2857
2858 Ok(())
2859 }));
2860 }
2861}
2862
2863fn make_remote_delegate(
2864 this: Entity<GitStore>,
2865 project_id: u64,
2866 repository_id: RepositoryId,
2867 askpass_id: u64,
2868 cx: &mut AsyncApp,
2869) -> AskPassDelegate {
2870 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2871 this.update(cx, |this, cx| {
2872 let Some((client, _)) = this.downstream_client() else {
2873 return;
2874 };
2875 let response = client.request(proto::AskPassRequest {
2876 project_id,
2877 repository_id: repository_id.to_proto(),
2878 askpass_id,
2879 prompt,
2880 });
2881 cx.spawn(async move |_, _| {
2882 let mut response = response.await?.response;
2883 tx.send(EncryptedPassword::try_from(response.as_ref())?)
2884 .ok();
2885 response.zeroize();
2886 anyhow::Ok(())
2887 })
2888 .detach_and_log_err(cx);
2889 })
2890 .log_err();
2891 })
2892}
2893
2894impl RepositoryId {
2895 pub fn to_proto(self) -> u64 {
2896 self.0
2897 }
2898
2899 pub fn from_proto(id: u64) -> Self {
2900 RepositoryId(id)
2901 }
2902}
2903
2904impl RepositorySnapshot {
2905 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
2906 Self {
2907 id,
2908 statuses_by_path: Default::default(),
2909 pending_ops: Default::default(),
2910 work_directory_abs_path,
2911 branch: None,
2912 head_commit: None,
2913 scan_id: 0,
2914 merge: Default::default(),
2915 remote_origin_url: None,
2916 remote_upstream_url: None,
2917 stash_entries: Default::default(),
2918 path_style,
2919 }
2920 }
2921
2922 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
2923 proto::UpdateRepository {
2924 branch_summary: self.branch.as_ref().map(branch_to_proto),
2925 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2926 updated_statuses: self
2927 .statuses_by_path
2928 .iter()
2929 .map(|entry| entry.to_proto())
2930 .collect(),
2931 removed_statuses: Default::default(),
2932 current_merge_conflicts: self
2933 .merge
2934 .conflicted_paths
2935 .iter()
2936 .map(|repo_path| repo_path.to_proto())
2937 .collect(),
2938 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
2939 project_id,
2940 id: self.id.to_proto(),
2941 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
2942 entry_ids: vec![self.id.to_proto()],
2943 scan_id: self.scan_id,
2944 is_last_update: true,
2945 stash_entries: self
2946 .stash_entries
2947 .entries
2948 .iter()
2949 .map(stash_to_proto)
2950 .collect(),
2951 }
2952 }
2953
2954 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
2955 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
2956 let mut removed_statuses: Vec<String> = Vec::new();
2957
2958 let mut new_statuses = self.statuses_by_path.iter().peekable();
2959 let mut old_statuses = old.statuses_by_path.iter().peekable();
2960
2961 let mut current_new_entry = new_statuses.next();
2962 let mut current_old_entry = old_statuses.next();
2963 loop {
2964 match (current_new_entry, current_old_entry) {
2965 (Some(new_entry), Some(old_entry)) => {
2966 match new_entry.repo_path.cmp(&old_entry.repo_path) {
2967 Ordering::Less => {
2968 updated_statuses.push(new_entry.to_proto());
2969 current_new_entry = new_statuses.next();
2970 }
2971 Ordering::Equal => {
2972 if new_entry.status != old_entry.status {
2973 updated_statuses.push(new_entry.to_proto());
2974 }
2975 current_old_entry = old_statuses.next();
2976 current_new_entry = new_statuses.next();
2977 }
2978 Ordering::Greater => {
2979 removed_statuses.push(old_entry.repo_path.to_proto());
2980 current_old_entry = old_statuses.next();
2981 }
2982 }
2983 }
2984 (None, Some(old_entry)) => {
2985 removed_statuses.push(old_entry.repo_path.to_proto());
2986 current_old_entry = old_statuses.next();
2987 }
2988 (Some(new_entry), None) => {
2989 updated_statuses.push(new_entry.to_proto());
2990 current_new_entry = new_statuses.next();
2991 }
2992 (None, None) => break,
2993 }
2994 }
2995
2996 proto::UpdateRepository {
2997 branch_summary: self.branch.as_ref().map(branch_to_proto),
2998 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
2999 updated_statuses,
3000 removed_statuses,
3001 current_merge_conflicts: self
3002 .merge
3003 .conflicted_paths
3004 .iter()
3005 .map(|path| path.to_proto())
3006 .collect(),
3007 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3008 project_id,
3009 id: self.id.to_proto(),
3010 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3011 entry_ids: vec![],
3012 scan_id: self.scan_id,
3013 is_last_update: true,
3014 stash_entries: self
3015 .stash_entries
3016 .entries
3017 .iter()
3018 .map(stash_to_proto)
3019 .collect(),
3020 }
3021 }
3022
3023 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3024 self.statuses_by_path.iter().cloned()
3025 }
3026
3027 pub fn status_summary(&self) -> GitSummary {
3028 self.statuses_by_path.summary().item_summary
3029 }
3030
3031 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3032 self.statuses_by_path
3033 .get(&PathKey(path.0.clone()), ())
3034 .cloned()
3035 }
3036
3037 pub fn pending_op_by_id(&self, id: PendingOpId) -> Option<PendingOp> {
3038 self.pending_ops.get(&id, ()).cloned()
3039 }
3040
3041 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Vec<PendingOp> {
3042 self.pending_ops
3043 .filter::<_, PathKey>((), |sum| sum.max_path == path.0)
3044 .into_iter()
3045 .map(Clone::clone)
3046 .collect()
3047 }
3048
3049 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3050 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3051 }
3052
3053 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3054 self.path_style
3055 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3056 .unwrap()
3057 .into()
3058 }
3059
3060 #[inline]
3061 fn abs_path_to_repo_path_inner(
3062 work_directory_abs_path: &Path,
3063 abs_path: &Path,
3064 path_style: PathStyle,
3065 ) -> Option<RepoPath> {
3066 abs_path
3067 .strip_prefix(&work_directory_abs_path)
3068 .ok()
3069 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3070 }
3071
3072 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3073 self.merge.conflicted_paths.contains(repo_path)
3074 }
3075
3076 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3077 let had_conflict_on_last_merge_head_change =
3078 self.merge.conflicted_paths.contains(repo_path);
3079 let has_conflict_currently = self
3080 .status_for_path(repo_path)
3081 .is_some_and(|entry| entry.status.is_conflicted());
3082 had_conflict_on_last_merge_head_change || has_conflict_currently
3083 }
3084
3085 /// This is the name that will be displayed in the repository selector for this repository.
3086 pub fn display_name(&self) -> SharedString {
3087 self.work_directory_abs_path
3088 .file_name()
3089 .unwrap_or_default()
3090 .to_string_lossy()
3091 .to_string()
3092 .into()
3093 }
3094}
3095
3096pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3097 proto::StashEntry {
3098 oid: entry.oid.as_bytes().to_vec(),
3099 message: entry.message.clone(),
3100 branch: entry.branch.clone(),
3101 index: entry.index as u64,
3102 timestamp: entry.timestamp,
3103 }
3104}
3105
3106pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3107 Ok(StashEntry {
3108 oid: Oid::from_bytes(&entry.oid)?,
3109 message: entry.message.clone(),
3110 index: entry.index as usize,
3111 branch: entry.branch.clone(),
3112 timestamp: entry.timestamp,
3113 })
3114}
3115
3116impl MergeDetails {
3117 async fn load(
3118 backend: &Arc<dyn GitRepository>,
3119 status: &SumTree<StatusEntry>,
3120 prev_snapshot: &RepositorySnapshot,
3121 ) -> Result<(MergeDetails, bool)> {
3122 log::debug!("load merge details");
3123 let message = backend.merge_message().await;
3124 let heads = backend
3125 .revparse_batch(vec![
3126 "MERGE_HEAD".into(),
3127 "CHERRY_PICK_HEAD".into(),
3128 "REBASE_HEAD".into(),
3129 "REVERT_HEAD".into(),
3130 "APPLY_HEAD".into(),
3131 ])
3132 .await
3133 .log_err()
3134 .unwrap_or_default()
3135 .into_iter()
3136 .map(|opt| opt.map(SharedString::from))
3137 .collect::<Vec<_>>();
3138 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3139 let conflicted_paths = if merge_heads_changed {
3140 let current_conflicted_paths = TreeSet::from_ordered_entries(
3141 status
3142 .iter()
3143 .filter(|entry| entry.status.is_conflicted())
3144 .map(|entry| entry.repo_path.clone()),
3145 );
3146
3147 // It can happen that we run a scan while a lengthy merge is in progress
3148 // that will eventually result in conflicts, but before those conflicts
3149 // are reported by `git status`. Since for the moment we only care about
3150 // the merge heads state for the purposes of tracking conflicts, don't update
3151 // this state until we see some conflicts.
3152 if heads.iter().any(Option::is_some)
3153 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3154 && current_conflicted_paths.is_empty()
3155 {
3156 log::debug!("not updating merge heads because no conflicts found");
3157 return Ok((
3158 MergeDetails {
3159 message: message.map(SharedString::from),
3160 ..prev_snapshot.merge.clone()
3161 },
3162 false,
3163 ));
3164 }
3165
3166 current_conflicted_paths
3167 } else {
3168 prev_snapshot.merge.conflicted_paths.clone()
3169 };
3170 let details = MergeDetails {
3171 conflicted_paths,
3172 message: message.map(SharedString::from),
3173 heads,
3174 };
3175 Ok((details, merge_heads_changed))
3176 }
3177}
3178
3179impl Repository {
3180 pub fn snapshot(&self) -> RepositorySnapshot {
3181 self.snapshot.clone()
3182 }
3183
3184 fn local(
3185 id: RepositoryId,
3186 work_directory_abs_path: Arc<Path>,
3187 dot_git_abs_path: Arc<Path>,
3188 repository_dir_abs_path: Arc<Path>,
3189 common_dir_abs_path: Arc<Path>,
3190 project_environment: WeakEntity<ProjectEnvironment>,
3191 fs: Arc<dyn Fs>,
3192 git_store: WeakEntity<GitStore>,
3193 cx: &mut Context<Self>,
3194 ) -> Self {
3195 let snapshot =
3196 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3197 Repository {
3198 this: cx.weak_entity(),
3199 git_store,
3200 snapshot,
3201 commit_message_buffer: None,
3202 askpass_delegates: Default::default(),
3203 paths_needing_status_update: Default::default(),
3204 latest_askpass_id: 0,
3205 job_sender: Repository::spawn_local_git_worker(
3206 work_directory_abs_path,
3207 dot_git_abs_path,
3208 repository_dir_abs_path,
3209 common_dir_abs_path,
3210 project_environment,
3211 fs,
3212 cx,
3213 ),
3214 job_id: 0,
3215 active_jobs: Default::default(),
3216 }
3217 }
3218
3219 fn remote(
3220 id: RepositoryId,
3221 work_directory_abs_path: Arc<Path>,
3222 path_style: PathStyle,
3223 project_id: ProjectId,
3224 client: AnyProtoClient,
3225 git_store: WeakEntity<GitStore>,
3226 cx: &mut Context<Self>,
3227 ) -> Self {
3228 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3229 Self {
3230 this: cx.weak_entity(),
3231 snapshot,
3232 commit_message_buffer: None,
3233 git_store,
3234 paths_needing_status_update: Default::default(),
3235 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3236 askpass_delegates: Default::default(),
3237 latest_askpass_id: 0,
3238 active_jobs: Default::default(),
3239 job_id: 0,
3240 }
3241 }
3242
3243 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3244 self.git_store.upgrade()
3245 }
3246
3247 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3248 let this = cx.weak_entity();
3249 let git_store = self.git_store.clone();
3250 let _ = self.send_keyed_job(
3251 Some(GitJobKey::ReloadBufferDiffBases),
3252 None,
3253 |state, mut cx| async move {
3254 let RepositoryState::Local { backend, .. } = state else {
3255 log::error!("tried to recompute diffs for a non-local repository");
3256 return Ok(());
3257 };
3258
3259 let Some(this) = this.upgrade() else {
3260 return Ok(());
3261 };
3262
3263 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3264 git_store.update(cx, |git_store, cx| {
3265 git_store
3266 .diffs
3267 .iter()
3268 .filter_map(|(buffer_id, diff_state)| {
3269 let buffer_store = git_store.buffer_store.read(cx);
3270 let buffer = buffer_store.get(*buffer_id)?;
3271 let file = File::from_dyn(buffer.read(cx).file())?;
3272 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3273 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3274 log::debug!(
3275 "start reload diff bases for repo path {}",
3276 repo_path.as_unix_str()
3277 );
3278 diff_state.update(cx, |diff_state, _| {
3279 let has_unstaged_diff = diff_state
3280 .unstaged_diff
3281 .as_ref()
3282 .is_some_and(|diff| diff.is_upgradable());
3283 let has_uncommitted_diff = diff_state
3284 .uncommitted_diff
3285 .as_ref()
3286 .is_some_and(|set| set.is_upgradable());
3287
3288 Some((
3289 buffer,
3290 repo_path,
3291 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3292 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3293 ))
3294 })
3295 })
3296 .collect::<Vec<_>>()
3297 })
3298 })??;
3299
3300 let buffer_diff_base_changes = cx
3301 .background_spawn(async move {
3302 let mut changes = Vec::new();
3303 for (buffer, repo_path, current_index_text, current_head_text) in
3304 &repo_diff_state_updates
3305 {
3306 let index_text = if current_index_text.is_some() {
3307 backend.load_index_text(repo_path.clone()).await
3308 } else {
3309 None
3310 };
3311 let head_text = if current_head_text.is_some() {
3312 backend.load_committed_text(repo_path.clone()).await
3313 } else {
3314 None
3315 };
3316
3317 let change =
3318 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3319 (Some(current_index), Some(current_head)) => {
3320 let index_changed =
3321 index_text.as_ref() != current_index.as_deref();
3322 let head_changed =
3323 head_text.as_ref() != current_head.as_deref();
3324 if index_changed && head_changed {
3325 if index_text == head_text {
3326 Some(DiffBasesChange::SetBoth(head_text))
3327 } else {
3328 Some(DiffBasesChange::SetEach {
3329 index: index_text,
3330 head: head_text,
3331 })
3332 }
3333 } else if index_changed {
3334 Some(DiffBasesChange::SetIndex(index_text))
3335 } else if head_changed {
3336 Some(DiffBasesChange::SetHead(head_text))
3337 } else {
3338 None
3339 }
3340 }
3341 (Some(current_index), None) => {
3342 let index_changed =
3343 index_text.as_ref() != current_index.as_deref();
3344 index_changed
3345 .then_some(DiffBasesChange::SetIndex(index_text))
3346 }
3347 (None, Some(current_head)) => {
3348 let head_changed =
3349 head_text.as_ref() != current_head.as_deref();
3350 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3351 }
3352 (None, None) => None,
3353 };
3354
3355 changes.push((buffer.clone(), change))
3356 }
3357 changes
3358 })
3359 .await;
3360
3361 git_store.update(&mut cx, |git_store, cx| {
3362 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3363 let buffer_snapshot = buffer.read(cx).text_snapshot();
3364 let buffer_id = buffer_snapshot.remote_id();
3365 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3366 continue;
3367 };
3368
3369 let downstream_client = git_store.downstream_client();
3370 diff_state.update(cx, |diff_state, cx| {
3371 use proto::update_diff_bases::Mode;
3372
3373 if let Some((diff_bases_change, (client, project_id))) =
3374 diff_bases_change.clone().zip(downstream_client)
3375 {
3376 let (staged_text, committed_text, mode) = match diff_bases_change {
3377 DiffBasesChange::SetIndex(index) => {
3378 (index, None, Mode::IndexOnly)
3379 }
3380 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3381 DiffBasesChange::SetEach { index, head } => {
3382 (index, head, Mode::IndexAndHead)
3383 }
3384 DiffBasesChange::SetBoth(text) => {
3385 (None, text, Mode::IndexMatchesHead)
3386 }
3387 };
3388 client
3389 .send(proto::UpdateDiffBases {
3390 project_id: project_id.to_proto(),
3391 buffer_id: buffer_id.to_proto(),
3392 staged_text,
3393 committed_text,
3394 mode: mode as i32,
3395 })
3396 .log_err();
3397 }
3398
3399 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3400 });
3401 }
3402 })
3403 },
3404 );
3405 }
3406
3407 pub fn send_job<F, Fut, R>(
3408 &mut self,
3409 status: Option<SharedString>,
3410 job: F,
3411 ) -> oneshot::Receiver<R>
3412 where
3413 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3414 Fut: Future<Output = R> + 'static,
3415 R: Send + 'static,
3416 {
3417 self.send_keyed_job(None, status, job)
3418 }
3419
3420 fn send_keyed_job<F, Fut, R>(
3421 &mut self,
3422 key: Option<GitJobKey>,
3423 status: Option<SharedString>,
3424 job: F,
3425 ) -> oneshot::Receiver<R>
3426 where
3427 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3428 Fut: Future<Output = R> + 'static,
3429 R: Send + 'static,
3430 {
3431 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3432 let job_id = post_inc(&mut self.job_id);
3433 let this = self.this.clone();
3434 self.job_sender
3435 .unbounded_send(GitJob {
3436 key,
3437 job: Box::new(move |state, cx: &mut AsyncApp| {
3438 let job = job(state, cx.clone());
3439 cx.spawn(async move |cx| {
3440 if let Some(s) = status.clone() {
3441 this.update(cx, |this, cx| {
3442 this.active_jobs.insert(
3443 job_id,
3444 JobInfo {
3445 start: Instant::now(),
3446 message: s.clone(),
3447 },
3448 );
3449
3450 cx.notify();
3451 })
3452 .ok();
3453 }
3454 let result = job.await;
3455
3456 this.update(cx, |this, cx| {
3457 this.active_jobs.remove(&job_id);
3458 cx.notify();
3459 })
3460 .ok();
3461
3462 result_tx.send(result).ok();
3463 })
3464 }),
3465 })
3466 .ok();
3467 result_rx
3468 }
3469
3470 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3471 let Some(git_store) = self.git_store.upgrade() else {
3472 return;
3473 };
3474 let entity = cx.entity();
3475 git_store.update(cx, |git_store, cx| {
3476 let Some((&id, _)) = git_store
3477 .repositories
3478 .iter()
3479 .find(|(_, handle)| *handle == &entity)
3480 else {
3481 return;
3482 };
3483 git_store.active_repo_id = Some(id);
3484 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3485 });
3486 }
3487
3488 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3489 self.snapshot.status()
3490 }
3491
3492 pub fn cached_stash(&self) -> GitStash {
3493 self.snapshot.stash_entries.clone()
3494 }
3495
3496 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3497 let git_store = self.git_store.upgrade()?;
3498 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3499 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3500 let abs_path = SanitizedPath::new(&abs_path);
3501 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3502 Some(ProjectPath {
3503 worktree_id: worktree.read(cx).id(),
3504 path: relative_path,
3505 })
3506 }
3507
3508 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3509 let git_store = self.git_store.upgrade()?;
3510 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3511 let abs_path = worktree_store.absolutize(path, cx)?;
3512 self.snapshot.abs_path_to_repo_path(&abs_path)
3513 }
3514
3515 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3516 other
3517 .read(cx)
3518 .snapshot
3519 .work_directory_abs_path
3520 .starts_with(&self.snapshot.work_directory_abs_path)
3521 }
3522
3523 pub fn open_commit_buffer(
3524 &mut self,
3525 languages: Option<Arc<LanguageRegistry>>,
3526 buffer_store: Entity<BufferStore>,
3527 cx: &mut Context<Self>,
3528 ) -> Task<Result<Entity<Buffer>>> {
3529 let id = self.id;
3530 if let Some(buffer) = self.commit_message_buffer.clone() {
3531 return Task::ready(Ok(buffer));
3532 }
3533 let this = cx.weak_entity();
3534
3535 let rx = self.send_job(None, move |state, mut cx| async move {
3536 let Some(this) = this.upgrade() else {
3537 bail!("git store was dropped");
3538 };
3539 match state {
3540 RepositoryState::Local { .. } => {
3541 this.update(&mut cx, |_, cx| {
3542 Self::open_local_commit_buffer(languages, buffer_store, cx)
3543 })?
3544 .await
3545 }
3546 RepositoryState::Remote { project_id, client } => {
3547 let request = client.request(proto::OpenCommitMessageBuffer {
3548 project_id: project_id.0,
3549 repository_id: id.to_proto(),
3550 });
3551 let response = request.await.context("requesting to open commit buffer")?;
3552 let buffer_id = BufferId::new(response.buffer_id)?;
3553 let buffer = buffer_store
3554 .update(&mut cx, |buffer_store, cx| {
3555 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3556 })?
3557 .await?;
3558 if let Some(language_registry) = languages {
3559 let git_commit_language =
3560 language_registry.language_for_name("Git Commit").await?;
3561 buffer.update(&mut cx, |buffer, cx| {
3562 buffer.set_language(Some(git_commit_language), cx);
3563 })?;
3564 }
3565 this.update(&mut cx, |this, _| {
3566 this.commit_message_buffer = Some(buffer.clone());
3567 })?;
3568 Ok(buffer)
3569 }
3570 }
3571 });
3572
3573 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3574 }
3575
3576 fn open_local_commit_buffer(
3577 language_registry: Option<Arc<LanguageRegistry>>,
3578 buffer_store: Entity<BufferStore>,
3579 cx: &mut Context<Self>,
3580 ) -> Task<Result<Entity<Buffer>>> {
3581 cx.spawn(async move |repository, cx| {
3582 let buffer = buffer_store
3583 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3584 .await?;
3585
3586 if let Some(language_registry) = language_registry {
3587 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3588 buffer.update(cx, |buffer, cx| {
3589 buffer.set_language(Some(git_commit_language), cx);
3590 })?;
3591 }
3592
3593 repository.update(cx, |repository, _| {
3594 repository.commit_message_buffer = Some(buffer.clone());
3595 })?;
3596 Ok(buffer)
3597 })
3598 }
3599
3600 pub fn checkout_files(
3601 &mut self,
3602 commit: &str,
3603 paths: Vec<RepoPath>,
3604 _cx: &mut App,
3605 ) -> oneshot::Receiver<Result<()>> {
3606 let commit = commit.to_string();
3607 let id = self.id;
3608
3609 self.send_job(
3610 Some(format!("git checkout {}", commit).into()),
3611 move |git_repo, _| async move {
3612 match git_repo {
3613 RepositoryState::Local {
3614 backend,
3615 environment,
3616 ..
3617 } => {
3618 backend
3619 .checkout_files(commit, paths, environment.clone())
3620 .await
3621 }
3622 RepositoryState::Remote { project_id, client } => {
3623 client
3624 .request(proto::GitCheckoutFiles {
3625 project_id: project_id.0,
3626 repository_id: id.to_proto(),
3627 commit,
3628 paths: paths.into_iter().map(|p| p.to_proto()).collect(),
3629 })
3630 .await?;
3631
3632 Ok(())
3633 }
3634 }
3635 },
3636 )
3637 }
3638
3639 pub fn reset(
3640 &mut self,
3641 commit: String,
3642 reset_mode: ResetMode,
3643 _cx: &mut App,
3644 ) -> oneshot::Receiver<Result<()>> {
3645 let id = self.id;
3646
3647 self.send_job(None, move |git_repo, _| async move {
3648 match git_repo {
3649 RepositoryState::Local {
3650 backend,
3651 environment,
3652 ..
3653 } => backend.reset(commit, reset_mode, environment).await,
3654 RepositoryState::Remote { project_id, client } => {
3655 client
3656 .request(proto::GitReset {
3657 project_id: project_id.0,
3658 repository_id: id.to_proto(),
3659 commit,
3660 mode: match reset_mode {
3661 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3662 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3663 },
3664 })
3665 .await?;
3666
3667 Ok(())
3668 }
3669 }
3670 })
3671 }
3672
3673 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3674 let id = self.id;
3675 self.send_job(None, move |git_repo, _cx| async move {
3676 match git_repo {
3677 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3678 RepositoryState::Remote { project_id, client } => {
3679 let resp = client
3680 .request(proto::GitShow {
3681 project_id: project_id.0,
3682 repository_id: id.to_proto(),
3683 commit,
3684 })
3685 .await?;
3686
3687 Ok(CommitDetails {
3688 sha: resp.sha.into(),
3689 message: resp.message.into(),
3690 commit_timestamp: resp.commit_timestamp,
3691 author_email: resp.author_email.into(),
3692 author_name: resp.author_name.into(),
3693 })
3694 }
3695 }
3696 })
3697 }
3698
3699 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3700 let id = self.id;
3701 self.send_job(None, move |git_repo, cx| async move {
3702 match git_repo {
3703 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3704 RepositoryState::Remote {
3705 client, project_id, ..
3706 } => {
3707 let response = client
3708 .request(proto::LoadCommitDiff {
3709 project_id: project_id.0,
3710 repository_id: id.to_proto(),
3711 commit,
3712 })
3713 .await?;
3714 Ok(CommitDiff {
3715 files: response
3716 .files
3717 .into_iter()
3718 .map(|file| {
3719 Ok(CommitFile {
3720 path: RepoPath::from_proto(&file.path)?,
3721 old_text: file.old_text,
3722 new_text: file.new_text,
3723 })
3724 })
3725 .collect::<Result<Vec<_>>>()?,
3726 })
3727 }
3728 }
3729 })
3730 }
3731
3732 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3733 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3734 }
3735
3736 fn save_buffers<'a>(
3737 &self,
3738 entries: impl IntoIterator<Item = &'a RepoPath>,
3739 cx: &mut Context<Self>,
3740 ) -> Vec<Task<anyhow::Result<()>>> {
3741 let mut save_futures = Vec::new();
3742 if let Some(buffer_store) = self.buffer_store(cx) {
3743 buffer_store.update(cx, |buffer_store, cx| {
3744 for path in entries {
3745 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3746 continue;
3747 };
3748 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3749 && buffer
3750 .read(cx)
3751 .file()
3752 .is_some_and(|file| file.disk_state().exists())
3753 && buffer.read(cx).has_unsaved_edits()
3754 {
3755 save_futures.push(buffer_store.save_buffer(buffer, cx));
3756 }
3757 }
3758 })
3759 }
3760 save_futures
3761 }
3762
3763 pub fn stage_entries(
3764 &mut self,
3765 entries: Vec<RepoPath>,
3766 cx: &mut Context<Self>,
3767 ) -> Task<anyhow::Result<()>> {
3768 if entries.is_empty() {
3769 return Task::ready(Ok(()));
3770 }
3771 let id = self.id;
3772 let save_tasks = self.save_buffers(&entries, cx);
3773 let paths = entries
3774 .iter()
3775 .map(|p| p.as_unix_str())
3776 .collect::<Vec<_>>()
3777 .join(" ");
3778 let status = format!("git add {paths}");
3779 let job_key = match entries.len() {
3780 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3781 _ => None,
3782 };
3783
3784 let mut ids = Vec::with_capacity(entries.len());
3785
3786 for entry in &entries {
3787 let id = self.snapshot.pending_ops.summary().item_summary.max_id + 1;
3788 self.snapshot.pending_ops.push(
3789 PendingOp {
3790 repo_path: entry.clone(),
3791 id,
3792 status: PendingOpStatus::Staged,
3793 finished: false,
3794 },
3795 (),
3796 );
3797 ids.push(id);
3798 }
3799
3800 cx.spawn(async move |this, cx| {
3801 for save_task in save_tasks {
3802 save_task.await?;
3803 }
3804
3805 this.update(cx, |this, _| {
3806 this.send_keyed_job(
3807 job_key,
3808 Some(status.into()),
3809 move |git_repo, _cx| async move {
3810 match git_repo {
3811 RepositoryState::Local {
3812 backend,
3813 environment,
3814 ..
3815 } => backend.stage_paths(entries, environment.clone()).await,
3816 RepositoryState::Remote { project_id, client } => {
3817 client
3818 .request(proto::Stage {
3819 project_id: project_id.0,
3820 repository_id: id.to_proto(),
3821 paths: entries
3822 .into_iter()
3823 .map(|repo_path| repo_path.to_proto())
3824 .collect(),
3825 })
3826 .await
3827 .context("sending stage request")?;
3828
3829 Ok(())
3830 }
3831 }
3832 },
3833 )
3834 })?
3835 .await??;
3836
3837 this.update(cx, |this, _| {
3838 for id in ids {
3839 if let Some(mut op) = this.snapshot.pending_op_by_id(id) {
3840 op.finished = true;
3841 this.snapshot
3842 .pending_ops
3843 .edit(vec![sum_tree::Edit::Insert(op)], ());
3844 }
3845 }
3846 })?;
3847
3848 Ok(())
3849 })
3850 }
3851
3852 pub fn unstage_entries(
3853 &mut self,
3854 entries: Vec<RepoPath>,
3855 cx: &mut Context<Self>,
3856 ) -> Task<anyhow::Result<()>> {
3857 if entries.is_empty() {
3858 return Task::ready(Ok(()));
3859 }
3860 let id = self.id;
3861 let save_tasks = self.save_buffers(&entries, cx);
3862 let paths = entries
3863 .iter()
3864 .map(|p| p.as_unix_str())
3865 .collect::<Vec<_>>()
3866 .join(" ");
3867 let status = format!("git reset {paths}");
3868 let job_key = match entries.len() {
3869 1 => Some(GitJobKey::WriteIndex(entries[0].clone())),
3870 _ => None,
3871 };
3872
3873 let mut ids = Vec::with_capacity(entries.len());
3874
3875 for entry in &entries {
3876 let id = self.snapshot.pending_ops.summary().item_summary.max_id + 1;
3877 self.snapshot.pending_ops.push(
3878 PendingOp {
3879 repo_path: entry.clone(),
3880 id,
3881 status: PendingOpStatus::Unstaged,
3882 finished: false,
3883 },
3884 (),
3885 );
3886 ids.push(id);
3887 }
3888
3889 cx.spawn(async move |this, cx| {
3890 for save_task in save_tasks {
3891 save_task.await?;
3892 }
3893
3894 this.update(cx, |this, _| {
3895 this.send_keyed_job(
3896 job_key,
3897 Some(status.into()),
3898 move |git_repo, _cx| async move {
3899 match git_repo {
3900 RepositoryState::Local {
3901 backend,
3902 environment,
3903 ..
3904 } => backend.unstage_paths(entries, environment).await,
3905 RepositoryState::Remote { project_id, client } => {
3906 client
3907 .request(proto::Unstage {
3908 project_id: project_id.0,
3909 repository_id: id.to_proto(),
3910 paths: entries
3911 .into_iter()
3912 .map(|repo_path| repo_path.to_proto())
3913 .collect(),
3914 })
3915 .await
3916 .context("sending unstage request")?;
3917
3918 Ok(())
3919 }
3920 }
3921 },
3922 )
3923 })?
3924 .await??;
3925
3926 this.update(cx, |this, _| {
3927 for id in ids {
3928 if let Some(mut op) = this.snapshot.pending_op_by_id(id) {
3929 op.finished = true;
3930 this.snapshot
3931 .pending_ops
3932 .edit(vec![sum_tree::Edit::Insert(op)], ());
3933 }
3934 }
3935 })?;
3936
3937 Ok(())
3938 })
3939 }
3940
3941 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3942 let to_stage = self
3943 .cached_status()
3944 .filter(|entry| !entry.status.staging().is_fully_staged())
3945 .map(|entry| entry.repo_path)
3946 .collect();
3947 self.stage_entries(to_stage, cx)
3948 }
3949
3950 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3951 let to_unstage = self
3952 .cached_status()
3953 .filter(|entry| entry.status.staging().has_staged())
3954 .map(|entry| entry.repo_path)
3955 .collect();
3956 self.unstage_entries(to_unstage, cx)
3957 }
3958
3959 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
3960 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
3961
3962 self.stash_entries(to_stash, cx)
3963 }
3964
3965 pub fn stash_entries(
3966 &mut self,
3967 entries: Vec<RepoPath>,
3968 cx: &mut Context<Self>,
3969 ) -> Task<anyhow::Result<()>> {
3970 let id = self.id;
3971
3972 cx.spawn(async move |this, cx| {
3973 this.update(cx, |this, _| {
3974 this.send_job(None, move |git_repo, _cx| async move {
3975 match git_repo {
3976 RepositoryState::Local {
3977 backend,
3978 environment,
3979 ..
3980 } => backend.stash_paths(entries, environment).await,
3981 RepositoryState::Remote { project_id, client } => {
3982 client
3983 .request(proto::Stash {
3984 project_id: project_id.0,
3985 repository_id: id.to_proto(),
3986 paths: entries
3987 .into_iter()
3988 .map(|repo_path| repo_path.to_proto())
3989 .collect(),
3990 })
3991 .await
3992 .context("sending stash request")?;
3993 Ok(())
3994 }
3995 }
3996 })
3997 })?
3998 .await??;
3999 Ok(())
4000 })
4001 }
4002
4003 pub fn stash_pop(
4004 &mut self,
4005 index: Option<usize>,
4006 cx: &mut Context<Self>,
4007 ) -> Task<anyhow::Result<()>> {
4008 let id = self.id;
4009 cx.spawn(async move |this, cx| {
4010 this.update(cx, |this, _| {
4011 this.send_job(None, move |git_repo, _cx| async move {
4012 match git_repo {
4013 RepositoryState::Local {
4014 backend,
4015 environment,
4016 ..
4017 } => backend.stash_pop(index, environment).await,
4018 RepositoryState::Remote { project_id, client } => {
4019 client
4020 .request(proto::StashPop {
4021 project_id: project_id.0,
4022 repository_id: id.to_proto(),
4023 stash_index: index.map(|i| i as u64),
4024 })
4025 .await
4026 .context("sending stash pop request")?;
4027 Ok(())
4028 }
4029 }
4030 })
4031 })?
4032 .await??;
4033 Ok(())
4034 })
4035 }
4036
4037 pub fn stash_apply(
4038 &mut self,
4039 index: Option<usize>,
4040 cx: &mut Context<Self>,
4041 ) -> Task<anyhow::Result<()>> {
4042 let id = self.id;
4043 cx.spawn(async move |this, cx| {
4044 this.update(cx, |this, _| {
4045 this.send_job(None, move |git_repo, _cx| async move {
4046 match git_repo {
4047 RepositoryState::Local {
4048 backend,
4049 environment,
4050 ..
4051 } => backend.stash_apply(index, environment).await,
4052 RepositoryState::Remote { project_id, client } => {
4053 client
4054 .request(proto::StashApply {
4055 project_id: project_id.0,
4056 repository_id: id.to_proto(),
4057 stash_index: index.map(|i| i as u64),
4058 })
4059 .await
4060 .context("sending stash apply request")?;
4061 Ok(())
4062 }
4063 }
4064 })
4065 })?
4066 .await??;
4067 Ok(())
4068 })
4069 }
4070
4071 pub fn stash_drop(
4072 &mut self,
4073 index: Option<usize>,
4074 cx: &mut Context<Self>,
4075 ) -> oneshot::Receiver<anyhow::Result<()>> {
4076 let id = self.id;
4077 let updates_tx = self
4078 .git_store()
4079 .and_then(|git_store| match &git_store.read(cx).state {
4080 GitStoreState::Local { downstream, .. } => downstream
4081 .as_ref()
4082 .map(|downstream| downstream.updates_tx.clone()),
4083 _ => None,
4084 });
4085 let this = cx.weak_entity();
4086 self.send_job(None, move |git_repo, mut cx| async move {
4087 match git_repo {
4088 RepositoryState::Local {
4089 backend,
4090 environment,
4091 ..
4092 } => {
4093 // TODO would be nice to not have to do this manually
4094 let result = backend.stash_drop(index, environment).await;
4095 if result.is_ok()
4096 && let Ok(stash_entries) = backend.stash_entries().await
4097 {
4098 let snapshot = this.update(&mut cx, |this, cx| {
4099 this.snapshot.stash_entries = stash_entries;
4100 cx.emit(RepositoryEvent::StashEntriesChanged);
4101 this.snapshot.clone()
4102 })?;
4103 if let Some(updates_tx) = updates_tx {
4104 updates_tx
4105 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4106 .ok();
4107 }
4108 }
4109
4110 result
4111 }
4112 RepositoryState::Remote { project_id, client } => {
4113 client
4114 .request(proto::StashDrop {
4115 project_id: project_id.0,
4116 repository_id: id.to_proto(),
4117 stash_index: index.map(|i| i as u64),
4118 })
4119 .await
4120 .context("sending stash pop request")?;
4121 Ok(())
4122 }
4123 }
4124 })
4125 }
4126
4127 pub fn commit(
4128 &mut self,
4129 message: SharedString,
4130 name_and_email: Option<(SharedString, SharedString)>,
4131 options: CommitOptions,
4132 _cx: &mut App,
4133 ) -> oneshot::Receiver<Result<()>> {
4134 let id = self.id;
4135
4136 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4137 match git_repo {
4138 RepositoryState::Local {
4139 backend,
4140 environment,
4141 ..
4142 } => {
4143 backend
4144 .commit(message, name_and_email, options, environment)
4145 .await
4146 }
4147 RepositoryState::Remote { project_id, client } => {
4148 let (name, email) = name_and_email.unzip();
4149 client
4150 .request(proto::Commit {
4151 project_id: project_id.0,
4152 repository_id: id.to_proto(),
4153 message: String::from(message),
4154 name: name.map(String::from),
4155 email: email.map(String::from),
4156 options: Some(proto::commit::CommitOptions {
4157 amend: options.amend,
4158 signoff: options.signoff,
4159 }),
4160 })
4161 .await
4162 .context("sending commit request")?;
4163
4164 Ok(())
4165 }
4166 }
4167 })
4168 }
4169
4170 pub fn fetch(
4171 &mut self,
4172 fetch_options: FetchOptions,
4173 askpass: AskPassDelegate,
4174 _cx: &mut App,
4175 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4176 let askpass_delegates = self.askpass_delegates.clone();
4177 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4178 let id = self.id;
4179
4180 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4181 match git_repo {
4182 RepositoryState::Local {
4183 backend,
4184 environment,
4185 ..
4186 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4187 RepositoryState::Remote { project_id, client } => {
4188 askpass_delegates.lock().insert(askpass_id, askpass);
4189 let _defer = util::defer(|| {
4190 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4191 debug_assert!(askpass_delegate.is_some());
4192 });
4193
4194 let response = client
4195 .request(proto::Fetch {
4196 project_id: project_id.0,
4197 repository_id: id.to_proto(),
4198 askpass_id,
4199 remote: fetch_options.to_proto(),
4200 })
4201 .await
4202 .context("sending fetch request")?;
4203
4204 Ok(RemoteCommandOutput {
4205 stdout: response.stdout,
4206 stderr: response.stderr,
4207 })
4208 }
4209 }
4210 })
4211 }
4212
4213 pub fn push(
4214 &mut self,
4215 branch: SharedString,
4216 remote: SharedString,
4217 options: Option<PushOptions>,
4218 askpass: AskPassDelegate,
4219 cx: &mut Context<Self>,
4220 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4221 let askpass_delegates = self.askpass_delegates.clone();
4222 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4223 let id = self.id;
4224
4225 let args = options
4226 .map(|option| match option {
4227 PushOptions::SetUpstream => " --set-upstream",
4228 PushOptions::Force => " --force-with-lease",
4229 })
4230 .unwrap_or("");
4231
4232 let updates_tx = self
4233 .git_store()
4234 .and_then(|git_store| match &git_store.read(cx).state {
4235 GitStoreState::Local { downstream, .. } => downstream
4236 .as_ref()
4237 .map(|downstream| downstream.updates_tx.clone()),
4238 _ => None,
4239 });
4240
4241 let this = cx.weak_entity();
4242 self.send_job(
4243 Some(format!("git push {} {} {}", args, remote, branch).into()),
4244 move |git_repo, mut cx| async move {
4245 match git_repo {
4246 RepositoryState::Local {
4247 backend,
4248 environment,
4249 ..
4250 } => {
4251 let result = backend
4252 .push(
4253 branch.to_string(),
4254 remote.to_string(),
4255 options,
4256 askpass,
4257 environment.clone(),
4258 cx.clone(),
4259 )
4260 .await;
4261 // TODO would be nice to not have to do this manually
4262 if result.is_ok() {
4263 let branches = backend.branches().await?;
4264 let branch = branches.into_iter().find(|branch| branch.is_head);
4265 log::info!("head branch after scan is {branch:?}");
4266 let snapshot = this.update(&mut cx, |this, cx| {
4267 this.snapshot.branch = branch;
4268 cx.emit(RepositoryEvent::BranchChanged);
4269 this.snapshot.clone()
4270 })?;
4271 if let Some(updates_tx) = updates_tx {
4272 updates_tx
4273 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4274 .ok();
4275 }
4276 }
4277 result
4278 }
4279 RepositoryState::Remote { project_id, client } => {
4280 askpass_delegates.lock().insert(askpass_id, askpass);
4281 let _defer = util::defer(|| {
4282 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4283 debug_assert!(askpass_delegate.is_some());
4284 });
4285 let response = client
4286 .request(proto::Push {
4287 project_id: project_id.0,
4288 repository_id: id.to_proto(),
4289 askpass_id,
4290 branch_name: branch.to_string(),
4291 remote_name: remote.to_string(),
4292 options: options.map(|options| match options {
4293 PushOptions::Force => proto::push::PushOptions::Force,
4294 PushOptions::SetUpstream => {
4295 proto::push::PushOptions::SetUpstream
4296 }
4297 }
4298 as i32),
4299 })
4300 .await
4301 .context("sending push request")?;
4302
4303 Ok(RemoteCommandOutput {
4304 stdout: response.stdout,
4305 stderr: response.stderr,
4306 })
4307 }
4308 }
4309 },
4310 )
4311 }
4312
4313 pub fn pull(
4314 &mut self,
4315 branch: SharedString,
4316 remote: SharedString,
4317 askpass: AskPassDelegate,
4318 _cx: &mut App,
4319 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4320 let askpass_delegates = self.askpass_delegates.clone();
4321 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4322 let id = self.id;
4323
4324 self.send_job(
4325 Some(format!("git pull {} {}", remote, branch).into()),
4326 move |git_repo, cx| async move {
4327 match git_repo {
4328 RepositoryState::Local {
4329 backend,
4330 environment,
4331 ..
4332 } => {
4333 backend
4334 .pull(
4335 branch.to_string(),
4336 remote.to_string(),
4337 askpass,
4338 environment.clone(),
4339 cx,
4340 )
4341 .await
4342 }
4343 RepositoryState::Remote { project_id, client } => {
4344 askpass_delegates.lock().insert(askpass_id, askpass);
4345 let _defer = util::defer(|| {
4346 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4347 debug_assert!(askpass_delegate.is_some());
4348 });
4349 let response = client
4350 .request(proto::Pull {
4351 project_id: project_id.0,
4352 repository_id: id.to_proto(),
4353 askpass_id,
4354 branch_name: branch.to_string(),
4355 remote_name: remote.to_string(),
4356 })
4357 .await
4358 .context("sending pull request")?;
4359
4360 Ok(RemoteCommandOutput {
4361 stdout: response.stdout,
4362 stderr: response.stderr,
4363 })
4364 }
4365 }
4366 },
4367 )
4368 }
4369
4370 fn spawn_set_index_text_job(
4371 &mut self,
4372 path: RepoPath,
4373 content: Option<String>,
4374 hunk_staging_operation_count: Option<usize>,
4375 cx: &mut Context<Self>,
4376 ) -> oneshot::Receiver<anyhow::Result<()>> {
4377 let id = self.id;
4378 let this = cx.weak_entity();
4379 let git_store = self.git_store.clone();
4380 self.send_keyed_job(
4381 Some(GitJobKey::WriteIndex(path.clone())),
4382 None,
4383 move |git_repo, mut cx| async move {
4384 log::debug!(
4385 "start updating index text for buffer {}",
4386 path.as_unix_str()
4387 );
4388 match git_repo {
4389 RepositoryState::Local {
4390 backend,
4391 environment,
4392 ..
4393 } => {
4394 backend
4395 .set_index_text(path.clone(), content, environment.clone())
4396 .await?;
4397 }
4398 RepositoryState::Remote { project_id, client } => {
4399 client
4400 .request(proto::SetIndexText {
4401 project_id: project_id.0,
4402 repository_id: id.to_proto(),
4403 path: path.to_proto(),
4404 text: content,
4405 })
4406 .await?;
4407 }
4408 }
4409 log::debug!(
4410 "finish updating index text for buffer {}",
4411 path.as_unix_str()
4412 );
4413
4414 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4415 let project_path = this
4416 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4417 .ok()
4418 .flatten();
4419 git_store.update(&mut cx, |git_store, cx| {
4420 let buffer_id = git_store
4421 .buffer_store
4422 .read(cx)
4423 .get_by_path(&project_path?)?
4424 .read(cx)
4425 .remote_id();
4426 let diff_state = git_store.diffs.get(&buffer_id)?;
4427 diff_state.update(cx, |diff_state, _| {
4428 diff_state.hunk_staging_operation_count_as_of_write =
4429 hunk_staging_operation_count;
4430 });
4431 Some(())
4432 })?;
4433 }
4434 Ok(())
4435 },
4436 )
4437 }
4438
4439 pub fn get_remotes(
4440 &mut self,
4441 branch_name: Option<String>,
4442 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4443 let id = self.id;
4444 self.send_job(None, move |repo, _cx| async move {
4445 match repo {
4446 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4447 RepositoryState::Remote { project_id, client } => {
4448 let response = client
4449 .request(proto::GetRemotes {
4450 project_id: project_id.0,
4451 repository_id: id.to_proto(),
4452 branch_name,
4453 })
4454 .await?;
4455
4456 let remotes = response
4457 .remotes
4458 .into_iter()
4459 .map(|remotes| git::repository::Remote {
4460 name: remotes.name.into(),
4461 })
4462 .collect();
4463
4464 Ok(remotes)
4465 }
4466 }
4467 })
4468 }
4469
4470 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4471 let id = self.id;
4472 self.send_job(None, move |repo, _| async move {
4473 match repo {
4474 RepositoryState::Local { backend, .. } => backend.branches().await,
4475 RepositoryState::Remote { project_id, client } => {
4476 let response = client
4477 .request(proto::GitGetBranches {
4478 project_id: project_id.0,
4479 repository_id: id.to_proto(),
4480 })
4481 .await?;
4482
4483 let branches = response
4484 .branches
4485 .into_iter()
4486 .map(|branch| proto_to_branch(&branch))
4487 .collect();
4488
4489 Ok(branches)
4490 }
4491 }
4492 })
4493 }
4494
4495 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4496 let id = self.id;
4497 self.send_job(None, move |repo, _| async move {
4498 match repo {
4499 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4500 RepositoryState::Remote { project_id, client } => {
4501 let response = client
4502 .request(proto::GetDefaultBranch {
4503 project_id: project_id.0,
4504 repository_id: id.to_proto(),
4505 })
4506 .await?;
4507
4508 anyhow::Ok(response.branch.map(SharedString::from))
4509 }
4510 }
4511 })
4512 }
4513
4514 pub fn diff_tree(
4515 &mut self,
4516 diff_type: DiffTreeType,
4517 _cx: &App,
4518 ) -> oneshot::Receiver<Result<TreeDiff>> {
4519 let repository_id = self.snapshot.id;
4520 self.send_job(None, move |repo, _cx| async move {
4521 match repo {
4522 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4523 RepositoryState::Remote { client, project_id } => {
4524 let response = client
4525 .request(proto::GetTreeDiff {
4526 project_id: project_id.0,
4527 repository_id: repository_id.0,
4528 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4529 base: diff_type.base().to_string(),
4530 head: diff_type.head().to_string(),
4531 })
4532 .await?;
4533
4534 let entries = response
4535 .entries
4536 .into_iter()
4537 .filter_map(|entry| {
4538 let status = match entry.status() {
4539 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4540 proto::tree_diff_status::Status::Modified => {
4541 TreeDiffStatus::Modified {
4542 old: git::Oid::from_str(
4543 &entry.oid.context("missing oid").log_err()?,
4544 )
4545 .log_err()?,
4546 }
4547 }
4548 proto::tree_diff_status::Status::Deleted => {
4549 TreeDiffStatus::Deleted {
4550 old: git::Oid::from_str(
4551 &entry.oid.context("missing oid").log_err()?,
4552 )
4553 .log_err()?,
4554 }
4555 }
4556 };
4557 Some((
4558 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4559 status,
4560 ))
4561 })
4562 .collect();
4563
4564 Ok(TreeDiff { entries })
4565 }
4566 }
4567 })
4568 }
4569
4570 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4571 let id = self.id;
4572 self.send_job(None, move |repo, _cx| async move {
4573 match repo {
4574 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4575 RepositoryState::Remote { project_id, client } => {
4576 let response = client
4577 .request(proto::GitDiff {
4578 project_id: project_id.0,
4579 repository_id: id.to_proto(),
4580 diff_type: match diff_type {
4581 DiffType::HeadToIndex => {
4582 proto::git_diff::DiffType::HeadToIndex.into()
4583 }
4584 DiffType::HeadToWorktree => {
4585 proto::git_diff::DiffType::HeadToWorktree.into()
4586 }
4587 },
4588 })
4589 .await?;
4590
4591 Ok(response.diff)
4592 }
4593 }
4594 })
4595 }
4596
4597 pub fn create_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4598 let id = self.id;
4599 self.send_job(
4600 Some(format!("git switch -c {branch_name}").into()),
4601 move |repo, _cx| async move {
4602 match repo {
4603 RepositoryState::Local { backend, .. } => {
4604 backend.create_branch(branch_name).await
4605 }
4606 RepositoryState::Remote { project_id, client } => {
4607 client
4608 .request(proto::GitCreateBranch {
4609 project_id: project_id.0,
4610 repository_id: id.to_proto(),
4611 branch_name,
4612 })
4613 .await?;
4614
4615 Ok(())
4616 }
4617 }
4618 },
4619 )
4620 }
4621
4622 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4623 let id = self.id;
4624 self.send_job(
4625 Some(format!("git switch {branch_name}").into()),
4626 move |repo, _cx| async move {
4627 match repo {
4628 RepositoryState::Local { backend, .. } => {
4629 backend.change_branch(branch_name).await
4630 }
4631 RepositoryState::Remote { project_id, client } => {
4632 client
4633 .request(proto::GitChangeBranch {
4634 project_id: project_id.0,
4635 repository_id: id.to_proto(),
4636 branch_name,
4637 })
4638 .await?;
4639
4640 Ok(())
4641 }
4642 }
4643 },
4644 )
4645 }
4646
4647 pub fn rename_branch(
4648 &mut self,
4649 branch: String,
4650 new_name: String,
4651 ) -> oneshot::Receiver<Result<()>> {
4652 let id = self.id;
4653 self.send_job(
4654 Some(format!("git branch -m {branch} {new_name}").into()),
4655 move |repo, _cx| async move {
4656 match repo {
4657 RepositoryState::Local { backend, .. } => {
4658 backend.rename_branch(branch, new_name).await
4659 }
4660 RepositoryState::Remote { project_id, client } => {
4661 client
4662 .request(proto::GitRenameBranch {
4663 project_id: project_id.0,
4664 repository_id: id.to_proto(),
4665 branch,
4666 new_name,
4667 })
4668 .await?;
4669
4670 Ok(())
4671 }
4672 }
4673 },
4674 )
4675 }
4676
4677 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4678 let id = self.id;
4679 self.send_job(None, move |repo, _cx| async move {
4680 match repo {
4681 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4682 RepositoryState::Remote { project_id, client } => {
4683 let response = client
4684 .request(proto::CheckForPushedCommits {
4685 project_id: project_id.0,
4686 repository_id: id.to_proto(),
4687 })
4688 .await?;
4689
4690 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4691
4692 Ok(branches)
4693 }
4694 }
4695 })
4696 }
4697
4698 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4699 self.send_job(None, |repo, _cx| async move {
4700 match repo {
4701 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4702 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4703 }
4704 })
4705 }
4706
4707 pub fn restore_checkpoint(
4708 &mut self,
4709 checkpoint: GitRepositoryCheckpoint,
4710 ) -> oneshot::Receiver<Result<()>> {
4711 self.send_job(None, move |repo, _cx| async move {
4712 match repo {
4713 RepositoryState::Local { backend, .. } => {
4714 backend.restore_checkpoint(checkpoint).await
4715 }
4716 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4717 }
4718 })
4719 }
4720
4721 pub(crate) fn apply_remote_update(
4722 &mut self,
4723 update: proto::UpdateRepository,
4724 cx: &mut Context<Self>,
4725 ) -> Result<()> {
4726 let conflicted_paths = TreeSet::from_ordered_entries(
4727 update
4728 .current_merge_conflicts
4729 .into_iter()
4730 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4731 );
4732 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4733 let new_head_commit = update
4734 .head_commit_details
4735 .as_ref()
4736 .map(proto_to_commit_details);
4737 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4738 cx.emit(RepositoryEvent::BranchChanged)
4739 }
4740 self.snapshot.branch = new_branch;
4741 self.snapshot.head_commit = new_head_commit;
4742
4743 self.snapshot.merge.conflicted_paths = conflicted_paths;
4744 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4745 let new_stash_entries = GitStash {
4746 entries: update
4747 .stash_entries
4748 .iter()
4749 .filter_map(|entry| proto_to_stash(entry).ok())
4750 .collect(),
4751 };
4752 if self.snapshot.stash_entries != new_stash_entries {
4753 cx.emit(RepositoryEvent::StashEntriesChanged)
4754 }
4755 self.snapshot.stash_entries = new_stash_entries;
4756
4757 let edits = update
4758 .removed_statuses
4759 .into_iter()
4760 .filter_map(|path| {
4761 Some(sum_tree::Edit::Remove(PathKey(
4762 RelPath::from_proto(&path).log_err()?,
4763 )))
4764 })
4765 .chain(
4766 update
4767 .updated_statuses
4768 .into_iter()
4769 .filter_map(|updated_status| {
4770 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4771 }),
4772 )
4773 .collect::<Vec<_>>();
4774 if !edits.is_empty() {
4775 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4776 }
4777 self.snapshot.statuses_by_path.edit(edits, ());
4778 if update.is_last_update {
4779 self.snapshot.scan_id = update.scan_id;
4780 }
4781 Ok(())
4782 }
4783
4784 pub fn compare_checkpoints(
4785 &mut self,
4786 left: GitRepositoryCheckpoint,
4787 right: GitRepositoryCheckpoint,
4788 ) -> oneshot::Receiver<Result<bool>> {
4789 self.send_job(None, move |repo, _cx| async move {
4790 match repo {
4791 RepositoryState::Local { backend, .. } => {
4792 backend.compare_checkpoints(left, right).await
4793 }
4794 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4795 }
4796 })
4797 }
4798
4799 pub fn diff_checkpoints(
4800 &mut self,
4801 base_checkpoint: GitRepositoryCheckpoint,
4802 target_checkpoint: GitRepositoryCheckpoint,
4803 ) -> oneshot::Receiver<Result<String>> {
4804 self.send_job(None, move |repo, _cx| async move {
4805 match repo {
4806 RepositoryState::Local { backend, .. } => {
4807 backend
4808 .diff_checkpoints(base_checkpoint, target_checkpoint)
4809 .await
4810 }
4811 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4812 }
4813 })
4814 }
4815
4816 fn schedule_scan(
4817 &mut self,
4818 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4819 cx: &mut Context<Self>,
4820 ) {
4821 let this = cx.weak_entity();
4822 let _ = self.send_keyed_job(
4823 Some(GitJobKey::ReloadGitState),
4824 None,
4825 |state, mut cx| async move {
4826 log::debug!("run scheduled git status scan");
4827
4828 let Some(this) = this.upgrade() else {
4829 return Ok(());
4830 };
4831 let RepositoryState::Local { backend, .. } = state else {
4832 bail!("not a local repository")
4833 };
4834 let (snapshot, events) = this
4835 .update(&mut cx, |this, _| {
4836 this.paths_needing_status_update.clear();
4837 compute_snapshot(
4838 this.id,
4839 this.work_directory_abs_path.clone(),
4840 this.snapshot.clone(),
4841 backend.clone(),
4842 )
4843 })?
4844 .await?;
4845 this.update(&mut cx, |this, cx| {
4846 this.snapshot = snapshot.clone();
4847 for event in events {
4848 cx.emit(event);
4849 }
4850 })?;
4851 if let Some(updates_tx) = updates_tx {
4852 updates_tx
4853 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4854 .ok();
4855 }
4856 Ok(())
4857 },
4858 );
4859 }
4860
4861 fn spawn_local_git_worker(
4862 work_directory_abs_path: Arc<Path>,
4863 dot_git_abs_path: Arc<Path>,
4864 _repository_dir_abs_path: Arc<Path>,
4865 _common_dir_abs_path: Arc<Path>,
4866 project_environment: WeakEntity<ProjectEnvironment>,
4867 fs: Arc<dyn Fs>,
4868 cx: &mut Context<Self>,
4869 ) -> mpsc::UnboundedSender<GitJob> {
4870 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4871
4872 cx.spawn(async move |_, cx| {
4873 let environment = project_environment
4874 .upgrade()
4875 .context("missing project environment")?
4876 .update(cx, |project_environment, cx| {
4877 project_environment.get_local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
4878 })?
4879 .await
4880 .unwrap_or_else(|| {
4881 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
4882 HashMap::default()
4883 });
4884 let search_paths = environment.get("PATH").map(|val| val.to_owned());
4885 let backend = cx
4886 .background_spawn(async move {
4887 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
4888 .or_else(|| which::which("git").ok());
4889 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
4890 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
4891 })
4892 .await?;
4893
4894 if let Some(git_hosting_provider_registry) =
4895 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
4896 {
4897 git_hosting_providers::register_additional_providers(
4898 git_hosting_provider_registry,
4899 backend.clone(),
4900 );
4901 }
4902
4903 let state = RepositoryState::Local {
4904 backend,
4905 environment: Arc::new(environment),
4906 };
4907 let mut jobs = VecDeque::new();
4908 loop {
4909 while let Ok(Some(next_job)) = job_rx.try_next() {
4910 jobs.push_back(next_job);
4911 }
4912
4913 if let Some(job) = jobs.pop_front() {
4914 if let Some(current_key) = &job.key
4915 && jobs
4916 .iter()
4917 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4918 {
4919 continue;
4920 }
4921 (job.job)(state.clone(), cx).await;
4922 } else if let Some(job) = job_rx.next().await {
4923 jobs.push_back(job);
4924 } else {
4925 break;
4926 }
4927 }
4928 anyhow::Ok(())
4929 })
4930 .detach_and_log_err(cx);
4931
4932 job_tx
4933 }
4934
4935 fn spawn_remote_git_worker(
4936 project_id: ProjectId,
4937 client: AnyProtoClient,
4938 cx: &mut Context<Self>,
4939 ) -> mpsc::UnboundedSender<GitJob> {
4940 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
4941
4942 cx.spawn(async move |_, cx| {
4943 let state = RepositoryState::Remote { project_id, client };
4944 let mut jobs = VecDeque::new();
4945 loop {
4946 while let Ok(Some(next_job)) = job_rx.try_next() {
4947 jobs.push_back(next_job);
4948 }
4949
4950 if let Some(job) = jobs.pop_front() {
4951 if let Some(current_key) = &job.key
4952 && jobs
4953 .iter()
4954 .any(|other_job| other_job.key.as_ref() == Some(current_key))
4955 {
4956 continue;
4957 }
4958 (job.job)(state.clone(), cx).await;
4959 } else if let Some(job) = job_rx.next().await {
4960 jobs.push_back(job);
4961 } else {
4962 break;
4963 }
4964 }
4965 anyhow::Ok(())
4966 })
4967 .detach_and_log_err(cx);
4968
4969 job_tx
4970 }
4971
4972 fn load_staged_text(
4973 &mut self,
4974 buffer_id: BufferId,
4975 repo_path: RepoPath,
4976 cx: &App,
4977 ) -> Task<Result<Option<String>>> {
4978 let rx = self.send_job(None, move |state, _| async move {
4979 match state {
4980 RepositoryState::Local { backend, .. } => {
4981 anyhow::Ok(backend.load_index_text(repo_path).await)
4982 }
4983 RepositoryState::Remote { project_id, client } => {
4984 let response = client
4985 .request(proto::OpenUnstagedDiff {
4986 project_id: project_id.to_proto(),
4987 buffer_id: buffer_id.to_proto(),
4988 })
4989 .await?;
4990 Ok(response.staged_text)
4991 }
4992 }
4993 });
4994 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
4995 }
4996
4997 fn load_committed_text(
4998 &mut self,
4999 buffer_id: BufferId,
5000 repo_path: RepoPath,
5001 cx: &App,
5002 ) -> Task<Result<DiffBasesChange>> {
5003 let rx = self.send_job(None, move |state, _| async move {
5004 match state {
5005 RepositoryState::Local { backend, .. } => {
5006 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5007 let staged_text = backend.load_index_text(repo_path).await;
5008 let diff_bases_change = if committed_text == staged_text {
5009 DiffBasesChange::SetBoth(committed_text)
5010 } else {
5011 DiffBasesChange::SetEach {
5012 index: staged_text,
5013 head: committed_text,
5014 }
5015 };
5016 anyhow::Ok(diff_bases_change)
5017 }
5018 RepositoryState::Remote { project_id, client } => {
5019 use proto::open_uncommitted_diff_response::Mode;
5020
5021 let response = client
5022 .request(proto::OpenUncommittedDiff {
5023 project_id: project_id.to_proto(),
5024 buffer_id: buffer_id.to_proto(),
5025 })
5026 .await?;
5027 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5028 let bases = match mode {
5029 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5030 Mode::IndexAndHead => DiffBasesChange::SetEach {
5031 head: response.committed_text,
5032 index: response.staged_text,
5033 },
5034 };
5035 Ok(bases)
5036 }
5037 }
5038 });
5039
5040 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5041 }
5042 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5043 let repository_id = self.snapshot.id;
5044 let rx = self.send_job(None, move |state, _| async move {
5045 match state {
5046 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5047 RepositoryState::Remote { client, project_id } => {
5048 let response = client
5049 .request(proto::GetBlobContent {
5050 project_id: project_id.to_proto(),
5051 repository_id: repository_id.0,
5052 oid: oid.to_string(),
5053 })
5054 .await?;
5055 Ok(response.content)
5056 }
5057 }
5058 });
5059 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5060 }
5061
5062 fn paths_changed(
5063 &mut self,
5064 paths: Vec<RepoPath>,
5065 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5066 cx: &mut Context<Self>,
5067 ) {
5068 self.paths_needing_status_update.extend(paths);
5069
5070 let this = cx.weak_entity();
5071 let _ = self.send_keyed_job(
5072 Some(GitJobKey::RefreshStatuses),
5073 None,
5074 |state, mut cx| async move {
5075 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5076 (
5077 this.snapshot.clone(),
5078 mem::take(&mut this.paths_needing_status_update),
5079 )
5080 })?;
5081 let RepositoryState::Local { backend, .. } = state else {
5082 bail!("not a local repository")
5083 };
5084
5085 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5086 if paths.is_empty() {
5087 return Ok(());
5088 }
5089 let statuses = backend.status(&paths).await?;
5090 let stash_entries = backend.stash_entries().await?;
5091
5092 let changed_path_statuses = cx
5093 .background_spawn(async move {
5094 let mut changed_path_statuses = Vec::new();
5095 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5096 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5097
5098 for (repo_path, status) in &*statuses.entries {
5099 changed_paths.remove(repo_path);
5100 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5101 && cursor.item().is_some_and(|entry| entry.status == *status)
5102 {
5103 continue;
5104 }
5105
5106 changed_path_statuses.push(Edit::Insert(StatusEntry {
5107 repo_path: repo_path.clone(),
5108 status: *status,
5109 }));
5110 }
5111 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5112 for path in changed_paths.into_iter() {
5113 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5114 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5115 }
5116 }
5117 changed_path_statuses
5118 })
5119 .await;
5120
5121 this.update(&mut cx, |this, cx| {
5122 if this.snapshot.stash_entries != stash_entries {
5123 cx.emit(RepositoryEvent::StashEntriesChanged);
5124 this.snapshot.stash_entries = stash_entries;
5125 }
5126
5127 if !changed_path_statuses.is_empty() {
5128 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5129 this.snapshot
5130 .statuses_by_path
5131 .edit(changed_path_statuses, ());
5132 this.snapshot.scan_id += 1;
5133 }
5134
5135 if let Some(updates_tx) = updates_tx {
5136 updates_tx
5137 .unbounded_send(DownstreamUpdate::UpdateRepository(
5138 this.snapshot.clone(),
5139 ))
5140 .ok();
5141 }
5142 })
5143 },
5144 );
5145 }
5146
5147 /// currently running git command and when it started
5148 pub fn current_job(&self) -> Option<JobInfo> {
5149 self.active_jobs.values().next().cloned()
5150 }
5151
5152 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5153 self.send_job(None, |_, _| async {})
5154 }
5155}
5156
5157fn get_permalink_in_rust_registry_src(
5158 provider_registry: Arc<GitHostingProviderRegistry>,
5159 path: PathBuf,
5160 selection: Range<u32>,
5161) -> Result<url::Url> {
5162 #[derive(Deserialize)]
5163 struct CargoVcsGit {
5164 sha1: String,
5165 }
5166
5167 #[derive(Deserialize)]
5168 struct CargoVcsInfo {
5169 git: CargoVcsGit,
5170 path_in_vcs: String,
5171 }
5172
5173 #[derive(Deserialize)]
5174 struct CargoPackage {
5175 repository: String,
5176 }
5177
5178 #[derive(Deserialize)]
5179 struct CargoToml {
5180 package: CargoPackage,
5181 }
5182
5183 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5184 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5185 Some((dir, json))
5186 }) else {
5187 bail!("No .cargo_vcs_info.json found in parent directories")
5188 };
5189 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5190 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5191 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5192 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5193 .context("parsing package.repository field of manifest")?;
5194 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5195 let permalink = provider.build_permalink(
5196 remote,
5197 BuildPermalinkParams::new(
5198 &cargo_vcs_info.git.sha1,
5199 &RepoPath(
5200 RelPath::new(&path, PathStyle::local())
5201 .context("invalid path")?
5202 .into_arc(),
5203 ),
5204 Some(selection),
5205 ),
5206 );
5207 Ok(permalink)
5208}
5209
5210fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5211 let Some(blame) = blame else {
5212 return proto::BlameBufferResponse {
5213 blame_response: None,
5214 };
5215 };
5216
5217 let entries = blame
5218 .entries
5219 .into_iter()
5220 .map(|entry| proto::BlameEntry {
5221 sha: entry.sha.as_bytes().into(),
5222 start_line: entry.range.start,
5223 end_line: entry.range.end,
5224 original_line_number: entry.original_line_number,
5225 author: entry.author,
5226 author_mail: entry.author_mail,
5227 author_time: entry.author_time,
5228 author_tz: entry.author_tz,
5229 committer: entry.committer_name,
5230 committer_mail: entry.committer_email,
5231 committer_time: entry.committer_time,
5232 committer_tz: entry.committer_tz,
5233 summary: entry.summary,
5234 previous: entry.previous,
5235 filename: entry.filename,
5236 })
5237 .collect::<Vec<_>>();
5238
5239 let messages = blame
5240 .messages
5241 .into_iter()
5242 .map(|(oid, message)| proto::CommitMessage {
5243 oid: oid.as_bytes().into(),
5244 message,
5245 })
5246 .collect::<Vec<_>>();
5247
5248 proto::BlameBufferResponse {
5249 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5250 entries,
5251 messages,
5252 remote_url: blame.remote_url,
5253 }),
5254 }
5255}
5256
5257fn deserialize_blame_buffer_response(
5258 response: proto::BlameBufferResponse,
5259) -> Option<git::blame::Blame> {
5260 let response = response.blame_response?;
5261 let entries = response
5262 .entries
5263 .into_iter()
5264 .filter_map(|entry| {
5265 Some(git::blame::BlameEntry {
5266 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5267 range: entry.start_line..entry.end_line,
5268 original_line_number: entry.original_line_number,
5269 committer_name: entry.committer,
5270 committer_time: entry.committer_time,
5271 committer_tz: entry.committer_tz,
5272 committer_email: entry.committer_mail,
5273 author: entry.author,
5274 author_mail: entry.author_mail,
5275 author_time: entry.author_time,
5276 author_tz: entry.author_tz,
5277 summary: entry.summary,
5278 previous: entry.previous,
5279 filename: entry.filename,
5280 })
5281 })
5282 .collect::<Vec<_>>();
5283
5284 let messages = response
5285 .messages
5286 .into_iter()
5287 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5288 .collect::<HashMap<_, _>>();
5289
5290 Some(Blame {
5291 entries,
5292 messages,
5293 remote_url: response.remote_url,
5294 })
5295}
5296
5297fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5298 proto::Branch {
5299 is_head: branch.is_head,
5300 ref_name: branch.ref_name.to_string(),
5301 unix_timestamp: branch
5302 .most_recent_commit
5303 .as_ref()
5304 .map(|commit| commit.commit_timestamp as u64),
5305 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5306 ref_name: upstream.ref_name.to_string(),
5307 tracking: upstream
5308 .tracking
5309 .status()
5310 .map(|upstream| proto::UpstreamTracking {
5311 ahead: upstream.ahead as u64,
5312 behind: upstream.behind as u64,
5313 }),
5314 }),
5315 most_recent_commit: branch
5316 .most_recent_commit
5317 .as_ref()
5318 .map(|commit| proto::CommitSummary {
5319 sha: commit.sha.to_string(),
5320 subject: commit.subject.to_string(),
5321 commit_timestamp: commit.commit_timestamp,
5322 author_name: commit.author_name.to_string(),
5323 }),
5324 }
5325}
5326
5327fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5328 git::repository::Branch {
5329 is_head: proto.is_head,
5330 ref_name: proto.ref_name.clone().into(),
5331 upstream: proto
5332 .upstream
5333 .as_ref()
5334 .map(|upstream| git::repository::Upstream {
5335 ref_name: upstream.ref_name.to_string().into(),
5336 tracking: upstream
5337 .tracking
5338 .as_ref()
5339 .map(|tracking| {
5340 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5341 ahead: tracking.ahead as u32,
5342 behind: tracking.behind as u32,
5343 })
5344 })
5345 .unwrap_or(git::repository::UpstreamTracking::Gone),
5346 }),
5347 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5348 git::repository::CommitSummary {
5349 sha: commit.sha.to_string().into(),
5350 subject: commit.subject.to_string().into(),
5351 commit_timestamp: commit.commit_timestamp,
5352 author_name: commit.author_name.to_string().into(),
5353 has_parent: true,
5354 }
5355 }),
5356 }
5357}
5358
5359fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5360 proto::GitCommitDetails {
5361 sha: commit.sha.to_string(),
5362 message: commit.message.to_string(),
5363 commit_timestamp: commit.commit_timestamp,
5364 author_email: commit.author_email.to_string(),
5365 author_name: commit.author_name.to_string(),
5366 }
5367}
5368
5369fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5370 CommitDetails {
5371 sha: proto.sha.clone().into(),
5372 message: proto.message.clone().into(),
5373 commit_timestamp: proto.commit_timestamp,
5374 author_email: proto.author_email.clone().into(),
5375 author_name: proto.author_name.clone().into(),
5376 }
5377}
5378
5379async fn compute_snapshot(
5380 id: RepositoryId,
5381 work_directory_abs_path: Arc<Path>,
5382 prev_snapshot: RepositorySnapshot,
5383 backend: Arc<dyn GitRepository>,
5384) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5385 let mut events = Vec::new();
5386 let branches = backend.branches().await?;
5387 let branch = branches.into_iter().find(|branch| branch.is_head);
5388 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5389 let stash_entries = backend.stash_entries().await?;
5390 let statuses_by_path = SumTree::from_iter(
5391 statuses
5392 .entries
5393 .iter()
5394 .map(|(repo_path, status)| StatusEntry {
5395 repo_path: repo_path.clone(),
5396 status: *status,
5397 }),
5398 (),
5399 );
5400 let (merge_details, merge_heads_changed) =
5401 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5402 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5403
5404 let pending_ops = prev_snapshot.pending_ops.clone();
5405
5406 if merge_heads_changed {
5407 events.push(RepositoryEvent::MergeHeadsChanged);
5408 }
5409
5410 if statuses_by_path != prev_snapshot.statuses_by_path {
5411 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5412 }
5413
5414 // Useful when branch is None in detached head state
5415 let head_commit = match backend.head_sha().await {
5416 Some(head_sha) => backend.show(head_sha).await.log_err(),
5417 None => None,
5418 };
5419
5420 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5421 events.push(RepositoryEvent::BranchChanged);
5422 }
5423
5424 // Used by edit prediction data collection
5425 let remote_origin_url = backend.remote_url("origin");
5426 let remote_upstream_url = backend.remote_url("upstream");
5427
5428 let snapshot = RepositorySnapshot {
5429 id,
5430 statuses_by_path,
5431 pending_ops,
5432 work_directory_abs_path,
5433 path_style: prev_snapshot.path_style,
5434 scan_id: prev_snapshot.scan_id + 1,
5435 branch,
5436 head_commit,
5437 merge: merge_details,
5438 remote_origin_url,
5439 remote_upstream_url,
5440 stash_entries,
5441 };
5442
5443 Ok((snapshot, events))
5444}
5445
5446fn status_from_proto(
5447 simple_status: i32,
5448 status: Option<proto::GitFileStatus>,
5449) -> anyhow::Result<FileStatus> {
5450 use proto::git_file_status::Variant;
5451
5452 let Some(variant) = status.and_then(|status| status.variant) else {
5453 let code = proto::GitStatus::from_i32(simple_status)
5454 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5455 let result = match code {
5456 proto::GitStatus::Added => TrackedStatus {
5457 worktree_status: StatusCode::Added,
5458 index_status: StatusCode::Unmodified,
5459 }
5460 .into(),
5461 proto::GitStatus::Modified => TrackedStatus {
5462 worktree_status: StatusCode::Modified,
5463 index_status: StatusCode::Unmodified,
5464 }
5465 .into(),
5466 proto::GitStatus::Conflict => UnmergedStatus {
5467 first_head: UnmergedStatusCode::Updated,
5468 second_head: UnmergedStatusCode::Updated,
5469 }
5470 .into(),
5471 proto::GitStatus::Deleted => TrackedStatus {
5472 worktree_status: StatusCode::Deleted,
5473 index_status: StatusCode::Unmodified,
5474 }
5475 .into(),
5476 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5477 };
5478 return Ok(result);
5479 };
5480
5481 let result = match variant {
5482 Variant::Untracked(_) => FileStatus::Untracked,
5483 Variant::Ignored(_) => FileStatus::Ignored,
5484 Variant::Unmerged(unmerged) => {
5485 let [first_head, second_head] =
5486 [unmerged.first_head, unmerged.second_head].map(|head| {
5487 let code = proto::GitStatus::from_i32(head)
5488 .with_context(|| format!("Invalid git status code: {head}"))?;
5489 let result = match code {
5490 proto::GitStatus::Added => UnmergedStatusCode::Added,
5491 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5492 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5493 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5494 };
5495 Ok(result)
5496 });
5497 let [first_head, second_head] = [first_head?, second_head?];
5498 UnmergedStatus {
5499 first_head,
5500 second_head,
5501 }
5502 .into()
5503 }
5504 Variant::Tracked(tracked) => {
5505 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5506 .map(|status| {
5507 let code = proto::GitStatus::from_i32(status)
5508 .with_context(|| format!("Invalid git status code: {status}"))?;
5509 let result = match code {
5510 proto::GitStatus::Modified => StatusCode::Modified,
5511 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5512 proto::GitStatus::Added => StatusCode::Added,
5513 proto::GitStatus::Deleted => StatusCode::Deleted,
5514 proto::GitStatus::Renamed => StatusCode::Renamed,
5515 proto::GitStatus::Copied => StatusCode::Copied,
5516 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5517 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5518 };
5519 Ok(result)
5520 });
5521 let [index_status, worktree_status] = [index_status?, worktree_status?];
5522 TrackedStatus {
5523 index_status,
5524 worktree_status,
5525 }
5526 .into()
5527 }
5528 };
5529 Ok(result)
5530}
5531
5532fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5533 use proto::git_file_status::{Tracked, Unmerged, Variant};
5534
5535 let variant = match status {
5536 FileStatus::Untracked => Variant::Untracked(Default::default()),
5537 FileStatus::Ignored => Variant::Ignored(Default::default()),
5538 FileStatus::Unmerged(UnmergedStatus {
5539 first_head,
5540 second_head,
5541 }) => Variant::Unmerged(Unmerged {
5542 first_head: unmerged_status_to_proto(first_head),
5543 second_head: unmerged_status_to_proto(second_head),
5544 }),
5545 FileStatus::Tracked(TrackedStatus {
5546 index_status,
5547 worktree_status,
5548 }) => Variant::Tracked(Tracked {
5549 index_status: tracked_status_to_proto(index_status),
5550 worktree_status: tracked_status_to_proto(worktree_status),
5551 }),
5552 };
5553 proto::GitFileStatus {
5554 variant: Some(variant),
5555 }
5556}
5557
5558fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5559 match code {
5560 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5561 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5562 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5563 }
5564}
5565
5566fn tracked_status_to_proto(code: StatusCode) -> i32 {
5567 match code {
5568 StatusCode::Added => proto::GitStatus::Added as _,
5569 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5570 StatusCode::Modified => proto::GitStatus::Modified as _,
5571 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5572 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5573 StatusCode::Copied => proto::GitStatus::Copied as _,
5574 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5575 }
5576}