1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use std::{
60 cmp::Ordering,
61 collections::{BTreeSet, HashSet, VecDeque},
62 future::Future,
63 mem,
64 ops::Range,
65 path::{Path, PathBuf},
66 str::FromStr,
67 sync::{
68 Arc,
69 atomic::{self, AtomicU64},
70 },
71 time::Instant,
72};
73use sum_tree::{Edit, SumTree, TreeSet};
74use task::Shell;
75use text::{Bias, BufferId};
76use util::{
77 ResultExt, debug_panic,
78 paths::{PathStyle, SanitizedPath},
79 post_inc,
80 rel_path::RelPath,
81};
82use worktree::{
83 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
84 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
85};
86use zeroize::Zeroize;
87
88pub struct GitStore {
89 state: GitStoreState,
90 buffer_store: Entity<BufferStore>,
91 worktree_store: Entity<WorktreeStore>,
92 repositories: HashMap<RepositoryId, Entity<Repository>>,
93 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
94 active_repo_id: Option<RepositoryId>,
95 #[allow(clippy::type_complexity)]
96 loading_diffs:
97 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
98 diffs: HashMap<BufferId, Entity<BufferGitState>>,
99 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
100 _subscriptions: Vec<Subscription>,
101}
102
103#[derive(Default)]
104struct SharedDiffs {
105 unstaged: Option<Entity<BufferDiff>>,
106 uncommitted: Option<Entity<BufferDiff>>,
107}
108
109struct BufferGitState {
110 unstaged_diff: Option<WeakEntity<BufferDiff>>,
111 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
112 conflict_set: Option<WeakEntity<ConflictSet>>,
113 recalculate_diff_task: Option<Task<Result<()>>>,
114 reparse_conflict_markers_task: Option<Task<Result<()>>>,
115 language: Option<Arc<Language>>,
116 language_registry: Option<Arc<LanguageRegistry>>,
117 conflict_updated_futures: Vec<oneshot::Sender<()>>,
118 recalculating_tx: postage::watch::Sender<bool>,
119
120 /// These operation counts are used to ensure that head and index text
121 /// values read from the git repository are up-to-date with any hunk staging
122 /// operations that have been performed on the BufferDiff.
123 ///
124 /// The operation count is incremented immediately when the user initiates a
125 /// hunk stage/unstage operation. Then, upon finishing writing the new index
126 /// text do disk, the `operation count as of write` is updated to reflect
127 /// the operation count that prompted the write.
128 hunk_staging_operation_count: usize,
129 hunk_staging_operation_count_as_of_write: usize,
130
131 head_text: Option<Arc<String>>,
132 index_text: Option<Arc<String>>,
133 head_changed: bool,
134 index_changed: bool,
135 language_changed: bool,
136}
137
138#[derive(Clone, Debug)]
139enum DiffBasesChange {
140 SetIndex(Option<String>),
141 SetHead(Option<String>),
142 SetEach {
143 index: Option<String>,
144 head: Option<String>,
145 },
146 SetBoth(Option<String>),
147}
148
149#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
150enum DiffKind {
151 Unstaged,
152 Uncommitted,
153}
154
155enum GitStoreState {
156 Local {
157 next_repository_id: Arc<AtomicU64>,
158 downstream: Option<LocalDownstreamState>,
159 project_environment: Entity<ProjectEnvironment>,
160 fs: Arc<dyn Fs>,
161 },
162 Remote {
163 upstream_client: AnyProtoClient,
164 upstream_project_id: u64,
165 downstream: Option<(AnyProtoClient, ProjectId)>,
166 },
167}
168
169enum DownstreamUpdate {
170 UpdateRepository(RepositorySnapshot),
171 RemoveRepository(RepositoryId),
172}
173
174struct LocalDownstreamState {
175 client: AnyProtoClient,
176 project_id: ProjectId,
177 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
178 _task: Task<Result<()>>,
179}
180
181#[derive(Clone, Debug)]
182pub struct GitStoreCheckpoint {
183 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
184}
185
186#[derive(Clone, Debug, PartialEq, Eq)]
187pub struct StatusEntry {
188 pub repo_path: RepoPath,
189 pub status: FileStatus,
190}
191
192impl StatusEntry {
193 fn to_proto(&self) -> proto::StatusEntry {
194 let simple_status = match self.status {
195 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
196 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
197 FileStatus::Tracked(TrackedStatus {
198 index_status,
199 worktree_status,
200 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
201 worktree_status
202 } else {
203 index_status
204 }),
205 };
206
207 proto::StatusEntry {
208 repo_path: self.repo_path.to_proto(),
209 simple_status,
210 status: Some(status_to_proto(self.status)),
211 }
212 }
213}
214
215impl TryFrom<proto::StatusEntry> for StatusEntry {
216 type Error = anyhow::Error;
217
218 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
219 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
220 let status = status_from_proto(value.simple_status, value.status)?;
221 Ok(Self { repo_path, status })
222 }
223}
224
225impl sum_tree::Item for StatusEntry {
226 type Summary = PathSummary<GitSummary>;
227
228 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
229 PathSummary {
230 max_path: self.repo_path.0.clone(),
231 item_summary: self.status.summary(),
232 }
233 }
234}
235
236impl sum_tree::KeyedItem for StatusEntry {
237 type Key = PathKey;
238
239 fn key(&self) -> Self::Key {
240 PathKey(self.repo_path.0.clone())
241 }
242}
243
244#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
245pub struct RepositoryId(pub u64);
246
247#[derive(Clone, Debug, Default, PartialEq, Eq)]
248pub struct MergeDetails {
249 pub conflicted_paths: TreeSet<RepoPath>,
250 pub message: Option<SharedString>,
251 pub heads: Vec<Option<SharedString>>,
252}
253
254#[derive(Clone, Debug, PartialEq, Eq)]
255pub struct RepositorySnapshot {
256 pub id: RepositoryId,
257 pub statuses_by_path: SumTree<StatusEntry>,
258 pub pending_ops_by_path: SumTree<PendingOps>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 job_id: JobId,
289 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
290 latest_askpass_id: u64,
291}
292
293impl std::ops::Deref for Repository {
294 type Target = RepositorySnapshot;
295
296 fn deref(&self) -> &Self::Target {
297 &self.snapshot
298 }
299}
300
301#[derive(Clone)]
302pub enum RepositoryState {
303 Local {
304 backend: Arc<dyn GitRepository>,
305 environment: Arc<HashMap<String, String>>,
306 },
307 Remote {
308 project_id: ProjectId,
309 client: AnyProtoClient,
310 },
311}
312
313#[derive(Clone, Debug, PartialEq, Eq)]
314pub enum RepositoryEvent {
315 StatusesChanged {
316 // TODO could report which statuses changed here
317 full_scan: bool,
318 },
319 MergeHeadsChanged,
320 BranchChanged,
321 StashEntriesChanged,
322 PendingOpsChanged {
323 pending_ops: SumTree<pending_op::PendingOps>,
324 },
325}
326
327#[derive(Clone, Debug)]
328pub struct JobsUpdated;
329
330#[derive(Debug)]
331pub enum GitStoreEvent {
332 ActiveRepositoryChanged(Option<RepositoryId>),
333 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
334 RepositoryAdded,
335 RepositoryRemoved(RepositoryId),
336 IndexWriteError(anyhow::Error),
337 JobsUpdated,
338 ConflictsUpdated,
339}
340
341impl EventEmitter<RepositoryEvent> for Repository {}
342impl EventEmitter<JobsUpdated> for Repository {}
343impl EventEmitter<GitStoreEvent> for GitStore {}
344
345pub struct GitJob {
346 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
347 key: Option<GitJobKey>,
348}
349
350#[derive(PartialEq, Eq)]
351enum GitJobKey {
352 WriteIndex(Vec<RepoPath>),
353 ReloadBufferDiffBases,
354 RefreshStatuses,
355 ReloadGitState,
356}
357
358impl GitStore {
359 pub fn local(
360 worktree_store: &Entity<WorktreeStore>,
361 buffer_store: Entity<BufferStore>,
362 environment: Entity<ProjectEnvironment>,
363 fs: Arc<dyn Fs>,
364 cx: &mut Context<Self>,
365 ) -> Self {
366 Self::new(
367 worktree_store.clone(),
368 buffer_store,
369 GitStoreState::Local {
370 next_repository_id: Arc::new(AtomicU64::new(1)),
371 downstream: None,
372 project_environment: environment,
373 fs,
374 },
375 cx,
376 )
377 }
378
379 pub fn remote(
380 worktree_store: &Entity<WorktreeStore>,
381 buffer_store: Entity<BufferStore>,
382 upstream_client: AnyProtoClient,
383 project_id: u64,
384 cx: &mut Context<Self>,
385 ) -> Self {
386 Self::new(
387 worktree_store.clone(),
388 buffer_store,
389 GitStoreState::Remote {
390 upstream_client,
391 upstream_project_id: project_id,
392 downstream: None,
393 },
394 cx,
395 )
396 }
397
398 fn new(
399 worktree_store: Entity<WorktreeStore>,
400 buffer_store: Entity<BufferStore>,
401 state: GitStoreState,
402 cx: &mut Context<Self>,
403 ) -> Self {
404 let _subscriptions = vec![
405 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
406 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
407 ];
408
409 GitStore {
410 state,
411 buffer_store,
412 worktree_store,
413 repositories: HashMap::default(),
414 worktree_ids: HashMap::default(),
415 active_repo_id: None,
416 _subscriptions,
417 loading_diffs: HashMap::default(),
418 shared_diffs: HashMap::default(),
419 diffs: HashMap::default(),
420 }
421 }
422
423 pub fn init(client: &AnyProtoClient) {
424 client.add_entity_request_handler(Self::handle_get_remotes);
425 client.add_entity_request_handler(Self::handle_get_branches);
426 client.add_entity_request_handler(Self::handle_get_default_branch);
427 client.add_entity_request_handler(Self::handle_change_branch);
428 client.add_entity_request_handler(Self::handle_create_branch);
429 client.add_entity_request_handler(Self::handle_rename_branch);
430 client.add_entity_request_handler(Self::handle_git_init);
431 client.add_entity_request_handler(Self::handle_push);
432 client.add_entity_request_handler(Self::handle_pull);
433 client.add_entity_request_handler(Self::handle_fetch);
434 client.add_entity_request_handler(Self::handle_stage);
435 client.add_entity_request_handler(Self::handle_unstage);
436 client.add_entity_request_handler(Self::handle_stash);
437 client.add_entity_request_handler(Self::handle_stash_pop);
438 client.add_entity_request_handler(Self::handle_stash_apply);
439 client.add_entity_request_handler(Self::handle_stash_drop);
440 client.add_entity_request_handler(Self::handle_commit);
441 client.add_entity_request_handler(Self::handle_reset);
442 client.add_entity_request_handler(Self::handle_show);
443 client.add_entity_request_handler(Self::handle_load_commit_diff);
444 client.add_entity_request_handler(Self::handle_checkout_files);
445 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
446 client.add_entity_request_handler(Self::handle_set_index_text);
447 client.add_entity_request_handler(Self::handle_askpass);
448 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
449 client.add_entity_request_handler(Self::handle_git_diff);
450 client.add_entity_request_handler(Self::handle_tree_diff);
451 client.add_entity_request_handler(Self::handle_get_blob_content);
452 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
453 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
454 client.add_entity_message_handler(Self::handle_update_diff_bases);
455 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
456 client.add_entity_request_handler(Self::handle_blame_buffer);
457 client.add_entity_message_handler(Self::handle_update_repository);
458 client.add_entity_message_handler(Self::handle_remove_repository);
459 client.add_entity_request_handler(Self::handle_git_clone);
460 client.add_entity_request_handler(Self::handle_get_worktrees);
461 client.add_entity_request_handler(Self::handle_create_worktree);
462 }
463
464 pub fn is_local(&self) -> bool {
465 matches!(self.state, GitStoreState::Local { .. })
466 }
467 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
468 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
469 let id = repo.read(cx).id;
470 if self.active_repo_id != Some(id) {
471 self.active_repo_id = Some(id);
472 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
473 }
474 }
475 }
476
477 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
478 match &mut self.state {
479 GitStoreState::Remote {
480 downstream: downstream_client,
481 ..
482 } => {
483 for repo in self.repositories.values() {
484 let update = repo.read(cx).snapshot.initial_update(project_id);
485 for update in split_repository_update(update) {
486 client.send(update).log_err();
487 }
488 }
489 *downstream_client = Some((client, ProjectId(project_id)));
490 }
491 GitStoreState::Local {
492 downstream: downstream_client,
493 ..
494 } => {
495 let mut snapshots = HashMap::default();
496 let (updates_tx, mut updates_rx) = mpsc::unbounded();
497 for repo in self.repositories.values() {
498 updates_tx
499 .unbounded_send(DownstreamUpdate::UpdateRepository(
500 repo.read(cx).snapshot.clone(),
501 ))
502 .ok();
503 }
504 *downstream_client = Some(LocalDownstreamState {
505 client: client.clone(),
506 project_id: ProjectId(project_id),
507 updates_tx,
508 _task: cx.spawn(async move |this, cx| {
509 cx.background_spawn(async move {
510 while let Some(update) = updates_rx.next().await {
511 match update {
512 DownstreamUpdate::UpdateRepository(snapshot) => {
513 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
514 {
515 let update =
516 snapshot.build_update(old_snapshot, project_id);
517 *old_snapshot = snapshot;
518 for update in split_repository_update(update) {
519 client.send(update)?;
520 }
521 } else {
522 let update = snapshot.initial_update(project_id);
523 for update in split_repository_update(update) {
524 client.send(update)?;
525 }
526 snapshots.insert(snapshot.id, snapshot);
527 }
528 }
529 DownstreamUpdate::RemoveRepository(id) => {
530 client.send(proto::RemoveRepository {
531 project_id,
532 id: id.to_proto(),
533 })?;
534 }
535 }
536 }
537 anyhow::Ok(())
538 })
539 .await
540 .ok();
541 this.update(cx, |this, _| {
542 if let GitStoreState::Local {
543 downstream: downstream_client,
544 ..
545 } = &mut this.state
546 {
547 downstream_client.take();
548 } else {
549 unreachable!("unshared called on remote store");
550 }
551 })
552 }),
553 });
554 }
555 }
556 }
557
558 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
559 match &mut self.state {
560 GitStoreState::Local {
561 downstream: downstream_client,
562 ..
563 } => {
564 downstream_client.take();
565 }
566 GitStoreState::Remote {
567 downstream: downstream_client,
568 ..
569 } => {
570 downstream_client.take();
571 }
572 }
573 self.shared_diffs.clear();
574 }
575
576 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
577 self.shared_diffs.remove(peer_id);
578 }
579
580 pub fn active_repository(&self) -> Option<Entity<Repository>> {
581 self.active_repo_id
582 .as_ref()
583 .map(|id| self.repositories[id].clone())
584 }
585
586 pub fn open_unstaged_diff(
587 &mut self,
588 buffer: Entity<Buffer>,
589 cx: &mut Context<Self>,
590 ) -> Task<Result<Entity<BufferDiff>>> {
591 let buffer_id = buffer.read(cx).remote_id();
592 if let Some(diff_state) = self.diffs.get(&buffer_id)
593 && let Some(unstaged_diff) = diff_state
594 .read(cx)
595 .unstaged_diff
596 .as_ref()
597 .and_then(|weak| weak.upgrade())
598 {
599 if let Some(task) =
600 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
601 {
602 return cx.background_executor().spawn(async move {
603 task.await;
604 Ok(unstaged_diff)
605 });
606 }
607 return Task::ready(Ok(unstaged_diff));
608 }
609
610 let Some((repo, repo_path)) =
611 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
612 else {
613 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
614 };
615
616 let task = self
617 .loading_diffs
618 .entry((buffer_id, DiffKind::Unstaged))
619 .or_insert_with(|| {
620 let staged_text = repo.update(cx, |repo, cx| {
621 repo.load_staged_text(buffer_id, repo_path, cx)
622 });
623 cx.spawn(async move |this, cx| {
624 Self::open_diff_internal(
625 this,
626 DiffKind::Unstaged,
627 staged_text.await.map(DiffBasesChange::SetIndex),
628 buffer,
629 cx,
630 )
631 .await
632 .map_err(Arc::new)
633 })
634 .shared()
635 })
636 .clone();
637
638 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
639 }
640
641 pub fn open_diff_since(
642 &mut self,
643 oid: Option<git::Oid>,
644 buffer: Entity<Buffer>,
645 repo: Entity<Repository>,
646 languages: Arc<LanguageRegistry>,
647 cx: &mut Context<Self>,
648 ) -> Task<Result<Entity<BufferDiff>>> {
649 cx.spawn(async move |this, cx| {
650 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
651 let content = match oid {
652 None => None,
653 Some(oid) => Some(
654 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
655 .await?,
656 ),
657 };
658 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
659
660 buffer_diff
661 .update(cx, |buffer_diff, cx| {
662 buffer_diff.set_base_text(
663 content.map(Arc::new),
664 buffer_snapshot.language().cloned(),
665 Some(languages.clone()),
666 buffer_snapshot.text,
667 cx,
668 )
669 })?
670 .await?;
671 let unstaged_diff = this
672 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
673 .await?;
674 buffer_diff.update(cx, |buffer_diff, _| {
675 buffer_diff.set_secondary_diff(unstaged_diff);
676 })?;
677
678 this.update(cx, |_, cx| {
679 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
680 .detach();
681 })?;
682
683 Ok(buffer_diff)
684 })
685 }
686
687 pub fn open_uncommitted_diff(
688 &mut self,
689 buffer: Entity<Buffer>,
690 cx: &mut Context<Self>,
691 ) -> Task<Result<Entity<BufferDiff>>> {
692 let buffer_id = buffer.read(cx).remote_id();
693
694 if let Some(diff_state) = self.diffs.get(&buffer_id)
695 && let Some(uncommitted_diff) = diff_state
696 .read(cx)
697 .uncommitted_diff
698 .as_ref()
699 .and_then(|weak| weak.upgrade())
700 {
701 if let Some(task) =
702 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
703 {
704 return cx.background_executor().spawn(async move {
705 task.await;
706 Ok(uncommitted_diff)
707 });
708 }
709 return Task::ready(Ok(uncommitted_diff));
710 }
711
712 let Some((repo, repo_path)) =
713 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
714 else {
715 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
716 };
717
718 let task = self
719 .loading_diffs
720 .entry((buffer_id, DiffKind::Uncommitted))
721 .or_insert_with(|| {
722 let changes = repo.update(cx, |repo, cx| {
723 repo.load_committed_text(buffer_id, repo_path, cx)
724 });
725
726 // todo(lw): hot foreground spawn
727 cx.spawn(async move |this, cx| {
728 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
729 .await
730 .map_err(Arc::new)
731 })
732 .shared()
733 })
734 .clone();
735
736 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
737 }
738
739 async fn open_diff_internal(
740 this: WeakEntity<Self>,
741 kind: DiffKind,
742 texts: Result<DiffBasesChange>,
743 buffer_entity: Entity<Buffer>,
744 cx: &mut AsyncApp,
745 ) -> Result<Entity<BufferDiff>> {
746 let diff_bases_change = match texts {
747 Err(e) => {
748 this.update(cx, |this, cx| {
749 let buffer = buffer_entity.read(cx);
750 let buffer_id = buffer.remote_id();
751 this.loading_diffs.remove(&(buffer_id, kind));
752 })?;
753 return Err(e);
754 }
755 Ok(change) => change,
756 };
757
758 this.update(cx, |this, cx| {
759 let buffer = buffer_entity.read(cx);
760 let buffer_id = buffer.remote_id();
761 let language = buffer.language().cloned();
762 let language_registry = buffer.language_registry();
763 let text_snapshot = buffer.text_snapshot();
764 this.loading_diffs.remove(&(buffer_id, kind));
765
766 let git_store = cx.weak_entity();
767 let diff_state = this
768 .diffs
769 .entry(buffer_id)
770 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
771
772 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
773
774 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
775 diff_state.update(cx, |diff_state, cx| {
776 diff_state.language = language;
777 diff_state.language_registry = language_registry;
778
779 match kind {
780 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
781 DiffKind::Uncommitted => {
782 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
783 diff
784 } else {
785 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
786 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
787 unstaged_diff
788 };
789
790 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
791 diff_state.uncommitted_diff = Some(diff.downgrade())
792 }
793 }
794
795 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
796 let rx = diff_state.wait_for_recalculation();
797
798 anyhow::Ok(async move {
799 if let Some(rx) = rx {
800 rx.await;
801 }
802 Ok(diff)
803 })
804 })
805 })??
806 .await
807 }
808
809 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
810 let diff_state = self.diffs.get(&buffer_id)?;
811 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
812 }
813
814 pub fn get_uncommitted_diff(
815 &self,
816 buffer_id: BufferId,
817 cx: &App,
818 ) -> Option<Entity<BufferDiff>> {
819 let diff_state = self.diffs.get(&buffer_id)?;
820 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
821 }
822
823 pub fn open_conflict_set(
824 &mut self,
825 buffer: Entity<Buffer>,
826 cx: &mut Context<Self>,
827 ) -> Entity<ConflictSet> {
828 log::debug!("open conflict set");
829 let buffer_id = buffer.read(cx).remote_id();
830
831 if let Some(git_state) = self.diffs.get(&buffer_id)
832 && let Some(conflict_set) = git_state
833 .read(cx)
834 .conflict_set
835 .as_ref()
836 .and_then(|weak| weak.upgrade())
837 {
838 let conflict_set = conflict_set;
839 let buffer_snapshot = buffer.read(cx).text_snapshot();
840
841 git_state.update(cx, |state, cx| {
842 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
843 });
844
845 return conflict_set;
846 }
847
848 let is_unmerged = self
849 .repository_and_path_for_buffer_id(buffer_id, cx)
850 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
851 let git_store = cx.weak_entity();
852 let buffer_git_state = self
853 .diffs
854 .entry(buffer_id)
855 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
856 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
857
858 self._subscriptions
859 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
860 cx.emit(GitStoreEvent::ConflictsUpdated);
861 }));
862
863 buffer_git_state.update(cx, |state, cx| {
864 state.conflict_set = Some(conflict_set.downgrade());
865 let buffer_snapshot = buffer.read(cx).text_snapshot();
866 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
867 });
868
869 conflict_set
870 }
871
872 pub fn project_path_git_status(
873 &self,
874 project_path: &ProjectPath,
875 cx: &App,
876 ) -> Option<FileStatus> {
877 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
878 Some(repo.read(cx).status_for_path(&repo_path)?.status)
879 }
880
881 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
882 let mut work_directory_abs_paths = Vec::new();
883 let mut checkpoints = Vec::new();
884 for repository in self.repositories.values() {
885 repository.update(cx, |repository, _| {
886 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
887 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
888 });
889 }
890
891 cx.background_executor().spawn(async move {
892 let checkpoints = future::try_join_all(checkpoints).await?;
893 Ok(GitStoreCheckpoint {
894 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
895 .into_iter()
896 .zip(checkpoints)
897 .collect(),
898 })
899 })
900 }
901
902 pub fn restore_checkpoint(
903 &self,
904 checkpoint: GitStoreCheckpoint,
905 cx: &mut App,
906 ) -> Task<Result<()>> {
907 let repositories_by_work_dir_abs_path = self
908 .repositories
909 .values()
910 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
911 .collect::<HashMap<_, _>>();
912
913 let mut tasks = Vec::new();
914 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
915 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
916 let restore = repository.update(cx, |repository, _| {
917 repository.restore_checkpoint(checkpoint)
918 });
919 tasks.push(async move { restore.await? });
920 }
921 }
922 cx.background_spawn(async move {
923 future::try_join_all(tasks).await?;
924 Ok(())
925 })
926 }
927
928 /// Compares two checkpoints, returning true if they are equal.
929 pub fn compare_checkpoints(
930 &self,
931 left: GitStoreCheckpoint,
932 mut right: GitStoreCheckpoint,
933 cx: &mut App,
934 ) -> Task<Result<bool>> {
935 let repositories_by_work_dir_abs_path = self
936 .repositories
937 .values()
938 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
939 .collect::<HashMap<_, _>>();
940
941 let mut tasks = Vec::new();
942 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
943 if let Some(right_checkpoint) = right
944 .checkpoints_by_work_dir_abs_path
945 .remove(&work_dir_abs_path)
946 {
947 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
948 {
949 let compare = repository.update(cx, |repository, _| {
950 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
951 });
952
953 tasks.push(async move { compare.await? });
954 }
955 } else {
956 return Task::ready(Ok(false));
957 }
958 }
959 cx.background_spawn(async move {
960 Ok(future::try_join_all(tasks)
961 .await?
962 .into_iter()
963 .all(|result| result))
964 })
965 }
966
967 /// Blames a buffer.
968 pub fn blame_buffer(
969 &self,
970 buffer: &Entity<Buffer>,
971 version: Option<clock::Global>,
972 cx: &mut App,
973 ) -> Task<Result<Option<Blame>>> {
974 let buffer = buffer.read(cx);
975 let Some((repo, repo_path)) =
976 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
977 else {
978 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
979 };
980 let content = match &version {
981 Some(version) => buffer.rope_for_version(version),
982 None => buffer.as_rope().clone(),
983 };
984 let version = version.unwrap_or(buffer.version());
985 let buffer_id = buffer.remote_id();
986
987 let rx = repo.update(cx, |repo, _| {
988 repo.send_job(None, move |state, _| async move {
989 match state {
990 RepositoryState::Local { backend, .. } => backend
991 .blame(repo_path.clone(), content)
992 .await
993 .with_context(|| format!("Failed to blame {:?}", repo_path.0))
994 .map(Some),
995 RepositoryState::Remote { project_id, client } => {
996 let response = client
997 .request(proto::BlameBuffer {
998 project_id: project_id.to_proto(),
999 buffer_id: buffer_id.into(),
1000 version: serialize_version(&version),
1001 })
1002 .await?;
1003 Ok(deserialize_blame_buffer_response(response))
1004 }
1005 }
1006 })
1007 });
1008
1009 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1010 }
1011
1012 pub fn get_permalink_to_line(
1013 &self,
1014 buffer: &Entity<Buffer>,
1015 selection: Range<u32>,
1016 cx: &mut App,
1017 ) -> Task<Result<url::Url>> {
1018 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1019 return Task::ready(Err(anyhow!("buffer has no file")));
1020 };
1021
1022 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1023 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1024 cx,
1025 ) else {
1026 // If we're not in a Git repo, check whether this is a Rust source
1027 // file in the Cargo registry (presumably opened with go-to-definition
1028 // from a normal Rust file). If so, we can put together a permalink
1029 // using crate metadata.
1030 if buffer
1031 .read(cx)
1032 .language()
1033 .is_none_or(|lang| lang.name() != "Rust".into())
1034 {
1035 return Task::ready(Err(anyhow!("no permalink available")));
1036 }
1037 let file_path = file.worktree.read(cx).absolutize(&file.path);
1038 return cx.spawn(async move |cx| {
1039 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1040 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1041 .context("no permalink available")
1042 });
1043 };
1044
1045 let buffer_id = buffer.read(cx).remote_id();
1046 let branch = repo.read(cx).branch.clone();
1047 let remote = branch
1048 .as_ref()
1049 .and_then(|b| b.upstream.as_ref())
1050 .and_then(|b| b.remote_name())
1051 .unwrap_or("origin")
1052 .to_string();
1053
1054 let rx = repo.update(cx, |repo, _| {
1055 repo.send_job(None, move |state, cx| async move {
1056 match state {
1057 RepositoryState::Local { backend, .. } => {
1058 let origin_url = backend
1059 .remote_url(&remote)
1060 .with_context(|| format!("remote \"{remote}\" not found"))?;
1061
1062 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1063
1064 let provider_registry =
1065 cx.update(GitHostingProviderRegistry::default_global)?;
1066
1067 let (provider, remote) =
1068 parse_git_remote_url(provider_registry, &origin_url)
1069 .context("parsing Git remote URL")?;
1070
1071 Ok(provider.build_permalink(
1072 remote,
1073 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1074 ))
1075 }
1076 RepositoryState::Remote { project_id, client } => {
1077 let response = client
1078 .request(proto::GetPermalinkToLine {
1079 project_id: project_id.to_proto(),
1080 buffer_id: buffer_id.into(),
1081 selection: Some(proto::Range {
1082 start: selection.start as u64,
1083 end: selection.end as u64,
1084 }),
1085 })
1086 .await?;
1087
1088 url::Url::parse(&response.permalink).context("failed to parse permalink")
1089 }
1090 }
1091 })
1092 });
1093 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1094 }
1095
1096 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1097 match &self.state {
1098 GitStoreState::Local {
1099 downstream: downstream_client,
1100 ..
1101 } => downstream_client
1102 .as_ref()
1103 .map(|state| (state.client.clone(), state.project_id)),
1104 GitStoreState::Remote {
1105 downstream: downstream_client,
1106 ..
1107 } => downstream_client.clone(),
1108 }
1109 }
1110
1111 fn upstream_client(&self) -> Option<AnyProtoClient> {
1112 match &self.state {
1113 GitStoreState::Local { .. } => None,
1114 GitStoreState::Remote {
1115 upstream_client, ..
1116 } => Some(upstream_client.clone()),
1117 }
1118 }
1119
1120 fn on_worktree_store_event(
1121 &mut self,
1122 worktree_store: Entity<WorktreeStore>,
1123 event: &WorktreeStoreEvent,
1124 cx: &mut Context<Self>,
1125 ) {
1126 let GitStoreState::Local {
1127 project_environment,
1128 downstream,
1129 next_repository_id,
1130 fs,
1131 } = &self.state
1132 else {
1133 return;
1134 };
1135
1136 match event {
1137 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1138 if let Some(worktree) = self
1139 .worktree_store
1140 .read(cx)
1141 .worktree_for_id(*worktree_id, cx)
1142 {
1143 let paths_by_git_repo =
1144 self.process_updated_entries(&worktree, updated_entries, cx);
1145 let downstream = downstream
1146 .as_ref()
1147 .map(|downstream| downstream.updates_tx.clone());
1148 cx.spawn(async move |_, cx| {
1149 let paths_by_git_repo = paths_by_git_repo.await;
1150 for (repo, paths) in paths_by_git_repo {
1151 repo.update(cx, |repo, cx| {
1152 repo.paths_changed(paths, downstream.clone(), cx);
1153 })
1154 .ok();
1155 }
1156 })
1157 .detach();
1158 }
1159 }
1160 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1161 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1162 else {
1163 return;
1164 };
1165 if !worktree.read(cx).is_visible() {
1166 log::debug!(
1167 "not adding repositories for local worktree {:?} because it's not visible",
1168 worktree.read(cx).abs_path()
1169 );
1170 return;
1171 }
1172 self.update_repositories_from_worktree(
1173 *worktree_id,
1174 project_environment.clone(),
1175 next_repository_id.clone(),
1176 downstream
1177 .as_ref()
1178 .map(|downstream| downstream.updates_tx.clone()),
1179 changed_repos.clone(),
1180 fs.clone(),
1181 cx,
1182 );
1183 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1184 }
1185 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1186 let repos_without_worktree: Vec<RepositoryId> = self
1187 .worktree_ids
1188 .iter_mut()
1189 .filter_map(|(repo_id, worktree_ids)| {
1190 worktree_ids.remove(worktree_id);
1191 if worktree_ids.is_empty() {
1192 Some(*repo_id)
1193 } else {
1194 None
1195 }
1196 })
1197 .collect();
1198 let is_active_repo_removed = repos_without_worktree
1199 .iter()
1200 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1201
1202 for repo_id in repos_without_worktree {
1203 self.repositories.remove(&repo_id);
1204 self.worktree_ids.remove(&repo_id);
1205 if let Some(updates_tx) =
1206 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1207 {
1208 updates_tx
1209 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1210 .ok();
1211 }
1212 }
1213
1214 if is_active_repo_removed {
1215 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1216 self.active_repo_id = Some(repo_id);
1217 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1218 } else {
1219 self.active_repo_id = None;
1220 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1221 }
1222 }
1223 }
1224 _ => {}
1225 }
1226 }
1227 fn on_repository_event(
1228 &mut self,
1229 repo: Entity<Repository>,
1230 event: &RepositoryEvent,
1231 cx: &mut Context<Self>,
1232 ) {
1233 let id = repo.read(cx).id;
1234 let repo_snapshot = repo.read(cx).snapshot.clone();
1235 for (buffer_id, diff) in self.diffs.iter() {
1236 if let Some((buffer_repo, repo_path)) =
1237 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1238 && buffer_repo == repo
1239 {
1240 diff.update(cx, |diff, cx| {
1241 if let Some(conflict_set) = &diff.conflict_set {
1242 let conflict_status_changed =
1243 conflict_set.update(cx, |conflict_set, cx| {
1244 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1245 conflict_set.set_has_conflict(has_conflict, cx)
1246 })?;
1247 if conflict_status_changed {
1248 let buffer_store = self.buffer_store.read(cx);
1249 if let Some(buffer) = buffer_store.get(*buffer_id) {
1250 let _ = diff
1251 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1252 }
1253 }
1254 }
1255 anyhow::Ok(())
1256 })
1257 .ok();
1258 }
1259 }
1260 cx.emit(GitStoreEvent::RepositoryUpdated(
1261 id,
1262 event.clone(),
1263 self.active_repo_id == Some(id),
1264 ))
1265 }
1266
1267 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1268 cx.emit(GitStoreEvent::JobsUpdated)
1269 }
1270
1271 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1272 fn update_repositories_from_worktree(
1273 &mut self,
1274 worktree_id: WorktreeId,
1275 project_environment: Entity<ProjectEnvironment>,
1276 next_repository_id: Arc<AtomicU64>,
1277 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1278 updated_git_repositories: UpdatedGitRepositoriesSet,
1279 fs: Arc<dyn Fs>,
1280 cx: &mut Context<Self>,
1281 ) {
1282 let mut removed_ids = Vec::new();
1283 for update in updated_git_repositories.iter() {
1284 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1285 let existing_work_directory_abs_path =
1286 repo.read(cx).work_directory_abs_path.clone();
1287 Some(&existing_work_directory_abs_path)
1288 == update.old_work_directory_abs_path.as_ref()
1289 || Some(&existing_work_directory_abs_path)
1290 == update.new_work_directory_abs_path.as_ref()
1291 }) {
1292 let repo_id = *id;
1293 if let Some(new_work_directory_abs_path) =
1294 update.new_work_directory_abs_path.clone()
1295 {
1296 self.worktree_ids
1297 .entry(repo_id)
1298 .or_insert_with(HashSet::new)
1299 .insert(worktree_id);
1300 existing.update(cx, |existing, cx| {
1301 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1302 existing.schedule_scan(updates_tx.clone(), cx);
1303 });
1304 } else {
1305 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1306 worktree_ids.remove(&worktree_id);
1307 if worktree_ids.is_empty() {
1308 removed_ids.push(repo_id);
1309 }
1310 }
1311 }
1312 } else if let UpdatedGitRepository {
1313 new_work_directory_abs_path: Some(work_directory_abs_path),
1314 dot_git_abs_path: Some(dot_git_abs_path),
1315 repository_dir_abs_path: Some(repository_dir_abs_path),
1316 common_dir_abs_path: Some(common_dir_abs_path),
1317 ..
1318 } = update
1319 {
1320 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1321 let git_store = cx.weak_entity();
1322 let repo = cx.new(|cx| {
1323 let mut repo = Repository::local(
1324 id,
1325 work_directory_abs_path.clone(),
1326 dot_git_abs_path.clone(),
1327 repository_dir_abs_path.clone(),
1328 common_dir_abs_path.clone(),
1329 project_environment.downgrade(),
1330 fs.clone(),
1331 git_store,
1332 cx,
1333 );
1334 if let Some(updates_tx) = updates_tx.as_ref() {
1335 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1336 updates_tx
1337 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1338 .ok();
1339 }
1340 repo.schedule_scan(updates_tx.clone(), cx);
1341 repo
1342 });
1343 self._subscriptions
1344 .push(cx.subscribe(&repo, Self::on_repository_event));
1345 self._subscriptions
1346 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1347 self.repositories.insert(id, repo);
1348 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1349 cx.emit(GitStoreEvent::RepositoryAdded);
1350 self.active_repo_id.get_or_insert_with(|| {
1351 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1352 id
1353 });
1354 }
1355 }
1356
1357 for id in removed_ids {
1358 if self.active_repo_id == Some(id) {
1359 self.active_repo_id = None;
1360 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1361 }
1362 self.repositories.remove(&id);
1363 if let Some(updates_tx) = updates_tx.as_ref() {
1364 updates_tx
1365 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1366 .ok();
1367 }
1368 }
1369 }
1370
1371 fn on_buffer_store_event(
1372 &mut self,
1373 _: Entity<BufferStore>,
1374 event: &BufferStoreEvent,
1375 cx: &mut Context<Self>,
1376 ) {
1377 match event {
1378 BufferStoreEvent::BufferAdded(buffer) => {
1379 cx.subscribe(buffer, |this, buffer, event, cx| {
1380 if let BufferEvent::LanguageChanged = event {
1381 let buffer_id = buffer.read(cx).remote_id();
1382 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1383 diff_state.update(cx, |diff_state, cx| {
1384 diff_state.buffer_language_changed(buffer, cx);
1385 });
1386 }
1387 }
1388 })
1389 .detach();
1390 }
1391 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1392 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1393 diffs.remove(buffer_id);
1394 }
1395 }
1396 BufferStoreEvent::BufferDropped(buffer_id) => {
1397 self.diffs.remove(buffer_id);
1398 for diffs in self.shared_diffs.values_mut() {
1399 diffs.remove(buffer_id);
1400 }
1401 }
1402
1403 _ => {}
1404 }
1405 }
1406
1407 pub fn recalculate_buffer_diffs(
1408 &mut self,
1409 buffers: Vec<Entity<Buffer>>,
1410 cx: &mut Context<Self>,
1411 ) -> impl Future<Output = ()> + use<> {
1412 let mut futures = Vec::new();
1413 for buffer in buffers {
1414 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1415 let buffer = buffer.read(cx).text_snapshot();
1416 diff_state.update(cx, |diff_state, cx| {
1417 diff_state.recalculate_diffs(buffer.clone(), cx);
1418 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1419 });
1420 futures.push(diff_state.update(cx, |diff_state, cx| {
1421 diff_state
1422 .reparse_conflict_markers(buffer, cx)
1423 .map(|_| {})
1424 .boxed()
1425 }));
1426 }
1427 }
1428 async move {
1429 futures::future::join_all(futures).await;
1430 }
1431 }
1432
1433 fn on_buffer_diff_event(
1434 &mut self,
1435 diff: Entity<buffer_diff::BufferDiff>,
1436 event: &BufferDiffEvent,
1437 cx: &mut Context<Self>,
1438 ) {
1439 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1440 let buffer_id = diff.read(cx).buffer_id;
1441 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1442 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1443 diff_state.hunk_staging_operation_count += 1;
1444 diff_state.hunk_staging_operation_count
1445 });
1446 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1447 let recv = repo.update(cx, |repo, cx| {
1448 log::debug!("hunks changed for {}", path.as_unix_str());
1449 repo.spawn_set_index_text_job(
1450 path,
1451 new_index_text.as_ref().map(|rope| rope.to_string()),
1452 Some(hunk_staging_operation_count),
1453 cx,
1454 )
1455 });
1456 let diff = diff.downgrade();
1457 cx.spawn(async move |this, cx| {
1458 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1459 diff.update(cx, |diff, cx| {
1460 diff.clear_pending_hunks(cx);
1461 })
1462 .ok();
1463 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1464 .ok();
1465 }
1466 })
1467 .detach();
1468 }
1469 }
1470 }
1471 }
1472
1473 fn local_worktree_git_repos_changed(
1474 &mut self,
1475 worktree: Entity<Worktree>,
1476 changed_repos: &UpdatedGitRepositoriesSet,
1477 cx: &mut Context<Self>,
1478 ) {
1479 log::debug!("local worktree repos changed");
1480 debug_assert!(worktree.read(cx).is_local());
1481
1482 for repository in self.repositories.values() {
1483 repository.update(cx, |repository, cx| {
1484 let repo_abs_path = &repository.work_directory_abs_path;
1485 if changed_repos.iter().any(|update| {
1486 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1487 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1488 }) {
1489 repository.reload_buffer_diff_bases(cx);
1490 }
1491 });
1492 }
1493 }
1494
1495 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1496 &self.repositories
1497 }
1498
1499 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1500 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1501 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1502 Some(status.status)
1503 }
1504
1505 pub fn repository_and_path_for_buffer_id(
1506 &self,
1507 buffer_id: BufferId,
1508 cx: &App,
1509 ) -> Option<(Entity<Repository>, RepoPath)> {
1510 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1511 let project_path = buffer.read(cx).project_path(cx)?;
1512 self.repository_and_path_for_project_path(&project_path, cx)
1513 }
1514
1515 pub fn repository_and_path_for_project_path(
1516 &self,
1517 path: &ProjectPath,
1518 cx: &App,
1519 ) -> Option<(Entity<Repository>, RepoPath)> {
1520 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1521 self.repositories
1522 .values()
1523 .filter_map(|repo| {
1524 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1525 Some((repo.clone(), repo_path))
1526 })
1527 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1528 }
1529
1530 pub fn git_init(
1531 &self,
1532 path: Arc<Path>,
1533 fallback_branch_name: String,
1534 cx: &App,
1535 ) -> Task<Result<()>> {
1536 match &self.state {
1537 GitStoreState::Local { fs, .. } => {
1538 let fs = fs.clone();
1539 cx.background_executor()
1540 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1541 }
1542 GitStoreState::Remote {
1543 upstream_client,
1544 upstream_project_id: project_id,
1545 ..
1546 } => {
1547 let client = upstream_client.clone();
1548 let project_id = *project_id;
1549 cx.background_executor().spawn(async move {
1550 client
1551 .request(proto::GitInit {
1552 project_id: project_id,
1553 abs_path: path.to_string_lossy().into_owned(),
1554 fallback_branch_name,
1555 })
1556 .await?;
1557 Ok(())
1558 })
1559 }
1560 }
1561 }
1562
1563 pub fn git_clone(
1564 &self,
1565 repo: String,
1566 path: impl Into<Arc<std::path::Path>>,
1567 cx: &App,
1568 ) -> Task<Result<()>> {
1569 let path = path.into();
1570 match &self.state {
1571 GitStoreState::Local { fs, .. } => {
1572 let fs = fs.clone();
1573 cx.background_executor()
1574 .spawn(async move { fs.git_clone(&repo, &path).await })
1575 }
1576 GitStoreState::Remote {
1577 upstream_client,
1578 upstream_project_id,
1579 ..
1580 } => {
1581 if upstream_client.is_via_collab() {
1582 return Task::ready(Err(anyhow!(
1583 "Git Clone isn't supported for project guests"
1584 )));
1585 }
1586 let request = upstream_client.request(proto::GitClone {
1587 project_id: *upstream_project_id,
1588 abs_path: path.to_string_lossy().into_owned(),
1589 remote_repo: repo,
1590 });
1591
1592 cx.background_spawn(async move {
1593 let result = request.await?;
1594
1595 match result.success {
1596 true => Ok(()),
1597 false => Err(anyhow!("Git Clone failed")),
1598 }
1599 })
1600 }
1601 }
1602 }
1603
1604 async fn handle_update_repository(
1605 this: Entity<Self>,
1606 envelope: TypedEnvelope<proto::UpdateRepository>,
1607 mut cx: AsyncApp,
1608 ) -> Result<()> {
1609 this.update(&mut cx, |this, cx| {
1610 let path_style = this.worktree_store.read(cx).path_style();
1611 let mut update = envelope.payload;
1612
1613 let id = RepositoryId::from_proto(update.id);
1614 let client = this.upstream_client().context("no upstream client")?;
1615
1616 let mut repo_subscription = None;
1617 let repo = this.repositories.entry(id).or_insert_with(|| {
1618 let git_store = cx.weak_entity();
1619 let repo = cx.new(|cx| {
1620 Repository::remote(
1621 id,
1622 Path::new(&update.abs_path).into(),
1623 path_style,
1624 ProjectId(update.project_id),
1625 client,
1626 git_store,
1627 cx,
1628 )
1629 });
1630 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1631 cx.emit(GitStoreEvent::RepositoryAdded);
1632 repo
1633 });
1634 this._subscriptions.extend(repo_subscription);
1635
1636 repo.update(cx, {
1637 let update = update.clone();
1638 |repo, cx| repo.apply_remote_update(update, cx)
1639 })?;
1640
1641 this.active_repo_id.get_or_insert_with(|| {
1642 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1643 id
1644 });
1645
1646 if let Some((client, project_id)) = this.downstream_client() {
1647 update.project_id = project_id.to_proto();
1648 client.send(update).log_err();
1649 }
1650 Ok(())
1651 })?
1652 }
1653
1654 async fn handle_remove_repository(
1655 this: Entity<Self>,
1656 envelope: TypedEnvelope<proto::RemoveRepository>,
1657 mut cx: AsyncApp,
1658 ) -> Result<()> {
1659 this.update(&mut cx, |this, cx| {
1660 let mut update = envelope.payload;
1661 let id = RepositoryId::from_proto(update.id);
1662 this.repositories.remove(&id);
1663 if let Some((client, project_id)) = this.downstream_client() {
1664 update.project_id = project_id.to_proto();
1665 client.send(update).log_err();
1666 }
1667 if this.active_repo_id == Some(id) {
1668 this.active_repo_id = None;
1669 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1670 }
1671 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1672 })
1673 }
1674
1675 async fn handle_git_init(
1676 this: Entity<Self>,
1677 envelope: TypedEnvelope<proto::GitInit>,
1678 cx: AsyncApp,
1679 ) -> Result<proto::Ack> {
1680 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1681 let name = envelope.payload.fallback_branch_name;
1682 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1683 .await?;
1684
1685 Ok(proto::Ack {})
1686 }
1687
1688 async fn handle_git_clone(
1689 this: Entity<Self>,
1690 envelope: TypedEnvelope<proto::GitClone>,
1691 cx: AsyncApp,
1692 ) -> Result<proto::GitCloneResponse> {
1693 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1694 let repo_name = envelope.payload.remote_repo;
1695 let result = cx
1696 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1697 .await;
1698
1699 Ok(proto::GitCloneResponse {
1700 success: result.is_ok(),
1701 })
1702 }
1703
1704 async fn handle_fetch(
1705 this: Entity<Self>,
1706 envelope: TypedEnvelope<proto::Fetch>,
1707 mut cx: AsyncApp,
1708 ) -> Result<proto::RemoteMessageResponse> {
1709 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1710 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1711 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1712 let askpass_id = envelope.payload.askpass_id;
1713
1714 let askpass = make_remote_delegate(
1715 this,
1716 envelope.payload.project_id,
1717 repository_id,
1718 askpass_id,
1719 &mut cx,
1720 );
1721
1722 let remote_output = repository_handle
1723 .update(&mut cx, |repository_handle, cx| {
1724 repository_handle.fetch(fetch_options, askpass, cx)
1725 })?
1726 .await??;
1727
1728 Ok(proto::RemoteMessageResponse {
1729 stdout: remote_output.stdout,
1730 stderr: remote_output.stderr,
1731 })
1732 }
1733
1734 async fn handle_push(
1735 this: Entity<Self>,
1736 envelope: TypedEnvelope<proto::Push>,
1737 mut cx: AsyncApp,
1738 ) -> Result<proto::RemoteMessageResponse> {
1739 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1740 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1741
1742 let askpass_id = envelope.payload.askpass_id;
1743 let askpass = make_remote_delegate(
1744 this,
1745 envelope.payload.project_id,
1746 repository_id,
1747 askpass_id,
1748 &mut cx,
1749 );
1750
1751 let options = envelope
1752 .payload
1753 .options
1754 .as_ref()
1755 .map(|_| match envelope.payload.options() {
1756 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1757 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1758 });
1759
1760 let branch_name = envelope.payload.branch_name.into();
1761 let remote_name = envelope.payload.remote_name.into();
1762
1763 let remote_output = repository_handle
1764 .update(&mut cx, |repository_handle, cx| {
1765 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1766 })?
1767 .await??;
1768 Ok(proto::RemoteMessageResponse {
1769 stdout: remote_output.stdout,
1770 stderr: remote_output.stderr,
1771 })
1772 }
1773
1774 async fn handle_pull(
1775 this: Entity<Self>,
1776 envelope: TypedEnvelope<proto::Pull>,
1777 mut cx: AsyncApp,
1778 ) -> Result<proto::RemoteMessageResponse> {
1779 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1780 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1781 let askpass_id = envelope.payload.askpass_id;
1782 let askpass = make_remote_delegate(
1783 this,
1784 envelope.payload.project_id,
1785 repository_id,
1786 askpass_id,
1787 &mut cx,
1788 );
1789
1790 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1791 let remote_name = envelope.payload.remote_name.into();
1792 let rebase = envelope.payload.rebase;
1793
1794 let remote_message = repository_handle
1795 .update(&mut cx, |repository_handle, cx| {
1796 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1797 })?
1798 .await??;
1799
1800 Ok(proto::RemoteMessageResponse {
1801 stdout: remote_message.stdout,
1802 stderr: remote_message.stderr,
1803 })
1804 }
1805
1806 async fn handle_stage(
1807 this: Entity<Self>,
1808 envelope: TypedEnvelope<proto::Stage>,
1809 mut cx: AsyncApp,
1810 ) -> Result<proto::Ack> {
1811 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1812 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1813
1814 let entries = envelope
1815 .payload
1816 .paths
1817 .into_iter()
1818 .map(|path| RepoPath::new(&path))
1819 .collect::<Result<Vec<_>>>()?;
1820
1821 repository_handle
1822 .update(&mut cx, |repository_handle, cx| {
1823 repository_handle.stage_entries(entries, cx)
1824 })?
1825 .await?;
1826 Ok(proto::Ack {})
1827 }
1828
1829 async fn handle_unstage(
1830 this: Entity<Self>,
1831 envelope: TypedEnvelope<proto::Unstage>,
1832 mut cx: AsyncApp,
1833 ) -> Result<proto::Ack> {
1834 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1835 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1836
1837 let entries = envelope
1838 .payload
1839 .paths
1840 .into_iter()
1841 .map(|path| RepoPath::new(&path))
1842 .collect::<Result<Vec<_>>>()?;
1843
1844 repository_handle
1845 .update(&mut cx, |repository_handle, cx| {
1846 repository_handle.unstage_entries(entries, cx)
1847 })?
1848 .await?;
1849
1850 Ok(proto::Ack {})
1851 }
1852
1853 async fn handle_stash(
1854 this: Entity<Self>,
1855 envelope: TypedEnvelope<proto::Stash>,
1856 mut cx: AsyncApp,
1857 ) -> Result<proto::Ack> {
1858 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1859 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1860
1861 let entries = envelope
1862 .payload
1863 .paths
1864 .into_iter()
1865 .map(|path| RepoPath::new(&path))
1866 .collect::<Result<Vec<_>>>()?;
1867
1868 repository_handle
1869 .update(&mut cx, |repository_handle, cx| {
1870 repository_handle.stash_entries(entries, cx)
1871 })?
1872 .await?;
1873
1874 Ok(proto::Ack {})
1875 }
1876
1877 async fn handle_stash_pop(
1878 this: Entity<Self>,
1879 envelope: TypedEnvelope<proto::StashPop>,
1880 mut cx: AsyncApp,
1881 ) -> Result<proto::Ack> {
1882 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1883 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1884 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1885
1886 repository_handle
1887 .update(&mut cx, |repository_handle, cx| {
1888 repository_handle.stash_pop(stash_index, cx)
1889 })?
1890 .await?;
1891
1892 Ok(proto::Ack {})
1893 }
1894
1895 async fn handle_stash_apply(
1896 this: Entity<Self>,
1897 envelope: TypedEnvelope<proto::StashApply>,
1898 mut cx: AsyncApp,
1899 ) -> Result<proto::Ack> {
1900 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1901 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1902 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1903
1904 repository_handle
1905 .update(&mut cx, |repository_handle, cx| {
1906 repository_handle.stash_apply(stash_index, cx)
1907 })?
1908 .await?;
1909
1910 Ok(proto::Ack {})
1911 }
1912
1913 async fn handle_stash_drop(
1914 this: Entity<Self>,
1915 envelope: TypedEnvelope<proto::StashDrop>,
1916 mut cx: AsyncApp,
1917 ) -> Result<proto::Ack> {
1918 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1919 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1920 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1921
1922 repository_handle
1923 .update(&mut cx, |repository_handle, cx| {
1924 repository_handle.stash_drop(stash_index, cx)
1925 })?
1926 .await??;
1927
1928 Ok(proto::Ack {})
1929 }
1930
1931 async fn handle_set_index_text(
1932 this: Entity<Self>,
1933 envelope: TypedEnvelope<proto::SetIndexText>,
1934 mut cx: AsyncApp,
1935 ) -> Result<proto::Ack> {
1936 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1937 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1938 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1939
1940 repository_handle
1941 .update(&mut cx, |repository_handle, cx| {
1942 repository_handle.spawn_set_index_text_job(
1943 repo_path,
1944 envelope.payload.text,
1945 None,
1946 cx,
1947 )
1948 })?
1949 .await??;
1950 Ok(proto::Ack {})
1951 }
1952
1953 async fn handle_commit(
1954 this: Entity<Self>,
1955 envelope: TypedEnvelope<proto::Commit>,
1956 mut cx: AsyncApp,
1957 ) -> Result<proto::Ack> {
1958 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1959 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1960 let askpass_id = envelope.payload.askpass_id;
1961
1962 let askpass = make_remote_delegate(
1963 this,
1964 envelope.payload.project_id,
1965 repository_id,
1966 askpass_id,
1967 &mut cx,
1968 );
1969
1970 let message = SharedString::from(envelope.payload.message);
1971 let name = envelope.payload.name.map(SharedString::from);
1972 let email = envelope.payload.email.map(SharedString::from);
1973 let options = envelope.payload.options.unwrap_or_default();
1974
1975 repository_handle
1976 .update(&mut cx, |repository_handle, cx| {
1977 repository_handle.commit(
1978 message,
1979 name.zip(email),
1980 CommitOptions {
1981 amend: options.amend,
1982 signoff: options.signoff,
1983 },
1984 askpass,
1985 cx,
1986 )
1987 })?
1988 .await??;
1989 Ok(proto::Ack {})
1990 }
1991
1992 async fn handle_get_remotes(
1993 this: Entity<Self>,
1994 envelope: TypedEnvelope<proto::GetRemotes>,
1995 mut cx: AsyncApp,
1996 ) -> Result<proto::GetRemotesResponse> {
1997 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1998 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1999
2000 let branch_name = envelope.payload.branch_name;
2001
2002 let remotes = repository_handle
2003 .update(&mut cx, |repository_handle, _| {
2004 repository_handle.get_remotes(branch_name)
2005 })?
2006 .await??;
2007
2008 Ok(proto::GetRemotesResponse {
2009 remotes: remotes
2010 .into_iter()
2011 .map(|remotes| proto::get_remotes_response::Remote {
2012 name: remotes.name.to_string(),
2013 })
2014 .collect::<Vec<_>>(),
2015 })
2016 }
2017
2018 async fn handle_get_worktrees(
2019 this: Entity<Self>,
2020 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2021 mut cx: AsyncApp,
2022 ) -> Result<proto::GitWorktreesResponse> {
2023 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2024 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2025
2026 let worktrees = repository_handle
2027 .update(&mut cx, |repository_handle, _| {
2028 repository_handle.worktrees()
2029 })?
2030 .await??;
2031
2032 Ok(proto::GitWorktreesResponse {
2033 worktrees: worktrees
2034 .into_iter()
2035 .map(|worktree| worktree_to_proto(&worktree))
2036 .collect::<Vec<_>>(),
2037 })
2038 }
2039
2040 async fn handle_create_worktree(
2041 this: Entity<Self>,
2042 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2043 mut cx: AsyncApp,
2044 ) -> Result<proto::Ack> {
2045 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2046 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2047 let directory = PathBuf::from(envelope.payload.directory);
2048 let name = envelope.payload.name;
2049 let commit = envelope.payload.commit;
2050
2051 repository_handle
2052 .update(&mut cx, |repository_handle, _| {
2053 repository_handle.create_worktree(name, directory, commit)
2054 })?
2055 .await??;
2056
2057 Ok(proto::Ack {})
2058 }
2059
2060 async fn handle_get_branches(
2061 this: Entity<Self>,
2062 envelope: TypedEnvelope<proto::GitGetBranches>,
2063 mut cx: AsyncApp,
2064 ) -> Result<proto::GitBranchesResponse> {
2065 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2066 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2067
2068 let branches = repository_handle
2069 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2070 .await??;
2071
2072 Ok(proto::GitBranchesResponse {
2073 branches: branches
2074 .into_iter()
2075 .map(|branch| branch_to_proto(&branch))
2076 .collect::<Vec<_>>(),
2077 })
2078 }
2079 async fn handle_get_default_branch(
2080 this: Entity<Self>,
2081 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2082 mut cx: AsyncApp,
2083 ) -> Result<proto::GetDefaultBranchResponse> {
2084 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2085 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2086
2087 let branch = repository_handle
2088 .update(&mut cx, |repository_handle, _| {
2089 repository_handle.default_branch()
2090 })?
2091 .await??
2092 .map(Into::into);
2093
2094 Ok(proto::GetDefaultBranchResponse { branch })
2095 }
2096 async fn handle_create_branch(
2097 this: Entity<Self>,
2098 envelope: TypedEnvelope<proto::GitCreateBranch>,
2099 mut cx: AsyncApp,
2100 ) -> Result<proto::Ack> {
2101 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2102 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2103 let branch_name = envelope.payload.branch_name;
2104
2105 repository_handle
2106 .update(&mut cx, |repository_handle, _| {
2107 repository_handle.create_branch(branch_name, None)
2108 })?
2109 .await??;
2110
2111 Ok(proto::Ack {})
2112 }
2113
2114 async fn handle_change_branch(
2115 this: Entity<Self>,
2116 envelope: TypedEnvelope<proto::GitChangeBranch>,
2117 mut cx: AsyncApp,
2118 ) -> Result<proto::Ack> {
2119 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2120 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2121 let branch_name = envelope.payload.branch_name;
2122
2123 repository_handle
2124 .update(&mut cx, |repository_handle, _| {
2125 repository_handle.change_branch(branch_name)
2126 })?
2127 .await??;
2128
2129 Ok(proto::Ack {})
2130 }
2131
2132 async fn handle_rename_branch(
2133 this: Entity<Self>,
2134 envelope: TypedEnvelope<proto::GitRenameBranch>,
2135 mut cx: AsyncApp,
2136 ) -> Result<proto::Ack> {
2137 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2138 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2139 let branch = envelope.payload.branch;
2140 let new_name = envelope.payload.new_name;
2141
2142 repository_handle
2143 .update(&mut cx, |repository_handle, _| {
2144 repository_handle.rename_branch(branch, new_name)
2145 })?
2146 .await??;
2147
2148 Ok(proto::Ack {})
2149 }
2150
2151 async fn handle_show(
2152 this: Entity<Self>,
2153 envelope: TypedEnvelope<proto::GitShow>,
2154 mut cx: AsyncApp,
2155 ) -> Result<proto::GitCommitDetails> {
2156 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2157 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2158
2159 let commit = repository_handle
2160 .update(&mut cx, |repository_handle, _| {
2161 repository_handle.show(envelope.payload.commit)
2162 })?
2163 .await??;
2164 Ok(proto::GitCommitDetails {
2165 sha: commit.sha.into(),
2166 message: commit.message.into(),
2167 commit_timestamp: commit.commit_timestamp,
2168 author_email: commit.author_email.into(),
2169 author_name: commit.author_name.into(),
2170 })
2171 }
2172
2173 async fn handle_load_commit_diff(
2174 this: Entity<Self>,
2175 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2176 mut cx: AsyncApp,
2177 ) -> Result<proto::LoadCommitDiffResponse> {
2178 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2179 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2180
2181 let commit_diff = repository_handle
2182 .update(&mut cx, |repository_handle, _| {
2183 repository_handle.load_commit_diff(envelope.payload.commit)
2184 })?
2185 .await??;
2186 Ok(proto::LoadCommitDiffResponse {
2187 files: commit_diff
2188 .files
2189 .into_iter()
2190 .map(|file| proto::CommitFile {
2191 path: file.path.to_proto(),
2192 old_text: file.old_text,
2193 new_text: file.new_text,
2194 })
2195 .collect(),
2196 })
2197 }
2198
2199 async fn handle_reset(
2200 this: Entity<Self>,
2201 envelope: TypedEnvelope<proto::GitReset>,
2202 mut cx: AsyncApp,
2203 ) -> Result<proto::Ack> {
2204 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2205 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2206
2207 let mode = match envelope.payload.mode() {
2208 git_reset::ResetMode::Soft => ResetMode::Soft,
2209 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2210 };
2211
2212 repository_handle
2213 .update(&mut cx, |repository_handle, cx| {
2214 repository_handle.reset(envelope.payload.commit, mode, cx)
2215 })?
2216 .await??;
2217 Ok(proto::Ack {})
2218 }
2219
2220 async fn handle_checkout_files(
2221 this: Entity<Self>,
2222 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2223 mut cx: AsyncApp,
2224 ) -> Result<proto::Ack> {
2225 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2226 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2227 let paths = envelope
2228 .payload
2229 .paths
2230 .iter()
2231 .map(|s| RepoPath::from_proto(s))
2232 .collect::<Result<Vec<_>>>()?;
2233
2234 repository_handle
2235 .update(&mut cx, |repository_handle, cx| {
2236 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2237 })?
2238 .await?;
2239 Ok(proto::Ack {})
2240 }
2241
2242 async fn handle_open_commit_message_buffer(
2243 this: Entity<Self>,
2244 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2245 mut cx: AsyncApp,
2246 ) -> Result<proto::OpenBufferResponse> {
2247 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2248 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2249 let buffer = repository
2250 .update(&mut cx, |repository, cx| {
2251 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2252 })?
2253 .await?;
2254
2255 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2256 this.update(&mut cx, |this, cx| {
2257 this.buffer_store.update(cx, |buffer_store, cx| {
2258 buffer_store
2259 .create_buffer_for_peer(
2260 &buffer,
2261 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2262 cx,
2263 )
2264 .detach_and_log_err(cx);
2265 })
2266 })?;
2267
2268 Ok(proto::OpenBufferResponse {
2269 buffer_id: buffer_id.to_proto(),
2270 })
2271 }
2272
2273 async fn handle_askpass(
2274 this: Entity<Self>,
2275 envelope: TypedEnvelope<proto::AskPassRequest>,
2276 mut cx: AsyncApp,
2277 ) -> Result<proto::AskPassResponse> {
2278 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2279 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2280
2281 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2282 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2283 debug_panic!("no askpass found");
2284 anyhow::bail!("no askpass found");
2285 };
2286
2287 let response = askpass
2288 .ask_password(envelope.payload.prompt)
2289 .await
2290 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2291
2292 delegates
2293 .lock()
2294 .insert(envelope.payload.askpass_id, askpass);
2295
2296 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2297 Ok(proto::AskPassResponse {
2298 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2299 })
2300 }
2301
2302 async fn handle_check_for_pushed_commits(
2303 this: Entity<Self>,
2304 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2305 mut cx: AsyncApp,
2306 ) -> Result<proto::CheckForPushedCommitsResponse> {
2307 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2308 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2309
2310 let branches = repository_handle
2311 .update(&mut cx, |repository_handle, _| {
2312 repository_handle.check_for_pushed_commits()
2313 })?
2314 .await??;
2315 Ok(proto::CheckForPushedCommitsResponse {
2316 pushed_to: branches
2317 .into_iter()
2318 .map(|commit| commit.to_string())
2319 .collect(),
2320 })
2321 }
2322
2323 async fn handle_git_diff(
2324 this: Entity<Self>,
2325 envelope: TypedEnvelope<proto::GitDiff>,
2326 mut cx: AsyncApp,
2327 ) -> Result<proto::GitDiffResponse> {
2328 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2329 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2330 let diff_type = match envelope.payload.diff_type() {
2331 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2332 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2333 };
2334
2335 let mut diff = repository_handle
2336 .update(&mut cx, |repository_handle, cx| {
2337 repository_handle.diff(diff_type, cx)
2338 })?
2339 .await??;
2340 const ONE_MB: usize = 1_000_000;
2341 if diff.len() > ONE_MB {
2342 diff = diff.chars().take(ONE_MB).collect()
2343 }
2344
2345 Ok(proto::GitDiffResponse { diff })
2346 }
2347
2348 async fn handle_tree_diff(
2349 this: Entity<Self>,
2350 request: TypedEnvelope<proto::GetTreeDiff>,
2351 mut cx: AsyncApp,
2352 ) -> Result<proto::GetTreeDiffResponse> {
2353 let repository_id = RepositoryId(request.payload.repository_id);
2354 let diff_type = if request.payload.is_merge {
2355 DiffTreeType::MergeBase {
2356 base: request.payload.base.into(),
2357 head: request.payload.head.into(),
2358 }
2359 } else {
2360 DiffTreeType::Since {
2361 base: request.payload.base.into(),
2362 head: request.payload.head.into(),
2363 }
2364 };
2365
2366 let diff = this
2367 .update(&mut cx, |this, cx| {
2368 let repository = this.repositories().get(&repository_id)?;
2369 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2370 })?
2371 .context("missing repository")?
2372 .await??;
2373
2374 Ok(proto::GetTreeDiffResponse {
2375 entries: diff
2376 .entries
2377 .into_iter()
2378 .map(|(path, status)| proto::TreeDiffStatus {
2379 path: path.0.to_proto(),
2380 status: match status {
2381 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2382 TreeDiffStatus::Modified { .. } => {
2383 proto::tree_diff_status::Status::Modified.into()
2384 }
2385 TreeDiffStatus::Deleted { .. } => {
2386 proto::tree_diff_status::Status::Deleted.into()
2387 }
2388 },
2389 oid: match status {
2390 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2391 Some(old.to_string())
2392 }
2393 TreeDiffStatus::Added => None,
2394 },
2395 })
2396 .collect(),
2397 })
2398 }
2399
2400 async fn handle_get_blob_content(
2401 this: Entity<Self>,
2402 request: TypedEnvelope<proto::GetBlobContent>,
2403 mut cx: AsyncApp,
2404 ) -> Result<proto::GetBlobContentResponse> {
2405 let oid = git::Oid::from_str(&request.payload.oid)?;
2406 let repository_id = RepositoryId(request.payload.repository_id);
2407 let content = this
2408 .update(&mut cx, |this, cx| {
2409 let repository = this.repositories().get(&repository_id)?;
2410 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2411 })?
2412 .context("missing repository")?
2413 .await?;
2414 Ok(proto::GetBlobContentResponse { content })
2415 }
2416
2417 async fn handle_open_unstaged_diff(
2418 this: Entity<Self>,
2419 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2420 mut cx: AsyncApp,
2421 ) -> Result<proto::OpenUnstagedDiffResponse> {
2422 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2423 let diff = this
2424 .update(&mut cx, |this, cx| {
2425 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2426 Some(this.open_unstaged_diff(buffer, cx))
2427 })?
2428 .context("missing buffer")?
2429 .await?;
2430 this.update(&mut cx, |this, _| {
2431 let shared_diffs = this
2432 .shared_diffs
2433 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2434 .or_default();
2435 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2436 })?;
2437 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2438 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2439 }
2440
2441 async fn handle_open_uncommitted_diff(
2442 this: Entity<Self>,
2443 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2444 mut cx: AsyncApp,
2445 ) -> Result<proto::OpenUncommittedDiffResponse> {
2446 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2447 let diff = this
2448 .update(&mut cx, |this, cx| {
2449 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2450 Some(this.open_uncommitted_diff(buffer, cx))
2451 })?
2452 .context("missing buffer")?
2453 .await?;
2454 this.update(&mut cx, |this, _| {
2455 let shared_diffs = this
2456 .shared_diffs
2457 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2458 .or_default();
2459 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2460 })?;
2461 diff.read_with(&cx, |diff, cx| {
2462 use proto::open_uncommitted_diff_response::Mode;
2463
2464 let unstaged_diff = diff.secondary_diff();
2465 let index_snapshot = unstaged_diff.and_then(|diff| {
2466 let diff = diff.read(cx);
2467 diff.base_text_exists().then(|| diff.base_text())
2468 });
2469
2470 let mode;
2471 let staged_text;
2472 let committed_text;
2473 if diff.base_text_exists() {
2474 let committed_snapshot = diff.base_text();
2475 committed_text = Some(committed_snapshot.text());
2476 if let Some(index_text) = index_snapshot {
2477 if index_text.remote_id() == committed_snapshot.remote_id() {
2478 mode = Mode::IndexMatchesHead;
2479 staged_text = None;
2480 } else {
2481 mode = Mode::IndexAndHead;
2482 staged_text = Some(index_text.text());
2483 }
2484 } else {
2485 mode = Mode::IndexAndHead;
2486 staged_text = None;
2487 }
2488 } else {
2489 mode = Mode::IndexAndHead;
2490 committed_text = None;
2491 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2492 }
2493
2494 proto::OpenUncommittedDiffResponse {
2495 committed_text,
2496 staged_text,
2497 mode: mode.into(),
2498 }
2499 })
2500 }
2501
2502 async fn handle_update_diff_bases(
2503 this: Entity<Self>,
2504 request: TypedEnvelope<proto::UpdateDiffBases>,
2505 mut cx: AsyncApp,
2506 ) -> Result<()> {
2507 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2508 this.update(&mut cx, |this, cx| {
2509 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2510 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2511 {
2512 let buffer = buffer.read(cx).text_snapshot();
2513 diff_state.update(cx, |diff_state, cx| {
2514 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2515 })
2516 }
2517 })
2518 }
2519
2520 async fn handle_blame_buffer(
2521 this: Entity<Self>,
2522 envelope: TypedEnvelope<proto::BlameBuffer>,
2523 mut cx: AsyncApp,
2524 ) -> Result<proto::BlameBufferResponse> {
2525 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2526 let version = deserialize_version(&envelope.payload.version);
2527 let buffer = this.read_with(&cx, |this, cx| {
2528 this.buffer_store.read(cx).get_existing(buffer_id)
2529 })??;
2530 buffer
2531 .update(&mut cx, |buffer, _| {
2532 buffer.wait_for_version(version.clone())
2533 })?
2534 .await?;
2535 let blame = this
2536 .update(&mut cx, |this, cx| {
2537 this.blame_buffer(&buffer, Some(version), cx)
2538 })?
2539 .await?;
2540 Ok(serialize_blame_buffer_response(blame))
2541 }
2542
2543 async fn handle_get_permalink_to_line(
2544 this: Entity<Self>,
2545 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2546 mut cx: AsyncApp,
2547 ) -> Result<proto::GetPermalinkToLineResponse> {
2548 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2549 // let version = deserialize_version(&envelope.payload.version);
2550 let selection = {
2551 let proto_selection = envelope
2552 .payload
2553 .selection
2554 .context("no selection to get permalink for defined")?;
2555 proto_selection.start as u32..proto_selection.end as u32
2556 };
2557 let buffer = this.read_with(&cx, |this, cx| {
2558 this.buffer_store.read(cx).get_existing(buffer_id)
2559 })??;
2560 let permalink = this
2561 .update(&mut cx, |this, cx| {
2562 this.get_permalink_to_line(&buffer, selection, cx)
2563 })?
2564 .await?;
2565 Ok(proto::GetPermalinkToLineResponse {
2566 permalink: permalink.to_string(),
2567 })
2568 }
2569
2570 fn repository_for_request(
2571 this: &Entity<Self>,
2572 id: RepositoryId,
2573 cx: &mut AsyncApp,
2574 ) -> Result<Entity<Repository>> {
2575 this.read_with(cx, |this, _| {
2576 this.repositories
2577 .get(&id)
2578 .context("missing repository handle")
2579 .cloned()
2580 })?
2581 }
2582
2583 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2584 self.repositories
2585 .iter()
2586 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2587 .collect()
2588 }
2589
2590 fn process_updated_entries(
2591 &self,
2592 worktree: &Entity<Worktree>,
2593 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2594 cx: &mut App,
2595 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2596 let path_style = worktree.read(cx).path_style();
2597 let mut repo_paths = self
2598 .repositories
2599 .values()
2600 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2601 .collect::<Vec<_>>();
2602 let mut entries: Vec<_> = updated_entries
2603 .iter()
2604 .map(|(path, _, _)| path.clone())
2605 .collect();
2606 entries.sort();
2607 let worktree = worktree.read(cx);
2608
2609 let entries = entries
2610 .into_iter()
2611 .map(|path| worktree.absolutize(&path))
2612 .collect::<Arc<[_]>>();
2613
2614 let executor = cx.background_executor().clone();
2615 cx.background_executor().spawn(async move {
2616 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2617 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2618 let mut tasks = FuturesOrdered::new();
2619 for (repo_path, repo) in repo_paths.into_iter().rev() {
2620 let entries = entries.clone();
2621 let task = executor.spawn(async move {
2622 // Find all repository paths that belong to this repo
2623 let mut ix = entries.partition_point(|path| path < &*repo_path);
2624 if ix == entries.len() {
2625 return None;
2626 };
2627
2628 let mut paths = Vec::new();
2629 // All paths prefixed by a given repo will constitute a continuous range.
2630 while let Some(path) = entries.get(ix)
2631 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2632 &repo_path, path, path_style,
2633 )
2634 {
2635 paths.push((repo_path, ix));
2636 ix += 1;
2637 }
2638 if paths.is_empty() {
2639 None
2640 } else {
2641 Some((repo, paths))
2642 }
2643 });
2644 tasks.push_back(task);
2645 }
2646
2647 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2648 let mut path_was_used = vec![false; entries.len()];
2649 let tasks = tasks.collect::<Vec<_>>().await;
2650 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2651 // We always want to assign a path to it's innermost repository.
2652 for t in tasks {
2653 let Some((repo, paths)) = t else {
2654 continue;
2655 };
2656 let entry = paths_by_git_repo.entry(repo).or_default();
2657 for (repo_path, ix) in paths {
2658 if path_was_used[ix] {
2659 continue;
2660 }
2661 path_was_used[ix] = true;
2662 entry.push(repo_path);
2663 }
2664 }
2665
2666 paths_by_git_repo
2667 })
2668 }
2669}
2670
2671impl BufferGitState {
2672 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2673 Self {
2674 unstaged_diff: Default::default(),
2675 uncommitted_diff: Default::default(),
2676 recalculate_diff_task: Default::default(),
2677 language: Default::default(),
2678 language_registry: Default::default(),
2679 recalculating_tx: postage::watch::channel_with(false).0,
2680 hunk_staging_operation_count: 0,
2681 hunk_staging_operation_count_as_of_write: 0,
2682 head_text: Default::default(),
2683 index_text: Default::default(),
2684 head_changed: Default::default(),
2685 index_changed: Default::default(),
2686 language_changed: Default::default(),
2687 conflict_updated_futures: Default::default(),
2688 conflict_set: Default::default(),
2689 reparse_conflict_markers_task: Default::default(),
2690 }
2691 }
2692
2693 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2694 self.language = buffer.read(cx).language().cloned();
2695 self.language_changed = true;
2696 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2697 }
2698
2699 fn reparse_conflict_markers(
2700 &mut self,
2701 buffer: text::BufferSnapshot,
2702 cx: &mut Context<Self>,
2703 ) -> oneshot::Receiver<()> {
2704 let (tx, rx) = oneshot::channel();
2705
2706 let Some(conflict_set) = self
2707 .conflict_set
2708 .as_ref()
2709 .and_then(|conflict_set| conflict_set.upgrade())
2710 else {
2711 return rx;
2712 };
2713
2714 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2715 if conflict_set.has_conflict {
2716 Some(conflict_set.snapshot())
2717 } else {
2718 None
2719 }
2720 });
2721
2722 if let Some(old_snapshot) = old_snapshot {
2723 self.conflict_updated_futures.push(tx);
2724 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2725 let (snapshot, changed_range) = cx
2726 .background_spawn(async move {
2727 let new_snapshot = ConflictSet::parse(&buffer);
2728 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2729 (new_snapshot, changed_range)
2730 })
2731 .await;
2732 this.update(cx, |this, cx| {
2733 if let Some(conflict_set) = &this.conflict_set {
2734 conflict_set
2735 .update(cx, |conflict_set, cx| {
2736 conflict_set.set_snapshot(snapshot, changed_range, cx);
2737 })
2738 .ok();
2739 }
2740 let futures = std::mem::take(&mut this.conflict_updated_futures);
2741 for tx in futures {
2742 tx.send(()).ok();
2743 }
2744 })
2745 }))
2746 }
2747
2748 rx
2749 }
2750
2751 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2752 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2753 }
2754
2755 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2756 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2757 }
2758
2759 fn handle_base_texts_updated(
2760 &mut self,
2761 buffer: text::BufferSnapshot,
2762 message: proto::UpdateDiffBases,
2763 cx: &mut Context<Self>,
2764 ) {
2765 use proto::update_diff_bases::Mode;
2766
2767 let Some(mode) = Mode::from_i32(message.mode) else {
2768 return;
2769 };
2770
2771 let diff_bases_change = match mode {
2772 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2773 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2774 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2775 Mode::IndexAndHead => DiffBasesChange::SetEach {
2776 index: message.staged_text,
2777 head: message.committed_text,
2778 },
2779 };
2780
2781 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2782 }
2783
2784 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2785 if *self.recalculating_tx.borrow() {
2786 let mut rx = self.recalculating_tx.subscribe();
2787 Some(async move {
2788 loop {
2789 let is_recalculating = rx.recv().await;
2790 if is_recalculating != Some(true) {
2791 break;
2792 }
2793 }
2794 })
2795 } else {
2796 None
2797 }
2798 }
2799
2800 fn diff_bases_changed(
2801 &mut self,
2802 buffer: text::BufferSnapshot,
2803 diff_bases_change: Option<DiffBasesChange>,
2804 cx: &mut Context<Self>,
2805 ) {
2806 match diff_bases_change {
2807 Some(DiffBasesChange::SetIndex(index)) => {
2808 self.index_text = index.map(|mut index| {
2809 text::LineEnding::normalize(&mut index);
2810 Arc::new(index)
2811 });
2812 self.index_changed = true;
2813 }
2814 Some(DiffBasesChange::SetHead(head)) => {
2815 self.head_text = head.map(|mut head| {
2816 text::LineEnding::normalize(&mut head);
2817 Arc::new(head)
2818 });
2819 self.head_changed = true;
2820 }
2821 Some(DiffBasesChange::SetBoth(text)) => {
2822 let text = text.map(|mut text| {
2823 text::LineEnding::normalize(&mut text);
2824 Arc::new(text)
2825 });
2826 self.head_text = text.clone();
2827 self.index_text = text;
2828 self.head_changed = true;
2829 self.index_changed = true;
2830 }
2831 Some(DiffBasesChange::SetEach { index, head }) => {
2832 self.index_text = index.map(|mut index| {
2833 text::LineEnding::normalize(&mut index);
2834 Arc::new(index)
2835 });
2836 self.index_changed = true;
2837 self.head_text = head.map(|mut head| {
2838 text::LineEnding::normalize(&mut head);
2839 Arc::new(head)
2840 });
2841 self.head_changed = true;
2842 }
2843 None => {}
2844 }
2845
2846 self.recalculate_diffs(buffer, cx)
2847 }
2848
2849 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2850 *self.recalculating_tx.borrow_mut() = true;
2851
2852 let language = self.language.clone();
2853 let language_registry = self.language_registry.clone();
2854 let unstaged_diff = self.unstaged_diff();
2855 let uncommitted_diff = self.uncommitted_diff();
2856 let head = self.head_text.clone();
2857 let index = self.index_text.clone();
2858 let index_changed = self.index_changed;
2859 let head_changed = self.head_changed;
2860 let language_changed = self.language_changed;
2861 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2862 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2863 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2864 (None, None) => true,
2865 _ => false,
2866 };
2867 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2868 log::debug!(
2869 "start recalculating diffs for buffer {}",
2870 buffer.remote_id()
2871 );
2872
2873 let mut new_unstaged_diff = None;
2874 if let Some(unstaged_diff) = &unstaged_diff {
2875 new_unstaged_diff = Some(
2876 BufferDiff::update_diff(
2877 unstaged_diff.clone(),
2878 buffer.clone(),
2879 index,
2880 index_changed,
2881 language_changed,
2882 language.clone(),
2883 language_registry.clone(),
2884 cx,
2885 )
2886 .await?,
2887 );
2888 }
2889
2890 let mut new_uncommitted_diff = None;
2891 if let Some(uncommitted_diff) = &uncommitted_diff {
2892 new_uncommitted_diff = if index_matches_head {
2893 new_unstaged_diff.clone()
2894 } else {
2895 Some(
2896 BufferDiff::update_diff(
2897 uncommitted_diff.clone(),
2898 buffer.clone(),
2899 head,
2900 head_changed,
2901 language_changed,
2902 language.clone(),
2903 language_registry.clone(),
2904 cx,
2905 )
2906 .await?,
2907 )
2908 }
2909 }
2910
2911 let cancel = this.update(cx, |this, _| {
2912 // This checks whether all pending stage/unstage operations
2913 // have quiesced (i.e. both the corresponding write and the
2914 // read of that write have completed). If not, then we cancel
2915 // this recalculation attempt to avoid invalidating pending
2916 // state too quickly; another recalculation will come along
2917 // later and clear the pending state once the state of the index has settled.
2918 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2919 *this.recalculating_tx.borrow_mut() = false;
2920 true
2921 } else {
2922 false
2923 }
2924 })?;
2925 if cancel {
2926 log::debug!(
2927 concat!(
2928 "aborting recalculating diffs for buffer {}",
2929 "due to subsequent hunk operations",
2930 ),
2931 buffer.remote_id()
2932 );
2933 return Ok(());
2934 }
2935
2936 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2937 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2938 {
2939 unstaged_diff.update(cx, |diff, cx| {
2940 if language_changed {
2941 diff.language_changed(cx);
2942 }
2943 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2944 })?
2945 } else {
2946 None
2947 };
2948
2949 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2950 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2951 {
2952 uncommitted_diff.update(cx, |diff, cx| {
2953 if language_changed {
2954 diff.language_changed(cx);
2955 }
2956 diff.set_snapshot_with_secondary(
2957 new_uncommitted_diff,
2958 &buffer,
2959 unstaged_changed_range,
2960 true,
2961 cx,
2962 );
2963 })?;
2964 }
2965
2966 log::debug!(
2967 "finished recalculating diffs for buffer {}",
2968 buffer.remote_id()
2969 );
2970
2971 if let Some(this) = this.upgrade() {
2972 this.update(cx, |this, _| {
2973 this.index_changed = false;
2974 this.head_changed = false;
2975 this.language_changed = false;
2976 *this.recalculating_tx.borrow_mut() = false;
2977 })?;
2978 }
2979
2980 Ok(())
2981 }));
2982 }
2983}
2984
2985fn make_remote_delegate(
2986 this: Entity<GitStore>,
2987 project_id: u64,
2988 repository_id: RepositoryId,
2989 askpass_id: u64,
2990 cx: &mut AsyncApp,
2991) -> AskPassDelegate {
2992 AskPassDelegate::new(cx, move |prompt, tx, cx| {
2993 this.update(cx, |this, cx| {
2994 let Some((client, _)) = this.downstream_client() else {
2995 return;
2996 };
2997 let response = client.request(proto::AskPassRequest {
2998 project_id,
2999 repository_id: repository_id.to_proto(),
3000 askpass_id,
3001 prompt,
3002 });
3003 cx.spawn(async move |_, _| {
3004 let mut response = response.await?.response;
3005 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3006 .ok();
3007 response.zeroize();
3008 anyhow::Ok(())
3009 })
3010 .detach_and_log_err(cx);
3011 })
3012 .log_err();
3013 })
3014}
3015
3016impl RepositoryId {
3017 pub fn to_proto(self) -> u64 {
3018 self.0
3019 }
3020
3021 pub fn from_proto(id: u64) -> Self {
3022 RepositoryId(id)
3023 }
3024}
3025
3026impl RepositorySnapshot {
3027 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3028 Self {
3029 id,
3030 statuses_by_path: Default::default(),
3031 pending_ops_by_path: Default::default(),
3032 work_directory_abs_path,
3033 branch: None,
3034 head_commit: None,
3035 scan_id: 0,
3036 merge: Default::default(),
3037 remote_origin_url: None,
3038 remote_upstream_url: None,
3039 stash_entries: Default::default(),
3040 path_style,
3041 }
3042 }
3043
3044 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3045 proto::UpdateRepository {
3046 branch_summary: self.branch.as_ref().map(branch_to_proto),
3047 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3048 updated_statuses: self
3049 .statuses_by_path
3050 .iter()
3051 .map(|entry| entry.to_proto())
3052 .collect(),
3053 removed_statuses: Default::default(),
3054 current_merge_conflicts: self
3055 .merge
3056 .conflicted_paths
3057 .iter()
3058 .map(|repo_path| repo_path.to_proto())
3059 .collect(),
3060 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3061 project_id,
3062 id: self.id.to_proto(),
3063 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3064 entry_ids: vec![self.id.to_proto()],
3065 scan_id: self.scan_id,
3066 is_last_update: true,
3067 stash_entries: self
3068 .stash_entries
3069 .entries
3070 .iter()
3071 .map(stash_to_proto)
3072 .collect(),
3073 }
3074 }
3075
3076 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3077 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3078 let mut removed_statuses: Vec<String> = Vec::new();
3079
3080 let mut new_statuses = self.statuses_by_path.iter().peekable();
3081 let mut old_statuses = old.statuses_by_path.iter().peekable();
3082
3083 let mut current_new_entry = new_statuses.next();
3084 let mut current_old_entry = old_statuses.next();
3085 loop {
3086 match (current_new_entry, current_old_entry) {
3087 (Some(new_entry), Some(old_entry)) => {
3088 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3089 Ordering::Less => {
3090 updated_statuses.push(new_entry.to_proto());
3091 current_new_entry = new_statuses.next();
3092 }
3093 Ordering::Equal => {
3094 if new_entry.status != old_entry.status {
3095 updated_statuses.push(new_entry.to_proto());
3096 }
3097 current_old_entry = old_statuses.next();
3098 current_new_entry = new_statuses.next();
3099 }
3100 Ordering::Greater => {
3101 removed_statuses.push(old_entry.repo_path.to_proto());
3102 current_old_entry = old_statuses.next();
3103 }
3104 }
3105 }
3106 (None, Some(old_entry)) => {
3107 removed_statuses.push(old_entry.repo_path.to_proto());
3108 current_old_entry = old_statuses.next();
3109 }
3110 (Some(new_entry), None) => {
3111 updated_statuses.push(new_entry.to_proto());
3112 current_new_entry = new_statuses.next();
3113 }
3114 (None, None) => break,
3115 }
3116 }
3117
3118 proto::UpdateRepository {
3119 branch_summary: self.branch.as_ref().map(branch_to_proto),
3120 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3121 updated_statuses,
3122 removed_statuses,
3123 current_merge_conflicts: self
3124 .merge
3125 .conflicted_paths
3126 .iter()
3127 .map(|path| path.to_proto())
3128 .collect(),
3129 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3130 project_id,
3131 id: self.id.to_proto(),
3132 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3133 entry_ids: vec![],
3134 scan_id: self.scan_id,
3135 is_last_update: true,
3136 stash_entries: self
3137 .stash_entries
3138 .entries
3139 .iter()
3140 .map(stash_to_proto)
3141 .collect(),
3142 }
3143 }
3144
3145 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3146 self.statuses_by_path.iter().cloned()
3147 }
3148
3149 pub fn status_summary(&self) -> GitSummary {
3150 self.statuses_by_path.summary().item_summary
3151 }
3152
3153 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3154 self.statuses_by_path
3155 .get(&PathKey(path.0.clone()), ())
3156 .cloned()
3157 }
3158
3159 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3160 self.pending_ops_by_path
3161 .get(&PathKey(path.0.clone()), ())
3162 .cloned()
3163 }
3164
3165 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3166 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3167 }
3168
3169 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3170 self.path_style
3171 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3172 .unwrap()
3173 .into()
3174 }
3175
3176 #[inline]
3177 fn abs_path_to_repo_path_inner(
3178 work_directory_abs_path: &Path,
3179 abs_path: &Path,
3180 path_style: PathStyle,
3181 ) -> Option<RepoPath> {
3182 abs_path
3183 .strip_prefix(&work_directory_abs_path)
3184 .ok()
3185 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3186 }
3187
3188 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3189 self.merge.conflicted_paths.contains(repo_path)
3190 }
3191
3192 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3193 let had_conflict_on_last_merge_head_change =
3194 self.merge.conflicted_paths.contains(repo_path);
3195 let has_conflict_currently = self
3196 .status_for_path(repo_path)
3197 .is_some_and(|entry| entry.status.is_conflicted());
3198 had_conflict_on_last_merge_head_change || has_conflict_currently
3199 }
3200
3201 /// This is the name that will be displayed in the repository selector for this repository.
3202 pub fn display_name(&self) -> SharedString {
3203 self.work_directory_abs_path
3204 .file_name()
3205 .unwrap_or_default()
3206 .to_string_lossy()
3207 .to_string()
3208 .into()
3209 }
3210}
3211
3212pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3213 proto::StashEntry {
3214 oid: entry.oid.as_bytes().to_vec(),
3215 message: entry.message.clone(),
3216 branch: entry.branch.clone(),
3217 index: entry.index as u64,
3218 timestamp: entry.timestamp,
3219 }
3220}
3221
3222pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3223 Ok(StashEntry {
3224 oid: Oid::from_bytes(&entry.oid)?,
3225 message: entry.message.clone(),
3226 index: entry.index as usize,
3227 branch: entry.branch.clone(),
3228 timestamp: entry.timestamp,
3229 })
3230}
3231
3232impl MergeDetails {
3233 async fn load(
3234 backend: &Arc<dyn GitRepository>,
3235 status: &SumTree<StatusEntry>,
3236 prev_snapshot: &RepositorySnapshot,
3237 ) -> Result<(MergeDetails, bool)> {
3238 log::debug!("load merge details");
3239 let message = backend.merge_message().await;
3240 let heads = backend
3241 .revparse_batch(vec![
3242 "MERGE_HEAD".into(),
3243 "CHERRY_PICK_HEAD".into(),
3244 "REBASE_HEAD".into(),
3245 "REVERT_HEAD".into(),
3246 "APPLY_HEAD".into(),
3247 ])
3248 .await
3249 .log_err()
3250 .unwrap_or_default()
3251 .into_iter()
3252 .map(|opt| opt.map(SharedString::from))
3253 .collect::<Vec<_>>();
3254 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3255 let conflicted_paths = if merge_heads_changed {
3256 let current_conflicted_paths = TreeSet::from_ordered_entries(
3257 status
3258 .iter()
3259 .filter(|entry| entry.status.is_conflicted())
3260 .map(|entry| entry.repo_path.clone()),
3261 );
3262
3263 // It can happen that we run a scan while a lengthy merge is in progress
3264 // that will eventually result in conflicts, but before those conflicts
3265 // are reported by `git status`. Since for the moment we only care about
3266 // the merge heads state for the purposes of tracking conflicts, don't update
3267 // this state until we see some conflicts.
3268 if heads.iter().any(Option::is_some)
3269 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3270 && current_conflicted_paths.is_empty()
3271 {
3272 log::debug!("not updating merge heads because no conflicts found");
3273 return Ok((
3274 MergeDetails {
3275 message: message.map(SharedString::from),
3276 ..prev_snapshot.merge.clone()
3277 },
3278 false,
3279 ));
3280 }
3281
3282 current_conflicted_paths
3283 } else {
3284 prev_snapshot.merge.conflicted_paths.clone()
3285 };
3286 let details = MergeDetails {
3287 conflicted_paths,
3288 message: message.map(SharedString::from),
3289 heads,
3290 };
3291 Ok((details, merge_heads_changed))
3292 }
3293}
3294
3295impl Repository {
3296 pub fn snapshot(&self) -> RepositorySnapshot {
3297 self.snapshot.clone()
3298 }
3299
3300 fn local(
3301 id: RepositoryId,
3302 work_directory_abs_path: Arc<Path>,
3303 dot_git_abs_path: Arc<Path>,
3304 repository_dir_abs_path: Arc<Path>,
3305 common_dir_abs_path: Arc<Path>,
3306 project_environment: WeakEntity<ProjectEnvironment>,
3307 fs: Arc<dyn Fs>,
3308 git_store: WeakEntity<GitStore>,
3309 cx: &mut Context<Self>,
3310 ) -> Self {
3311 let snapshot =
3312 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3313 Repository {
3314 this: cx.weak_entity(),
3315 git_store,
3316 snapshot,
3317 commit_message_buffer: None,
3318 askpass_delegates: Default::default(),
3319 paths_needing_status_update: Default::default(),
3320 latest_askpass_id: 0,
3321 job_sender: Repository::spawn_local_git_worker(
3322 work_directory_abs_path,
3323 dot_git_abs_path,
3324 repository_dir_abs_path,
3325 common_dir_abs_path,
3326 project_environment,
3327 fs,
3328 cx,
3329 ),
3330 job_id: 0,
3331 active_jobs: Default::default(),
3332 }
3333 }
3334
3335 fn remote(
3336 id: RepositoryId,
3337 work_directory_abs_path: Arc<Path>,
3338 path_style: PathStyle,
3339 project_id: ProjectId,
3340 client: AnyProtoClient,
3341 git_store: WeakEntity<GitStore>,
3342 cx: &mut Context<Self>,
3343 ) -> Self {
3344 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3345 Self {
3346 this: cx.weak_entity(),
3347 snapshot,
3348 commit_message_buffer: None,
3349 git_store,
3350 paths_needing_status_update: Default::default(),
3351 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3352 askpass_delegates: Default::default(),
3353 latest_askpass_id: 0,
3354 active_jobs: Default::default(),
3355 job_id: 0,
3356 }
3357 }
3358
3359 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3360 self.git_store.upgrade()
3361 }
3362
3363 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3364 let this = cx.weak_entity();
3365 let git_store = self.git_store.clone();
3366 let _ = self.send_keyed_job(
3367 Some(GitJobKey::ReloadBufferDiffBases),
3368 None,
3369 |state, mut cx| async move {
3370 let RepositoryState::Local { backend, .. } = state else {
3371 log::error!("tried to recompute diffs for a non-local repository");
3372 return Ok(());
3373 };
3374
3375 let Some(this) = this.upgrade() else {
3376 return Ok(());
3377 };
3378
3379 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3380 git_store.update(cx, |git_store, cx| {
3381 git_store
3382 .diffs
3383 .iter()
3384 .filter_map(|(buffer_id, diff_state)| {
3385 let buffer_store = git_store.buffer_store.read(cx);
3386 let buffer = buffer_store.get(*buffer_id)?;
3387 let file = File::from_dyn(buffer.read(cx).file())?;
3388 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3389 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3390 log::debug!(
3391 "start reload diff bases for repo path {}",
3392 repo_path.as_unix_str()
3393 );
3394 diff_state.update(cx, |diff_state, _| {
3395 let has_unstaged_diff = diff_state
3396 .unstaged_diff
3397 .as_ref()
3398 .is_some_and(|diff| diff.is_upgradable());
3399 let has_uncommitted_diff = diff_state
3400 .uncommitted_diff
3401 .as_ref()
3402 .is_some_and(|set| set.is_upgradable());
3403
3404 Some((
3405 buffer,
3406 repo_path,
3407 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3408 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3409 ))
3410 })
3411 })
3412 .collect::<Vec<_>>()
3413 })
3414 })??;
3415
3416 let buffer_diff_base_changes = cx
3417 .background_spawn(async move {
3418 let mut changes = Vec::new();
3419 for (buffer, repo_path, current_index_text, current_head_text) in
3420 &repo_diff_state_updates
3421 {
3422 let index_text = if current_index_text.is_some() {
3423 backend.load_index_text(repo_path.clone()).await
3424 } else {
3425 None
3426 };
3427 let head_text = if current_head_text.is_some() {
3428 backend.load_committed_text(repo_path.clone()).await
3429 } else {
3430 None
3431 };
3432
3433 let change =
3434 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3435 (Some(current_index), Some(current_head)) => {
3436 let index_changed =
3437 index_text.as_ref() != current_index.as_deref();
3438 let head_changed =
3439 head_text.as_ref() != current_head.as_deref();
3440 if index_changed && head_changed {
3441 if index_text == head_text {
3442 Some(DiffBasesChange::SetBoth(head_text))
3443 } else {
3444 Some(DiffBasesChange::SetEach {
3445 index: index_text,
3446 head: head_text,
3447 })
3448 }
3449 } else if index_changed {
3450 Some(DiffBasesChange::SetIndex(index_text))
3451 } else if head_changed {
3452 Some(DiffBasesChange::SetHead(head_text))
3453 } else {
3454 None
3455 }
3456 }
3457 (Some(current_index), None) => {
3458 let index_changed =
3459 index_text.as_ref() != current_index.as_deref();
3460 index_changed
3461 .then_some(DiffBasesChange::SetIndex(index_text))
3462 }
3463 (None, Some(current_head)) => {
3464 let head_changed =
3465 head_text.as_ref() != current_head.as_deref();
3466 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3467 }
3468 (None, None) => None,
3469 };
3470
3471 changes.push((buffer.clone(), change))
3472 }
3473 changes
3474 })
3475 .await;
3476
3477 git_store.update(&mut cx, |git_store, cx| {
3478 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3479 let buffer_snapshot = buffer.read(cx).text_snapshot();
3480 let buffer_id = buffer_snapshot.remote_id();
3481 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3482 continue;
3483 };
3484
3485 let downstream_client = git_store.downstream_client();
3486 diff_state.update(cx, |diff_state, cx| {
3487 use proto::update_diff_bases::Mode;
3488
3489 if let Some((diff_bases_change, (client, project_id))) =
3490 diff_bases_change.clone().zip(downstream_client)
3491 {
3492 let (staged_text, committed_text, mode) = match diff_bases_change {
3493 DiffBasesChange::SetIndex(index) => {
3494 (index, None, Mode::IndexOnly)
3495 }
3496 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3497 DiffBasesChange::SetEach { index, head } => {
3498 (index, head, Mode::IndexAndHead)
3499 }
3500 DiffBasesChange::SetBoth(text) => {
3501 (None, text, Mode::IndexMatchesHead)
3502 }
3503 };
3504 client
3505 .send(proto::UpdateDiffBases {
3506 project_id: project_id.to_proto(),
3507 buffer_id: buffer_id.to_proto(),
3508 staged_text,
3509 committed_text,
3510 mode: mode as i32,
3511 })
3512 .log_err();
3513 }
3514
3515 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3516 });
3517 }
3518 })
3519 },
3520 );
3521 }
3522
3523 pub fn send_job<F, Fut, R>(
3524 &mut self,
3525 status: Option<SharedString>,
3526 job: F,
3527 ) -> oneshot::Receiver<R>
3528 where
3529 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3530 Fut: Future<Output = R> + 'static,
3531 R: Send + 'static,
3532 {
3533 self.send_keyed_job(None, status, job)
3534 }
3535
3536 fn send_keyed_job<F, Fut, R>(
3537 &mut self,
3538 key: Option<GitJobKey>,
3539 status: Option<SharedString>,
3540 job: F,
3541 ) -> oneshot::Receiver<R>
3542 where
3543 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3544 Fut: Future<Output = R> + 'static,
3545 R: Send + 'static,
3546 {
3547 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3548 let job_id = post_inc(&mut self.job_id);
3549 let this = self.this.clone();
3550 self.job_sender
3551 .unbounded_send(GitJob {
3552 key,
3553 job: Box::new(move |state, cx: &mut AsyncApp| {
3554 let job = job(state, cx.clone());
3555 cx.spawn(async move |cx| {
3556 if let Some(s) = status.clone() {
3557 this.update(cx, |this, cx| {
3558 this.active_jobs.insert(
3559 job_id,
3560 JobInfo {
3561 start: Instant::now(),
3562 message: s.clone(),
3563 },
3564 );
3565
3566 cx.notify();
3567 })
3568 .ok();
3569 }
3570 let result = job.await;
3571
3572 this.update(cx, |this, cx| {
3573 this.active_jobs.remove(&job_id);
3574 cx.notify();
3575 })
3576 .ok();
3577
3578 result_tx.send(result).ok();
3579 })
3580 }),
3581 })
3582 .ok();
3583 result_rx
3584 }
3585
3586 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3587 let Some(git_store) = self.git_store.upgrade() else {
3588 return;
3589 };
3590 let entity = cx.entity();
3591 git_store.update(cx, |git_store, cx| {
3592 let Some((&id, _)) = git_store
3593 .repositories
3594 .iter()
3595 .find(|(_, handle)| *handle == &entity)
3596 else {
3597 return;
3598 };
3599 git_store.active_repo_id = Some(id);
3600 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3601 });
3602 }
3603
3604 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3605 self.snapshot.status()
3606 }
3607
3608 pub fn cached_stash(&self) -> GitStash {
3609 self.snapshot.stash_entries.clone()
3610 }
3611
3612 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3613 let git_store = self.git_store.upgrade()?;
3614 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3615 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3616 let abs_path = SanitizedPath::new(&abs_path);
3617 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3618 Some(ProjectPath {
3619 worktree_id: worktree.read(cx).id(),
3620 path: relative_path,
3621 })
3622 }
3623
3624 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3625 let git_store = self.git_store.upgrade()?;
3626 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3627 let abs_path = worktree_store.absolutize(path, cx)?;
3628 self.snapshot.abs_path_to_repo_path(&abs_path)
3629 }
3630
3631 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3632 other
3633 .read(cx)
3634 .snapshot
3635 .work_directory_abs_path
3636 .starts_with(&self.snapshot.work_directory_abs_path)
3637 }
3638
3639 pub fn open_commit_buffer(
3640 &mut self,
3641 languages: Option<Arc<LanguageRegistry>>,
3642 buffer_store: Entity<BufferStore>,
3643 cx: &mut Context<Self>,
3644 ) -> Task<Result<Entity<Buffer>>> {
3645 let id = self.id;
3646 if let Some(buffer) = self.commit_message_buffer.clone() {
3647 return Task::ready(Ok(buffer));
3648 }
3649 let this = cx.weak_entity();
3650
3651 let rx = self.send_job(None, move |state, mut cx| async move {
3652 let Some(this) = this.upgrade() else {
3653 bail!("git store was dropped");
3654 };
3655 match state {
3656 RepositoryState::Local { .. } => {
3657 this.update(&mut cx, |_, cx| {
3658 Self::open_local_commit_buffer(languages, buffer_store, cx)
3659 })?
3660 .await
3661 }
3662 RepositoryState::Remote { project_id, client } => {
3663 let request = client.request(proto::OpenCommitMessageBuffer {
3664 project_id: project_id.0,
3665 repository_id: id.to_proto(),
3666 });
3667 let response = request.await.context("requesting to open commit buffer")?;
3668 let buffer_id = BufferId::new(response.buffer_id)?;
3669 let buffer = buffer_store
3670 .update(&mut cx, |buffer_store, cx| {
3671 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3672 })?
3673 .await?;
3674 if let Some(language_registry) = languages {
3675 let git_commit_language =
3676 language_registry.language_for_name("Git Commit").await?;
3677 buffer.update(&mut cx, |buffer, cx| {
3678 buffer.set_language(Some(git_commit_language), cx);
3679 })?;
3680 }
3681 this.update(&mut cx, |this, _| {
3682 this.commit_message_buffer = Some(buffer.clone());
3683 })?;
3684 Ok(buffer)
3685 }
3686 }
3687 });
3688
3689 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3690 }
3691
3692 fn open_local_commit_buffer(
3693 language_registry: Option<Arc<LanguageRegistry>>,
3694 buffer_store: Entity<BufferStore>,
3695 cx: &mut Context<Self>,
3696 ) -> Task<Result<Entity<Buffer>>> {
3697 cx.spawn(async move |repository, cx| {
3698 let buffer = buffer_store
3699 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3700 .await?;
3701
3702 if let Some(language_registry) = language_registry {
3703 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3704 buffer.update(cx, |buffer, cx| {
3705 buffer.set_language(Some(git_commit_language), cx);
3706 })?;
3707 }
3708
3709 repository.update(cx, |repository, _| {
3710 repository.commit_message_buffer = Some(buffer.clone());
3711 })?;
3712 Ok(buffer)
3713 })
3714 }
3715
3716 pub fn checkout_files(
3717 &mut self,
3718 commit: &str,
3719 paths: Vec<RepoPath>,
3720 cx: &mut Context<Self>,
3721 ) -> Task<Result<()>> {
3722 let commit = commit.to_string();
3723 let id = self.id;
3724
3725 self.spawn_job_with_tracking(
3726 paths.clone(),
3727 pending_op::GitStatus::Reverted,
3728 cx,
3729 async move |this, cx| {
3730 this.update(cx, |this, _cx| {
3731 this.send_job(
3732 Some(format!("git checkout {}", commit).into()),
3733 move |git_repo, _| async move {
3734 match git_repo {
3735 RepositoryState::Local {
3736 backend,
3737 environment,
3738 ..
3739 } => {
3740 backend
3741 .checkout_files(commit, paths, environment.clone())
3742 .await
3743 }
3744 RepositoryState::Remote { project_id, client } => {
3745 client
3746 .request(proto::GitCheckoutFiles {
3747 project_id: project_id.0,
3748 repository_id: id.to_proto(),
3749 commit,
3750 paths: paths
3751 .into_iter()
3752 .map(|p| p.to_proto())
3753 .collect(),
3754 })
3755 .await?;
3756
3757 Ok(())
3758 }
3759 }
3760 },
3761 )
3762 })?
3763 .await?
3764 },
3765 )
3766 }
3767
3768 pub fn reset(
3769 &mut self,
3770 commit: String,
3771 reset_mode: ResetMode,
3772 _cx: &mut App,
3773 ) -> oneshot::Receiver<Result<()>> {
3774 let id = self.id;
3775
3776 self.send_job(None, move |git_repo, _| async move {
3777 match git_repo {
3778 RepositoryState::Local {
3779 backend,
3780 environment,
3781 ..
3782 } => backend.reset(commit, reset_mode, environment).await,
3783 RepositoryState::Remote { project_id, client } => {
3784 client
3785 .request(proto::GitReset {
3786 project_id: project_id.0,
3787 repository_id: id.to_proto(),
3788 commit,
3789 mode: match reset_mode {
3790 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3791 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3792 },
3793 })
3794 .await?;
3795
3796 Ok(())
3797 }
3798 }
3799 })
3800 }
3801
3802 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3803 let id = self.id;
3804 self.send_job(None, move |git_repo, _cx| async move {
3805 match git_repo {
3806 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3807 RepositoryState::Remote { project_id, client } => {
3808 let resp = client
3809 .request(proto::GitShow {
3810 project_id: project_id.0,
3811 repository_id: id.to_proto(),
3812 commit,
3813 })
3814 .await?;
3815
3816 Ok(CommitDetails {
3817 sha: resp.sha.into(),
3818 message: resp.message.into(),
3819 commit_timestamp: resp.commit_timestamp,
3820 author_email: resp.author_email.into(),
3821 author_name: resp.author_name.into(),
3822 })
3823 }
3824 }
3825 })
3826 }
3827
3828 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3829 let id = self.id;
3830 self.send_job(None, move |git_repo, cx| async move {
3831 match git_repo {
3832 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3833 RepositoryState::Remote {
3834 client, project_id, ..
3835 } => {
3836 let response = client
3837 .request(proto::LoadCommitDiff {
3838 project_id: project_id.0,
3839 repository_id: id.to_proto(),
3840 commit,
3841 })
3842 .await?;
3843 Ok(CommitDiff {
3844 files: response
3845 .files
3846 .into_iter()
3847 .map(|file| {
3848 Ok(CommitFile {
3849 path: RepoPath::from_proto(&file.path)?,
3850 old_text: file.old_text,
3851 new_text: file.new_text,
3852 })
3853 })
3854 .collect::<Result<Vec<_>>>()?,
3855 })
3856 }
3857 }
3858 })
3859 }
3860
3861 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3862 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3863 }
3864
3865 fn save_buffers<'a>(
3866 &self,
3867 entries: impl IntoIterator<Item = &'a RepoPath>,
3868 cx: &mut Context<Self>,
3869 ) -> Vec<Task<anyhow::Result<()>>> {
3870 let mut save_futures = Vec::new();
3871 if let Some(buffer_store) = self.buffer_store(cx) {
3872 buffer_store.update(cx, |buffer_store, cx| {
3873 for path in entries {
3874 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3875 continue;
3876 };
3877 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3878 && buffer
3879 .read(cx)
3880 .file()
3881 .is_some_and(|file| file.disk_state().exists())
3882 && buffer.read(cx).has_unsaved_edits()
3883 {
3884 save_futures.push(buffer_store.save_buffer(buffer, cx));
3885 }
3886 }
3887 })
3888 }
3889 save_futures
3890 }
3891
3892 pub fn stage_entries(
3893 &mut self,
3894 entries: Vec<RepoPath>,
3895 cx: &mut Context<Self>,
3896 ) -> Task<anyhow::Result<()>> {
3897 if entries.is_empty() {
3898 return Task::ready(Ok(()));
3899 }
3900 let id = self.id;
3901 let save_tasks = self.save_buffers(&entries, cx);
3902 let paths = entries
3903 .iter()
3904 .map(|p| p.as_unix_str())
3905 .collect::<Vec<_>>()
3906 .join(" ");
3907 let status = format!("git add {paths}");
3908 let job_key = GitJobKey::WriteIndex(entries.clone());
3909
3910 self.spawn_job_with_tracking(
3911 entries.clone(),
3912 pending_op::GitStatus::Staged,
3913 cx,
3914 async move |this, cx| {
3915 for save_task in save_tasks {
3916 save_task.await?;
3917 }
3918
3919 this.update(cx, |this, _| {
3920 this.send_keyed_job(
3921 Some(job_key),
3922 Some(status.into()),
3923 move |git_repo, _cx| async move {
3924 match git_repo {
3925 RepositoryState::Local {
3926 backend,
3927 environment,
3928 ..
3929 } => backend.stage_paths(entries, environment.clone()).await,
3930 RepositoryState::Remote { project_id, client } => {
3931 client
3932 .request(proto::Stage {
3933 project_id: project_id.0,
3934 repository_id: id.to_proto(),
3935 paths: entries
3936 .into_iter()
3937 .map(|repo_path| repo_path.to_proto())
3938 .collect(),
3939 })
3940 .await
3941 .context("sending stage request")?;
3942
3943 Ok(())
3944 }
3945 }
3946 },
3947 )
3948 })?
3949 .await?
3950 },
3951 )
3952 }
3953
3954 pub fn unstage_entries(
3955 &mut self,
3956 entries: Vec<RepoPath>,
3957 cx: &mut Context<Self>,
3958 ) -> Task<anyhow::Result<()>> {
3959 if entries.is_empty() {
3960 return Task::ready(Ok(()));
3961 }
3962 let id = self.id;
3963 let save_tasks = self.save_buffers(&entries, cx);
3964 let paths = entries
3965 .iter()
3966 .map(|p| p.as_unix_str())
3967 .collect::<Vec<_>>()
3968 .join(" ");
3969 let status = format!("git reset {paths}");
3970 let job_key = GitJobKey::WriteIndex(entries.clone());
3971
3972 self.spawn_job_with_tracking(
3973 entries.clone(),
3974 pending_op::GitStatus::Unstaged,
3975 cx,
3976 async move |this, cx| {
3977 for save_task in save_tasks {
3978 save_task.await?;
3979 }
3980
3981 this.update(cx, |this, _| {
3982 this.send_keyed_job(
3983 Some(job_key),
3984 Some(status.into()),
3985 move |git_repo, _cx| async move {
3986 match git_repo {
3987 RepositoryState::Local {
3988 backend,
3989 environment,
3990 ..
3991 } => backend.unstage_paths(entries, environment).await,
3992 RepositoryState::Remote { project_id, client } => {
3993 client
3994 .request(proto::Unstage {
3995 project_id: project_id.0,
3996 repository_id: id.to_proto(),
3997 paths: entries
3998 .into_iter()
3999 .map(|repo_path| repo_path.to_proto())
4000 .collect(),
4001 })
4002 .await
4003 .context("sending unstage request")?;
4004
4005 Ok(())
4006 }
4007 }
4008 },
4009 )
4010 })?
4011 .await?
4012 },
4013 )
4014 }
4015
4016 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4017 let to_stage = self
4018 .cached_status()
4019 .filter_map(|entry| {
4020 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4021 if ops.staging() || ops.staged() {
4022 None
4023 } else {
4024 Some(entry.repo_path)
4025 }
4026 } else if entry.status.staging().has_staged() {
4027 None
4028 } else {
4029 Some(entry.repo_path)
4030 }
4031 })
4032 .collect();
4033 self.stage_entries(to_stage, cx)
4034 }
4035
4036 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4037 let to_unstage = self
4038 .cached_status()
4039 .filter_map(|entry| {
4040 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4041 if !ops.staging() && !ops.staged() {
4042 None
4043 } else {
4044 Some(entry.repo_path)
4045 }
4046 } else if entry.status.staging().has_unstaged() {
4047 None
4048 } else {
4049 Some(entry.repo_path)
4050 }
4051 })
4052 .collect();
4053 self.unstage_entries(to_unstage, cx)
4054 }
4055
4056 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4057 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4058
4059 self.stash_entries(to_stash, cx)
4060 }
4061
4062 pub fn stash_entries(
4063 &mut self,
4064 entries: Vec<RepoPath>,
4065 cx: &mut Context<Self>,
4066 ) -> Task<anyhow::Result<()>> {
4067 let id = self.id;
4068
4069 cx.spawn(async move |this, cx| {
4070 this.update(cx, |this, _| {
4071 this.send_job(None, move |git_repo, _cx| async move {
4072 match git_repo {
4073 RepositoryState::Local {
4074 backend,
4075 environment,
4076 ..
4077 } => backend.stash_paths(entries, environment).await,
4078 RepositoryState::Remote { project_id, client } => {
4079 client
4080 .request(proto::Stash {
4081 project_id: project_id.0,
4082 repository_id: id.to_proto(),
4083 paths: entries
4084 .into_iter()
4085 .map(|repo_path| repo_path.to_proto())
4086 .collect(),
4087 })
4088 .await
4089 .context("sending stash request")?;
4090 Ok(())
4091 }
4092 }
4093 })
4094 })?
4095 .await??;
4096 Ok(())
4097 })
4098 }
4099
4100 pub fn stash_pop(
4101 &mut self,
4102 index: Option<usize>,
4103 cx: &mut Context<Self>,
4104 ) -> Task<anyhow::Result<()>> {
4105 let id = self.id;
4106 cx.spawn(async move |this, cx| {
4107 this.update(cx, |this, _| {
4108 this.send_job(None, move |git_repo, _cx| async move {
4109 match git_repo {
4110 RepositoryState::Local {
4111 backend,
4112 environment,
4113 ..
4114 } => backend.stash_pop(index, environment).await,
4115 RepositoryState::Remote { project_id, client } => {
4116 client
4117 .request(proto::StashPop {
4118 project_id: project_id.0,
4119 repository_id: id.to_proto(),
4120 stash_index: index.map(|i| i as u64),
4121 })
4122 .await
4123 .context("sending stash pop request")?;
4124 Ok(())
4125 }
4126 }
4127 })
4128 })?
4129 .await??;
4130 Ok(())
4131 })
4132 }
4133
4134 pub fn stash_apply(
4135 &mut self,
4136 index: Option<usize>,
4137 cx: &mut Context<Self>,
4138 ) -> Task<anyhow::Result<()>> {
4139 let id = self.id;
4140 cx.spawn(async move |this, cx| {
4141 this.update(cx, |this, _| {
4142 this.send_job(None, move |git_repo, _cx| async move {
4143 match git_repo {
4144 RepositoryState::Local {
4145 backend,
4146 environment,
4147 ..
4148 } => backend.stash_apply(index, environment).await,
4149 RepositoryState::Remote { project_id, client } => {
4150 client
4151 .request(proto::StashApply {
4152 project_id: project_id.0,
4153 repository_id: id.to_proto(),
4154 stash_index: index.map(|i| i as u64),
4155 })
4156 .await
4157 .context("sending stash apply request")?;
4158 Ok(())
4159 }
4160 }
4161 })
4162 })?
4163 .await??;
4164 Ok(())
4165 })
4166 }
4167
4168 pub fn stash_drop(
4169 &mut self,
4170 index: Option<usize>,
4171 cx: &mut Context<Self>,
4172 ) -> oneshot::Receiver<anyhow::Result<()>> {
4173 let id = self.id;
4174 let updates_tx = self
4175 .git_store()
4176 .and_then(|git_store| match &git_store.read(cx).state {
4177 GitStoreState::Local { downstream, .. } => downstream
4178 .as_ref()
4179 .map(|downstream| downstream.updates_tx.clone()),
4180 _ => None,
4181 });
4182 let this = cx.weak_entity();
4183 self.send_job(None, move |git_repo, mut cx| async move {
4184 match git_repo {
4185 RepositoryState::Local {
4186 backend,
4187 environment,
4188 ..
4189 } => {
4190 // TODO would be nice to not have to do this manually
4191 let result = backend.stash_drop(index, environment).await;
4192 if result.is_ok()
4193 && let Ok(stash_entries) = backend.stash_entries().await
4194 {
4195 let snapshot = this.update(&mut cx, |this, cx| {
4196 this.snapshot.stash_entries = stash_entries;
4197 cx.emit(RepositoryEvent::StashEntriesChanged);
4198 this.snapshot.clone()
4199 })?;
4200 if let Some(updates_tx) = updates_tx {
4201 updates_tx
4202 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4203 .ok();
4204 }
4205 }
4206
4207 result
4208 }
4209 RepositoryState::Remote { project_id, client } => {
4210 client
4211 .request(proto::StashDrop {
4212 project_id: project_id.0,
4213 repository_id: id.to_proto(),
4214 stash_index: index.map(|i| i as u64),
4215 })
4216 .await
4217 .context("sending stash pop request")?;
4218 Ok(())
4219 }
4220 }
4221 })
4222 }
4223
4224 pub fn commit(
4225 &mut self,
4226 message: SharedString,
4227 name_and_email: Option<(SharedString, SharedString)>,
4228 options: CommitOptions,
4229 askpass: AskPassDelegate,
4230 _cx: &mut App,
4231 ) -> oneshot::Receiver<Result<()>> {
4232 let id = self.id;
4233 let askpass_delegates = self.askpass_delegates.clone();
4234 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4235
4236 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4237 match git_repo {
4238 RepositoryState::Local {
4239 backend,
4240 environment,
4241 ..
4242 } => {
4243 backend
4244 .commit(message, name_and_email, options, askpass, environment)
4245 .await
4246 }
4247 RepositoryState::Remote { project_id, client } => {
4248 askpass_delegates.lock().insert(askpass_id, askpass);
4249 let _defer = util::defer(|| {
4250 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4251 debug_assert!(askpass_delegate.is_some());
4252 });
4253 let (name, email) = name_and_email.unzip();
4254 client
4255 .request(proto::Commit {
4256 project_id: project_id.0,
4257 repository_id: id.to_proto(),
4258 message: String::from(message),
4259 name: name.map(String::from),
4260 email: email.map(String::from),
4261 options: Some(proto::commit::CommitOptions {
4262 amend: options.amend,
4263 signoff: options.signoff,
4264 }),
4265 askpass_id,
4266 })
4267 .await
4268 .context("sending commit request")?;
4269
4270 Ok(())
4271 }
4272 }
4273 })
4274 }
4275
4276 pub fn fetch(
4277 &mut self,
4278 fetch_options: FetchOptions,
4279 askpass: AskPassDelegate,
4280 _cx: &mut App,
4281 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4282 let askpass_delegates = self.askpass_delegates.clone();
4283 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4284 let id = self.id;
4285
4286 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4287 match git_repo {
4288 RepositoryState::Local {
4289 backend,
4290 environment,
4291 ..
4292 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4293 RepositoryState::Remote { project_id, client } => {
4294 askpass_delegates.lock().insert(askpass_id, askpass);
4295 let _defer = util::defer(|| {
4296 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4297 debug_assert!(askpass_delegate.is_some());
4298 });
4299
4300 let response = client
4301 .request(proto::Fetch {
4302 project_id: project_id.0,
4303 repository_id: id.to_proto(),
4304 askpass_id,
4305 remote: fetch_options.to_proto(),
4306 })
4307 .await
4308 .context("sending fetch request")?;
4309
4310 Ok(RemoteCommandOutput {
4311 stdout: response.stdout,
4312 stderr: response.stderr,
4313 })
4314 }
4315 }
4316 })
4317 }
4318
4319 pub fn push(
4320 &mut self,
4321 branch: SharedString,
4322 remote: SharedString,
4323 options: Option<PushOptions>,
4324 askpass: AskPassDelegate,
4325 cx: &mut Context<Self>,
4326 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4327 let askpass_delegates = self.askpass_delegates.clone();
4328 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4329 let id = self.id;
4330
4331 let args = options
4332 .map(|option| match option {
4333 PushOptions::SetUpstream => " --set-upstream",
4334 PushOptions::Force => " --force-with-lease",
4335 })
4336 .unwrap_or("");
4337
4338 let updates_tx = self
4339 .git_store()
4340 .and_then(|git_store| match &git_store.read(cx).state {
4341 GitStoreState::Local { downstream, .. } => downstream
4342 .as_ref()
4343 .map(|downstream| downstream.updates_tx.clone()),
4344 _ => None,
4345 });
4346
4347 let this = cx.weak_entity();
4348 self.send_job(
4349 Some(format!("git push {} {} {}", args, remote, branch).into()),
4350 move |git_repo, mut cx| async move {
4351 match git_repo {
4352 RepositoryState::Local {
4353 backend,
4354 environment,
4355 ..
4356 } => {
4357 let result = backend
4358 .push(
4359 branch.to_string(),
4360 remote.to_string(),
4361 options,
4362 askpass,
4363 environment.clone(),
4364 cx.clone(),
4365 )
4366 .await;
4367 // TODO would be nice to not have to do this manually
4368 if result.is_ok() {
4369 let branches = backend.branches().await?;
4370 let branch = branches.into_iter().find(|branch| branch.is_head);
4371 log::info!("head branch after scan is {branch:?}");
4372 let snapshot = this.update(&mut cx, |this, cx| {
4373 this.snapshot.branch = branch;
4374 cx.emit(RepositoryEvent::BranchChanged);
4375 this.snapshot.clone()
4376 })?;
4377 if let Some(updates_tx) = updates_tx {
4378 updates_tx
4379 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4380 .ok();
4381 }
4382 }
4383 result
4384 }
4385 RepositoryState::Remote { project_id, client } => {
4386 askpass_delegates.lock().insert(askpass_id, askpass);
4387 let _defer = util::defer(|| {
4388 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4389 debug_assert!(askpass_delegate.is_some());
4390 });
4391 let response = client
4392 .request(proto::Push {
4393 project_id: project_id.0,
4394 repository_id: id.to_proto(),
4395 askpass_id,
4396 branch_name: branch.to_string(),
4397 remote_name: remote.to_string(),
4398 options: options.map(|options| match options {
4399 PushOptions::Force => proto::push::PushOptions::Force,
4400 PushOptions::SetUpstream => {
4401 proto::push::PushOptions::SetUpstream
4402 }
4403 }
4404 as i32),
4405 })
4406 .await
4407 .context("sending push request")?;
4408
4409 Ok(RemoteCommandOutput {
4410 stdout: response.stdout,
4411 stderr: response.stderr,
4412 })
4413 }
4414 }
4415 },
4416 )
4417 }
4418
4419 pub fn pull(
4420 &mut self,
4421 branch: Option<SharedString>,
4422 remote: SharedString,
4423 rebase: bool,
4424 askpass: AskPassDelegate,
4425 _cx: &mut App,
4426 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4427 let askpass_delegates = self.askpass_delegates.clone();
4428 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4429 let id = self.id;
4430
4431 let mut status = "git pull".to_string();
4432 if rebase {
4433 status.push_str(" --rebase");
4434 }
4435 status.push_str(&format!(" {}", remote));
4436 if let Some(b) = &branch {
4437 status.push_str(&format!(" {}", b));
4438 }
4439
4440 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4441 match git_repo {
4442 RepositoryState::Local {
4443 backend,
4444 environment,
4445 ..
4446 } => {
4447 backend
4448 .pull(
4449 branch.as_ref().map(|b| b.to_string()),
4450 remote.to_string(),
4451 rebase,
4452 askpass,
4453 environment.clone(),
4454 cx,
4455 )
4456 .await
4457 }
4458 RepositoryState::Remote { project_id, client } => {
4459 askpass_delegates.lock().insert(askpass_id, askpass);
4460 let _defer = util::defer(|| {
4461 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4462 debug_assert!(askpass_delegate.is_some());
4463 });
4464 let response = client
4465 .request(proto::Pull {
4466 project_id: project_id.0,
4467 repository_id: id.to_proto(),
4468 askpass_id,
4469 rebase,
4470 branch_name: branch.as_ref().map(|b| b.to_string()),
4471 remote_name: remote.to_string(),
4472 })
4473 .await
4474 .context("sending pull request")?;
4475
4476 Ok(RemoteCommandOutput {
4477 stdout: response.stdout,
4478 stderr: response.stderr,
4479 })
4480 }
4481 }
4482 })
4483 }
4484
4485 fn spawn_set_index_text_job(
4486 &mut self,
4487 path: RepoPath,
4488 content: Option<String>,
4489 hunk_staging_operation_count: Option<usize>,
4490 cx: &mut Context<Self>,
4491 ) -> oneshot::Receiver<anyhow::Result<()>> {
4492 let id = self.id;
4493 let this = cx.weak_entity();
4494 let git_store = self.git_store.clone();
4495 self.send_keyed_job(
4496 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4497 None,
4498 move |git_repo, mut cx| async move {
4499 log::debug!(
4500 "start updating index text for buffer {}",
4501 path.as_unix_str()
4502 );
4503 match git_repo {
4504 RepositoryState::Local {
4505 backend,
4506 environment,
4507 ..
4508 } => {
4509 backend
4510 .set_index_text(path.clone(), content, environment.clone())
4511 .await?;
4512 }
4513 RepositoryState::Remote { project_id, client } => {
4514 client
4515 .request(proto::SetIndexText {
4516 project_id: project_id.0,
4517 repository_id: id.to_proto(),
4518 path: path.to_proto(),
4519 text: content,
4520 })
4521 .await?;
4522 }
4523 }
4524 log::debug!(
4525 "finish updating index text for buffer {}",
4526 path.as_unix_str()
4527 );
4528
4529 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4530 let project_path = this
4531 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4532 .ok()
4533 .flatten();
4534 git_store.update(&mut cx, |git_store, cx| {
4535 let buffer_id = git_store
4536 .buffer_store
4537 .read(cx)
4538 .get_by_path(&project_path?)?
4539 .read(cx)
4540 .remote_id();
4541 let diff_state = git_store.diffs.get(&buffer_id)?;
4542 diff_state.update(cx, |diff_state, _| {
4543 diff_state.hunk_staging_operation_count_as_of_write =
4544 hunk_staging_operation_count;
4545 });
4546 Some(())
4547 })?;
4548 }
4549 Ok(())
4550 },
4551 )
4552 }
4553
4554 pub fn get_remotes(
4555 &mut self,
4556 branch_name: Option<String>,
4557 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4558 let id = self.id;
4559 self.send_job(None, move |repo, _cx| async move {
4560 match repo {
4561 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4562 RepositoryState::Remote { project_id, client } => {
4563 let response = client
4564 .request(proto::GetRemotes {
4565 project_id: project_id.0,
4566 repository_id: id.to_proto(),
4567 branch_name,
4568 })
4569 .await?;
4570
4571 let remotes = response
4572 .remotes
4573 .into_iter()
4574 .map(|remotes| git::repository::Remote {
4575 name: remotes.name.into(),
4576 })
4577 .collect();
4578
4579 Ok(remotes)
4580 }
4581 }
4582 })
4583 }
4584
4585 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4586 let id = self.id;
4587 self.send_job(None, move |repo, _| async move {
4588 match repo {
4589 RepositoryState::Local { backend, .. } => backend.branches().await,
4590 RepositoryState::Remote { project_id, client } => {
4591 let response = client
4592 .request(proto::GitGetBranches {
4593 project_id: project_id.0,
4594 repository_id: id.to_proto(),
4595 })
4596 .await?;
4597
4598 let branches = response
4599 .branches
4600 .into_iter()
4601 .map(|branch| proto_to_branch(&branch))
4602 .collect();
4603
4604 Ok(branches)
4605 }
4606 }
4607 })
4608 }
4609
4610 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4611 let id = self.id;
4612 self.send_job(None, move |repo, _| async move {
4613 match repo {
4614 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4615 RepositoryState::Remote { project_id, client } => {
4616 let response = client
4617 .request(proto::GitGetWorktrees {
4618 project_id: project_id.0,
4619 repository_id: id.to_proto(),
4620 })
4621 .await?;
4622
4623 let worktrees = response
4624 .worktrees
4625 .into_iter()
4626 .map(|worktree| proto_to_worktree(&worktree))
4627 .collect();
4628
4629 Ok(worktrees)
4630 }
4631 }
4632 })
4633 }
4634
4635 pub fn create_worktree(
4636 &mut self,
4637 name: String,
4638 path: PathBuf,
4639 commit: Option<String>,
4640 ) -> oneshot::Receiver<Result<()>> {
4641 let id = self.id;
4642 self.send_job(
4643 Some("git worktree add".into()),
4644 move |repo, _cx| async move {
4645 match repo {
4646 RepositoryState::Local { backend, .. } => {
4647 backend.create_worktree(name, path, commit).await
4648 }
4649 RepositoryState::Remote { project_id, client } => {
4650 client
4651 .request(proto::GitCreateWorktree {
4652 project_id: project_id.0,
4653 repository_id: id.to_proto(),
4654 name,
4655 directory: path.to_string_lossy().to_string(),
4656 commit,
4657 })
4658 .await?;
4659
4660 Ok(())
4661 }
4662 }
4663 },
4664 )
4665 }
4666
4667 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4668 let id = self.id;
4669 self.send_job(None, move |repo, _| async move {
4670 match repo {
4671 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4672 RepositoryState::Remote { project_id, client } => {
4673 let response = client
4674 .request(proto::GetDefaultBranch {
4675 project_id: project_id.0,
4676 repository_id: id.to_proto(),
4677 })
4678 .await?;
4679
4680 anyhow::Ok(response.branch.map(SharedString::from))
4681 }
4682 }
4683 })
4684 }
4685
4686 pub fn diff_tree(
4687 &mut self,
4688 diff_type: DiffTreeType,
4689 _cx: &App,
4690 ) -> oneshot::Receiver<Result<TreeDiff>> {
4691 let repository_id = self.snapshot.id;
4692 self.send_job(None, move |repo, _cx| async move {
4693 match repo {
4694 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4695 RepositoryState::Remote { client, project_id } => {
4696 let response = client
4697 .request(proto::GetTreeDiff {
4698 project_id: project_id.0,
4699 repository_id: repository_id.0,
4700 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4701 base: diff_type.base().to_string(),
4702 head: diff_type.head().to_string(),
4703 })
4704 .await?;
4705
4706 let entries = response
4707 .entries
4708 .into_iter()
4709 .filter_map(|entry| {
4710 let status = match entry.status() {
4711 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4712 proto::tree_diff_status::Status::Modified => {
4713 TreeDiffStatus::Modified {
4714 old: git::Oid::from_str(
4715 &entry.oid.context("missing oid").log_err()?,
4716 )
4717 .log_err()?,
4718 }
4719 }
4720 proto::tree_diff_status::Status::Deleted => {
4721 TreeDiffStatus::Deleted {
4722 old: git::Oid::from_str(
4723 &entry.oid.context("missing oid").log_err()?,
4724 )
4725 .log_err()?,
4726 }
4727 }
4728 };
4729 Some((
4730 RepoPath(RelPath::from_proto(&entry.path).log_err()?),
4731 status,
4732 ))
4733 })
4734 .collect();
4735
4736 Ok(TreeDiff { entries })
4737 }
4738 }
4739 })
4740 }
4741
4742 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4743 let id = self.id;
4744 self.send_job(None, move |repo, _cx| async move {
4745 match repo {
4746 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4747 RepositoryState::Remote { project_id, client } => {
4748 let response = client
4749 .request(proto::GitDiff {
4750 project_id: project_id.0,
4751 repository_id: id.to_proto(),
4752 diff_type: match diff_type {
4753 DiffType::HeadToIndex => {
4754 proto::git_diff::DiffType::HeadToIndex.into()
4755 }
4756 DiffType::HeadToWorktree => {
4757 proto::git_diff::DiffType::HeadToWorktree.into()
4758 }
4759 },
4760 })
4761 .await?;
4762
4763 Ok(response.diff)
4764 }
4765 }
4766 })
4767 }
4768
4769 pub fn create_branch(
4770 &mut self,
4771 branch_name: String,
4772 base_branch: Option<String>,
4773 ) -> oneshot::Receiver<Result<()>> {
4774 let id = self.id;
4775 let status_msg = if let Some(ref base) = base_branch {
4776 format!("git switch -c {branch_name} {base}").into()
4777 } else {
4778 format!("git switch -c {branch_name}").into()
4779 };
4780 self.send_job(Some(status_msg), move |repo, _cx| async move {
4781 match repo {
4782 RepositoryState::Local { backend, .. } => {
4783 backend.create_branch(branch_name, base_branch).await
4784 }
4785 RepositoryState::Remote { project_id, client } => {
4786 client
4787 .request(proto::GitCreateBranch {
4788 project_id: project_id.0,
4789 repository_id: id.to_proto(),
4790 branch_name,
4791 })
4792 .await?;
4793
4794 Ok(())
4795 }
4796 }
4797 })
4798 }
4799
4800 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4801 let id = self.id;
4802 self.send_job(
4803 Some(format!("git switch {branch_name}").into()),
4804 move |repo, _cx| async move {
4805 match repo {
4806 RepositoryState::Local { backend, .. } => {
4807 backend.change_branch(branch_name).await
4808 }
4809 RepositoryState::Remote { project_id, client } => {
4810 client
4811 .request(proto::GitChangeBranch {
4812 project_id: project_id.0,
4813 repository_id: id.to_proto(),
4814 branch_name,
4815 })
4816 .await?;
4817
4818 Ok(())
4819 }
4820 }
4821 },
4822 )
4823 }
4824
4825 pub fn rename_branch(
4826 &mut self,
4827 branch: String,
4828 new_name: String,
4829 ) -> oneshot::Receiver<Result<()>> {
4830 let id = self.id;
4831 self.send_job(
4832 Some(format!("git branch -m {branch} {new_name}").into()),
4833 move |repo, _cx| async move {
4834 match repo {
4835 RepositoryState::Local { backend, .. } => {
4836 backend.rename_branch(branch, new_name).await
4837 }
4838 RepositoryState::Remote { project_id, client } => {
4839 client
4840 .request(proto::GitRenameBranch {
4841 project_id: project_id.0,
4842 repository_id: id.to_proto(),
4843 branch,
4844 new_name,
4845 })
4846 .await?;
4847
4848 Ok(())
4849 }
4850 }
4851 },
4852 )
4853 }
4854
4855 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4856 let id = self.id;
4857 self.send_job(None, move |repo, _cx| async move {
4858 match repo {
4859 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4860 RepositoryState::Remote { project_id, client } => {
4861 let response = client
4862 .request(proto::CheckForPushedCommits {
4863 project_id: project_id.0,
4864 repository_id: id.to_proto(),
4865 })
4866 .await?;
4867
4868 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4869
4870 Ok(branches)
4871 }
4872 }
4873 })
4874 }
4875
4876 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4877 self.send_job(None, |repo, _cx| async move {
4878 match repo {
4879 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4880 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4881 }
4882 })
4883 }
4884
4885 pub fn restore_checkpoint(
4886 &mut self,
4887 checkpoint: GitRepositoryCheckpoint,
4888 ) -> oneshot::Receiver<Result<()>> {
4889 self.send_job(None, move |repo, _cx| async move {
4890 match repo {
4891 RepositoryState::Local { backend, .. } => {
4892 backend.restore_checkpoint(checkpoint).await
4893 }
4894 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4895 }
4896 })
4897 }
4898
4899 pub(crate) fn apply_remote_update(
4900 &mut self,
4901 update: proto::UpdateRepository,
4902 cx: &mut Context<Self>,
4903 ) -> Result<()> {
4904 let conflicted_paths = TreeSet::from_ordered_entries(
4905 update
4906 .current_merge_conflicts
4907 .into_iter()
4908 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4909 );
4910 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4911 let new_head_commit = update
4912 .head_commit_details
4913 .as_ref()
4914 .map(proto_to_commit_details);
4915 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4916 cx.emit(RepositoryEvent::BranchChanged)
4917 }
4918 self.snapshot.branch = new_branch;
4919 self.snapshot.head_commit = new_head_commit;
4920
4921 self.snapshot.merge.conflicted_paths = conflicted_paths;
4922 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4923 let new_stash_entries = GitStash {
4924 entries: update
4925 .stash_entries
4926 .iter()
4927 .filter_map(|entry| proto_to_stash(entry).ok())
4928 .collect(),
4929 };
4930 if self.snapshot.stash_entries != new_stash_entries {
4931 cx.emit(RepositoryEvent::StashEntriesChanged)
4932 }
4933 self.snapshot.stash_entries = new_stash_entries;
4934
4935 let edits = update
4936 .removed_statuses
4937 .into_iter()
4938 .filter_map(|path| {
4939 Some(sum_tree::Edit::Remove(PathKey(
4940 RelPath::from_proto(&path).log_err()?,
4941 )))
4942 })
4943 .chain(
4944 update
4945 .updated_statuses
4946 .into_iter()
4947 .filter_map(|updated_status| {
4948 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4949 }),
4950 )
4951 .collect::<Vec<_>>();
4952 if !edits.is_empty() {
4953 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4954 }
4955 self.snapshot.statuses_by_path.edit(edits, ());
4956 if update.is_last_update {
4957 self.snapshot.scan_id = update.scan_id;
4958 }
4959 Ok(())
4960 }
4961
4962 pub fn compare_checkpoints(
4963 &mut self,
4964 left: GitRepositoryCheckpoint,
4965 right: GitRepositoryCheckpoint,
4966 ) -> oneshot::Receiver<Result<bool>> {
4967 self.send_job(None, move |repo, _cx| async move {
4968 match repo {
4969 RepositoryState::Local { backend, .. } => {
4970 backend.compare_checkpoints(left, right).await
4971 }
4972 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4973 }
4974 })
4975 }
4976
4977 pub fn diff_checkpoints(
4978 &mut self,
4979 base_checkpoint: GitRepositoryCheckpoint,
4980 target_checkpoint: GitRepositoryCheckpoint,
4981 ) -> oneshot::Receiver<Result<String>> {
4982 self.send_job(None, move |repo, _cx| async move {
4983 match repo {
4984 RepositoryState::Local { backend, .. } => {
4985 backend
4986 .diff_checkpoints(base_checkpoint, target_checkpoint)
4987 .await
4988 }
4989 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4990 }
4991 })
4992 }
4993
4994 fn schedule_scan(
4995 &mut self,
4996 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
4997 cx: &mut Context<Self>,
4998 ) {
4999 let this = cx.weak_entity();
5000 let _ = self.send_keyed_job(
5001 Some(GitJobKey::ReloadGitState),
5002 None,
5003 |state, mut cx| async move {
5004 log::debug!("run scheduled git status scan");
5005
5006 let Some(this) = this.upgrade() else {
5007 return Ok(());
5008 };
5009 let RepositoryState::Local { backend, .. } = state else {
5010 bail!("not a local repository")
5011 };
5012 let (snapshot, events) = this
5013 .update(&mut cx, |this, _| {
5014 this.paths_needing_status_update.clear();
5015 compute_snapshot(
5016 this.id,
5017 this.work_directory_abs_path.clone(),
5018 this.snapshot.clone(),
5019 backend.clone(),
5020 )
5021 })?
5022 .await?;
5023 this.update(&mut cx, |this, cx| {
5024 this.snapshot = snapshot.clone();
5025 for event in events {
5026 cx.emit(event);
5027 }
5028 })?;
5029 if let Some(updates_tx) = updates_tx {
5030 updates_tx
5031 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5032 .ok();
5033 }
5034 Ok(())
5035 },
5036 );
5037 }
5038
5039 fn spawn_local_git_worker(
5040 work_directory_abs_path: Arc<Path>,
5041 dot_git_abs_path: Arc<Path>,
5042 _repository_dir_abs_path: Arc<Path>,
5043 _common_dir_abs_path: Arc<Path>,
5044 project_environment: WeakEntity<ProjectEnvironment>,
5045 fs: Arc<dyn Fs>,
5046 cx: &mut Context<Self>,
5047 ) -> mpsc::UnboundedSender<GitJob> {
5048 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5049
5050 cx.spawn(async move |_, cx| {
5051 let environment = project_environment
5052 .upgrade()
5053 .context("missing project environment")?
5054 .update(cx, |project_environment, cx| {
5055 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
5056 })?
5057 .await
5058 .unwrap_or_else(|| {
5059 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
5060 HashMap::default()
5061 });
5062 let search_paths = environment.get("PATH").map(|val| val.to_owned());
5063 let backend = cx
5064 .background_spawn(async move {
5065 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
5066 .or_else(|| which::which("git").ok());
5067 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
5068 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
5069 })
5070 .await?;
5071
5072 if let Some(git_hosting_provider_registry) =
5073 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5074 {
5075 git_hosting_providers::register_additional_providers(
5076 git_hosting_provider_registry,
5077 backend.clone(),
5078 );
5079 }
5080
5081 let state = RepositoryState::Local {
5082 backend,
5083 environment: Arc::new(environment),
5084 };
5085 let mut jobs = VecDeque::new();
5086 loop {
5087 while let Ok(Some(next_job)) = job_rx.try_next() {
5088 jobs.push_back(next_job);
5089 }
5090
5091 if let Some(job) = jobs.pop_front() {
5092 if let Some(current_key) = &job.key
5093 && jobs
5094 .iter()
5095 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5096 {
5097 continue;
5098 }
5099 (job.job)(state.clone(), cx).await;
5100 } else if let Some(job) = job_rx.next().await {
5101 jobs.push_back(job);
5102 } else {
5103 break;
5104 }
5105 }
5106 anyhow::Ok(())
5107 })
5108 .detach_and_log_err(cx);
5109
5110 job_tx
5111 }
5112
5113 fn spawn_remote_git_worker(
5114 project_id: ProjectId,
5115 client: AnyProtoClient,
5116 cx: &mut Context<Self>,
5117 ) -> mpsc::UnboundedSender<GitJob> {
5118 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5119
5120 cx.spawn(async move |_, cx| {
5121 let state = RepositoryState::Remote { project_id, client };
5122 let mut jobs = VecDeque::new();
5123 loop {
5124 while let Ok(Some(next_job)) = job_rx.try_next() {
5125 jobs.push_back(next_job);
5126 }
5127
5128 if let Some(job) = jobs.pop_front() {
5129 if let Some(current_key) = &job.key
5130 && jobs
5131 .iter()
5132 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5133 {
5134 continue;
5135 }
5136 (job.job)(state.clone(), cx).await;
5137 } else if let Some(job) = job_rx.next().await {
5138 jobs.push_back(job);
5139 } else {
5140 break;
5141 }
5142 }
5143 anyhow::Ok(())
5144 })
5145 .detach_and_log_err(cx);
5146
5147 job_tx
5148 }
5149
5150 fn load_staged_text(
5151 &mut self,
5152 buffer_id: BufferId,
5153 repo_path: RepoPath,
5154 cx: &App,
5155 ) -> Task<Result<Option<String>>> {
5156 let rx = self.send_job(None, move |state, _| async move {
5157 match state {
5158 RepositoryState::Local { backend, .. } => {
5159 anyhow::Ok(backend.load_index_text(repo_path).await)
5160 }
5161 RepositoryState::Remote { project_id, client } => {
5162 let response = client
5163 .request(proto::OpenUnstagedDiff {
5164 project_id: project_id.to_proto(),
5165 buffer_id: buffer_id.to_proto(),
5166 })
5167 .await?;
5168 Ok(response.staged_text)
5169 }
5170 }
5171 });
5172 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5173 }
5174
5175 fn load_committed_text(
5176 &mut self,
5177 buffer_id: BufferId,
5178 repo_path: RepoPath,
5179 cx: &App,
5180 ) -> Task<Result<DiffBasesChange>> {
5181 let rx = self.send_job(None, move |state, _| async move {
5182 match state {
5183 RepositoryState::Local { backend, .. } => {
5184 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5185 let staged_text = backend.load_index_text(repo_path).await;
5186 let diff_bases_change = if committed_text == staged_text {
5187 DiffBasesChange::SetBoth(committed_text)
5188 } else {
5189 DiffBasesChange::SetEach {
5190 index: staged_text,
5191 head: committed_text,
5192 }
5193 };
5194 anyhow::Ok(diff_bases_change)
5195 }
5196 RepositoryState::Remote { project_id, client } => {
5197 use proto::open_uncommitted_diff_response::Mode;
5198
5199 let response = client
5200 .request(proto::OpenUncommittedDiff {
5201 project_id: project_id.to_proto(),
5202 buffer_id: buffer_id.to_proto(),
5203 })
5204 .await?;
5205 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5206 let bases = match mode {
5207 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5208 Mode::IndexAndHead => DiffBasesChange::SetEach {
5209 head: response.committed_text,
5210 index: response.staged_text,
5211 },
5212 };
5213 Ok(bases)
5214 }
5215 }
5216 });
5217
5218 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5219 }
5220 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5221 let repository_id = self.snapshot.id;
5222 let rx = self.send_job(None, move |state, _| async move {
5223 match state {
5224 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5225 RepositoryState::Remote { client, project_id } => {
5226 let response = client
5227 .request(proto::GetBlobContent {
5228 project_id: project_id.to_proto(),
5229 repository_id: repository_id.0,
5230 oid: oid.to_string(),
5231 })
5232 .await?;
5233 Ok(response.content)
5234 }
5235 }
5236 });
5237 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5238 }
5239
5240 fn paths_changed(
5241 &mut self,
5242 paths: Vec<RepoPath>,
5243 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5244 cx: &mut Context<Self>,
5245 ) {
5246 self.paths_needing_status_update.extend(paths);
5247
5248 let this = cx.weak_entity();
5249 let _ = self.send_keyed_job(
5250 Some(GitJobKey::RefreshStatuses),
5251 None,
5252 |state, mut cx| async move {
5253 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5254 (
5255 this.snapshot.clone(),
5256 mem::take(&mut this.paths_needing_status_update),
5257 )
5258 })?;
5259 let RepositoryState::Local { backend, .. } = state else {
5260 bail!("not a local repository")
5261 };
5262
5263 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5264 if paths.is_empty() {
5265 return Ok(());
5266 }
5267 let statuses = backend.status(&paths).await?;
5268 let stash_entries = backend.stash_entries().await?;
5269
5270 let changed_path_statuses = cx
5271 .background_spawn(async move {
5272 let mut changed_path_statuses = Vec::new();
5273 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5274 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5275
5276 for (repo_path, status) in &*statuses.entries {
5277 changed_paths.remove(repo_path);
5278 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5279 && cursor.item().is_some_and(|entry| entry.status == *status)
5280 {
5281 continue;
5282 }
5283
5284 changed_path_statuses.push(Edit::Insert(StatusEntry {
5285 repo_path: repo_path.clone(),
5286 status: *status,
5287 }));
5288 }
5289 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5290 for path in changed_paths.into_iter() {
5291 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5292 changed_path_statuses.push(Edit::Remove(PathKey(path.0)));
5293 }
5294 }
5295 changed_path_statuses
5296 })
5297 .await;
5298
5299 this.update(&mut cx, |this, cx| {
5300 if this.snapshot.stash_entries != stash_entries {
5301 cx.emit(RepositoryEvent::StashEntriesChanged);
5302 this.snapshot.stash_entries = stash_entries;
5303 }
5304
5305 if !changed_path_statuses.is_empty() {
5306 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5307 this.snapshot
5308 .statuses_by_path
5309 .edit(changed_path_statuses, ());
5310 this.snapshot.scan_id += 1;
5311 }
5312
5313 if let Some(updates_tx) = updates_tx {
5314 updates_tx
5315 .unbounded_send(DownstreamUpdate::UpdateRepository(
5316 this.snapshot.clone(),
5317 ))
5318 .ok();
5319 }
5320 })
5321 },
5322 );
5323 }
5324
5325 /// currently running git command and when it started
5326 pub fn current_job(&self) -> Option<JobInfo> {
5327 self.active_jobs.values().next().cloned()
5328 }
5329
5330 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5331 self.send_job(None, |_, _| async {})
5332 }
5333
5334 fn spawn_job_with_tracking<AsyncFn>(
5335 &mut self,
5336 paths: Vec<RepoPath>,
5337 git_status: pending_op::GitStatus,
5338 cx: &mut Context<Self>,
5339 f: AsyncFn,
5340 ) -> Task<Result<()>>
5341 where
5342 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5343 {
5344 let ids = self.new_pending_ops_for_paths(paths, git_status);
5345
5346 cx.spawn(async move |this, cx| {
5347 let (job_status, result) = match f(this.clone(), cx).await {
5348 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5349 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5350 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5351 };
5352
5353 this.update(cx, |this, _| {
5354 let mut edits = Vec::with_capacity(ids.len());
5355 for (id, entry) in ids {
5356 if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
5357 if let Some(op) = ops.op_by_id_mut(id) {
5358 op.job_status = job_status;
5359 }
5360 edits.push(sum_tree::Edit::Insert(ops));
5361 }
5362 }
5363 this.snapshot.pending_ops_by_path.edit(edits, ());
5364 })?;
5365
5366 result
5367 })
5368 }
5369
5370 fn new_pending_ops_for_paths(
5371 &mut self,
5372 paths: Vec<RepoPath>,
5373 git_status: pending_op::GitStatus,
5374 ) -> Vec<(PendingOpId, RepoPath)> {
5375 let mut edits = Vec::with_capacity(paths.len());
5376 let mut ids = Vec::with_capacity(paths.len());
5377 for path in paths {
5378 let mut ops = self
5379 .snapshot
5380 .pending_ops_for_path(&path)
5381 .unwrap_or_else(|| PendingOps::new(&path));
5382 let id = ops.max_id() + 1;
5383 ops.ops.push(PendingOp {
5384 id,
5385 git_status,
5386 job_status: pending_op::JobStatus::Running,
5387 });
5388 edits.push(sum_tree::Edit::Insert(ops));
5389 ids.push((id, path));
5390 }
5391 self.snapshot.pending_ops_by_path.edit(edits, ());
5392 ids
5393 }
5394}
5395
5396fn get_permalink_in_rust_registry_src(
5397 provider_registry: Arc<GitHostingProviderRegistry>,
5398 path: PathBuf,
5399 selection: Range<u32>,
5400) -> Result<url::Url> {
5401 #[derive(Deserialize)]
5402 struct CargoVcsGit {
5403 sha1: String,
5404 }
5405
5406 #[derive(Deserialize)]
5407 struct CargoVcsInfo {
5408 git: CargoVcsGit,
5409 path_in_vcs: String,
5410 }
5411
5412 #[derive(Deserialize)]
5413 struct CargoPackage {
5414 repository: String,
5415 }
5416
5417 #[derive(Deserialize)]
5418 struct CargoToml {
5419 package: CargoPackage,
5420 }
5421
5422 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5423 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5424 Some((dir, json))
5425 }) else {
5426 bail!("No .cargo_vcs_info.json found in parent directories")
5427 };
5428 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5429 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5430 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5431 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5432 .context("parsing package.repository field of manifest")?;
5433 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5434 let permalink = provider.build_permalink(
5435 remote,
5436 BuildPermalinkParams::new(
5437 &cargo_vcs_info.git.sha1,
5438 &RepoPath(
5439 RelPath::new(&path, PathStyle::local())
5440 .context("invalid path")?
5441 .into_arc(),
5442 ),
5443 Some(selection),
5444 ),
5445 );
5446 Ok(permalink)
5447}
5448
5449fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5450 let Some(blame) = blame else {
5451 return proto::BlameBufferResponse {
5452 blame_response: None,
5453 };
5454 };
5455
5456 let entries = blame
5457 .entries
5458 .into_iter()
5459 .map(|entry| proto::BlameEntry {
5460 sha: entry.sha.as_bytes().into(),
5461 start_line: entry.range.start,
5462 end_line: entry.range.end,
5463 original_line_number: entry.original_line_number,
5464 author: entry.author,
5465 author_mail: entry.author_mail,
5466 author_time: entry.author_time,
5467 author_tz: entry.author_tz,
5468 committer: entry.committer_name,
5469 committer_mail: entry.committer_email,
5470 committer_time: entry.committer_time,
5471 committer_tz: entry.committer_tz,
5472 summary: entry.summary,
5473 previous: entry.previous,
5474 filename: entry.filename,
5475 })
5476 .collect::<Vec<_>>();
5477
5478 let messages = blame
5479 .messages
5480 .into_iter()
5481 .map(|(oid, message)| proto::CommitMessage {
5482 oid: oid.as_bytes().into(),
5483 message,
5484 })
5485 .collect::<Vec<_>>();
5486
5487 proto::BlameBufferResponse {
5488 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5489 entries,
5490 messages,
5491 remote_url: blame.remote_url,
5492 }),
5493 }
5494}
5495
5496fn deserialize_blame_buffer_response(
5497 response: proto::BlameBufferResponse,
5498) -> Option<git::blame::Blame> {
5499 let response = response.blame_response?;
5500 let entries = response
5501 .entries
5502 .into_iter()
5503 .filter_map(|entry| {
5504 Some(git::blame::BlameEntry {
5505 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5506 range: entry.start_line..entry.end_line,
5507 original_line_number: entry.original_line_number,
5508 committer_name: entry.committer,
5509 committer_time: entry.committer_time,
5510 committer_tz: entry.committer_tz,
5511 committer_email: entry.committer_mail,
5512 author: entry.author,
5513 author_mail: entry.author_mail,
5514 author_time: entry.author_time,
5515 author_tz: entry.author_tz,
5516 summary: entry.summary,
5517 previous: entry.previous,
5518 filename: entry.filename,
5519 })
5520 })
5521 .collect::<Vec<_>>();
5522
5523 let messages = response
5524 .messages
5525 .into_iter()
5526 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5527 .collect::<HashMap<_, _>>();
5528
5529 Some(Blame {
5530 entries,
5531 messages,
5532 remote_url: response.remote_url,
5533 })
5534}
5535
5536fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5537 proto::Branch {
5538 is_head: branch.is_head,
5539 ref_name: branch.ref_name.to_string(),
5540 unix_timestamp: branch
5541 .most_recent_commit
5542 .as_ref()
5543 .map(|commit| commit.commit_timestamp as u64),
5544 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5545 ref_name: upstream.ref_name.to_string(),
5546 tracking: upstream
5547 .tracking
5548 .status()
5549 .map(|upstream| proto::UpstreamTracking {
5550 ahead: upstream.ahead as u64,
5551 behind: upstream.behind as u64,
5552 }),
5553 }),
5554 most_recent_commit: branch
5555 .most_recent_commit
5556 .as_ref()
5557 .map(|commit| proto::CommitSummary {
5558 sha: commit.sha.to_string(),
5559 subject: commit.subject.to_string(),
5560 commit_timestamp: commit.commit_timestamp,
5561 author_name: commit.author_name.to_string(),
5562 }),
5563 }
5564}
5565
5566fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5567 proto::Worktree {
5568 path: worktree.path.to_string_lossy().to_string(),
5569 ref_name: worktree.ref_name.to_string(),
5570 sha: worktree.sha.to_string(),
5571 }
5572}
5573
5574fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5575 git::repository::Worktree {
5576 path: PathBuf::from(proto.path.clone()),
5577 ref_name: proto.ref_name.clone().into(),
5578 sha: proto.sha.clone().into(),
5579 }
5580}
5581
5582fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5583 git::repository::Branch {
5584 is_head: proto.is_head,
5585 ref_name: proto.ref_name.clone().into(),
5586 upstream: proto
5587 .upstream
5588 .as_ref()
5589 .map(|upstream| git::repository::Upstream {
5590 ref_name: upstream.ref_name.to_string().into(),
5591 tracking: upstream
5592 .tracking
5593 .as_ref()
5594 .map(|tracking| {
5595 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5596 ahead: tracking.ahead as u32,
5597 behind: tracking.behind as u32,
5598 })
5599 })
5600 .unwrap_or(git::repository::UpstreamTracking::Gone),
5601 }),
5602 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5603 git::repository::CommitSummary {
5604 sha: commit.sha.to_string().into(),
5605 subject: commit.subject.to_string().into(),
5606 commit_timestamp: commit.commit_timestamp,
5607 author_name: commit.author_name.to_string().into(),
5608 has_parent: true,
5609 }
5610 }),
5611 }
5612}
5613
5614fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5615 proto::GitCommitDetails {
5616 sha: commit.sha.to_string(),
5617 message: commit.message.to_string(),
5618 commit_timestamp: commit.commit_timestamp,
5619 author_email: commit.author_email.to_string(),
5620 author_name: commit.author_name.to_string(),
5621 }
5622}
5623
5624fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5625 CommitDetails {
5626 sha: proto.sha.clone().into(),
5627 message: proto.message.clone().into(),
5628 commit_timestamp: proto.commit_timestamp,
5629 author_email: proto.author_email.clone().into(),
5630 author_name: proto.author_name.clone().into(),
5631 }
5632}
5633
5634async fn compute_snapshot(
5635 id: RepositoryId,
5636 work_directory_abs_path: Arc<Path>,
5637 prev_snapshot: RepositorySnapshot,
5638 backend: Arc<dyn GitRepository>,
5639) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5640 let mut events = Vec::new();
5641 let branches = backend.branches().await?;
5642 let branch = branches.into_iter().find(|branch| branch.is_head);
5643 let statuses = backend.status(&[RelPath::empty().into()]).await?;
5644 let stash_entries = backend.stash_entries().await?;
5645 let statuses_by_path = SumTree::from_iter(
5646 statuses
5647 .entries
5648 .iter()
5649 .map(|(repo_path, status)| StatusEntry {
5650 repo_path: repo_path.clone(),
5651 status: *status,
5652 }),
5653 (),
5654 );
5655 let (merge_details, merge_heads_changed) =
5656 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5657 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5658
5659 let pending_ops_by_path = SumTree::from_iter(
5660 prev_snapshot.pending_ops_by_path.iter().filter_map(|ops| {
5661 let inner_ops: Vec<PendingOp> =
5662 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5663 if inner_ops.is_empty() {
5664 None
5665 } else {
5666 Some(PendingOps {
5667 repo_path: ops.repo_path.clone(),
5668 ops: inner_ops,
5669 })
5670 }
5671 }),
5672 (),
5673 );
5674
5675 if pending_ops_by_path != prev_snapshot.pending_ops_by_path {
5676 events.push(RepositoryEvent::PendingOpsChanged {
5677 pending_ops: prev_snapshot.pending_ops_by_path.clone(),
5678 })
5679 }
5680
5681 if merge_heads_changed {
5682 events.push(RepositoryEvent::MergeHeadsChanged);
5683 }
5684
5685 if statuses_by_path != prev_snapshot.statuses_by_path {
5686 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5687 }
5688
5689 // Useful when branch is None in detached head state
5690 let head_commit = match backend.head_sha().await {
5691 Some(head_sha) => backend.show(head_sha).await.log_err(),
5692 None => None,
5693 };
5694
5695 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5696 events.push(RepositoryEvent::BranchChanged);
5697 }
5698
5699 // Used by edit prediction data collection
5700 let remote_origin_url = backend.remote_url("origin");
5701 let remote_upstream_url = backend.remote_url("upstream");
5702
5703 let snapshot = RepositorySnapshot {
5704 id,
5705 statuses_by_path,
5706 pending_ops_by_path,
5707 work_directory_abs_path,
5708 path_style: prev_snapshot.path_style,
5709 scan_id: prev_snapshot.scan_id + 1,
5710 branch,
5711 head_commit,
5712 merge: merge_details,
5713 remote_origin_url,
5714 remote_upstream_url,
5715 stash_entries,
5716 };
5717
5718 Ok((snapshot, events))
5719}
5720
5721fn status_from_proto(
5722 simple_status: i32,
5723 status: Option<proto::GitFileStatus>,
5724) -> anyhow::Result<FileStatus> {
5725 use proto::git_file_status::Variant;
5726
5727 let Some(variant) = status.and_then(|status| status.variant) else {
5728 let code = proto::GitStatus::from_i32(simple_status)
5729 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5730 let result = match code {
5731 proto::GitStatus::Added => TrackedStatus {
5732 worktree_status: StatusCode::Added,
5733 index_status: StatusCode::Unmodified,
5734 }
5735 .into(),
5736 proto::GitStatus::Modified => TrackedStatus {
5737 worktree_status: StatusCode::Modified,
5738 index_status: StatusCode::Unmodified,
5739 }
5740 .into(),
5741 proto::GitStatus::Conflict => UnmergedStatus {
5742 first_head: UnmergedStatusCode::Updated,
5743 second_head: UnmergedStatusCode::Updated,
5744 }
5745 .into(),
5746 proto::GitStatus::Deleted => TrackedStatus {
5747 worktree_status: StatusCode::Deleted,
5748 index_status: StatusCode::Unmodified,
5749 }
5750 .into(),
5751 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5752 };
5753 return Ok(result);
5754 };
5755
5756 let result = match variant {
5757 Variant::Untracked(_) => FileStatus::Untracked,
5758 Variant::Ignored(_) => FileStatus::Ignored,
5759 Variant::Unmerged(unmerged) => {
5760 let [first_head, second_head] =
5761 [unmerged.first_head, unmerged.second_head].map(|head| {
5762 let code = proto::GitStatus::from_i32(head)
5763 .with_context(|| format!("Invalid git status code: {head}"))?;
5764 let result = match code {
5765 proto::GitStatus::Added => UnmergedStatusCode::Added,
5766 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5767 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5768 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5769 };
5770 Ok(result)
5771 });
5772 let [first_head, second_head] = [first_head?, second_head?];
5773 UnmergedStatus {
5774 first_head,
5775 second_head,
5776 }
5777 .into()
5778 }
5779 Variant::Tracked(tracked) => {
5780 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5781 .map(|status| {
5782 let code = proto::GitStatus::from_i32(status)
5783 .with_context(|| format!("Invalid git status code: {status}"))?;
5784 let result = match code {
5785 proto::GitStatus::Modified => StatusCode::Modified,
5786 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5787 proto::GitStatus::Added => StatusCode::Added,
5788 proto::GitStatus::Deleted => StatusCode::Deleted,
5789 proto::GitStatus::Renamed => StatusCode::Renamed,
5790 proto::GitStatus::Copied => StatusCode::Copied,
5791 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5792 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5793 };
5794 Ok(result)
5795 });
5796 let [index_status, worktree_status] = [index_status?, worktree_status?];
5797 TrackedStatus {
5798 index_status,
5799 worktree_status,
5800 }
5801 .into()
5802 }
5803 };
5804 Ok(result)
5805}
5806
5807fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5808 use proto::git_file_status::{Tracked, Unmerged, Variant};
5809
5810 let variant = match status {
5811 FileStatus::Untracked => Variant::Untracked(Default::default()),
5812 FileStatus::Ignored => Variant::Ignored(Default::default()),
5813 FileStatus::Unmerged(UnmergedStatus {
5814 first_head,
5815 second_head,
5816 }) => Variant::Unmerged(Unmerged {
5817 first_head: unmerged_status_to_proto(first_head),
5818 second_head: unmerged_status_to_proto(second_head),
5819 }),
5820 FileStatus::Tracked(TrackedStatus {
5821 index_status,
5822 worktree_status,
5823 }) => Variant::Tracked(Tracked {
5824 index_status: tracked_status_to_proto(index_status),
5825 worktree_status: tracked_status_to_proto(worktree_status),
5826 }),
5827 };
5828 proto::GitFileStatus {
5829 variant: Some(variant),
5830 }
5831}
5832
5833fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5834 match code {
5835 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5836 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5837 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5838 }
5839}
5840
5841fn tracked_status_to_proto(code: StatusCode) -> i32 {
5842 match code {
5843 StatusCode::Added => proto::GitStatus::Added as _,
5844 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5845 StatusCode::Modified => proto::GitStatus::Modified as _,
5846 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5847 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5848 StatusCode::Copied => proto::GitStatus::Copied as _,
5849 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5850 }
5851}