1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use std::{
60 cmp::Ordering,
61 collections::{BTreeSet, HashSet, VecDeque},
62 future::Future,
63 mem,
64 ops::Range,
65 path::{Path, PathBuf},
66 str::FromStr,
67 sync::{
68 Arc,
69 atomic::{self, AtomicU64},
70 },
71 time::Instant,
72};
73use sum_tree::{Edit, SumTree, TreeSet};
74use task::Shell;
75use text::{Bias, BufferId};
76use util::{
77 ResultExt, debug_panic,
78 paths::{PathStyle, SanitizedPath},
79 post_inc,
80 rel_path::RelPath,
81};
82use worktree::{
83 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
84 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
85};
86use zeroize::Zeroize;
87
88pub struct GitStore {
89 state: GitStoreState,
90 buffer_store: Entity<BufferStore>,
91 worktree_store: Entity<WorktreeStore>,
92 repositories: HashMap<RepositoryId, Entity<Repository>>,
93 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
94 active_repo_id: Option<RepositoryId>,
95 #[allow(clippy::type_complexity)]
96 loading_diffs:
97 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
98 diffs: HashMap<BufferId, Entity<BufferGitState>>,
99 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
100 _subscriptions: Vec<Subscription>,
101}
102
103#[derive(Default)]
104struct SharedDiffs {
105 unstaged: Option<Entity<BufferDiff>>,
106 uncommitted: Option<Entity<BufferDiff>>,
107}
108
109struct BufferGitState {
110 unstaged_diff: Option<WeakEntity<BufferDiff>>,
111 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
112 conflict_set: Option<WeakEntity<ConflictSet>>,
113 recalculate_diff_task: Option<Task<Result<()>>>,
114 reparse_conflict_markers_task: Option<Task<Result<()>>>,
115 language: Option<Arc<Language>>,
116 language_registry: Option<Arc<LanguageRegistry>>,
117 conflict_updated_futures: Vec<oneshot::Sender<()>>,
118 recalculating_tx: postage::watch::Sender<bool>,
119
120 /// These operation counts are used to ensure that head and index text
121 /// values read from the git repository are up-to-date with any hunk staging
122 /// operations that have been performed on the BufferDiff.
123 ///
124 /// The operation count is incremented immediately when the user initiates a
125 /// hunk stage/unstage operation. Then, upon finishing writing the new index
126 /// text do disk, the `operation count as of write` is updated to reflect
127 /// the operation count that prompted the write.
128 hunk_staging_operation_count: usize,
129 hunk_staging_operation_count_as_of_write: usize,
130
131 head_text: Option<Arc<String>>,
132 index_text: Option<Arc<String>>,
133 head_changed: bool,
134 index_changed: bool,
135 language_changed: bool,
136}
137
138#[derive(Clone, Debug)]
139enum DiffBasesChange {
140 SetIndex(Option<String>),
141 SetHead(Option<String>),
142 SetEach {
143 index: Option<String>,
144 head: Option<String>,
145 },
146 SetBoth(Option<String>),
147}
148
149#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
150enum DiffKind {
151 Unstaged,
152 Uncommitted,
153}
154
155enum GitStoreState {
156 Local {
157 next_repository_id: Arc<AtomicU64>,
158 downstream: Option<LocalDownstreamState>,
159 project_environment: Entity<ProjectEnvironment>,
160 fs: Arc<dyn Fs>,
161 },
162 Remote {
163 upstream_client: AnyProtoClient,
164 upstream_project_id: u64,
165 downstream: Option<(AnyProtoClient, ProjectId)>,
166 },
167}
168
169enum DownstreamUpdate {
170 UpdateRepository(RepositorySnapshot),
171 RemoveRepository(RepositoryId),
172}
173
174struct LocalDownstreamState {
175 client: AnyProtoClient,
176 project_id: ProjectId,
177 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
178 _task: Task<Result<()>>,
179}
180
181#[derive(Clone, Debug)]
182pub struct GitStoreCheckpoint {
183 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
184}
185
186#[derive(Clone, Debug, PartialEq, Eq)]
187pub struct StatusEntry {
188 pub repo_path: RepoPath,
189 pub status: FileStatus,
190}
191
192impl StatusEntry {
193 fn to_proto(&self) -> proto::StatusEntry {
194 let simple_status = match self.status {
195 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
196 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
197 FileStatus::Tracked(TrackedStatus {
198 index_status,
199 worktree_status,
200 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
201 worktree_status
202 } else {
203 index_status
204 }),
205 };
206
207 proto::StatusEntry {
208 repo_path: self.repo_path.to_proto(),
209 simple_status,
210 status: Some(status_to_proto(self.status)),
211 }
212 }
213}
214
215impl TryFrom<proto::StatusEntry> for StatusEntry {
216 type Error = anyhow::Error;
217
218 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
219 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
220 let status = status_from_proto(value.simple_status, value.status)?;
221 Ok(Self { repo_path, status })
222 }
223}
224
225impl sum_tree::Item for StatusEntry {
226 type Summary = PathSummary<GitSummary>;
227
228 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
229 PathSummary {
230 max_path: self.repo_path.as_ref().clone(),
231 item_summary: self.status.summary(),
232 }
233 }
234}
235
236impl sum_tree::KeyedItem for StatusEntry {
237 type Key = PathKey;
238
239 fn key(&self) -> Self::Key {
240 PathKey(self.repo_path.as_ref().clone())
241 }
242}
243
244#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
245pub struct RepositoryId(pub u64);
246
247#[derive(Clone, Debug, Default, PartialEq, Eq)]
248pub struct MergeDetails {
249 pub conflicted_paths: TreeSet<RepoPath>,
250 pub message: Option<SharedString>,
251 pub heads: Vec<Option<SharedString>>,
252}
253
254#[derive(Clone, Debug, PartialEq, Eq)]
255pub struct RepositorySnapshot {
256 pub id: RepositoryId,
257 pub statuses_by_path: SumTree<StatusEntry>,
258 pub pending_ops_by_path: SumTree<PendingOps>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 job_id: JobId,
289 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
290 latest_askpass_id: u64,
291}
292
293impl std::ops::Deref for Repository {
294 type Target = RepositorySnapshot;
295
296 fn deref(&self) -> &Self::Target {
297 &self.snapshot
298 }
299}
300
301#[derive(Clone)]
302pub enum RepositoryState {
303 Local {
304 backend: Arc<dyn GitRepository>,
305 environment: Arc<HashMap<String, String>>,
306 },
307 Remote {
308 project_id: ProjectId,
309 client: AnyProtoClient,
310 },
311}
312
313#[derive(Clone, Debug, PartialEq, Eq)]
314pub enum RepositoryEvent {
315 StatusesChanged {
316 // TODO could report which statuses changed here
317 full_scan: bool,
318 },
319 MergeHeadsChanged,
320 BranchChanged,
321 StashEntriesChanged,
322 PendingOpsChanged {
323 pending_ops: SumTree<pending_op::PendingOps>,
324 },
325}
326
327#[derive(Clone, Debug)]
328pub struct JobsUpdated;
329
330#[derive(Debug)]
331pub enum GitStoreEvent {
332 ActiveRepositoryChanged(Option<RepositoryId>),
333 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
334 RepositoryAdded,
335 RepositoryRemoved(RepositoryId),
336 IndexWriteError(anyhow::Error),
337 JobsUpdated,
338 ConflictsUpdated,
339}
340
341impl EventEmitter<RepositoryEvent> for Repository {}
342impl EventEmitter<JobsUpdated> for Repository {}
343impl EventEmitter<GitStoreEvent> for GitStore {}
344
345pub struct GitJob {
346 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
347 key: Option<GitJobKey>,
348}
349
350#[derive(PartialEq, Eq)]
351enum GitJobKey {
352 WriteIndex(Vec<RepoPath>),
353 ReloadBufferDiffBases,
354 RefreshStatuses,
355 ReloadGitState,
356}
357
358impl GitStore {
359 pub fn local(
360 worktree_store: &Entity<WorktreeStore>,
361 buffer_store: Entity<BufferStore>,
362 environment: Entity<ProjectEnvironment>,
363 fs: Arc<dyn Fs>,
364 cx: &mut Context<Self>,
365 ) -> Self {
366 Self::new(
367 worktree_store.clone(),
368 buffer_store,
369 GitStoreState::Local {
370 next_repository_id: Arc::new(AtomicU64::new(1)),
371 downstream: None,
372 project_environment: environment,
373 fs,
374 },
375 cx,
376 )
377 }
378
379 pub fn remote(
380 worktree_store: &Entity<WorktreeStore>,
381 buffer_store: Entity<BufferStore>,
382 upstream_client: AnyProtoClient,
383 project_id: u64,
384 cx: &mut Context<Self>,
385 ) -> Self {
386 Self::new(
387 worktree_store.clone(),
388 buffer_store,
389 GitStoreState::Remote {
390 upstream_client,
391 upstream_project_id: project_id,
392 downstream: None,
393 },
394 cx,
395 )
396 }
397
398 fn new(
399 worktree_store: Entity<WorktreeStore>,
400 buffer_store: Entity<BufferStore>,
401 state: GitStoreState,
402 cx: &mut Context<Self>,
403 ) -> Self {
404 let _subscriptions = vec![
405 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
406 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
407 ];
408
409 GitStore {
410 state,
411 buffer_store,
412 worktree_store,
413 repositories: HashMap::default(),
414 worktree_ids: HashMap::default(),
415 active_repo_id: None,
416 _subscriptions,
417 loading_diffs: HashMap::default(),
418 shared_diffs: HashMap::default(),
419 diffs: HashMap::default(),
420 }
421 }
422
423 pub fn init(client: &AnyProtoClient) {
424 client.add_entity_request_handler(Self::handle_get_remotes);
425 client.add_entity_request_handler(Self::handle_get_branches);
426 client.add_entity_request_handler(Self::handle_get_default_branch);
427 client.add_entity_request_handler(Self::handle_change_branch);
428 client.add_entity_request_handler(Self::handle_create_branch);
429 client.add_entity_request_handler(Self::handle_rename_branch);
430 client.add_entity_request_handler(Self::handle_git_init);
431 client.add_entity_request_handler(Self::handle_push);
432 client.add_entity_request_handler(Self::handle_pull);
433 client.add_entity_request_handler(Self::handle_fetch);
434 client.add_entity_request_handler(Self::handle_stage);
435 client.add_entity_request_handler(Self::handle_unstage);
436 client.add_entity_request_handler(Self::handle_stash);
437 client.add_entity_request_handler(Self::handle_stash_pop);
438 client.add_entity_request_handler(Self::handle_stash_apply);
439 client.add_entity_request_handler(Self::handle_stash_drop);
440 client.add_entity_request_handler(Self::handle_commit);
441 client.add_entity_request_handler(Self::handle_reset);
442 client.add_entity_request_handler(Self::handle_show);
443 client.add_entity_request_handler(Self::handle_load_commit_diff);
444 client.add_entity_request_handler(Self::handle_checkout_files);
445 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
446 client.add_entity_request_handler(Self::handle_set_index_text);
447 client.add_entity_request_handler(Self::handle_askpass);
448 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
449 client.add_entity_request_handler(Self::handle_git_diff);
450 client.add_entity_request_handler(Self::handle_tree_diff);
451 client.add_entity_request_handler(Self::handle_get_blob_content);
452 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
453 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
454 client.add_entity_message_handler(Self::handle_update_diff_bases);
455 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
456 client.add_entity_request_handler(Self::handle_blame_buffer);
457 client.add_entity_message_handler(Self::handle_update_repository);
458 client.add_entity_message_handler(Self::handle_remove_repository);
459 client.add_entity_request_handler(Self::handle_git_clone);
460 client.add_entity_request_handler(Self::handle_get_worktrees);
461 client.add_entity_request_handler(Self::handle_create_worktree);
462 }
463
464 pub fn is_local(&self) -> bool {
465 matches!(self.state, GitStoreState::Local { .. })
466 }
467 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
468 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
469 let id = repo.read(cx).id;
470 if self.active_repo_id != Some(id) {
471 self.active_repo_id = Some(id);
472 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
473 }
474 }
475 }
476
477 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
478 match &mut self.state {
479 GitStoreState::Remote {
480 downstream: downstream_client,
481 ..
482 } => {
483 for repo in self.repositories.values() {
484 let update = repo.read(cx).snapshot.initial_update(project_id);
485 for update in split_repository_update(update) {
486 client.send(update).log_err();
487 }
488 }
489 *downstream_client = Some((client, ProjectId(project_id)));
490 }
491 GitStoreState::Local {
492 downstream: downstream_client,
493 ..
494 } => {
495 let mut snapshots = HashMap::default();
496 let (updates_tx, mut updates_rx) = mpsc::unbounded();
497 for repo in self.repositories.values() {
498 updates_tx
499 .unbounded_send(DownstreamUpdate::UpdateRepository(
500 repo.read(cx).snapshot.clone(),
501 ))
502 .ok();
503 }
504 *downstream_client = Some(LocalDownstreamState {
505 client: client.clone(),
506 project_id: ProjectId(project_id),
507 updates_tx,
508 _task: cx.spawn(async move |this, cx| {
509 cx.background_spawn(async move {
510 while let Some(update) = updates_rx.next().await {
511 match update {
512 DownstreamUpdate::UpdateRepository(snapshot) => {
513 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
514 {
515 let update =
516 snapshot.build_update(old_snapshot, project_id);
517 *old_snapshot = snapshot;
518 for update in split_repository_update(update) {
519 client.send(update)?;
520 }
521 } else {
522 let update = snapshot.initial_update(project_id);
523 for update in split_repository_update(update) {
524 client.send(update)?;
525 }
526 snapshots.insert(snapshot.id, snapshot);
527 }
528 }
529 DownstreamUpdate::RemoveRepository(id) => {
530 client.send(proto::RemoveRepository {
531 project_id,
532 id: id.to_proto(),
533 })?;
534 }
535 }
536 }
537 anyhow::Ok(())
538 })
539 .await
540 .ok();
541 this.update(cx, |this, _| {
542 if let GitStoreState::Local {
543 downstream: downstream_client,
544 ..
545 } = &mut this.state
546 {
547 downstream_client.take();
548 } else {
549 unreachable!("unshared called on remote store");
550 }
551 })
552 }),
553 });
554 }
555 }
556 }
557
558 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
559 match &mut self.state {
560 GitStoreState::Local {
561 downstream: downstream_client,
562 ..
563 } => {
564 downstream_client.take();
565 }
566 GitStoreState::Remote {
567 downstream: downstream_client,
568 ..
569 } => {
570 downstream_client.take();
571 }
572 }
573 self.shared_diffs.clear();
574 }
575
576 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
577 self.shared_diffs.remove(peer_id);
578 }
579
580 pub fn active_repository(&self) -> Option<Entity<Repository>> {
581 self.active_repo_id
582 .as_ref()
583 .map(|id| self.repositories[id].clone())
584 }
585
586 pub fn open_unstaged_diff(
587 &mut self,
588 buffer: Entity<Buffer>,
589 cx: &mut Context<Self>,
590 ) -> Task<Result<Entity<BufferDiff>>> {
591 let buffer_id = buffer.read(cx).remote_id();
592 if let Some(diff_state) = self.diffs.get(&buffer_id)
593 && let Some(unstaged_diff) = diff_state
594 .read(cx)
595 .unstaged_diff
596 .as_ref()
597 .and_then(|weak| weak.upgrade())
598 {
599 if let Some(task) =
600 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
601 {
602 return cx.background_executor().spawn(async move {
603 task.await;
604 Ok(unstaged_diff)
605 });
606 }
607 return Task::ready(Ok(unstaged_diff));
608 }
609
610 let Some((repo, repo_path)) =
611 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
612 else {
613 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
614 };
615
616 let task = self
617 .loading_diffs
618 .entry((buffer_id, DiffKind::Unstaged))
619 .or_insert_with(|| {
620 let staged_text = repo.update(cx, |repo, cx| {
621 repo.load_staged_text(buffer_id, repo_path, cx)
622 });
623 cx.spawn(async move |this, cx| {
624 Self::open_diff_internal(
625 this,
626 DiffKind::Unstaged,
627 staged_text.await.map(DiffBasesChange::SetIndex),
628 buffer,
629 cx,
630 )
631 .await
632 .map_err(Arc::new)
633 })
634 .shared()
635 })
636 .clone();
637
638 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
639 }
640
641 pub fn open_diff_since(
642 &mut self,
643 oid: Option<git::Oid>,
644 buffer: Entity<Buffer>,
645 repo: Entity<Repository>,
646 languages: Arc<LanguageRegistry>,
647 cx: &mut Context<Self>,
648 ) -> Task<Result<Entity<BufferDiff>>> {
649 cx.spawn(async move |this, cx| {
650 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
651 let content = match oid {
652 None => None,
653 Some(oid) => Some(
654 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
655 .await?,
656 ),
657 };
658 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
659
660 buffer_diff
661 .update(cx, |buffer_diff, cx| {
662 buffer_diff.set_base_text(
663 content.map(Arc::new),
664 buffer_snapshot.language().cloned(),
665 Some(languages.clone()),
666 buffer_snapshot.text,
667 cx,
668 )
669 })?
670 .await?;
671 let unstaged_diff = this
672 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
673 .await?;
674 buffer_diff.update(cx, |buffer_diff, _| {
675 buffer_diff.set_secondary_diff(unstaged_diff);
676 })?;
677
678 this.update(cx, |_, cx| {
679 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
680 .detach();
681 })?;
682
683 Ok(buffer_diff)
684 })
685 }
686
687 pub fn open_uncommitted_diff(
688 &mut self,
689 buffer: Entity<Buffer>,
690 cx: &mut Context<Self>,
691 ) -> Task<Result<Entity<BufferDiff>>> {
692 let buffer_id = buffer.read(cx).remote_id();
693
694 if let Some(diff_state) = self.diffs.get(&buffer_id)
695 && let Some(uncommitted_diff) = diff_state
696 .read(cx)
697 .uncommitted_diff
698 .as_ref()
699 .and_then(|weak| weak.upgrade())
700 {
701 if let Some(task) =
702 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
703 {
704 return cx.background_executor().spawn(async move {
705 task.await;
706 Ok(uncommitted_diff)
707 });
708 }
709 return Task::ready(Ok(uncommitted_diff));
710 }
711
712 let Some((repo, repo_path)) =
713 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
714 else {
715 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
716 };
717
718 let task = self
719 .loading_diffs
720 .entry((buffer_id, DiffKind::Uncommitted))
721 .or_insert_with(|| {
722 let changes = repo.update(cx, |repo, cx| {
723 repo.load_committed_text(buffer_id, repo_path, cx)
724 });
725
726 // todo(lw): hot foreground spawn
727 cx.spawn(async move |this, cx| {
728 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
729 .await
730 .map_err(Arc::new)
731 })
732 .shared()
733 })
734 .clone();
735
736 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
737 }
738
739 async fn open_diff_internal(
740 this: WeakEntity<Self>,
741 kind: DiffKind,
742 texts: Result<DiffBasesChange>,
743 buffer_entity: Entity<Buffer>,
744 cx: &mut AsyncApp,
745 ) -> Result<Entity<BufferDiff>> {
746 let diff_bases_change = match texts {
747 Err(e) => {
748 this.update(cx, |this, cx| {
749 let buffer = buffer_entity.read(cx);
750 let buffer_id = buffer.remote_id();
751 this.loading_diffs.remove(&(buffer_id, kind));
752 })?;
753 return Err(e);
754 }
755 Ok(change) => change,
756 };
757
758 this.update(cx, |this, cx| {
759 let buffer = buffer_entity.read(cx);
760 let buffer_id = buffer.remote_id();
761 let language = buffer.language().cloned();
762 let language_registry = buffer.language_registry();
763 let text_snapshot = buffer.text_snapshot();
764 this.loading_diffs.remove(&(buffer_id, kind));
765
766 let git_store = cx.weak_entity();
767 let diff_state = this
768 .diffs
769 .entry(buffer_id)
770 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
771
772 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
773
774 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
775 diff_state.update(cx, |diff_state, cx| {
776 diff_state.language = language;
777 diff_state.language_registry = language_registry;
778
779 match kind {
780 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
781 DiffKind::Uncommitted => {
782 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
783 diff
784 } else {
785 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
786 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
787 unstaged_diff
788 };
789
790 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
791 diff_state.uncommitted_diff = Some(diff.downgrade())
792 }
793 }
794
795 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
796 let rx = diff_state.wait_for_recalculation();
797
798 anyhow::Ok(async move {
799 if let Some(rx) = rx {
800 rx.await;
801 }
802 Ok(diff)
803 })
804 })
805 })??
806 .await
807 }
808
809 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
810 let diff_state = self.diffs.get(&buffer_id)?;
811 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
812 }
813
814 pub fn get_uncommitted_diff(
815 &self,
816 buffer_id: BufferId,
817 cx: &App,
818 ) -> Option<Entity<BufferDiff>> {
819 let diff_state = self.diffs.get(&buffer_id)?;
820 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
821 }
822
823 pub fn open_conflict_set(
824 &mut self,
825 buffer: Entity<Buffer>,
826 cx: &mut Context<Self>,
827 ) -> Entity<ConflictSet> {
828 log::debug!("open conflict set");
829 let buffer_id = buffer.read(cx).remote_id();
830
831 if let Some(git_state) = self.diffs.get(&buffer_id)
832 && let Some(conflict_set) = git_state
833 .read(cx)
834 .conflict_set
835 .as_ref()
836 .and_then(|weak| weak.upgrade())
837 {
838 let conflict_set = conflict_set;
839 let buffer_snapshot = buffer.read(cx).text_snapshot();
840
841 git_state.update(cx, |state, cx| {
842 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
843 });
844
845 return conflict_set;
846 }
847
848 let is_unmerged = self
849 .repository_and_path_for_buffer_id(buffer_id, cx)
850 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
851 let git_store = cx.weak_entity();
852 let buffer_git_state = self
853 .diffs
854 .entry(buffer_id)
855 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
856 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
857
858 self._subscriptions
859 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
860 cx.emit(GitStoreEvent::ConflictsUpdated);
861 }));
862
863 buffer_git_state.update(cx, |state, cx| {
864 state.conflict_set = Some(conflict_set.downgrade());
865 let buffer_snapshot = buffer.read(cx).text_snapshot();
866 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
867 });
868
869 conflict_set
870 }
871
872 pub fn project_path_git_status(
873 &self,
874 project_path: &ProjectPath,
875 cx: &App,
876 ) -> Option<FileStatus> {
877 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
878 Some(repo.read(cx).status_for_path(&repo_path)?.status)
879 }
880
881 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
882 let mut work_directory_abs_paths = Vec::new();
883 let mut checkpoints = Vec::new();
884 for repository in self.repositories.values() {
885 repository.update(cx, |repository, _| {
886 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
887 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
888 });
889 }
890
891 cx.background_executor().spawn(async move {
892 let checkpoints = future::try_join_all(checkpoints).await?;
893 Ok(GitStoreCheckpoint {
894 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
895 .into_iter()
896 .zip(checkpoints)
897 .collect(),
898 })
899 })
900 }
901
902 pub fn restore_checkpoint(
903 &self,
904 checkpoint: GitStoreCheckpoint,
905 cx: &mut App,
906 ) -> Task<Result<()>> {
907 let repositories_by_work_dir_abs_path = self
908 .repositories
909 .values()
910 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
911 .collect::<HashMap<_, _>>();
912
913 let mut tasks = Vec::new();
914 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
915 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
916 let restore = repository.update(cx, |repository, _| {
917 repository.restore_checkpoint(checkpoint)
918 });
919 tasks.push(async move { restore.await? });
920 }
921 }
922 cx.background_spawn(async move {
923 future::try_join_all(tasks).await?;
924 Ok(())
925 })
926 }
927
928 /// Compares two checkpoints, returning true if they are equal.
929 pub fn compare_checkpoints(
930 &self,
931 left: GitStoreCheckpoint,
932 mut right: GitStoreCheckpoint,
933 cx: &mut App,
934 ) -> Task<Result<bool>> {
935 let repositories_by_work_dir_abs_path = self
936 .repositories
937 .values()
938 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
939 .collect::<HashMap<_, _>>();
940
941 let mut tasks = Vec::new();
942 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
943 if let Some(right_checkpoint) = right
944 .checkpoints_by_work_dir_abs_path
945 .remove(&work_dir_abs_path)
946 {
947 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
948 {
949 let compare = repository.update(cx, |repository, _| {
950 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
951 });
952
953 tasks.push(async move { compare.await? });
954 }
955 } else {
956 return Task::ready(Ok(false));
957 }
958 }
959 cx.background_spawn(async move {
960 Ok(future::try_join_all(tasks)
961 .await?
962 .into_iter()
963 .all(|result| result))
964 })
965 }
966
967 /// Blames a buffer.
968 pub fn blame_buffer(
969 &self,
970 buffer: &Entity<Buffer>,
971 version: Option<clock::Global>,
972 cx: &mut App,
973 ) -> Task<Result<Option<Blame>>> {
974 let buffer = buffer.read(cx);
975 let Some((repo, repo_path)) =
976 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
977 else {
978 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
979 };
980 let content = match &version {
981 Some(version) => buffer.rope_for_version(version),
982 None => buffer.as_rope().clone(),
983 };
984 let version = version.unwrap_or(buffer.version());
985 let buffer_id = buffer.remote_id();
986
987 let rx = repo.update(cx, |repo, _| {
988 repo.send_job(None, move |state, _| async move {
989 match state {
990 RepositoryState::Local { backend, .. } => backend
991 .blame(repo_path.clone(), content)
992 .await
993 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
994 .map(Some),
995 RepositoryState::Remote { project_id, client } => {
996 let response = client
997 .request(proto::BlameBuffer {
998 project_id: project_id.to_proto(),
999 buffer_id: buffer_id.into(),
1000 version: serialize_version(&version),
1001 })
1002 .await?;
1003 Ok(deserialize_blame_buffer_response(response))
1004 }
1005 }
1006 })
1007 });
1008
1009 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1010 }
1011
1012 pub fn file_history(
1013 &self,
1014 repo: &Entity<Repository>,
1015 path: RepoPath,
1016 cx: &mut App,
1017 ) -> Task<Result<git::repository::FileHistory>> {
1018 let rx = repo.update(cx, |repo, _| {
1019 repo.send_job(None, move |state, _| async move {
1020 match state {
1021 RepositoryState::Local { backend, .. } => backend.file_history(path).await,
1022 RepositoryState::Remote { .. } => Err(anyhow!(
1023 "file history not supported for remote repositories yet"
1024 )),
1025 }
1026 })
1027 });
1028
1029 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1030 }
1031
1032 pub fn get_permalink_to_line(
1033 &self,
1034 buffer: &Entity<Buffer>,
1035 selection: Range<u32>,
1036 cx: &mut App,
1037 ) -> Task<Result<url::Url>> {
1038 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1039 return Task::ready(Err(anyhow!("buffer has no file")));
1040 };
1041
1042 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1043 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1044 cx,
1045 ) else {
1046 // If we're not in a Git repo, check whether this is a Rust source
1047 // file in the Cargo registry (presumably opened with go-to-definition
1048 // from a normal Rust file). If so, we can put together a permalink
1049 // using crate metadata.
1050 if buffer
1051 .read(cx)
1052 .language()
1053 .is_none_or(|lang| lang.name() != "Rust".into())
1054 {
1055 return Task::ready(Err(anyhow!("no permalink available")));
1056 }
1057 let file_path = file.worktree.read(cx).absolutize(&file.path);
1058 return cx.spawn(async move |cx| {
1059 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1060 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1061 .context("no permalink available")
1062 });
1063 };
1064
1065 let buffer_id = buffer.read(cx).remote_id();
1066 let branch = repo.read(cx).branch.clone();
1067 let remote = branch
1068 .as_ref()
1069 .and_then(|b| b.upstream.as_ref())
1070 .and_then(|b| b.remote_name())
1071 .unwrap_or("origin")
1072 .to_string();
1073
1074 let rx = repo.update(cx, |repo, _| {
1075 repo.send_job(None, move |state, cx| async move {
1076 match state {
1077 RepositoryState::Local { backend, .. } => {
1078 let origin_url = backend
1079 .remote_url(&remote)
1080 .with_context(|| format!("remote \"{remote}\" not found"))?;
1081
1082 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1083
1084 let provider_registry =
1085 cx.update(GitHostingProviderRegistry::default_global)?;
1086
1087 let (provider, remote) =
1088 parse_git_remote_url(provider_registry, &origin_url)
1089 .context("parsing Git remote URL")?;
1090
1091 Ok(provider.build_permalink(
1092 remote,
1093 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1094 ))
1095 }
1096 RepositoryState::Remote { project_id, client } => {
1097 let response = client
1098 .request(proto::GetPermalinkToLine {
1099 project_id: project_id.to_proto(),
1100 buffer_id: buffer_id.into(),
1101 selection: Some(proto::Range {
1102 start: selection.start as u64,
1103 end: selection.end as u64,
1104 }),
1105 })
1106 .await?;
1107
1108 url::Url::parse(&response.permalink).context("failed to parse permalink")
1109 }
1110 }
1111 })
1112 });
1113 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1114 }
1115
1116 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1117 match &self.state {
1118 GitStoreState::Local {
1119 downstream: downstream_client,
1120 ..
1121 } => downstream_client
1122 .as_ref()
1123 .map(|state| (state.client.clone(), state.project_id)),
1124 GitStoreState::Remote {
1125 downstream: downstream_client,
1126 ..
1127 } => downstream_client.clone(),
1128 }
1129 }
1130
1131 fn upstream_client(&self) -> Option<AnyProtoClient> {
1132 match &self.state {
1133 GitStoreState::Local { .. } => None,
1134 GitStoreState::Remote {
1135 upstream_client, ..
1136 } => Some(upstream_client.clone()),
1137 }
1138 }
1139
1140 fn on_worktree_store_event(
1141 &mut self,
1142 worktree_store: Entity<WorktreeStore>,
1143 event: &WorktreeStoreEvent,
1144 cx: &mut Context<Self>,
1145 ) {
1146 let GitStoreState::Local {
1147 project_environment,
1148 downstream,
1149 next_repository_id,
1150 fs,
1151 } = &self.state
1152 else {
1153 return;
1154 };
1155
1156 match event {
1157 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1158 if let Some(worktree) = self
1159 .worktree_store
1160 .read(cx)
1161 .worktree_for_id(*worktree_id, cx)
1162 {
1163 let paths_by_git_repo =
1164 self.process_updated_entries(&worktree, updated_entries, cx);
1165 let downstream = downstream
1166 .as_ref()
1167 .map(|downstream| downstream.updates_tx.clone());
1168 cx.spawn(async move |_, cx| {
1169 let paths_by_git_repo = paths_by_git_repo.await;
1170 for (repo, paths) in paths_by_git_repo {
1171 repo.update(cx, |repo, cx| {
1172 repo.paths_changed(paths, downstream.clone(), cx);
1173 })
1174 .ok();
1175 }
1176 })
1177 .detach();
1178 }
1179 }
1180 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1181 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1182 else {
1183 return;
1184 };
1185 if !worktree.read(cx).is_visible() {
1186 log::debug!(
1187 "not adding repositories for local worktree {:?} because it's not visible",
1188 worktree.read(cx).abs_path()
1189 );
1190 return;
1191 }
1192 self.update_repositories_from_worktree(
1193 *worktree_id,
1194 project_environment.clone(),
1195 next_repository_id.clone(),
1196 downstream
1197 .as_ref()
1198 .map(|downstream| downstream.updates_tx.clone()),
1199 changed_repos.clone(),
1200 fs.clone(),
1201 cx,
1202 );
1203 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1204 }
1205 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1206 let repos_without_worktree: Vec<RepositoryId> = self
1207 .worktree_ids
1208 .iter_mut()
1209 .filter_map(|(repo_id, worktree_ids)| {
1210 worktree_ids.remove(worktree_id);
1211 if worktree_ids.is_empty() {
1212 Some(*repo_id)
1213 } else {
1214 None
1215 }
1216 })
1217 .collect();
1218 let is_active_repo_removed = repos_without_worktree
1219 .iter()
1220 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1221
1222 for repo_id in repos_without_worktree {
1223 self.repositories.remove(&repo_id);
1224 self.worktree_ids.remove(&repo_id);
1225 if let Some(updates_tx) =
1226 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1227 {
1228 updates_tx
1229 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1230 .ok();
1231 }
1232 }
1233
1234 if is_active_repo_removed {
1235 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1236 self.active_repo_id = Some(repo_id);
1237 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1238 } else {
1239 self.active_repo_id = None;
1240 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1241 }
1242 }
1243 }
1244 _ => {}
1245 }
1246 }
1247 fn on_repository_event(
1248 &mut self,
1249 repo: Entity<Repository>,
1250 event: &RepositoryEvent,
1251 cx: &mut Context<Self>,
1252 ) {
1253 let id = repo.read(cx).id;
1254 let repo_snapshot = repo.read(cx).snapshot.clone();
1255 for (buffer_id, diff) in self.diffs.iter() {
1256 if let Some((buffer_repo, repo_path)) =
1257 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1258 && buffer_repo == repo
1259 {
1260 diff.update(cx, |diff, cx| {
1261 if let Some(conflict_set) = &diff.conflict_set {
1262 let conflict_status_changed =
1263 conflict_set.update(cx, |conflict_set, cx| {
1264 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1265 conflict_set.set_has_conflict(has_conflict, cx)
1266 })?;
1267 if conflict_status_changed {
1268 let buffer_store = self.buffer_store.read(cx);
1269 if let Some(buffer) = buffer_store.get(*buffer_id) {
1270 let _ = diff
1271 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1272 }
1273 }
1274 }
1275 anyhow::Ok(())
1276 })
1277 .ok();
1278 }
1279 }
1280 cx.emit(GitStoreEvent::RepositoryUpdated(
1281 id,
1282 event.clone(),
1283 self.active_repo_id == Some(id),
1284 ))
1285 }
1286
1287 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1288 cx.emit(GitStoreEvent::JobsUpdated)
1289 }
1290
1291 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1292 fn update_repositories_from_worktree(
1293 &mut self,
1294 worktree_id: WorktreeId,
1295 project_environment: Entity<ProjectEnvironment>,
1296 next_repository_id: Arc<AtomicU64>,
1297 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1298 updated_git_repositories: UpdatedGitRepositoriesSet,
1299 fs: Arc<dyn Fs>,
1300 cx: &mut Context<Self>,
1301 ) {
1302 let mut removed_ids = Vec::new();
1303 for update in updated_git_repositories.iter() {
1304 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1305 let existing_work_directory_abs_path =
1306 repo.read(cx).work_directory_abs_path.clone();
1307 Some(&existing_work_directory_abs_path)
1308 == update.old_work_directory_abs_path.as_ref()
1309 || Some(&existing_work_directory_abs_path)
1310 == update.new_work_directory_abs_path.as_ref()
1311 }) {
1312 let repo_id = *id;
1313 if let Some(new_work_directory_abs_path) =
1314 update.new_work_directory_abs_path.clone()
1315 {
1316 self.worktree_ids
1317 .entry(repo_id)
1318 .or_insert_with(HashSet::new)
1319 .insert(worktree_id);
1320 existing.update(cx, |existing, cx| {
1321 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1322 existing.schedule_scan(updates_tx.clone(), cx);
1323 });
1324 } else {
1325 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1326 worktree_ids.remove(&worktree_id);
1327 if worktree_ids.is_empty() {
1328 removed_ids.push(repo_id);
1329 }
1330 }
1331 }
1332 } else if let UpdatedGitRepository {
1333 new_work_directory_abs_path: Some(work_directory_abs_path),
1334 dot_git_abs_path: Some(dot_git_abs_path),
1335 repository_dir_abs_path: Some(repository_dir_abs_path),
1336 common_dir_abs_path: Some(common_dir_abs_path),
1337 ..
1338 } = update
1339 {
1340 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1341 let git_store = cx.weak_entity();
1342 let repo = cx.new(|cx| {
1343 let mut repo = Repository::local(
1344 id,
1345 work_directory_abs_path.clone(),
1346 dot_git_abs_path.clone(),
1347 repository_dir_abs_path.clone(),
1348 common_dir_abs_path.clone(),
1349 project_environment.downgrade(),
1350 fs.clone(),
1351 git_store,
1352 cx,
1353 );
1354 if let Some(updates_tx) = updates_tx.as_ref() {
1355 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1356 updates_tx
1357 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1358 .ok();
1359 }
1360 repo.schedule_scan(updates_tx.clone(), cx);
1361 repo
1362 });
1363 self._subscriptions
1364 .push(cx.subscribe(&repo, Self::on_repository_event));
1365 self._subscriptions
1366 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1367 self.repositories.insert(id, repo);
1368 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1369 cx.emit(GitStoreEvent::RepositoryAdded);
1370 self.active_repo_id.get_or_insert_with(|| {
1371 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1372 id
1373 });
1374 }
1375 }
1376
1377 for id in removed_ids {
1378 if self.active_repo_id == Some(id) {
1379 self.active_repo_id = None;
1380 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1381 }
1382 self.repositories.remove(&id);
1383 if let Some(updates_tx) = updates_tx.as_ref() {
1384 updates_tx
1385 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1386 .ok();
1387 }
1388 }
1389 }
1390
1391 fn on_buffer_store_event(
1392 &mut self,
1393 _: Entity<BufferStore>,
1394 event: &BufferStoreEvent,
1395 cx: &mut Context<Self>,
1396 ) {
1397 match event {
1398 BufferStoreEvent::BufferAdded(buffer) => {
1399 cx.subscribe(buffer, |this, buffer, event, cx| {
1400 if let BufferEvent::LanguageChanged = event {
1401 let buffer_id = buffer.read(cx).remote_id();
1402 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1403 diff_state.update(cx, |diff_state, cx| {
1404 diff_state.buffer_language_changed(buffer, cx);
1405 });
1406 }
1407 }
1408 })
1409 .detach();
1410 }
1411 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1412 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1413 diffs.remove(buffer_id);
1414 }
1415 }
1416 BufferStoreEvent::BufferDropped(buffer_id) => {
1417 self.diffs.remove(buffer_id);
1418 for diffs in self.shared_diffs.values_mut() {
1419 diffs.remove(buffer_id);
1420 }
1421 }
1422
1423 _ => {}
1424 }
1425 }
1426
1427 pub fn recalculate_buffer_diffs(
1428 &mut self,
1429 buffers: Vec<Entity<Buffer>>,
1430 cx: &mut Context<Self>,
1431 ) -> impl Future<Output = ()> + use<> {
1432 let mut futures = Vec::new();
1433 for buffer in buffers {
1434 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1435 let buffer = buffer.read(cx).text_snapshot();
1436 diff_state.update(cx, |diff_state, cx| {
1437 diff_state.recalculate_diffs(buffer.clone(), cx);
1438 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1439 });
1440 futures.push(diff_state.update(cx, |diff_state, cx| {
1441 diff_state
1442 .reparse_conflict_markers(buffer, cx)
1443 .map(|_| {})
1444 .boxed()
1445 }));
1446 }
1447 }
1448 async move {
1449 futures::future::join_all(futures).await;
1450 }
1451 }
1452
1453 fn on_buffer_diff_event(
1454 &mut self,
1455 diff: Entity<buffer_diff::BufferDiff>,
1456 event: &BufferDiffEvent,
1457 cx: &mut Context<Self>,
1458 ) {
1459 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1460 let buffer_id = diff.read(cx).buffer_id;
1461 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1462 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1463 diff_state.hunk_staging_operation_count += 1;
1464 diff_state.hunk_staging_operation_count
1465 });
1466 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1467 let recv = repo.update(cx, |repo, cx| {
1468 log::debug!("hunks changed for {}", path.as_unix_str());
1469 repo.spawn_set_index_text_job(
1470 path,
1471 new_index_text.as_ref().map(|rope| rope.to_string()),
1472 Some(hunk_staging_operation_count),
1473 cx,
1474 )
1475 });
1476 let diff = diff.downgrade();
1477 cx.spawn(async move |this, cx| {
1478 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1479 diff.update(cx, |diff, cx| {
1480 diff.clear_pending_hunks(cx);
1481 })
1482 .ok();
1483 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1484 .ok();
1485 }
1486 })
1487 .detach();
1488 }
1489 }
1490 }
1491 }
1492
1493 fn local_worktree_git_repos_changed(
1494 &mut self,
1495 worktree: Entity<Worktree>,
1496 changed_repos: &UpdatedGitRepositoriesSet,
1497 cx: &mut Context<Self>,
1498 ) {
1499 log::debug!("local worktree repos changed");
1500 debug_assert!(worktree.read(cx).is_local());
1501
1502 for repository in self.repositories.values() {
1503 repository.update(cx, |repository, cx| {
1504 let repo_abs_path = &repository.work_directory_abs_path;
1505 if changed_repos.iter().any(|update| {
1506 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1507 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1508 }) {
1509 repository.reload_buffer_diff_bases(cx);
1510 }
1511 });
1512 }
1513 }
1514
1515 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1516 &self.repositories
1517 }
1518
1519 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1520 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1521 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1522 Some(status.status)
1523 }
1524
1525 pub fn repository_and_path_for_buffer_id(
1526 &self,
1527 buffer_id: BufferId,
1528 cx: &App,
1529 ) -> Option<(Entity<Repository>, RepoPath)> {
1530 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1531 let project_path = buffer.read(cx).project_path(cx)?;
1532 self.repository_and_path_for_project_path(&project_path, cx)
1533 }
1534
1535 pub fn repository_and_path_for_project_path(
1536 &self,
1537 path: &ProjectPath,
1538 cx: &App,
1539 ) -> Option<(Entity<Repository>, RepoPath)> {
1540 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1541 self.repositories
1542 .values()
1543 .filter_map(|repo| {
1544 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1545 Some((repo.clone(), repo_path))
1546 })
1547 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1548 }
1549
1550 pub fn git_init(
1551 &self,
1552 path: Arc<Path>,
1553 fallback_branch_name: String,
1554 cx: &App,
1555 ) -> Task<Result<()>> {
1556 match &self.state {
1557 GitStoreState::Local { fs, .. } => {
1558 let fs = fs.clone();
1559 cx.background_executor()
1560 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1561 }
1562 GitStoreState::Remote {
1563 upstream_client,
1564 upstream_project_id: project_id,
1565 ..
1566 } => {
1567 let client = upstream_client.clone();
1568 let project_id = *project_id;
1569 cx.background_executor().spawn(async move {
1570 client
1571 .request(proto::GitInit {
1572 project_id: project_id,
1573 abs_path: path.to_string_lossy().into_owned(),
1574 fallback_branch_name,
1575 })
1576 .await?;
1577 Ok(())
1578 })
1579 }
1580 }
1581 }
1582
1583 pub fn git_clone(
1584 &self,
1585 repo: String,
1586 path: impl Into<Arc<std::path::Path>>,
1587 cx: &App,
1588 ) -> Task<Result<()>> {
1589 let path = path.into();
1590 match &self.state {
1591 GitStoreState::Local { fs, .. } => {
1592 let fs = fs.clone();
1593 cx.background_executor()
1594 .spawn(async move { fs.git_clone(&repo, &path).await })
1595 }
1596 GitStoreState::Remote {
1597 upstream_client,
1598 upstream_project_id,
1599 ..
1600 } => {
1601 if upstream_client.is_via_collab() {
1602 return Task::ready(Err(anyhow!(
1603 "Git Clone isn't supported for project guests"
1604 )));
1605 }
1606 let request = upstream_client.request(proto::GitClone {
1607 project_id: *upstream_project_id,
1608 abs_path: path.to_string_lossy().into_owned(),
1609 remote_repo: repo,
1610 });
1611
1612 cx.background_spawn(async move {
1613 let result = request.await?;
1614
1615 match result.success {
1616 true => Ok(()),
1617 false => Err(anyhow!("Git Clone failed")),
1618 }
1619 })
1620 }
1621 }
1622 }
1623
1624 async fn handle_update_repository(
1625 this: Entity<Self>,
1626 envelope: TypedEnvelope<proto::UpdateRepository>,
1627 mut cx: AsyncApp,
1628 ) -> Result<()> {
1629 this.update(&mut cx, |this, cx| {
1630 let path_style = this.worktree_store.read(cx).path_style();
1631 let mut update = envelope.payload;
1632
1633 let id = RepositoryId::from_proto(update.id);
1634 let client = this.upstream_client().context("no upstream client")?;
1635
1636 let mut repo_subscription = None;
1637 let repo = this.repositories.entry(id).or_insert_with(|| {
1638 let git_store = cx.weak_entity();
1639 let repo = cx.new(|cx| {
1640 Repository::remote(
1641 id,
1642 Path::new(&update.abs_path).into(),
1643 path_style,
1644 ProjectId(update.project_id),
1645 client,
1646 git_store,
1647 cx,
1648 )
1649 });
1650 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1651 cx.emit(GitStoreEvent::RepositoryAdded);
1652 repo
1653 });
1654 this._subscriptions.extend(repo_subscription);
1655
1656 repo.update(cx, {
1657 let update = update.clone();
1658 |repo, cx| repo.apply_remote_update(update, cx)
1659 })?;
1660
1661 this.active_repo_id.get_or_insert_with(|| {
1662 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1663 id
1664 });
1665
1666 if let Some((client, project_id)) = this.downstream_client() {
1667 update.project_id = project_id.to_proto();
1668 client.send(update).log_err();
1669 }
1670 Ok(())
1671 })?
1672 }
1673
1674 async fn handle_remove_repository(
1675 this: Entity<Self>,
1676 envelope: TypedEnvelope<proto::RemoveRepository>,
1677 mut cx: AsyncApp,
1678 ) -> Result<()> {
1679 this.update(&mut cx, |this, cx| {
1680 let mut update = envelope.payload;
1681 let id = RepositoryId::from_proto(update.id);
1682 this.repositories.remove(&id);
1683 if let Some((client, project_id)) = this.downstream_client() {
1684 update.project_id = project_id.to_proto();
1685 client.send(update).log_err();
1686 }
1687 if this.active_repo_id == Some(id) {
1688 this.active_repo_id = None;
1689 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1690 }
1691 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1692 })
1693 }
1694
1695 async fn handle_git_init(
1696 this: Entity<Self>,
1697 envelope: TypedEnvelope<proto::GitInit>,
1698 cx: AsyncApp,
1699 ) -> Result<proto::Ack> {
1700 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1701 let name = envelope.payload.fallback_branch_name;
1702 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1703 .await?;
1704
1705 Ok(proto::Ack {})
1706 }
1707
1708 async fn handle_git_clone(
1709 this: Entity<Self>,
1710 envelope: TypedEnvelope<proto::GitClone>,
1711 cx: AsyncApp,
1712 ) -> Result<proto::GitCloneResponse> {
1713 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1714 let repo_name = envelope.payload.remote_repo;
1715 let result = cx
1716 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1717 .await;
1718
1719 Ok(proto::GitCloneResponse {
1720 success: result.is_ok(),
1721 })
1722 }
1723
1724 async fn handle_fetch(
1725 this: Entity<Self>,
1726 envelope: TypedEnvelope<proto::Fetch>,
1727 mut cx: AsyncApp,
1728 ) -> Result<proto::RemoteMessageResponse> {
1729 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1730 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1731 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1732 let askpass_id = envelope.payload.askpass_id;
1733
1734 let askpass = make_remote_delegate(
1735 this,
1736 envelope.payload.project_id,
1737 repository_id,
1738 askpass_id,
1739 &mut cx,
1740 );
1741
1742 let remote_output = repository_handle
1743 .update(&mut cx, |repository_handle, cx| {
1744 repository_handle.fetch(fetch_options, askpass, cx)
1745 })?
1746 .await??;
1747
1748 Ok(proto::RemoteMessageResponse {
1749 stdout: remote_output.stdout,
1750 stderr: remote_output.stderr,
1751 })
1752 }
1753
1754 async fn handle_push(
1755 this: Entity<Self>,
1756 envelope: TypedEnvelope<proto::Push>,
1757 mut cx: AsyncApp,
1758 ) -> Result<proto::RemoteMessageResponse> {
1759 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1760 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1761
1762 let askpass_id = envelope.payload.askpass_id;
1763 let askpass = make_remote_delegate(
1764 this,
1765 envelope.payload.project_id,
1766 repository_id,
1767 askpass_id,
1768 &mut cx,
1769 );
1770
1771 let options = envelope
1772 .payload
1773 .options
1774 .as_ref()
1775 .map(|_| match envelope.payload.options() {
1776 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1777 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1778 });
1779
1780 let branch_name = envelope.payload.branch_name.into();
1781 let remote_name = envelope.payload.remote_name.into();
1782
1783 let remote_output = repository_handle
1784 .update(&mut cx, |repository_handle, cx| {
1785 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1786 })?
1787 .await??;
1788 Ok(proto::RemoteMessageResponse {
1789 stdout: remote_output.stdout,
1790 stderr: remote_output.stderr,
1791 })
1792 }
1793
1794 async fn handle_pull(
1795 this: Entity<Self>,
1796 envelope: TypedEnvelope<proto::Pull>,
1797 mut cx: AsyncApp,
1798 ) -> Result<proto::RemoteMessageResponse> {
1799 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1800 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1801 let askpass_id = envelope.payload.askpass_id;
1802 let askpass = make_remote_delegate(
1803 this,
1804 envelope.payload.project_id,
1805 repository_id,
1806 askpass_id,
1807 &mut cx,
1808 );
1809
1810 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1811 let remote_name = envelope.payload.remote_name.into();
1812 let rebase = envelope.payload.rebase;
1813
1814 let remote_message = repository_handle
1815 .update(&mut cx, |repository_handle, cx| {
1816 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1817 })?
1818 .await??;
1819
1820 Ok(proto::RemoteMessageResponse {
1821 stdout: remote_message.stdout,
1822 stderr: remote_message.stderr,
1823 })
1824 }
1825
1826 async fn handle_stage(
1827 this: Entity<Self>,
1828 envelope: TypedEnvelope<proto::Stage>,
1829 mut cx: AsyncApp,
1830 ) -> Result<proto::Ack> {
1831 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1832 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1833
1834 let entries = envelope
1835 .payload
1836 .paths
1837 .into_iter()
1838 .map(|path| RepoPath::new(&path))
1839 .collect::<Result<Vec<_>>>()?;
1840
1841 repository_handle
1842 .update(&mut cx, |repository_handle, cx| {
1843 repository_handle.stage_entries(entries, cx)
1844 })?
1845 .await?;
1846 Ok(proto::Ack {})
1847 }
1848
1849 async fn handle_unstage(
1850 this: Entity<Self>,
1851 envelope: TypedEnvelope<proto::Unstage>,
1852 mut cx: AsyncApp,
1853 ) -> Result<proto::Ack> {
1854 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1855 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1856
1857 let entries = envelope
1858 .payload
1859 .paths
1860 .into_iter()
1861 .map(|path| RepoPath::new(&path))
1862 .collect::<Result<Vec<_>>>()?;
1863
1864 repository_handle
1865 .update(&mut cx, |repository_handle, cx| {
1866 repository_handle.unstage_entries(entries, cx)
1867 })?
1868 .await?;
1869
1870 Ok(proto::Ack {})
1871 }
1872
1873 async fn handle_stash(
1874 this: Entity<Self>,
1875 envelope: TypedEnvelope<proto::Stash>,
1876 mut cx: AsyncApp,
1877 ) -> Result<proto::Ack> {
1878 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1879 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1880
1881 let entries = envelope
1882 .payload
1883 .paths
1884 .into_iter()
1885 .map(|path| RepoPath::new(&path))
1886 .collect::<Result<Vec<_>>>()?;
1887
1888 repository_handle
1889 .update(&mut cx, |repository_handle, cx| {
1890 repository_handle.stash_entries(entries, cx)
1891 })?
1892 .await?;
1893
1894 Ok(proto::Ack {})
1895 }
1896
1897 async fn handle_stash_pop(
1898 this: Entity<Self>,
1899 envelope: TypedEnvelope<proto::StashPop>,
1900 mut cx: AsyncApp,
1901 ) -> Result<proto::Ack> {
1902 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1903 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1904 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1905
1906 repository_handle
1907 .update(&mut cx, |repository_handle, cx| {
1908 repository_handle.stash_pop(stash_index, cx)
1909 })?
1910 .await?;
1911
1912 Ok(proto::Ack {})
1913 }
1914
1915 async fn handle_stash_apply(
1916 this: Entity<Self>,
1917 envelope: TypedEnvelope<proto::StashApply>,
1918 mut cx: AsyncApp,
1919 ) -> Result<proto::Ack> {
1920 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1921 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1922 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1923
1924 repository_handle
1925 .update(&mut cx, |repository_handle, cx| {
1926 repository_handle.stash_apply(stash_index, cx)
1927 })?
1928 .await?;
1929
1930 Ok(proto::Ack {})
1931 }
1932
1933 async fn handle_stash_drop(
1934 this: Entity<Self>,
1935 envelope: TypedEnvelope<proto::StashDrop>,
1936 mut cx: AsyncApp,
1937 ) -> Result<proto::Ack> {
1938 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1939 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1940 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1941
1942 repository_handle
1943 .update(&mut cx, |repository_handle, cx| {
1944 repository_handle.stash_drop(stash_index, cx)
1945 })?
1946 .await??;
1947
1948 Ok(proto::Ack {})
1949 }
1950
1951 async fn handle_set_index_text(
1952 this: Entity<Self>,
1953 envelope: TypedEnvelope<proto::SetIndexText>,
1954 mut cx: AsyncApp,
1955 ) -> Result<proto::Ack> {
1956 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1957 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1958 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
1959
1960 repository_handle
1961 .update(&mut cx, |repository_handle, cx| {
1962 repository_handle.spawn_set_index_text_job(
1963 repo_path,
1964 envelope.payload.text,
1965 None,
1966 cx,
1967 )
1968 })?
1969 .await??;
1970 Ok(proto::Ack {})
1971 }
1972
1973 async fn handle_commit(
1974 this: Entity<Self>,
1975 envelope: TypedEnvelope<proto::Commit>,
1976 mut cx: AsyncApp,
1977 ) -> Result<proto::Ack> {
1978 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1979 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1980 let askpass_id = envelope.payload.askpass_id;
1981
1982 let askpass = make_remote_delegate(
1983 this,
1984 envelope.payload.project_id,
1985 repository_id,
1986 askpass_id,
1987 &mut cx,
1988 );
1989
1990 let message = SharedString::from(envelope.payload.message);
1991 let name = envelope.payload.name.map(SharedString::from);
1992 let email = envelope.payload.email.map(SharedString::from);
1993 let options = envelope.payload.options.unwrap_or_default();
1994
1995 repository_handle
1996 .update(&mut cx, |repository_handle, cx| {
1997 repository_handle.commit(
1998 message,
1999 name.zip(email),
2000 CommitOptions {
2001 amend: options.amend,
2002 signoff: options.signoff,
2003 },
2004 askpass,
2005 cx,
2006 )
2007 })?
2008 .await??;
2009 Ok(proto::Ack {})
2010 }
2011
2012 async fn handle_get_remotes(
2013 this: Entity<Self>,
2014 envelope: TypedEnvelope<proto::GetRemotes>,
2015 mut cx: AsyncApp,
2016 ) -> Result<proto::GetRemotesResponse> {
2017 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2018 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2019
2020 let branch_name = envelope.payload.branch_name;
2021
2022 let remotes = repository_handle
2023 .update(&mut cx, |repository_handle, _| {
2024 repository_handle.get_remotes(branch_name)
2025 })?
2026 .await??;
2027
2028 Ok(proto::GetRemotesResponse {
2029 remotes: remotes
2030 .into_iter()
2031 .map(|remotes| proto::get_remotes_response::Remote {
2032 name: remotes.name.to_string(),
2033 })
2034 .collect::<Vec<_>>(),
2035 })
2036 }
2037
2038 async fn handle_get_worktrees(
2039 this: Entity<Self>,
2040 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2041 mut cx: AsyncApp,
2042 ) -> Result<proto::GitWorktreesResponse> {
2043 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2044 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2045
2046 let worktrees = repository_handle
2047 .update(&mut cx, |repository_handle, _| {
2048 repository_handle.worktrees()
2049 })?
2050 .await??;
2051
2052 Ok(proto::GitWorktreesResponse {
2053 worktrees: worktrees
2054 .into_iter()
2055 .map(|worktree| worktree_to_proto(&worktree))
2056 .collect::<Vec<_>>(),
2057 })
2058 }
2059
2060 async fn handle_create_worktree(
2061 this: Entity<Self>,
2062 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2063 mut cx: AsyncApp,
2064 ) -> Result<proto::Ack> {
2065 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2066 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2067 let directory = PathBuf::from(envelope.payload.directory);
2068 let name = envelope.payload.name;
2069 let commit = envelope.payload.commit;
2070
2071 repository_handle
2072 .update(&mut cx, |repository_handle, _| {
2073 repository_handle.create_worktree(name, directory, commit)
2074 })?
2075 .await??;
2076
2077 Ok(proto::Ack {})
2078 }
2079
2080 async fn handle_get_branches(
2081 this: Entity<Self>,
2082 envelope: TypedEnvelope<proto::GitGetBranches>,
2083 mut cx: AsyncApp,
2084 ) -> Result<proto::GitBranchesResponse> {
2085 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2086 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2087
2088 let branches = repository_handle
2089 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2090 .await??;
2091
2092 Ok(proto::GitBranchesResponse {
2093 branches: branches
2094 .into_iter()
2095 .map(|branch| branch_to_proto(&branch))
2096 .collect::<Vec<_>>(),
2097 })
2098 }
2099 async fn handle_get_default_branch(
2100 this: Entity<Self>,
2101 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2102 mut cx: AsyncApp,
2103 ) -> Result<proto::GetDefaultBranchResponse> {
2104 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2105 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2106
2107 let branch = repository_handle
2108 .update(&mut cx, |repository_handle, _| {
2109 repository_handle.default_branch()
2110 })?
2111 .await??
2112 .map(Into::into);
2113
2114 Ok(proto::GetDefaultBranchResponse { branch })
2115 }
2116 async fn handle_create_branch(
2117 this: Entity<Self>,
2118 envelope: TypedEnvelope<proto::GitCreateBranch>,
2119 mut cx: AsyncApp,
2120 ) -> Result<proto::Ack> {
2121 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2122 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2123 let branch_name = envelope.payload.branch_name;
2124
2125 repository_handle
2126 .update(&mut cx, |repository_handle, _| {
2127 repository_handle.create_branch(branch_name, None)
2128 })?
2129 .await??;
2130
2131 Ok(proto::Ack {})
2132 }
2133
2134 async fn handle_change_branch(
2135 this: Entity<Self>,
2136 envelope: TypedEnvelope<proto::GitChangeBranch>,
2137 mut cx: AsyncApp,
2138 ) -> Result<proto::Ack> {
2139 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2140 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2141 let branch_name = envelope.payload.branch_name;
2142
2143 repository_handle
2144 .update(&mut cx, |repository_handle, _| {
2145 repository_handle.change_branch(branch_name)
2146 })?
2147 .await??;
2148
2149 Ok(proto::Ack {})
2150 }
2151
2152 async fn handle_rename_branch(
2153 this: Entity<Self>,
2154 envelope: TypedEnvelope<proto::GitRenameBranch>,
2155 mut cx: AsyncApp,
2156 ) -> Result<proto::Ack> {
2157 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2158 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2159 let branch = envelope.payload.branch;
2160 let new_name = envelope.payload.new_name;
2161
2162 repository_handle
2163 .update(&mut cx, |repository_handle, _| {
2164 repository_handle.rename_branch(branch, new_name)
2165 })?
2166 .await??;
2167
2168 Ok(proto::Ack {})
2169 }
2170
2171 async fn handle_show(
2172 this: Entity<Self>,
2173 envelope: TypedEnvelope<proto::GitShow>,
2174 mut cx: AsyncApp,
2175 ) -> Result<proto::GitCommitDetails> {
2176 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2177 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2178
2179 let commit = repository_handle
2180 .update(&mut cx, |repository_handle, _| {
2181 repository_handle.show(envelope.payload.commit)
2182 })?
2183 .await??;
2184 Ok(proto::GitCommitDetails {
2185 sha: commit.sha.into(),
2186 message: commit.message.into(),
2187 commit_timestamp: commit.commit_timestamp,
2188 author_email: commit.author_email.into(),
2189 author_name: commit.author_name.into(),
2190 })
2191 }
2192
2193 async fn handle_load_commit_diff(
2194 this: Entity<Self>,
2195 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2196 mut cx: AsyncApp,
2197 ) -> Result<proto::LoadCommitDiffResponse> {
2198 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2199 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2200
2201 let commit_diff = repository_handle
2202 .update(&mut cx, |repository_handle, _| {
2203 repository_handle.load_commit_diff(envelope.payload.commit)
2204 })?
2205 .await??;
2206 Ok(proto::LoadCommitDiffResponse {
2207 files: commit_diff
2208 .files
2209 .into_iter()
2210 .map(|file| proto::CommitFile {
2211 path: file.path.to_proto(),
2212 old_text: file.old_text,
2213 new_text: file.new_text,
2214 })
2215 .collect(),
2216 })
2217 }
2218
2219 async fn handle_reset(
2220 this: Entity<Self>,
2221 envelope: TypedEnvelope<proto::GitReset>,
2222 mut cx: AsyncApp,
2223 ) -> Result<proto::Ack> {
2224 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2225 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2226
2227 let mode = match envelope.payload.mode() {
2228 git_reset::ResetMode::Soft => ResetMode::Soft,
2229 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2230 };
2231
2232 repository_handle
2233 .update(&mut cx, |repository_handle, cx| {
2234 repository_handle.reset(envelope.payload.commit, mode, cx)
2235 })?
2236 .await??;
2237 Ok(proto::Ack {})
2238 }
2239
2240 async fn handle_checkout_files(
2241 this: Entity<Self>,
2242 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2243 mut cx: AsyncApp,
2244 ) -> Result<proto::Ack> {
2245 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2246 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2247 let paths = envelope
2248 .payload
2249 .paths
2250 .iter()
2251 .map(|s| RepoPath::from_proto(s))
2252 .collect::<Result<Vec<_>>>()?;
2253
2254 repository_handle
2255 .update(&mut cx, |repository_handle, cx| {
2256 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2257 })?
2258 .await?;
2259 Ok(proto::Ack {})
2260 }
2261
2262 async fn handle_open_commit_message_buffer(
2263 this: Entity<Self>,
2264 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2265 mut cx: AsyncApp,
2266 ) -> Result<proto::OpenBufferResponse> {
2267 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2268 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2269 let buffer = repository
2270 .update(&mut cx, |repository, cx| {
2271 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2272 })?
2273 .await?;
2274
2275 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2276 this.update(&mut cx, |this, cx| {
2277 this.buffer_store.update(cx, |buffer_store, cx| {
2278 buffer_store
2279 .create_buffer_for_peer(
2280 &buffer,
2281 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2282 cx,
2283 )
2284 .detach_and_log_err(cx);
2285 })
2286 })?;
2287
2288 Ok(proto::OpenBufferResponse {
2289 buffer_id: buffer_id.to_proto(),
2290 })
2291 }
2292
2293 async fn handle_askpass(
2294 this: Entity<Self>,
2295 envelope: TypedEnvelope<proto::AskPassRequest>,
2296 mut cx: AsyncApp,
2297 ) -> Result<proto::AskPassResponse> {
2298 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2299 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2300
2301 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2302 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2303 debug_panic!("no askpass found");
2304 anyhow::bail!("no askpass found");
2305 };
2306
2307 let response = askpass
2308 .ask_password(envelope.payload.prompt)
2309 .await
2310 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2311
2312 delegates
2313 .lock()
2314 .insert(envelope.payload.askpass_id, askpass);
2315
2316 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2317 Ok(proto::AskPassResponse {
2318 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2319 })
2320 }
2321
2322 async fn handle_check_for_pushed_commits(
2323 this: Entity<Self>,
2324 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2325 mut cx: AsyncApp,
2326 ) -> Result<proto::CheckForPushedCommitsResponse> {
2327 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2328 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2329
2330 let branches = repository_handle
2331 .update(&mut cx, |repository_handle, _| {
2332 repository_handle.check_for_pushed_commits()
2333 })?
2334 .await??;
2335 Ok(proto::CheckForPushedCommitsResponse {
2336 pushed_to: branches
2337 .into_iter()
2338 .map(|commit| commit.to_string())
2339 .collect(),
2340 })
2341 }
2342
2343 async fn handle_git_diff(
2344 this: Entity<Self>,
2345 envelope: TypedEnvelope<proto::GitDiff>,
2346 mut cx: AsyncApp,
2347 ) -> Result<proto::GitDiffResponse> {
2348 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2349 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2350 let diff_type = match envelope.payload.diff_type() {
2351 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2352 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2353 };
2354
2355 let mut diff = repository_handle
2356 .update(&mut cx, |repository_handle, cx| {
2357 repository_handle.diff(diff_type, cx)
2358 })?
2359 .await??;
2360 const ONE_MB: usize = 1_000_000;
2361 if diff.len() > ONE_MB {
2362 diff = diff.chars().take(ONE_MB).collect()
2363 }
2364
2365 Ok(proto::GitDiffResponse { diff })
2366 }
2367
2368 async fn handle_tree_diff(
2369 this: Entity<Self>,
2370 request: TypedEnvelope<proto::GetTreeDiff>,
2371 mut cx: AsyncApp,
2372 ) -> Result<proto::GetTreeDiffResponse> {
2373 let repository_id = RepositoryId(request.payload.repository_id);
2374 let diff_type = if request.payload.is_merge {
2375 DiffTreeType::MergeBase {
2376 base: request.payload.base.into(),
2377 head: request.payload.head.into(),
2378 }
2379 } else {
2380 DiffTreeType::Since {
2381 base: request.payload.base.into(),
2382 head: request.payload.head.into(),
2383 }
2384 };
2385
2386 let diff = this
2387 .update(&mut cx, |this, cx| {
2388 let repository = this.repositories().get(&repository_id)?;
2389 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2390 })?
2391 .context("missing repository")?
2392 .await??;
2393
2394 Ok(proto::GetTreeDiffResponse {
2395 entries: diff
2396 .entries
2397 .into_iter()
2398 .map(|(path, status)| proto::TreeDiffStatus {
2399 path: path.as_ref().to_proto(),
2400 status: match status {
2401 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2402 TreeDiffStatus::Modified { .. } => {
2403 proto::tree_diff_status::Status::Modified.into()
2404 }
2405 TreeDiffStatus::Deleted { .. } => {
2406 proto::tree_diff_status::Status::Deleted.into()
2407 }
2408 },
2409 oid: match status {
2410 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2411 Some(old.to_string())
2412 }
2413 TreeDiffStatus::Added => None,
2414 },
2415 })
2416 .collect(),
2417 })
2418 }
2419
2420 async fn handle_get_blob_content(
2421 this: Entity<Self>,
2422 request: TypedEnvelope<proto::GetBlobContent>,
2423 mut cx: AsyncApp,
2424 ) -> Result<proto::GetBlobContentResponse> {
2425 let oid = git::Oid::from_str(&request.payload.oid)?;
2426 let repository_id = RepositoryId(request.payload.repository_id);
2427 let content = this
2428 .update(&mut cx, |this, cx| {
2429 let repository = this.repositories().get(&repository_id)?;
2430 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2431 })?
2432 .context("missing repository")?
2433 .await?;
2434 Ok(proto::GetBlobContentResponse { content })
2435 }
2436
2437 async fn handle_open_unstaged_diff(
2438 this: Entity<Self>,
2439 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2440 mut cx: AsyncApp,
2441 ) -> Result<proto::OpenUnstagedDiffResponse> {
2442 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2443 let diff = this
2444 .update(&mut cx, |this, cx| {
2445 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2446 Some(this.open_unstaged_diff(buffer, cx))
2447 })?
2448 .context("missing buffer")?
2449 .await?;
2450 this.update(&mut cx, |this, _| {
2451 let shared_diffs = this
2452 .shared_diffs
2453 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2454 .or_default();
2455 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2456 })?;
2457 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2458 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2459 }
2460
2461 async fn handle_open_uncommitted_diff(
2462 this: Entity<Self>,
2463 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2464 mut cx: AsyncApp,
2465 ) -> Result<proto::OpenUncommittedDiffResponse> {
2466 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2467 let diff = this
2468 .update(&mut cx, |this, cx| {
2469 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2470 Some(this.open_uncommitted_diff(buffer, cx))
2471 })?
2472 .context("missing buffer")?
2473 .await?;
2474 this.update(&mut cx, |this, _| {
2475 let shared_diffs = this
2476 .shared_diffs
2477 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2478 .or_default();
2479 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2480 })?;
2481 diff.read_with(&cx, |diff, cx| {
2482 use proto::open_uncommitted_diff_response::Mode;
2483
2484 let unstaged_diff = diff.secondary_diff();
2485 let index_snapshot = unstaged_diff.and_then(|diff| {
2486 let diff = diff.read(cx);
2487 diff.base_text_exists().then(|| diff.base_text())
2488 });
2489
2490 let mode;
2491 let staged_text;
2492 let committed_text;
2493 if diff.base_text_exists() {
2494 let committed_snapshot = diff.base_text();
2495 committed_text = Some(committed_snapshot.text());
2496 if let Some(index_text) = index_snapshot {
2497 if index_text.remote_id() == committed_snapshot.remote_id() {
2498 mode = Mode::IndexMatchesHead;
2499 staged_text = None;
2500 } else {
2501 mode = Mode::IndexAndHead;
2502 staged_text = Some(index_text.text());
2503 }
2504 } else {
2505 mode = Mode::IndexAndHead;
2506 staged_text = None;
2507 }
2508 } else {
2509 mode = Mode::IndexAndHead;
2510 committed_text = None;
2511 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2512 }
2513
2514 proto::OpenUncommittedDiffResponse {
2515 committed_text,
2516 staged_text,
2517 mode: mode.into(),
2518 }
2519 })
2520 }
2521
2522 async fn handle_update_diff_bases(
2523 this: Entity<Self>,
2524 request: TypedEnvelope<proto::UpdateDiffBases>,
2525 mut cx: AsyncApp,
2526 ) -> Result<()> {
2527 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2528 this.update(&mut cx, |this, cx| {
2529 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2530 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2531 {
2532 let buffer = buffer.read(cx).text_snapshot();
2533 diff_state.update(cx, |diff_state, cx| {
2534 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2535 })
2536 }
2537 })
2538 }
2539
2540 async fn handle_blame_buffer(
2541 this: Entity<Self>,
2542 envelope: TypedEnvelope<proto::BlameBuffer>,
2543 mut cx: AsyncApp,
2544 ) -> Result<proto::BlameBufferResponse> {
2545 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2546 let version = deserialize_version(&envelope.payload.version);
2547 let buffer = this.read_with(&cx, |this, cx| {
2548 this.buffer_store.read(cx).get_existing(buffer_id)
2549 })??;
2550 buffer
2551 .update(&mut cx, |buffer, _| {
2552 buffer.wait_for_version(version.clone())
2553 })?
2554 .await?;
2555 let blame = this
2556 .update(&mut cx, |this, cx| {
2557 this.blame_buffer(&buffer, Some(version), cx)
2558 })?
2559 .await?;
2560 Ok(serialize_blame_buffer_response(blame))
2561 }
2562
2563 async fn handle_get_permalink_to_line(
2564 this: Entity<Self>,
2565 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2566 mut cx: AsyncApp,
2567 ) -> Result<proto::GetPermalinkToLineResponse> {
2568 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2569 // let version = deserialize_version(&envelope.payload.version);
2570 let selection = {
2571 let proto_selection = envelope
2572 .payload
2573 .selection
2574 .context("no selection to get permalink for defined")?;
2575 proto_selection.start as u32..proto_selection.end as u32
2576 };
2577 let buffer = this.read_with(&cx, |this, cx| {
2578 this.buffer_store.read(cx).get_existing(buffer_id)
2579 })??;
2580 let permalink = this
2581 .update(&mut cx, |this, cx| {
2582 this.get_permalink_to_line(&buffer, selection, cx)
2583 })?
2584 .await?;
2585 Ok(proto::GetPermalinkToLineResponse {
2586 permalink: permalink.to_string(),
2587 })
2588 }
2589
2590 fn repository_for_request(
2591 this: &Entity<Self>,
2592 id: RepositoryId,
2593 cx: &mut AsyncApp,
2594 ) -> Result<Entity<Repository>> {
2595 this.read_with(cx, |this, _| {
2596 this.repositories
2597 .get(&id)
2598 .context("missing repository handle")
2599 .cloned()
2600 })?
2601 }
2602
2603 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2604 self.repositories
2605 .iter()
2606 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2607 .collect()
2608 }
2609
2610 fn process_updated_entries(
2611 &self,
2612 worktree: &Entity<Worktree>,
2613 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2614 cx: &mut App,
2615 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2616 let path_style = worktree.read(cx).path_style();
2617 let mut repo_paths = self
2618 .repositories
2619 .values()
2620 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2621 .collect::<Vec<_>>();
2622 let mut entries: Vec<_> = updated_entries
2623 .iter()
2624 .map(|(path, _, _)| path.clone())
2625 .collect();
2626 entries.sort();
2627 let worktree = worktree.read(cx);
2628
2629 let entries = entries
2630 .into_iter()
2631 .map(|path| worktree.absolutize(&path))
2632 .collect::<Arc<[_]>>();
2633
2634 let executor = cx.background_executor().clone();
2635 cx.background_executor().spawn(async move {
2636 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2637 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2638 let mut tasks = FuturesOrdered::new();
2639 for (repo_path, repo) in repo_paths.into_iter().rev() {
2640 let entries = entries.clone();
2641 let task = executor.spawn(async move {
2642 // Find all repository paths that belong to this repo
2643 let mut ix = entries.partition_point(|path| path < &*repo_path);
2644 if ix == entries.len() {
2645 return None;
2646 };
2647
2648 let mut paths = Vec::new();
2649 // All paths prefixed by a given repo will constitute a continuous range.
2650 while let Some(path) = entries.get(ix)
2651 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2652 &repo_path, path, path_style,
2653 )
2654 {
2655 paths.push((repo_path, ix));
2656 ix += 1;
2657 }
2658 if paths.is_empty() {
2659 None
2660 } else {
2661 Some((repo, paths))
2662 }
2663 });
2664 tasks.push_back(task);
2665 }
2666
2667 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2668 let mut path_was_used = vec![false; entries.len()];
2669 let tasks = tasks.collect::<Vec<_>>().await;
2670 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2671 // We always want to assign a path to it's innermost repository.
2672 for t in tasks {
2673 let Some((repo, paths)) = t else {
2674 continue;
2675 };
2676 let entry = paths_by_git_repo.entry(repo).or_default();
2677 for (repo_path, ix) in paths {
2678 if path_was_used[ix] {
2679 continue;
2680 }
2681 path_was_used[ix] = true;
2682 entry.push(repo_path);
2683 }
2684 }
2685
2686 paths_by_git_repo
2687 })
2688 }
2689}
2690
2691impl BufferGitState {
2692 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2693 Self {
2694 unstaged_diff: Default::default(),
2695 uncommitted_diff: Default::default(),
2696 recalculate_diff_task: Default::default(),
2697 language: Default::default(),
2698 language_registry: Default::default(),
2699 recalculating_tx: postage::watch::channel_with(false).0,
2700 hunk_staging_operation_count: 0,
2701 hunk_staging_operation_count_as_of_write: 0,
2702 head_text: Default::default(),
2703 index_text: Default::default(),
2704 head_changed: Default::default(),
2705 index_changed: Default::default(),
2706 language_changed: Default::default(),
2707 conflict_updated_futures: Default::default(),
2708 conflict_set: Default::default(),
2709 reparse_conflict_markers_task: Default::default(),
2710 }
2711 }
2712
2713 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2714 self.language = buffer.read(cx).language().cloned();
2715 self.language_changed = true;
2716 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2717 }
2718
2719 fn reparse_conflict_markers(
2720 &mut self,
2721 buffer: text::BufferSnapshot,
2722 cx: &mut Context<Self>,
2723 ) -> oneshot::Receiver<()> {
2724 let (tx, rx) = oneshot::channel();
2725
2726 let Some(conflict_set) = self
2727 .conflict_set
2728 .as_ref()
2729 .and_then(|conflict_set| conflict_set.upgrade())
2730 else {
2731 return rx;
2732 };
2733
2734 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2735 if conflict_set.has_conflict {
2736 Some(conflict_set.snapshot())
2737 } else {
2738 None
2739 }
2740 });
2741
2742 if let Some(old_snapshot) = old_snapshot {
2743 self.conflict_updated_futures.push(tx);
2744 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2745 let (snapshot, changed_range) = cx
2746 .background_spawn(async move {
2747 let new_snapshot = ConflictSet::parse(&buffer);
2748 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2749 (new_snapshot, changed_range)
2750 })
2751 .await;
2752 this.update(cx, |this, cx| {
2753 if let Some(conflict_set) = &this.conflict_set {
2754 conflict_set
2755 .update(cx, |conflict_set, cx| {
2756 conflict_set.set_snapshot(snapshot, changed_range, cx);
2757 })
2758 .ok();
2759 }
2760 let futures = std::mem::take(&mut this.conflict_updated_futures);
2761 for tx in futures {
2762 tx.send(()).ok();
2763 }
2764 })
2765 }))
2766 }
2767
2768 rx
2769 }
2770
2771 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2772 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2773 }
2774
2775 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2776 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2777 }
2778
2779 fn handle_base_texts_updated(
2780 &mut self,
2781 buffer: text::BufferSnapshot,
2782 message: proto::UpdateDiffBases,
2783 cx: &mut Context<Self>,
2784 ) {
2785 use proto::update_diff_bases::Mode;
2786
2787 let Some(mode) = Mode::from_i32(message.mode) else {
2788 return;
2789 };
2790
2791 let diff_bases_change = match mode {
2792 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2793 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2794 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2795 Mode::IndexAndHead => DiffBasesChange::SetEach {
2796 index: message.staged_text,
2797 head: message.committed_text,
2798 },
2799 };
2800
2801 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2802 }
2803
2804 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2805 if *self.recalculating_tx.borrow() {
2806 let mut rx = self.recalculating_tx.subscribe();
2807 Some(async move {
2808 loop {
2809 let is_recalculating = rx.recv().await;
2810 if is_recalculating != Some(true) {
2811 break;
2812 }
2813 }
2814 })
2815 } else {
2816 None
2817 }
2818 }
2819
2820 fn diff_bases_changed(
2821 &mut self,
2822 buffer: text::BufferSnapshot,
2823 diff_bases_change: Option<DiffBasesChange>,
2824 cx: &mut Context<Self>,
2825 ) {
2826 match diff_bases_change {
2827 Some(DiffBasesChange::SetIndex(index)) => {
2828 self.index_text = index.map(|mut index| {
2829 text::LineEnding::normalize(&mut index);
2830 Arc::new(index)
2831 });
2832 self.index_changed = true;
2833 }
2834 Some(DiffBasesChange::SetHead(head)) => {
2835 self.head_text = head.map(|mut head| {
2836 text::LineEnding::normalize(&mut head);
2837 Arc::new(head)
2838 });
2839 self.head_changed = true;
2840 }
2841 Some(DiffBasesChange::SetBoth(text)) => {
2842 let text = text.map(|mut text| {
2843 text::LineEnding::normalize(&mut text);
2844 Arc::new(text)
2845 });
2846 self.head_text = text.clone();
2847 self.index_text = text;
2848 self.head_changed = true;
2849 self.index_changed = true;
2850 }
2851 Some(DiffBasesChange::SetEach { index, head }) => {
2852 self.index_text = index.map(|mut index| {
2853 text::LineEnding::normalize(&mut index);
2854 Arc::new(index)
2855 });
2856 self.index_changed = true;
2857 self.head_text = head.map(|mut head| {
2858 text::LineEnding::normalize(&mut head);
2859 Arc::new(head)
2860 });
2861 self.head_changed = true;
2862 }
2863 None => {}
2864 }
2865
2866 self.recalculate_diffs(buffer, cx)
2867 }
2868
2869 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2870 *self.recalculating_tx.borrow_mut() = true;
2871
2872 let language = self.language.clone();
2873 let language_registry = self.language_registry.clone();
2874 let unstaged_diff = self.unstaged_diff();
2875 let uncommitted_diff = self.uncommitted_diff();
2876 let head = self.head_text.clone();
2877 let index = self.index_text.clone();
2878 let index_changed = self.index_changed;
2879 let head_changed = self.head_changed;
2880 let language_changed = self.language_changed;
2881 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2882 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2883 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2884 (None, None) => true,
2885 _ => false,
2886 };
2887 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2888 log::debug!(
2889 "start recalculating diffs for buffer {}",
2890 buffer.remote_id()
2891 );
2892
2893 let mut new_unstaged_diff = None;
2894 if let Some(unstaged_diff) = &unstaged_diff {
2895 new_unstaged_diff = Some(
2896 BufferDiff::update_diff(
2897 unstaged_diff.clone(),
2898 buffer.clone(),
2899 index,
2900 index_changed,
2901 language_changed,
2902 language.clone(),
2903 language_registry.clone(),
2904 cx,
2905 )
2906 .await?,
2907 );
2908 }
2909
2910 let mut new_uncommitted_diff = None;
2911 if let Some(uncommitted_diff) = &uncommitted_diff {
2912 new_uncommitted_diff = if index_matches_head {
2913 new_unstaged_diff.clone()
2914 } else {
2915 Some(
2916 BufferDiff::update_diff(
2917 uncommitted_diff.clone(),
2918 buffer.clone(),
2919 head,
2920 head_changed,
2921 language_changed,
2922 language.clone(),
2923 language_registry.clone(),
2924 cx,
2925 )
2926 .await?,
2927 )
2928 }
2929 }
2930
2931 let cancel = this.update(cx, |this, _| {
2932 // This checks whether all pending stage/unstage operations
2933 // have quiesced (i.e. both the corresponding write and the
2934 // read of that write have completed). If not, then we cancel
2935 // this recalculation attempt to avoid invalidating pending
2936 // state too quickly; another recalculation will come along
2937 // later and clear the pending state once the state of the index has settled.
2938 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
2939 *this.recalculating_tx.borrow_mut() = false;
2940 true
2941 } else {
2942 false
2943 }
2944 })?;
2945 if cancel {
2946 log::debug!(
2947 concat!(
2948 "aborting recalculating diffs for buffer {}",
2949 "due to subsequent hunk operations",
2950 ),
2951 buffer.remote_id()
2952 );
2953 return Ok(());
2954 }
2955
2956 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
2957 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
2958 {
2959 unstaged_diff.update(cx, |diff, cx| {
2960 if language_changed {
2961 diff.language_changed(cx);
2962 }
2963 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
2964 })?
2965 } else {
2966 None
2967 };
2968
2969 if let Some((uncommitted_diff, new_uncommitted_diff)) =
2970 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
2971 {
2972 uncommitted_diff.update(cx, |diff, cx| {
2973 if language_changed {
2974 diff.language_changed(cx);
2975 }
2976 diff.set_snapshot_with_secondary(
2977 new_uncommitted_diff,
2978 &buffer,
2979 unstaged_changed_range,
2980 true,
2981 cx,
2982 );
2983 })?;
2984 }
2985
2986 log::debug!(
2987 "finished recalculating diffs for buffer {}",
2988 buffer.remote_id()
2989 );
2990
2991 if let Some(this) = this.upgrade() {
2992 this.update(cx, |this, _| {
2993 this.index_changed = false;
2994 this.head_changed = false;
2995 this.language_changed = false;
2996 *this.recalculating_tx.borrow_mut() = false;
2997 })?;
2998 }
2999
3000 Ok(())
3001 }));
3002 }
3003}
3004
3005fn make_remote_delegate(
3006 this: Entity<GitStore>,
3007 project_id: u64,
3008 repository_id: RepositoryId,
3009 askpass_id: u64,
3010 cx: &mut AsyncApp,
3011) -> AskPassDelegate {
3012 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3013 this.update(cx, |this, cx| {
3014 let Some((client, _)) = this.downstream_client() else {
3015 return;
3016 };
3017 let response = client.request(proto::AskPassRequest {
3018 project_id,
3019 repository_id: repository_id.to_proto(),
3020 askpass_id,
3021 prompt,
3022 });
3023 cx.spawn(async move |_, _| {
3024 let mut response = response.await?.response;
3025 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3026 .ok();
3027 response.zeroize();
3028 anyhow::Ok(())
3029 })
3030 .detach_and_log_err(cx);
3031 })
3032 .log_err();
3033 })
3034}
3035
3036impl RepositoryId {
3037 pub fn to_proto(self) -> u64 {
3038 self.0
3039 }
3040
3041 pub fn from_proto(id: u64) -> Self {
3042 RepositoryId(id)
3043 }
3044}
3045
3046impl RepositorySnapshot {
3047 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3048 Self {
3049 id,
3050 statuses_by_path: Default::default(),
3051 pending_ops_by_path: Default::default(),
3052 work_directory_abs_path,
3053 branch: None,
3054 head_commit: None,
3055 scan_id: 0,
3056 merge: Default::default(),
3057 remote_origin_url: None,
3058 remote_upstream_url: None,
3059 stash_entries: Default::default(),
3060 path_style,
3061 }
3062 }
3063
3064 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3065 proto::UpdateRepository {
3066 branch_summary: self.branch.as_ref().map(branch_to_proto),
3067 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3068 updated_statuses: self
3069 .statuses_by_path
3070 .iter()
3071 .map(|entry| entry.to_proto())
3072 .collect(),
3073 removed_statuses: Default::default(),
3074 current_merge_conflicts: self
3075 .merge
3076 .conflicted_paths
3077 .iter()
3078 .map(|repo_path| repo_path.to_proto())
3079 .collect(),
3080 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3081 project_id,
3082 id: self.id.to_proto(),
3083 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3084 entry_ids: vec![self.id.to_proto()],
3085 scan_id: self.scan_id,
3086 is_last_update: true,
3087 stash_entries: self
3088 .stash_entries
3089 .entries
3090 .iter()
3091 .map(stash_to_proto)
3092 .collect(),
3093 }
3094 }
3095
3096 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3097 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3098 let mut removed_statuses: Vec<String> = Vec::new();
3099
3100 let mut new_statuses = self.statuses_by_path.iter().peekable();
3101 let mut old_statuses = old.statuses_by_path.iter().peekable();
3102
3103 let mut current_new_entry = new_statuses.next();
3104 let mut current_old_entry = old_statuses.next();
3105 loop {
3106 match (current_new_entry, current_old_entry) {
3107 (Some(new_entry), Some(old_entry)) => {
3108 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3109 Ordering::Less => {
3110 updated_statuses.push(new_entry.to_proto());
3111 current_new_entry = new_statuses.next();
3112 }
3113 Ordering::Equal => {
3114 if new_entry.status != old_entry.status {
3115 updated_statuses.push(new_entry.to_proto());
3116 }
3117 current_old_entry = old_statuses.next();
3118 current_new_entry = new_statuses.next();
3119 }
3120 Ordering::Greater => {
3121 removed_statuses.push(old_entry.repo_path.to_proto());
3122 current_old_entry = old_statuses.next();
3123 }
3124 }
3125 }
3126 (None, Some(old_entry)) => {
3127 removed_statuses.push(old_entry.repo_path.to_proto());
3128 current_old_entry = old_statuses.next();
3129 }
3130 (Some(new_entry), None) => {
3131 updated_statuses.push(new_entry.to_proto());
3132 current_new_entry = new_statuses.next();
3133 }
3134 (None, None) => break,
3135 }
3136 }
3137
3138 proto::UpdateRepository {
3139 branch_summary: self.branch.as_ref().map(branch_to_proto),
3140 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3141 updated_statuses,
3142 removed_statuses,
3143 current_merge_conflicts: self
3144 .merge
3145 .conflicted_paths
3146 .iter()
3147 .map(|path| path.to_proto())
3148 .collect(),
3149 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3150 project_id,
3151 id: self.id.to_proto(),
3152 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3153 entry_ids: vec![],
3154 scan_id: self.scan_id,
3155 is_last_update: true,
3156 stash_entries: self
3157 .stash_entries
3158 .entries
3159 .iter()
3160 .map(stash_to_proto)
3161 .collect(),
3162 }
3163 }
3164
3165 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3166 self.statuses_by_path.iter().cloned()
3167 }
3168
3169 pub fn status_summary(&self) -> GitSummary {
3170 self.statuses_by_path.summary().item_summary
3171 }
3172
3173 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3174 self.statuses_by_path
3175 .get(&PathKey(path.as_ref().clone()), ())
3176 .cloned()
3177 }
3178
3179 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3180 self.pending_ops_by_path
3181 .get(&PathKey(path.as_ref().clone()), ())
3182 .cloned()
3183 }
3184
3185 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3186 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3187 }
3188
3189 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3190 self.path_style
3191 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3192 .unwrap()
3193 .into()
3194 }
3195
3196 #[inline]
3197 fn abs_path_to_repo_path_inner(
3198 work_directory_abs_path: &Path,
3199 abs_path: &Path,
3200 path_style: PathStyle,
3201 ) -> Option<RepoPath> {
3202 abs_path
3203 .strip_prefix(&work_directory_abs_path)
3204 .ok()
3205 .and_then(|path| RepoPath::from_std_path(path, path_style).ok())
3206 }
3207
3208 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3209 self.merge.conflicted_paths.contains(repo_path)
3210 }
3211
3212 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3213 let had_conflict_on_last_merge_head_change =
3214 self.merge.conflicted_paths.contains(repo_path);
3215 let has_conflict_currently = self
3216 .status_for_path(repo_path)
3217 .is_some_and(|entry| entry.status.is_conflicted());
3218 had_conflict_on_last_merge_head_change || has_conflict_currently
3219 }
3220
3221 /// This is the name that will be displayed in the repository selector for this repository.
3222 pub fn display_name(&self) -> SharedString {
3223 self.work_directory_abs_path
3224 .file_name()
3225 .unwrap_or_default()
3226 .to_string_lossy()
3227 .to_string()
3228 .into()
3229 }
3230}
3231
3232pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3233 proto::StashEntry {
3234 oid: entry.oid.as_bytes().to_vec(),
3235 message: entry.message.clone(),
3236 branch: entry.branch.clone(),
3237 index: entry.index as u64,
3238 timestamp: entry.timestamp,
3239 }
3240}
3241
3242pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3243 Ok(StashEntry {
3244 oid: Oid::from_bytes(&entry.oid)?,
3245 message: entry.message.clone(),
3246 index: entry.index as usize,
3247 branch: entry.branch.clone(),
3248 timestamp: entry.timestamp,
3249 })
3250}
3251
3252impl MergeDetails {
3253 async fn load(
3254 backend: &Arc<dyn GitRepository>,
3255 status: &SumTree<StatusEntry>,
3256 prev_snapshot: &RepositorySnapshot,
3257 ) -> Result<(MergeDetails, bool)> {
3258 log::debug!("load merge details");
3259 let message = backend.merge_message().await;
3260 let heads = backend
3261 .revparse_batch(vec![
3262 "MERGE_HEAD".into(),
3263 "CHERRY_PICK_HEAD".into(),
3264 "REBASE_HEAD".into(),
3265 "REVERT_HEAD".into(),
3266 "APPLY_HEAD".into(),
3267 ])
3268 .await
3269 .log_err()
3270 .unwrap_or_default()
3271 .into_iter()
3272 .map(|opt| opt.map(SharedString::from))
3273 .collect::<Vec<_>>();
3274 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3275 let conflicted_paths = if merge_heads_changed {
3276 let current_conflicted_paths = TreeSet::from_ordered_entries(
3277 status
3278 .iter()
3279 .filter(|entry| entry.status.is_conflicted())
3280 .map(|entry| entry.repo_path.clone()),
3281 );
3282
3283 // It can happen that we run a scan while a lengthy merge is in progress
3284 // that will eventually result in conflicts, but before those conflicts
3285 // are reported by `git status`. Since for the moment we only care about
3286 // the merge heads state for the purposes of tracking conflicts, don't update
3287 // this state until we see some conflicts.
3288 if heads.iter().any(Option::is_some)
3289 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3290 && current_conflicted_paths.is_empty()
3291 {
3292 log::debug!("not updating merge heads because no conflicts found");
3293 return Ok((
3294 MergeDetails {
3295 message: message.map(SharedString::from),
3296 ..prev_snapshot.merge.clone()
3297 },
3298 false,
3299 ));
3300 }
3301
3302 current_conflicted_paths
3303 } else {
3304 prev_snapshot.merge.conflicted_paths.clone()
3305 };
3306 let details = MergeDetails {
3307 conflicted_paths,
3308 message: message.map(SharedString::from),
3309 heads,
3310 };
3311 Ok((details, merge_heads_changed))
3312 }
3313}
3314
3315impl Repository {
3316 pub fn snapshot(&self) -> RepositorySnapshot {
3317 self.snapshot.clone()
3318 }
3319
3320 fn local(
3321 id: RepositoryId,
3322 work_directory_abs_path: Arc<Path>,
3323 dot_git_abs_path: Arc<Path>,
3324 repository_dir_abs_path: Arc<Path>,
3325 common_dir_abs_path: Arc<Path>,
3326 project_environment: WeakEntity<ProjectEnvironment>,
3327 fs: Arc<dyn Fs>,
3328 git_store: WeakEntity<GitStore>,
3329 cx: &mut Context<Self>,
3330 ) -> Self {
3331 let snapshot =
3332 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3333 Repository {
3334 this: cx.weak_entity(),
3335 git_store,
3336 snapshot,
3337 commit_message_buffer: None,
3338 askpass_delegates: Default::default(),
3339 paths_needing_status_update: Default::default(),
3340 latest_askpass_id: 0,
3341 job_sender: Repository::spawn_local_git_worker(
3342 work_directory_abs_path,
3343 dot_git_abs_path,
3344 repository_dir_abs_path,
3345 common_dir_abs_path,
3346 project_environment,
3347 fs,
3348 cx,
3349 ),
3350 job_id: 0,
3351 active_jobs: Default::default(),
3352 }
3353 }
3354
3355 fn remote(
3356 id: RepositoryId,
3357 work_directory_abs_path: Arc<Path>,
3358 path_style: PathStyle,
3359 project_id: ProjectId,
3360 client: AnyProtoClient,
3361 git_store: WeakEntity<GitStore>,
3362 cx: &mut Context<Self>,
3363 ) -> Self {
3364 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3365 Self {
3366 this: cx.weak_entity(),
3367 snapshot,
3368 commit_message_buffer: None,
3369 git_store,
3370 paths_needing_status_update: Default::default(),
3371 job_sender: Self::spawn_remote_git_worker(project_id, client, cx),
3372 askpass_delegates: Default::default(),
3373 latest_askpass_id: 0,
3374 active_jobs: Default::default(),
3375 job_id: 0,
3376 }
3377 }
3378
3379 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3380 self.git_store.upgrade()
3381 }
3382
3383 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3384 let this = cx.weak_entity();
3385 let git_store = self.git_store.clone();
3386 let _ = self.send_keyed_job(
3387 Some(GitJobKey::ReloadBufferDiffBases),
3388 None,
3389 |state, mut cx| async move {
3390 let RepositoryState::Local { backend, .. } = state else {
3391 log::error!("tried to recompute diffs for a non-local repository");
3392 return Ok(());
3393 };
3394
3395 let Some(this) = this.upgrade() else {
3396 return Ok(());
3397 };
3398
3399 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3400 git_store.update(cx, |git_store, cx| {
3401 git_store
3402 .diffs
3403 .iter()
3404 .filter_map(|(buffer_id, diff_state)| {
3405 let buffer_store = git_store.buffer_store.read(cx);
3406 let buffer = buffer_store.get(*buffer_id)?;
3407 let file = File::from_dyn(buffer.read(cx).file())?;
3408 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3409 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3410 log::debug!(
3411 "start reload diff bases for repo path {}",
3412 repo_path.as_unix_str()
3413 );
3414 diff_state.update(cx, |diff_state, _| {
3415 let has_unstaged_diff = diff_state
3416 .unstaged_diff
3417 .as_ref()
3418 .is_some_and(|diff| diff.is_upgradable());
3419 let has_uncommitted_diff = diff_state
3420 .uncommitted_diff
3421 .as_ref()
3422 .is_some_and(|set| set.is_upgradable());
3423
3424 Some((
3425 buffer,
3426 repo_path,
3427 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3428 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3429 ))
3430 })
3431 })
3432 .collect::<Vec<_>>()
3433 })
3434 })??;
3435
3436 let buffer_diff_base_changes = cx
3437 .background_spawn(async move {
3438 let mut changes = Vec::new();
3439 for (buffer, repo_path, current_index_text, current_head_text) in
3440 &repo_diff_state_updates
3441 {
3442 let index_text = if current_index_text.is_some() {
3443 backend.load_index_text(repo_path.clone()).await
3444 } else {
3445 None
3446 };
3447 let head_text = if current_head_text.is_some() {
3448 backend.load_committed_text(repo_path.clone()).await
3449 } else {
3450 None
3451 };
3452
3453 let change =
3454 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3455 (Some(current_index), Some(current_head)) => {
3456 let index_changed =
3457 index_text.as_ref() != current_index.as_deref();
3458 let head_changed =
3459 head_text.as_ref() != current_head.as_deref();
3460 if index_changed && head_changed {
3461 if index_text == head_text {
3462 Some(DiffBasesChange::SetBoth(head_text))
3463 } else {
3464 Some(DiffBasesChange::SetEach {
3465 index: index_text,
3466 head: head_text,
3467 })
3468 }
3469 } else if index_changed {
3470 Some(DiffBasesChange::SetIndex(index_text))
3471 } else if head_changed {
3472 Some(DiffBasesChange::SetHead(head_text))
3473 } else {
3474 None
3475 }
3476 }
3477 (Some(current_index), None) => {
3478 let index_changed =
3479 index_text.as_ref() != current_index.as_deref();
3480 index_changed
3481 .then_some(DiffBasesChange::SetIndex(index_text))
3482 }
3483 (None, Some(current_head)) => {
3484 let head_changed =
3485 head_text.as_ref() != current_head.as_deref();
3486 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3487 }
3488 (None, None) => None,
3489 };
3490
3491 changes.push((buffer.clone(), change))
3492 }
3493 changes
3494 })
3495 .await;
3496
3497 git_store.update(&mut cx, |git_store, cx| {
3498 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3499 let buffer_snapshot = buffer.read(cx).text_snapshot();
3500 let buffer_id = buffer_snapshot.remote_id();
3501 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3502 continue;
3503 };
3504
3505 let downstream_client = git_store.downstream_client();
3506 diff_state.update(cx, |diff_state, cx| {
3507 use proto::update_diff_bases::Mode;
3508
3509 if let Some((diff_bases_change, (client, project_id))) =
3510 diff_bases_change.clone().zip(downstream_client)
3511 {
3512 let (staged_text, committed_text, mode) = match diff_bases_change {
3513 DiffBasesChange::SetIndex(index) => {
3514 (index, None, Mode::IndexOnly)
3515 }
3516 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3517 DiffBasesChange::SetEach { index, head } => {
3518 (index, head, Mode::IndexAndHead)
3519 }
3520 DiffBasesChange::SetBoth(text) => {
3521 (None, text, Mode::IndexMatchesHead)
3522 }
3523 };
3524 client
3525 .send(proto::UpdateDiffBases {
3526 project_id: project_id.to_proto(),
3527 buffer_id: buffer_id.to_proto(),
3528 staged_text,
3529 committed_text,
3530 mode: mode as i32,
3531 })
3532 .log_err();
3533 }
3534
3535 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3536 });
3537 }
3538 })
3539 },
3540 );
3541 }
3542
3543 pub fn send_job<F, Fut, R>(
3544 &mut self,
3545 status: Option<SharedString>,
3546 job: F,
3547 ) -> oneshot::Receiver<R>
3548 where
3549 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3550 Fut: Future<Output = R> + 'static,
3551 R: Send + 'static,
3552 {
3553 self.send_keyed_job(None, status, job)
3554 }
3555
3556 fn send_keyed_job<F, Fut, R>(
3557 &mut self,
3558 key: Option<GitJobKey>,
3559 status: Option<SharedString>,
3560 job: F,
3561 ) -> oneshot::Receiver<R>
3562 where
3563 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3564 Fut: Future<Output = R> + 'static,
3565 R: Send + 'static,
3566 {
3567 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3568 let job_id = post_inc(&mut self.job_id);
3569 let this = self.this.clone();
3570 self.job_sender
3571 .unbounded_send(GitJob {
3572 key,
3573 job: Box::new(move |state, cx: &mut AsyncApp| {
3574 let job = job(state, cx.clone());
3575 cx.spawn(async move |cx| {
3576 if let Some(s) = status.clone() {
3577 this.update(cx, |this, cx| {
3578 this.active_jobs.insert(
3579 job_id,
3580 JobInfo {
3581 start: Instant::now(),
3582 message: s.clone(),
3583 },
3584 );
3585
3586 cx.notify();
3587 })
3588 .ok();
3589 }
3590 let result = job.await;
3591
3592 this.update(cx, |this, cx| {
3593 this.active_jobs.remove(&job_id);
3594 cx.notify();
3595 })
3596 .ok();
3597
3598 result_tx.send(result).ok();
3599 })
3600 }),
3601 })
3602 .ok();
3603 result_rx
3604 }
3605
3606 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3607 let Some(git_store) = self.git_store.upgrade() else {
3608 return;
3609 };
3610 let entity = cx.entity();
3611 git_store.update(cx, |git_store, cx| {
3612 let Some((&id, _)) = git_store
3613 .repositories
3614 .iter()
3615 .find(|(_, handle)| *handle == &entity)
3616 else {
3617 return;
3618 };
3619 git_store.active_repo_id = Some(id);
3620 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3621 });
3622 }
3623
3624 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3625 self.snapshot.status()
3626 }
3627
3628 pub fn cached_stash(&self) -> GitStash {
3629 self.snapshot.stash_entries.clone()
3630 }
3631
3632 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3633 let git_store = self.git_store.upgrade()?;
3634 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3635 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3636 let abs_path = SanitizedPath::new(&abs_path);
3637 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3638 Some(ProjectPath {
3639 worktree_id: worktree.read(cx).id(),
3640 path: relative_path,
3641 })
3642 }
3643
3644 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3645 let git_store = self.git_store.upgrade()?;
3646 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3647 let abs_path = worktree_store.absolutize(path, cx)?;
3648 self.snapshot.abs_path_to_repo_path(&abs_path)
3649 }
3650
3651 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3652 other
3653 .read(cx)
3654 .snapshot
3655 .work_directory_abs_path
3656 .starts_with(&self.snapshot.work_directory_abs_path)
3657 }
3658
3659 pub fn open_commit_buffer(
3660 &mut self,
3661 languages: Option<Arc<LanguageRegistry>>,
3662 buffer_store: Entity<BufferStore>,
3663 cx: &mut Context<Self>,
3664 ) -> Task<Result<Entity<Buffer>>> {
3665 let id = self.id;
3666 if let Some(buffer) = self.commit_message_buffer.clone() {
3667 return Task::ready(Ok(buffer));
3668 }
3669 let this = cx.weak_entity();
3670
3671 let rx = self.send_job(None, move |state, mut cx| async move {
3672 let Some(this) = this.upgrade() else {
3673 bail!("git store was dropped");
3674 };
3675 match state {
3676 RepositoryState::Local { .. } => {
3677 this.update(&mut cx, |_, cx| {
3678 Self::open_local_commit_buffer(languages, buffer_store, cx)
3679 })?
3680 .await
3681 }
3682 RepositoryState::Remote { project_id, client } => {
3683 let request = client.request(proto::OpenCommitMessageBuffer {
3684 project_id: project_id.0,
3685 repository_id: id.to_proto(),
3686 });
3687 let response = request.await.context("requesting to open commit buffer")?;
3688 let buffer_id = BufferId::new(response.buffer_id)?;
3689 let buffer = buffer_store
3690 .update(&mut cx, |buffer_store, cx| {
3691 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3692 })?
3693 .await?;
3694 if let Some(language_registry) = languages {
3695 let git_commit_language =
3696 language_registry.language_for_name("Git Commit").await?;
3697 buffer.update(&mut cx, |buffer, cx| {
3698 buffer.set_language(Some(git_commit_language), cx);
3699 })?;
3700 }
3701 this.update(&mut cx, |this, _| {
3702 this.commit_message_buffer = Some(buffer.clone());
3703 })?;
3704 Ok(buffer)
3705 }
3706 }
3707 });
3708
3709 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3710 }
3711
3712 fn open_local_commit_buffer(
3713 language_registry: Option<Arc<LanguageRegistry>>,
3714 buffer_store: Entity<BufferStore>,
3715 cx: &mut Context<Self>,
3716 ) -> Task<Result<Entity<Buffer>>> {
3717 cx.spawn(async move |repository, cx| {
3718 let buffer = buffer_store
3719 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3720 .await?;
3721
3722 if let Some(language_registry) = language_registry {
3723 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3724 buffer.update(cx, |buffer, cx| {
3725 buffer.set_language(Some(git_commit_language), cx);
3726 })?;
3727 }
3728
3729 repository.update(cx, |repository, _| {
3730 repository.commit_message_buffer = Some(buffer.clone());
3731 })?;
3732 Ok(buffer)
3733 })
3734 }
3735
3736 pub fn checkout_files(
3737 &mut self,
3738 commit: &str,
3739 paths: Vec<RepoPath>,
3740 cx: &mut Context<Self>,
3741 ) -> Task<Result<()>> {
3742 let commit = commit.to_string();
3743 let id = self.id;
3744
3745 self.spawn_job_with_tracking(
3746 paths.clone(),
3747 pending_op::GitStatus::Reverted,
3748 cx,
3749 async move |this, cx| {
3750 this.update(cx, |this, _cx| {
3751 this.send_job(
3752 Some(format!("git checkout {}", commit).into()),
3753 move |git_repo, _| async move {
3754 match git_repo {
3755 RepositoryState::Local {
3756 backend,
3757 environment,
3758 ..
3759 } => {
3760 backend
3761 .checkout_files(commit, paths, environment.clone())
3762 .await
3763 }
3764 RepositoryState::Remote { project_id, client } => {
3765 client
3766 .request(proto::GitCheckoutFiles {
3767 project_id: project_id.0,
3768 repository_id: id.to_proto(),
3769 commit,
3770 paths: paths
3771 .into_iter()
3772 .map(|p| p.to_proto())
3773 .collect(),
3774 })
3775 .await?;
3776
3777 Ok(())
3778 }
3779 }
3780 },
3781 )
3782 })?
3783 .await?
3784 },
3785 )
3786 }
3787
3788 pub fn reset(
3789 &mut self,
3790 commit: String,
3791 reset_mode: ResetMode,
3792 _cx: &mut App,
3793 ) -> oneshot::Receiver<Result<()>> {
3794 let id = self.id;
3795
3796 self.send_job(None, move |git_repo, _| async move {
3797 match git_repo {
3798 RepositoryState::Local {
3799 backend,
3800 environment,
3801 ..
3802 } => backend.reset(commit, reset_mode, environment).await,
3803 RepositoryState::Remote { project_id, client } => {
3804 client
3805 .request(proto::GitReset {
3806 project_id: project_id.0,
3807 repository_id: id.to_proto(),
3808 commit,
3809 mode: match reset_mode {
3810 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3811 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3812 },
3813 })
3814 .await?;
3815
3816 Ok(())
3817 }
3818 }
3819 })
3820 }
3821
3822 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3823 let id = self.id;
3824 self.send_job(None, move |git_repo, _cx| async move {
3825 match git_repo {
3826 RepositoryState::Local { backend, .. } => backend.show(commit).await,
3827 RepositoryState::Remote { project_id, client } => {
3828 let resp = client
3829 .request(proto::GitShow {
3830 project_id: project_id.0,
3831 repository_id: id.to_proto(),
3832 commit,
3833 })
3834 .await?;
3835
3836 Ok(CommitDetails {
3837 sha: resp.sha.into(),
3838 message: resp.message.into(),
3839 commit_timestamp: resp.commit_timestamp,
3840 author_email: resp.author_email.into(),
3841 author_name: resp.author_name.into(),
3842 })
3843 }
3844 }
3845 })
3846 }
3847
3848 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3849 let id = self.id;
3850 self.send_job(None, move |git_repo, cx| async move {
3851 match git_repo {
3852 RepositoryState::Local { backend, .. } => backend.load_commit(commit, cx).await,
3853 RepositoryState::Remote {
3854 client, project_id, ..
3855 } => {
3856 let response = client
3857 .request(proto::LoadCommitDiff {
3858 project_id: project_id.0,
3859 repository_id: id.to_proto(),
3860 commit,
3861 })
3862 .await?;
3863 Ok(CommitDiff {
3864 files: response
3865 .files
3866 .into_iter()
3867 .map(|file| {
3868 Ok(CommitFile {
3869 path: RepoPath::from_proto(&file.path)?,
3870 old_text: file.old_text,
3871 new_text: file.new_text,
3872 })
3873 })
3874 .collect::<Result<Vec<_>>>()?,
3875 })
3876 }
3877 }
3878 })
3879 }
3880
3881 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3882 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3883 }
3884
3885 fn save_buffers<'a>(
3886 &self,
3887 entries: impl IntoIterator<Item = &'a RepoPath>,
3888 cx: &mut Context<Self>,
3889 ) -> Vec<Task<anyhow::Result<()>>> {
3890 let mut save_futures = Vec::new();
3891 if let Some(buffer_store) = self.buffer_store(cx) {
3892 buffer_store.update(cx, |buffer_store, cx| {
3893 for path in entries {
3894 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
3895 continue;
3896 };
3897 if let Some(buffer) = buffer_store.get_by_path(&project_path)
3898 && buffer
3899 .read(cx)
3900 .file()
3901 .is_some_and(|file| file.disk_state().exists())
3902 && buffer.read(cx).has_unsaved_edits()
3903 {
3904 save_futures.push(buffer_store.save_buffer(buffer, cx));
3905 }
3906 }
3907 })
3908 }
3909 save_futures
3910 }
3911
3912 pub fn stage_entries(
3913 &mut self,
3914 entries: Vec<RepoPath>,
3915 cx: &mut Context<Self>,
3916 ) -> Task<anyhow::Result<()>> {
3917 if entries.is_empty() {
3918 return Task::ready(Ok(()));
3919 }
3920 let id = self.id;
3921 let save_tasks = self.save_buffers(&entries, cx);
3922 let paths = entries
3923 .iter()
3924 .map(|p| p.as_unix_str())
3925 .collect::<Vec<_>>()
3926 .join(" ");
3927 let status = format!("git add {paths}");
3928 let job_key = GitJobKey::WriteIndex(entries.clone());
3929
3930 self.spawn_job_with_tracking(
3931 entries.clone(),
3932 pending_op::GitStatus::Staged,
3933 cx,
3934 async move |this, cx| {
3935 for save_task in save_tasks {
3936 save_task.await?;
3937 }
3938
3939 this.update(cx, |this, _| {
3940 this.send_keyed_job(
3941 Some(job_key),
3942 Some(status.into()),
3943 move |git_repo, _cx| async move {
3944 match git_repo {
3945 RepositoryState::Local {
3946 backend,
3947 environment,
3948 ..
3949 } => backend.stage_paths(entries, environment.clone()).await,
3950 RepositoryState::Remote { project_id, client } => {
3951 client
3952 .request(proto::Stage {
3953 project_id: project_id.0,
3954 repository_id: id.to_proto(),
3955 paths: entries
3956 .into_iter()
3957 .map(|repo_path| repo_path.to_proto())
3958 .collect(),
3959 })
3960 .await
3961 .context("sending stage request")?;
3962
3963 Ok(())
3964 }
3965 }
3966 },
3967 )
3968 })?
3969 .await?
3970 },
3971 )
3972 }
3973
3974 pub fn unstage_entries(
3975 &mut self,
3976 entries: Vec<RepoPath>,
3977 cx: &mut Context<Self>,
3978 ) -> Task<anyhow::Result<()>> {
3979 if entries.is_empty() {
3980 return Task::ready(Ok(()));
3981 }
3982 let id = self.id;
3983 let save_tasks = self.save_buffers(&entries, cx);
3984 let paths = entries
3985 .iter()
3986 .map(|p| p.as_unix_str())
3987 .collect::<Vec<_>>()
3988 .join(" ");
3989 let status = format!("git reset {paths}");
3990 let job_key = GitJobKey::WriteIndex(entries.clone());
3991
3992 self.spawn_job_with_tracking(
3993 entries.clone(),
3994 pending_op::GitStatus::Unstaged,
3995 cx,
3996 async move |this, cx| {
3997 for save_task in save_tasks {
3998 save_task.await?;
3999 }
4000
4001 this.update(cx, |this, _| {
4002 this.send_keyed_job(
4003 Some(job_key),
4004 Some(status.into()),
4005 move |git_repo, _cx| async move {
4006 match git_repo {
4007 RepositoryState::Local {
4008 backend,
4009 environment,
4010 ..
4011 } => backend.unstage_paths(entries, environment).await,
4012 RepositoryState::Remote { project_id, client } => {
4013 client
4014 .request(proto::Unstage {
4015 project_id: project_id.0,
4016 repository_id: id.to_proto(),
4017 paths: entries
4018 .into_iter()
4019 .map(|repo_path| repo_path.to_proto())
4020 .collect(),
4021 })
4022 .await
4023 .context("sending unstage request")?;
4024
4025 Ok(())
4026 }
4027 }
4028 },
4029 )
4030 })?
4031 .await?
4032 },
4033 )
4034 }
4035
4036 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4037 let to_stage = self
4038 .cached_status()
4039 .filter_map(|entry| {
4040 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4041 if ops.staging() || ops.staged() {
4042 None
4043 } else {
4044 Some(entry.repo_path)
4045 }
4046 } else if entry.status.staging().has_staged() {
4047 None
4048 } else {
4049 Some(entry.repo_path)
4050 }
4051 })
4052 .collect();
4053 self.stage_entries(to_stage, cx)
4054 }
4055
4056 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4057 let to_unstage = self
4058 .cached_status()
4059 .filter_map(|entry| {
4060 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4061 if !ops.staging() && !ops.staged() {
4062 None
4063 } else {
4064 Some(entry.repo_path)
4065 }
4066 } else if entry.status.staging().has_unstaged() {
4067 None
4068 } else {
4069 Some(entry.repo_path)
4070 }
4071 })
4072 .collect();
4073 self.unstage_entries(to_unstage, cx)
4074 }
4075
4076 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4077 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4078
4079 self.stash_entries(to_stash, cx)
4080 }
4081
4082 pub fn stash_entries(
4083 &mut self,
4084 entries: Vec<RepoPath>,
4085 cx: &mut Context<Self>,
4086 ) -> Task<anyhow::Result<()>> {
4087 let id = self.id;
4088
4089 cx.spawn(async move |this, cx| {
4090 this.update(cx, |this, _| {
4091 this.send_job(None, move |git_repo, _cx| async move {
4092 match git_repo {
4093 RepositoryState::Local {
4094 backend,
4095 environment,
4096 ..
4097 } => backend.stash_paths(entries, environment).await,
4098 RepositoryState::Remote { project_id, client } => {
4099 client
4100 .request(proto::Stash {
4101 project_id: project_id.0,
4102 repository_id: id.to_proto(),
4103 paths: entries
4104 .into_iter()
4105 .map(|repo_path| repo_path.to_proto())
4106 .collect(),
4107 })
4108 .await
4109 .context("sending stash request")?;
4110 Ok(())
4111 }
4112 }
4113 })
4114 })?
4115 .await??;
4116 Ok(())
4117 })
4118 }
4119
4120 pub fn stash_pop(
4121 &mut self,
4122 index: Option<usize>,
4123 cx: &mut Context<Self>,
4124 ) -> Task<anyhow::Result<()>> {
4125 let id = self.id;
4126 cx.spawn(async move |this, cx| {
4127 this.update(cx, |this, _| {
4128 this.send_job(None, move |git_repo, _cx| async move {
4129 match git_repo {
4130 RepositoryState::Local {
4131 backend,
4132 environment,
4133 ..
4134 } => backend.stash_pop(index, environment).await,
4135 RepositoryState::Remote { project_id, client } => {
4136 client
4137 .request(proto::StashPop {
4138 project_id: project_id.0,
4139 repository_id: id.to_proto(),
4140 stash_index: index.map(|i| i as u64),
4141 })
4142 .await
4143 .context("sending stash pop request")?;
4144 Ok(())
4145 }
4146 }
4147 })
4148 })?
4149 .await??;
4150 Ok(())
4151 })
4152 }
4153
4154 pub fn stash_apply(
4155 &mut self,
4156 index: Option<usize>,
4157 cx: &mut Context<Self>,
4158 ) -> Task<anyhow::Result<()>> {
4159 let id = self.id;
4160 cx.spawn(async move |this, cx| {
4161 this.update(cx, |this, _| {
4162 this.send_job(None, move |git_repo, _cx| async move {
4163 match git_repo {
4164 RepositoryState::Local {
4165 backend,
4166 environment,
4167 ..
4168 } => backend.stash_apply(index, environment).await,
4169 RepositoryState::Remote { project_id, client } => {
4170 client
4171 .request(proto::StashApply {
4172 project_id: project_id.0,
4173 repository_id: id.to_proto(),
4174 stash_index: index.map(|i| i as u64),
4175 })
4176 .await
4177 .context("sending stash apply request")?;
4178 Ok(())
4179 }
4180 }
4181 })
4182 })?
4183 .await??;
4184 Ok(())
4185 })
4186 }
4187
4188 pub fn stash_drop(
4189 &mut self,
4190 index: Option<usize>,
4191 cx: &mut Context<Self>,
4192 ) -> oneshot::Receiver<anyhow::Result<()>> {
4193 let id = self.id;
4194 let updates_tx = self
4195 .git_store()
4196 .and_then(|git_store| match &git_store.read(cx).state {
4197 GitStoreState::Local { downstream, .. } => downstream
4198 .as_ref()
4199 .map(|downstream| downstream.updates_tx.clone()),
4200 _ => None,
4201 });
4202 let this = cx.weak_entity();
4203 self.send_job(None, move |git_repo, mut cx| async move {
4204 match git_repo {
4205 RepositoryState::Local {
4206 backend,
4207 environment,
4208 ..
4209 } => {
4210 // TODO would be nice to not have to do this manually
4211 let result = backend.stash_drop(index, environment).await;
4212 if result.is_ok()
4213 && let Ok(stash_entries) = backend.stash_entries().await
4214 {
4215 let snapshot = this.update(&mut cx, |this, cx| {
4216 this.snapshot.stash_entries = stash_entries;
4217 cx.emit(RepositoryEvent::StashEntriesChanged);
4218 this.snapshot.clone()
4219 })?;
4220 if let Some(updates_tx) = updates_tx {
4221 updates_tx
4222 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4223 .ok();
4224 }
4225 }
4226
4227 result
4228 }
4229 RepositoryState::Remote { project_id, client } => {
4230 client
4231 .request(proto::StashDrop {
4232 project_id: project_id.0,
4233 repository_id: id.to_proto(),
4234 stash_index: index.map(|i| i as u64),
4235 })
4236 .await
4237 .context("sending stash pop request")?;
4238 Ok(())
4239 }
4240 }
4241 })
4242 }
4243
4244 pub fn commit(
4245 &mut self,
4246 message: SharedString,
4247 name_and_email: Option<(SharedString, SharedString)>,
4248 options: CommitOptions,
4249 askpass: AskPassDelegate,
4250 _cx: &mut App,
4251 ) -> oneshot::Receiver<Result<()>> {
4252 let id = self.id;
4253 let askpass_delegates = self.askpass_delegates.clone();
4254 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4255
4256 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4257 match git_repo {
4258 RepositoryState::Local {
4259 backend,
4260 environment,
4261 ..
4262 } => {
4263 backend
4264 .commit(message, name_and_email, options, askpass, environment)
4265 .await
4266 }
4267 RepositoryState::Remote { project_id, client } => {
4268 askpass_delegates.lock().insert(askpass_id, askpass);
4269 let _defer = util::defer(|| {
4270 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4271 debug_assert!(askpass_delegate.is_some());
4272 });
4273 let (name, email) = name_and_email.unzip();
4274 client
4275 .request(proto::Commit {
4276 project_id: project_id.0,
4277 repository_id: id.to_proto(),
4278 message: String::from(message),
4279 name: name.map(String::from),
4280 email: email.map(String::from),
4281 options: Some(proto::commit::CommitOptions {
4282 amend: options.amend,
4283 signoff: options.signoff,
4284 }),
4285 askpass_id,
4286 })
4287 .await
4288 .context("sending commit request")?;
4289
4290 Ok(())
4291 }
4292 }
4293 })
4294 }
4295
4296 pub fn fetch(
4297 &mut self,
4298 fetch_options: FetchOptions,
4299 askpass: AskPassDelegate,
4300 _cx: &mut App,
4301 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4302 let askpass_delegates = self.askpass_delegates.clone();
4303 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4304 let id = self.id;
4305
4306 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4307 match git_repo {
4308 RepositoryState::Local {
4309 backend,
4310 environment,
4311 ..
4312 } => backend.fetch(fetch_options, askpass, environment, cx).await,
4313 RepositoryState::Remote { project_id, client } => {
4314 askpass_delegates.lock().insert(askpass_id, askpass);
4315 let _defer = util::defer(|| {
4316 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4317 debug_assert!(askpass_delegate.is_some());
4318 });
4319
4320 let response = client
4321 .request(proto::Fetch {
4322 project_id: project_id.0,
4323 repository_id: id.to_proto(),
4324 askpass_id,
4325 remote: fetch_options.to_proto(),
4326 })
4327 .await
4328 .context("sending fetch request")?;
4329
4330 Ok(RemoteCommandOutput {
4331 stdout: response.stdout,
4332 stderr: response.stderr,
4333 })
4334 }
4335 }
4336 })
4337 }
4338
4339 pub fn push(
4340 &mut self,
4341 branch: SharedString,
4342 remote: SharedString,
4343 options: Option<PushOptions>,
4344 askpass: AskPassDelegate,
4345 cx: &mut Context<Self>,
4346 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4347 let askpass_delegates = self.askpass_delegates.clone();
4348 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4349 let id = self.id;
4350
4351 let args = options
4352 .map(|option| match option {
4353 PushOptions::SetUpstream => " --set-upstream",
4354 PushOptions::Force => " --force-with-lease",
4355 })
4356 .unwrap_or("");
4357
4358 let updates_tx = self
4359 .git_store()
4360 .and_then(|git_store| match &git_store.read(cx).state {
4361 GitStoreState::Local { downstream, .. } => downstream
4362 .as_ref()
4363 .map(|downstream| downstream.updates_tx.clone()),
4364 _ => None,
4365 });
4366
4367 let this = cx.weak_entity();
4368 self.send_job(
4369 Some(format!("git push {} {} {}", args, remote, branch).into()),
4370 move |git_repo, mut cx| async move {
4371 match git_repo {
4372 RepositoryState::Local {
4373 backend,
4374 environment,
4375 ..
4376 } => {
4377 let result = backend
4378 .push(
4379 branch.to_string(),
4380 remote.to_string(),
4381 options,
4382 askpass,
4383 environment.clone(),
4384 cx.clone(),
4385 )
4386 .await;
4387 // TODO would be nice to not have to do this manually
4388 if result.is_ok() {
4389 let branches = backend.branches().await?;
4390 let branch = branches.into_iter().find(|branch| branch.is_head);
4391 log::info!("head branch after scan is {branch:?}");
4392 let snapshot = this.update(&mut cx, |this, cx| {
4393 this.snapshot.branch = branch;
4394 cx.emit(RepositoryEvent::BranchChanged);
4395 this.snapshot.clone()
4396 })?;
4397 if let Some(updates_tx) = updates_tx {
4398 updates_tx
4399 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4400 .ok();
4401 }
4402 }
4403 result
4404 }
4405 RepositoryState::Remote { project_id, client } => {
4406 askpass_delegates.lock().insert(askpass_id, askpass);
4407 let _defer = util::defer(|| {
4408 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4409 debug_assert!(askpass_delegate.is_some());
4410 });
4411 let response = client
4412 .request(proto::Push {
4413 project_id: project_id.0,
4414 repository_id: id.to_proto(),
4415 askpass_id,
4416 branch_name: branch.to_string(),
4417 remote_name: remote.to_string(),
4418 options: options.map(|options| match options {
4419 PushOptions::Force => proto::push::PushOptions::Force,
4420 PushOptions::SetUpstream => {
4421 proto::push::PushOptions::SetUpstream
4422 }
4423 }
4424 as i32),
4425 })
4426 .await
4427 .context("sending push request")?;
4428
4429 Ok(RemoteCommandOutput {
4430 stdout: response.stdout,
4431 stderr: response.stderr,
4432 })
4433 }
4434 }
4435 },
4436 )
4437 }
4438
4439 pub fn pull(
4440 &mut self,
4441 branch: Option<SharedString>,
4442 remote: SharedString,
4443 rebase: bool,
4444 askpass: AskPassDelegate,
4445 _cx: &mut App,
4446 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4447 let askpass_delegates = self.askpass_delegates.clone();
4448 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4449 let id = self.id;
4450
4451 let mut status = "git pull".to_string();
4452 if rebase {
4453 status.push_str(" --rebase");
4454 }
4455 status.push_str(&format!(" {}", remote));
4456 if let Some(b) = &branch {
4457 status.push_str(&format!(" {}", b));
4458 }
4459
4460 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4461 match git_repo {
4462 RepositoryState::Local {
4463 backend,
4464 environment,
4465 ..
4466 } => {
4467 backend
4468 .pull(
4469 branch.as_ref().map(|b| b.to_string()),
4470 remote.to_string(),
4471 rebase,
4472 askpass,
4473 environment.clone(),
4474 cx,
4475 )
4476 .await
4477 }
4478 RepositoryState::Remote { project_id, client } => {
4479 askpass_delegates.lock().insert(askpass_id, askpass);
4480 let _defer = util::defer(|| {
4481 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4482 debug_assert!(askpass_delegate.is_some());
4483 });
4484 let response = client
4485 .request(proto::Pull {
4486 project_id: project_id.0,
4487 repository_id: id.to_proto(),
4488 askpass_id,
4489 rebase,
4490 branch_name: branch.as_ref().map(|b| b.to_string()),
4491 remote_name: remote.to_string(),
4492 })
4493 .await
4494 .context("sending pull request")?;
4495
4496 Ok(RemoteCommandOutput {
4497 stdout: response.stdout,
4498 stderr: response.stderr,
4499 })
4500 }
4501 }
4502 })
4503 }
4504
4505 fn spawn_set_index_text_job(
4506 &mut self,
4507 path: RepoPath,
4508 content: Option<String>,
4509 hunk_staging_operation_count: Option<usize>,
4510 cx: &mut Context<Self>,
4511 ) -> oneshot::Receiver<anyhow::Result<()>> {
4512 let id = self.id;
4513 let this = cx.weak_entity();
4514 let git_store = self.git_store.clone();
4515 self.send_keyed_job(
4516 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4517 None,
4518 move |git_repo, mut cx| async move {
4519 log::debug!(
4520 "start updating index text for buffer {}",
4521 path.as_unix_str()
4522 );
4523 match git_repo {
4524 RepositoryState::Local {
4525 backend,
4526 environment,
4527 ..
4528 } => {
4529 backend
4530 .set_index_text(path.clone(), content, environment.clone())
4531 .await?;
4532 }
4533 RepositoryState::Remote { project_id, client } => {
4534 client
4535 .request(proto::SetIndexText {
4536 project_id: project_id.0,
4537 repository_id: id.to_proto(),
4538 path: path.to_proto(),
4539 text: content,
4540 })
4541 .await?;
4542 }
4543 }
4544 log::debug!(
4545 "finish updating index text for buffer {}",
4546 path.as_unix_str()
4547 );
4548
4549 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4550 let project_path = this
4551 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4552 .ok()
4553 .flatten();
4554 git_store.update(&mut cx, |git_store, cx| {
4555 let buffer_id = git_store
4556 .buffer_store
4557 .read(cx)
4558 .get_by_path(&project_path?)?
4559 .read(cx)
4560 .remote_id();
4561 let diff_state = git_store.diffs.get(&buffer_id)?;
4562 diff_state.update(cx, |diff_state, _| {
4563 diff_state.hunk_staging_operation_count_as_of_write =
4564 hunk_staging_operation_count;
4565 });
4566 Some(())
4567 })?;
4568 }
4569 Ok(())
4570 },
4571 )
4572 }
4573
4574 pub fn get_remotes(
4575 &mut self,
4576 branch_name: Option<String>,
4577 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4578 let id = self.id;
4579 self.send_job(None, move |repo, _cx| async move {
4580 match repo {
4581 RepositoryState::Local { backend, .. } => backend.get_remotes(branch_name).await,
4582 RepositoryState::Remote { project_id, client } => {
4583 let response = client
4584 .request(proto::GetRemotes {
4585 project_id: project_id.0,
4586 repository_id: id.to_proto(),
4587 branch_name,
4588 })
4589 .await?;
4590
4591 let remotes = response
4592 .remotes
4593 .into_iter()
4594 .map(|remotes| git::repository::Remote {
4595 name: remotes.name.into(),
4596 })
4597 .collect();
4598
4599 Ok(remotes)
4600 }
4601 }
4602 })
4603 }
4604
4605 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4606 let id = self.id;
4607 self.send_job(None, move |repo, _| async move {
4608 match repo {
4609 RepositoryState::Local { backend, .. } => backend.branches().await,
4610 RepositoryState::Remote { project_id, client } => {
4611 let response = client
4612 .request(proto::GitGetBranches {
4613 project_id: project_id.0,
4614 repository_id: id.to_proto(),
4615 })
4616 .await?;
4617
4618 let branches = response
4619 .branches
4620 .into_iter()
4621 .map(|branch| proto_to_branch(&branch))
4622 .collect();
4623
4624 Ok(branches)
4625 }
4626 }
4627 })
4628 }
4629
4630 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4631 let id = self.id;
4632 self.send_job(None, move |repo, _| async move {
4633 match repo {
4634 RepositoryState::Local { backend, .. } => backend.worktrees().await,
4635 RepositoryState::Remote { project_id, client } => {
4636 let response = client
4637 .request(proto::GitGetWorktrees {
4638 project_id: project_id.0,
4639 repository_id: id.to_proto(),
4640 })
4641 .await?;
4642
4643 let worktrees = response
4644 .worktrees
4645 .into_iter()
4646 .map(|worktree| proto_to_worktree(&worktree))
4647 .collect();
4648
4649 Ok(worktrees)
4650 }
4651 }
4652 })
4653 }
4654
4655 pub fn create_worktree(
4656 &mut self,
4657 name: String,
4658 path: PathBuf,
4659 commit: Option<String>,
4660 ) -> oneshot::Receiver<Result<()>> {
4661 let id = self.id;
4662 self.send_job(
4663 Some("git worktree add".into()),
4664 move |repo, _cx| async move {
4665 match repo {
4666 RepositoryState::Local { backend, .. } => {
4667 backend.create_worktree(name, path, commit).await
4668 }
4669 RepositoryState::Remote { project_id, client } => {
4670 client
4671 .request(proto::GitCreateWorktree {
4672 project_id: project_id.0,
4673 repository_id: id.to_proto(),
4674 name,
4675 directory: path.to_string_lossy().to_string(),
4676 commit,
4677 })
4678 .await?;
4679
4680 Ok(())
4681 }
4682 }
4683 },
4684 )
4685 }
4686
4687 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4688 let id = self.id;
4689 self.send_job(None, move |repo, _| async move {
4690 match repo {
4691 RepositoryState::Local { backend, .. } => backend.default_branch().await,
4692 RepositoryState::Remote { project_id, client } => {
4693 let response = client
4694 .request(proto::GetDefaultBranch {
4695 project_id: project_id.0,
4696 repository_id: id.to_proto(),
4697 })
4698 .await?;
4699
4700 anyhow::Ok(response.branch.map(SharedString::from))
4701 }
4702 }
4703 })
4704 }
4705
4706 pub fn diff_tree(
4707 &mut self,
4708 diff_type: DiffTreeType,
4709 _cx: &App,
4710 ) -> oneshot::Receiver<Result<TreeDiff>> {
4711 let repository_id = self.snapshot.id;
4712 self.send_job(None, move |repo, _cx| async move {
4713 match repo {
4714 RepositoryState::Local { backend, .. } => backend.diff_tree(diff_type).await,
4715 RepositoryState::Remote { client, project_id } => {
4716 let response = client
4717 .request(proto::GetTreeDiff {
4718 project_id: project_id.0,
4719 repository_id: repository_id.0,
4720 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4721 base: diff_type.base().to_string(),
4722 head: diff_type.head().to_string(),
4723 })
4724 .await?;
4725
4726 let entries = response
4727 .entries
4728 .into_iter()
4729 .filter_map(|entry| {
4730 let status = match entry.status() {
4731 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4732 proto::tree_diff_status::Status::Modified => {
4733 TreeDiffStatus::Modified {
4734 old: git::Oid::from_str(
4735 &entry.oid.context("missing oid").log_err()?,
4736 )
4737 .log_err()?,
4738 }
4739 }
4740 proto::tree_diff_status::Status::Deleted => {
4741 TreeDiffStatus::Deleted {
4742 old: git::Oid::from_str(
4743 &entry.oid.context("missing oid").log_err()?,
4744 )
4745 .log_err()?,
4746 }
4747 }
4748 };
4749 Some((
4750 RepoPath::from_rel_path(
4751 &RelPath::from_proto(&entry.path).log_err()?,
4752 ),
4753 status,
4754 ))
4755 })
4756 .collect();
4757
4758 Ok(TreeDiff { entries })
4759 }
4760 }
4761 })
4762 }
4763
4764 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4765 let id = self.id;
4766 self.send_job(None, move |repo, _cx| async move {
4767 match repo {
4768 RepositoryState::Local { backend, .. } => backend.diff(diff_type).await,
4769 RepositoryState::Remote { project_id, client } => {
4770 let response = client
4771 .request(proto::GitDiff {
4772 project_id: project_id.0,
4773 repository_id: id.to_proto(),
4774 diff_type: match diff_type {
4775 DiffType::HeadToIndex => {
4776 proto::git_diff::DiffType::HeadToIndex.into()
4777 }
4778 DiffType::HeadToWorktree => {
4779 proto::git_diff::DiffType::HeadToWorktree.into()
4780 }
4781 },
4782 })
4783 .await?;
4784
4785 Ok(response.diff)
4786 }
4787 }
4788 })
4789 }
4790
4791 pub fn create_branch(
4792 &mut self,
4793 branch_name: String,
4794 base_branch: Option<String>,
4795 ) -> oneshot::Receiver<Result<()>> {
4796 let id = self.id;
4797 let status_msg = if let Some(ref base) = base_branch {
4798 format!("git switch -c {branch_name} {base}").into()
4799 } else {
4800 format!("git switch -c {branch_name}").into()
4801 };
4802 self.send_job(Some(status_msg), move |repo, _cx| async move {
4803 match repo {
4804 RepositoryState::Local { backend, .. } => {
4805 backend.create_branch(branch_name, base_branch).await
4806 }
4807 RepositoryState::Remote { project_id, client } => {
4808 client
4809 .request(proto::GitCreateBranch {
4810 project_id: project_id.0,
4811 repository_id: id.to_proto(),
4812 branch_name,
4813 })
4814 .await?;
4815
4816 Ok(())
4817 }
4818 }
4819 })
4820 }
4821
4822 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4823 let id = self.id;
4824 self.send_job(
4825 Some(format!("git switch {branch_name}").into()),
4826 move |repo, _cx| async move {
4827 match repo {
4828 RepositoryState::Local { backend, .. } => {
4829 backend.change_branch(branch_name).await
4830 }
4831 RepositoryState::Remote { project_id, client } => {
4832 client
4833 .request(proto::GitChangeBranch {
4834 project_id: project_id.0,
4835 repository_id: id.to_proto(),
4836 branch_name,
4837 })
4838 .await?;
4839
4840 Ok(())
4841 }
4842 }
4843 },
4844 )
4845 }
4846
4847 pub fn rename_branch(
4848 &mut self,
4849 branch: String,
4850 new_name: String,
4851 ) -> oneshot::Receiver<Result<()>> {
4852 let id = self.id;
4853 self.send_job(
4854 Some(format!("git branch -m {branch} {new_name}").into()),
4855 move |repo, _cx| async move {
4856 match repo {
4857 RepositoryState::Local { backend, .. } => {
4858 backend.rename_branch(branch, new_name).await
4859 }
4860 RepositoryState::Remote { project_id, client } => {
4861 client
4862 .request(proto::GitRenameBranch {
4863 project_id: project_id.0,
4864 repository_id: id.to_proto(),
4865 branch,
4866 new_name,
4867 })
4868 .await?;
4869
4870 Ok(())
4871 }
4872 }
4873 },
4874 )
4875 }
4876
4877 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
4878 let id = self.id;
4879 self.send_job(None, move |repo, _cx| async move {
4880 match repo {
4881 RepositoryState::Local { backend, .. } => backend.check_for_pushed_commit().await,
4882 RepositoryState::Remote { project_id, client } => {
4883 let response = client
4884 .request(proto::CheckForPushedCommits {
4885 project_id: project_id.0,
4886 repository_id: id.to_proto(),
4887 })
4888 .await?;
4889
4890 let branches = response.pushed_to.into_iter().map(Into::into).collect();
4891
4892 Ok(branches)
4893 }
4894 }
4895 })
4896 }
4897
4898 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
4899 self.send_job(None, |repo, _cx| async move {
4900 match repo {
4901 RepositoryState::Local { backend, .. } => backend.checkpoint().await,
4902 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4903 }
4904 })
4905 }
4906
4907 pub fn restore_checkpoint(
4908 &mut self,
4909 checkpoint: GitRepositoryCheckpoint,
4910 ) -> oneshot::Receiver<Result<()>> {
4911 self.send_job(None, move |repo, _cx| async move {
4912 match repo {
4913 RepositoryState::Local { backend, .. } => {
4914 backend.restore_checkpoint(checkpoint).await
4915 }
4916 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4917 }
4918 })
4919 }
4920
4921 pub(crate) fn apply_remote_update(
4922 &mut self,
4923 update: proto::UpdateRepository,
4924 cx: &mut Context<Self>,
4925 ) -> Result<()> {
4926 let conflicted_paths = TreeSet::from_ordered_entries(
4927 update
4928 .current_merge_conflicts
4929 .into_iter()
4930 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
4931 );
4932 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
4933 let new_head_commit = update
4934 .head_commit_details
4935 .as_ref()
4936 .map(proto_to_commit_details);
4937 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
4938 cx.emit(RepositoryEvent::BranchChanged)
4939 }
4940 self.snapshot.branch = new_branch;
4941 self.snapshot.head_commit = new_head_commit;
4942
4943 self.snapshot.merge.conflicted_paths = conflicted_paths;
4944 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
4945 let new_stash_entries = GitStash {
4946 entries: update
4947 .stash_entries
4948 .iter()
4949 .filter_map(|entry| proto_to_stash(entry).ok())
4950 .collect(),
4951 };
4952 if self.snapshot.stash_entries != new_stash_entries {
4953 cx.emit(RepositoryEvent::StashEntriesChanged)
4954 }
4955 self.snapshot.stash_entries = new_stash_entries;
4956
4957 let edits = update
4958 .removed_statuses
4959 .into_iter()
4960 .filter_map(|path| {
4961 Some(sum_tree::Edit::Remove(PathKey(
4962 RelPath::from_proto(&path).log_err()?,
4963 )))
4964 })
4965 .chain(
4966 update
4967 .updated_statuses
4968 .into_iter()
4969 .filter_map(|updated_status| {
4970 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
4971 }),
4972 )
4973 .collect::<Vec<_>>();
4974 if !edits.is_empty() {
4975 cx.emit(RepositoryEvent::StatusesChanged { full_scan: true });
4976 }
4977 self.snapshot.statuses_by_path.edit(edits, ());
4978 if update.is_last_update {
4979 self.snapshot.scan_id = update.scan_id;
4980 }
4981 Ok(())
4982 }
4983
4984 pub fn compare_checkpoints(
4985 &mut self,
4986 left: GitRepositoryCheckpoint,
4987 right: GitRepositoryCheckpoint,
4988 ) -> oneshot::Receiver<Result<bool>> {
4989 self.send_job(None, move |repo, _cx| async move {
4990 match repo {
4991 RepositoryState::Local { backend, .. } => {
4992 backend.compare_checkpoints(left, right).await
4993 }
4994 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
4995 }
4996 })
4997 }
4998
4999 pub fn diff_checkpoints(
5000 &mut self,
5001 base_checkpoint: GitRepositoryCheckpoint,
5002 target_checkpoint: GitRepositoryCheckpoint,
5003 ) -> oneshot::Receiver<Result<String>> {
5004 self.send_job(None, move |repo, _cx| async move {
5005 match repo {
5006 RepositoryState::Local { backend, .. } => {
5007 backend
5008 .diff_checkpoints(base_checkpoint, target_checkpoint)
5009 .await
5010 }
5011 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5012 }
5013 })
5014 }
5015
5016 fn schedule_scan(
5017 &mut self,
5018 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5019 cx: &mut Context<Self>,
5020 ) {
5021 let this = cx.weak_entity();
5022 let _ = self.send_keyed_job(
5023 Some(GitJobKey::ReloadGitState),
5024 None,
5025 |state, mut cx| async move {
5026 log::debug!("run scheduled git status scan");
5027
5028 let Some(this) = this.upgrade() else {
5029 return Ok(());
5030 };
5031 let RepositoryState::Local { backend, .. } = state else {
5032 bail!("not a local repository")
5033 };
5034 let (snapshot, events) = this
5035 .update(&mut cx, |this, _| {
5036 this.paths_needing_status_update.clear();
5037 compute_snapshot(
5038 this.id,
5039 this.work_directory_abs_path.clone(),
5040 this.snapshot.clone(),
5041 backend.clone(),
5042 )
5043 })?
5044 .await?;
5045 this.update(&mut cx, |this, cx| {
5046 this.snapshot = snapshot.clone();
5047 for event in events {
5048 cx.emit(event);
5049 }
5050 })?;
5051 if let Some(updates_tx) = updates_tx {
5052 updates_tx
5053 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5054 .ok();
5055 }
5056 Ok(())
5057 },
5058 );
5059 }
5060
5061 fn spawn_local_git_worker(
5062 work_directory_abs_path: Arc<Path>,
5063 dot_git_abs_path: Arc<Path>,
5064 _repository_dir_abs_path: Arc<Path>,
5065 _common_dir_abs_path: Arc<Path>,
5066 project_environment: WeakEntity<ProjectEnvironment>,
5067 fs: Arc<dyn Fs>,
5068 cx: &mut Context<Self>,
5069 ) -> mpsc::UnboundedSender<GitJob> {
5070 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5071
5072 cx.spawn(async move |_, cx| {
5073 let environment = project_environment
5074 .upgrade()
5075 .context("missing project environment")?
5076 .update(cx, |project_environment, cx| {
5077 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
5078 })?
5079 .await
5080 .unwrap_or_else(|| {
5081 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
5082 HashMap::default()
5083 });
5084 let search_paths = environment.get("PATH").map(|val| val.to_owned());
5085 let backend = cx
5086 .background_spawn(async move {
5087 let system_git_binary_path = search_paths.and_then(|search_paths| which::which_in("git", Some(search_paths), &work_directory_abs_path).ok())
5088 .or_else(|| which::which("git").ok());
5089 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
5090 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
5091 })
5092 .await?;
5093
5094 if let Some(git_hosting_provider_registry) =
5095 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5096 {
5097 git_hosting_providers::register_additional_providers(
5098 git_hosting_provider_registry,
5099 backend.clone(),
5100 );
5101 }
5102
5103 let state = RepositoryState::Local {
5104 backend,
5105 environment: Arc::new(environment),
5106 };
5107 let mut jobs = VecDeque::new();
5108 loop {
5109 while let Ok(Some(next_job)) = job_rx.try_next() {
5110 jobs.push_back(next_job);
5111 }
5112
5113 if let Some(job) = jobs.pop_front() {
5114 if let Some(current_key) = &job.key
5115 && jobs
5116 .iter()
5117 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5118 {
5119 continue;
5120 }
5121 (job.job)(state.clone(), cx).await;
5122 } else if let Some(job) = job_rx.next().await {
5123 jobs.push_back(job);
5124 } else {
5125 break;
5126 }
5127 }
5128 anyhow::Ok(())
5129 })
5130 .detach_and_log_err(cx);
5131
5132 job_tx
5133 }
5134
5135 fn spawn_remote_git_worker(
5136 project_id: ProjectId,
5137 client: AnyProtoClient,
5138 cx: &mut Context<Self>,
5139 ) -> mpsc::UnboundedSender<GitJob> {
5140 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5141
5142 cx.spawn(async move |_, cx| {
5143 let state = RepositoryState::Remote { project_id, client };
5144 let mut jobs = VecDeque::new();
5145 loop {
5146 while let Ok(Some(next_job)) = job_rx.try_next() {
5147 jobs.push_back(next_job);
5148 }
5149
5150 if let Some(job) = jobs.pop_front() {
5151 if let Some(current_key) = &job.key
5152 && jobs
5153 .iter()
5154 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5155 {
5156 continue;
5157 }
5158 (job.job)(state.clone(), cx).await;
5159 } else if let Some(job) = job_rx.next().await {
5160 jobs.push_back(job);
5161 } else {
5162 break;
5163 }
5164 }
5165 anyhow::Ok(())
5166 })
5167 .detach_and_log_err(cx);
5168
5169 job_tx
5170 }
5171
5172 fn load_staged_text(
5173 &mut self,
5174 buffer_id: BufferId,
5175 repo_path: RepoPath,
5176 cx: &App,
5177 ) -> Task<Result<Option<String>>> {
5178 let rx = self.send_job(None, move |state, _| async move {
5179 match state {
5180 RepositoryState::Local { backend, .. } => {
5181 anyhow::Ok(backend.load_index_text(repo_path).await)
5182 }
5183 RepositoryState::Remote { project_id, client } => {
5184 let response = client
5185 .request(proto::OpenUnstagedDiff {
5186 project_id: project_id.to_proto(),
5187 buffer_id: buffer_id.to_proto(),
5188 })
5189 .await?;
5190 Ok(response.staged_text)
5191 }
5192 }
5193 });
5194 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5195 }
5196
5197 fn load_committed_text(
5198 &mut self,
5199 buffer_id: BufferId,
5200 repo_path: RepoPath,
5201 cx: &App,
5202 ) -> Task<Result<DiffBasesChange>> {
5203 let rx = self.send_job(None, move |state, _| async move {
5204 match state {
5205 RepositoryState::Local { backend, .. } => {
5206 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5207 let staged_text = backend.load_index_text(repo_path).await;
5208 let diff_bases_change = if committed_text == staged_text {
5209 DiffBasesChange::SetBoth(committed_text)
5210 } else {
5211 DiffBasesChange::SetEach {
5212 index: staged_text,
5213 head: committed_text,
5214 }
5215 };
5216 anyhow::Ok(diff_bases_change)
5217 }
5218 RepositoryState::Remote { project_id, client } => {
5219 use proto::open_uncommitted_diff_response::Mode;
5220
5221 let response = client
5222 .request(proto::OpenUncommittedDiff {
5223 project_id: project_id.to_proto(),
5224 buffer_id: buffer_id.to_proto(),
5225 })
5226 .await?;
5227 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5228 let bases = match mode {
5229 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5230 Mode::IndexAndHead => DiffBasesChange::SetEach {
5231 head: response.committed_text,
5232 index: response.staged_text,
5233 },
5234 };
5235 Ok(bases)
5236 }
5237 }
5238 });
5239
5240 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5241 }
5242 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5243 let repository_id = self.snapshot.id;
5244 let rx = self.send_job(None, move |state, _| async move {
5245 match state {
5246 RepositoryState::Local { backend, .. } => backend.load_blob_content(oid).await,
5247 RepositoryState::Remote { client, project_id } => {
5248 let response = client
5249 .request(proto::GetBlobContent {
5250 project_id: project_id.to_proto(),
5251 repository_id: repository_id.0,
5252 oid: oid.to_string(),
5253 })
5254 .await?;
5255 Ok(response.content)
5256 }
5257 }
5258 });
5259 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5260 }
5261
5262 fn paths_changed(
5263 &mut self,
5264 paths: Vec<RepoPath>,
5265 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5266 cx: &mut Context<Self>,
5267 ) {
5268 self.paths_needing_status_update.extend(paths);
5269
5270 let this = cx.weak_entity();
5271 let _ = self.send_keyed_job(
5272 Some(GitJobKey::RefreshStatuses),
5273 None,
5274 |state, mut cx| async move {
5275 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5276 (
5277 this.snapshot.clone(),
5278 mem::take(&mut this.paths_needing_status_update),
5279 )
5280 })?;
5281 let RepositoryState::Local { backend, .. } = state else {
5282 bail!("not a local repository")
5283 };
5284
5285 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5286 if paths.is_empty() {
5287 return Ok(());
5288 }
5289 let statuses = backend.status(&paths).await?;
5290 let stash_entries = backend.stash_entries().await?;
5291
5292 let changed_path_statuses = cx
5293 .background_spawn(async move {
5294 let mut changed_path_statuses = Vec::new();
5295 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5296 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5297
5298 for (repo_path, status) in &*statuses.entries {
5299 changed_paths.remove(repo_path);
5300 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5301 && cursor.item().is_some_and(|entry| entry.status == *status)
5302 {
5303 continue;
5304 }
5305
5306 changed_path_statuses.push(Edit::Insert(StatusEntry {
5307 repo_path: repo_path.clone(),
5308 status: *status,
5309 }));
5310 }
5311 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5312 for path in changed_paths.into_iter() {
5313 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5314 changed_path_statuses
5315 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5316 }
5317 }
5318 changed_path_statuses
5319 })
5320 .await;
5321
5322 this.update(&mut cx, |this, cx| {
5323 if this.snapshot.stash_entries != stash_entries {
5324 cx.emit(RepositoryEvent::StashEntriesChanged);
5325 this.snapshot.stash_entries = stash_entries;
5326 }
5327
5328 if !changed_path_statuses.is_empty() {
5329 cx.emit(RepositoryEvent::StatusesChanged { full_scan: false });
5330 this.snapshot
5331 .statuses_by_path
5332 .edit(changed_path_statuses, ());
5333 this.snapshot.scan_id += 1;
5334 }
5335
5336 if let Some(updates_tx) = updates_tx {
5337 updates_tx
5338 .unbounded_send(DownstreamUpdate::UpdateRepository(
5339 this.snapshot.clone(),
5340 ))
5341 .ok();
5342 }
5343 })
5344 },
5345 );
5346 }
5347
5348 /// currently running git command and when it started
5349 pub fn current_job(&self) -> Option<JobInfo> {
5350 self.active_jobs.values().next().cloned()
5351 }
5352
5353 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5354 self.send_job(None, |_, _| async {})
5355 }
5356
5357 fn spawn_job_with_tracking<AsyncFn>(
5358 &mut self,
5359 paths: Vec<RepoPath>,
5360 git_status: pending_op::GitStatus,
5361 cx: &mut Context<Self>,
5362 f: AsyncFn,
5363 ) -> Task<Result<()>>
5364 where
5365 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5366 {
5367 let ids = self.new_pending_ops_for_paths(paths, git_status);
5368
5369 cx.spawn(async move |this, cx| {
5370 let (job_status, result) = match f(this.clone(), cx).await {
5371 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5372 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5373 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5374 };
5375
5376 this.update(cx, |this, _| {
5377 let mut edits = Vec::with_capacity(ids.len());
5378 for (id, entry) in ids {
5379 if let Some(mut ops) = this.snapshot.pending_ops_for_path(&entry) {
5380 if let Some(op) = ops.op_by_id_mut(id) {
5381 op.job_status = job_status;
5382 }
5383 edits.push(sum_tree::Edit::Insert(ops));
5384 }
5385 }
5386 this.snapshot.pending_ops_by_path.edit(edits, ());
5387 })?;
5388
5389 result
5390 })
5391 }
5392
5393 fn new_pending_ops_for_paths(
5394 &mut self,
5395 paths: Vec<RepoPath>,
5396 git_status: pending_op::GitStatus,
5397 ) -> Vec<(PendingOpId, RepoPath)> {
5398 let mut edits = Vec::with_capacity(paths.len());
5399 let mut ids = Vec::with_capacity(paths.len());
5400 for path in paths {
5401 let mut ops = self
5402 .snapshot
5403 .pending_ops_for_path(&path)
5404 .unwrap_or_else(|| PendingOps::new(&path));
5405 let id = ops.max_id() + 1;
5406 ops.ops.push(PendingOp {
5407 id,
5408 git_status,
5409 job_status: pending_op::JobStatus::Running,
5410 });
5411 edits.push(sum_tree::Edit::Insert(ops));
5412 ids.push((id, path));
5413 }
5414 self.snapshot.pending_ops_by_path.edit(edits, ());
5415 ids
5416 }
5417}
5418
5419fn get_permalink_in_rust_registry_src(
5420 provider_registry: Arc<GitHostingProviderRegistry>,
5421 path: PathBuf,
5422 selection: Range<u32>,
5423) -> Result<url::Url> {
5424 #[derive(Deserialize)]
5425 struct CargoVcsGit {
5426 sha1: String,
5427 }
5428
5429 #[derive(Deserialize)]
5430 struct CargoVcsInfo {
5431 git: CargoVcsGit,
5432 path_in_vcs: String,
5433 }
5434
5435 #[derive(Deserialize)]
5436 struct CargoPackage {
5437 repository: String,
5438 }
5439
5440 #[derive(Deserialize)]
5441 struct CargoToml {
5442 package: CargoPackage,
5443 }
5444
5445 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5446 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5447 Some((dir, json))
5448 }) else {
5449 bail!("No .cargo_vcs_info.json found in parent directories")
5450 };
5451 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5452 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5453 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5454 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5455 .context("parsing package.repository field of manifest")?;
5456 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5457 let permalink = provider.build_permalink(
5458 remote,
5459 BuildPermalinkParams::new(
5460 &cargo_vcs_info.git.sha1,
5461 &RepoPath::from_rel_path(
5462 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5463 ),
5464 Some(selection),
5465 ),
5466 );
5467 Ok(permalink)
5468}
5469
5470fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5471 let Some(blame) = blame else {
5472 return proto::BlameBufferResponse {
5473 blame_response: None,
5474 };
5475 };
5476
5477 let entries = blame
5478 .entries
5479 .into_iter()
5480 .map(|entry| proto::BlameEntry {
5481 sha: entry.sha.as_bytes().into(),
5482 start_line: entry.range.start,
5483 end_line: entry.range.end,
5484 original_line_number: entry.original_line_number,
5485 author: entry.author,
5486 author_mail: entry.author_mail,
5487 author_time: entry.author_time,
5488 author_tz: entry.author_tz,
5489 committer: entry.committer_name,
5490 committer_mail: entry.committer_email,
5491 committer_time: entry.committer_time,
5492 committer_tz: entry.committer_tz,
5493 summary: entry.summary,
5494 previous: entry.previous,
5495 filename: entry.filename,
5496 })
5497 .collect::<Vec<_>>();
5498
5499 let messages = blame
5500 .messages
5501 .into_iter()
5502 .map(|(oid, message)| proto::CommitMessage {
5503 oid: oid.as_bytes().into(),
5504 message,
5505 })
5506 .collect::<Vec<_>>();
5507
5508 proto::BlameBufferResponse {
5509 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5510 entries,
5511 messages,
5512 remote_url: blame.remote_url,
5513 }),
5514 }
5515}
5516
5517fn deserialize_blame_buffer_response(
5518 response: proto::BlameBufferResponse,
5519) -> Option<git::blame::Blame> {
5520 let response = response.blame_response?;
5521 let entries = response
5522 .entries
5523 .into_iter()
5524 .filter_map(|entry| {
5525 Some(git::blame::BlameEntry {
5526 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5527 range: entry.start_line..entry.end_line,
5528 original_line_number: entry.original_line_number,
5529 committer_name: entry.committer,
5530 committer_time: entry.committer_time,
5531 committer_tz: entry.committer_tz,
5532 committer_email: entry.committer_mail,
5533 author: entry.author,
5534 author_mail: entry.author_mail,
5535 author_time: entry.author_time,
5536 author_tz: entry.author_tz,
5537 summary: entry.summary,
5538 previous: entry.previous,
5539 filename: entry.filename,
5540 })
5541 })
5542 .collect::<Vec<_>>();
5543
5544 let messages = response
5545 .messages
5546 .into_iter()
5547 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5548 .collect::<HashMap<_, _>>();
5549
5550 Some(Blame {
5551 entries,
5552 messages,
5553 remote_url: response.remote_url,
5554 })
5555}
5556
5557fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5558 proto::Branch {
5559 is_head: branch.is_head,
5560 ref_name: branch.ref_name.to_string(),
5561 unix_timestamp: branch
5562 .most_recent_commit
5563 .as_ref()
5564 .map(|commit| commit.commit_timestamp as u64),
5565 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5566 ref_name: upstream.ref_name.to_string(),
5567 tracking: upstream
5568 .tracking
5569 .status()
5570 .map(|upstream| proto::UpstreamTracking {
5571 ahead: upstream.ahead as u64,
5572 behind: upstream.behind as u64,
5573 }),
5574 }),
5575 most_recent_commit: branch
5576 .most_recent_commit
5577 .as_ref()
5578 .map(|commit| proto::CommitSummary {
5579 sha: commit.sha.to_string(),
5580 subject: commit.subject.to_string(),
5581 commit_timestamp: commit.commit_timestamp,
5582 author_name: commit.author_name.to_string(),
5583 }),
5584 }
5585}
5586
5587fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5588 proto::Worktree {
5589 path: worktree.path.to_string_lossy().to_string(),
5590 ref_name: worktree.ref_name.to_string(),
5591 sha: worktree.sha.to_string(),
5592 }
5593}
5594
5595fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5596 git::repository::Worktree {
5597 path: PathBuf::from(proto.path.clone()),
5598 ref_name: proto.ref_name.clone().into(),
5599 sha: proto.sha.clone().into(),
5600 }
5601}
5602
5603fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5604 git::repository::Branch {
5605 is_head: proto.is_head,
5606 ref_name: proto.ref_name.clone().into(),
5607 upstream: proto
5608 .upstream
5609 .as_ref()
5610 .map(|upstream| git::repository::Upstream {
5611 ref_name: upstream.ref_name.to_string().into(),
5612 tracking: upstream
5613 .tracking
5614 .as_ref()
5615 .map(|tracking| {
5616 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5617 ahead: tracking.ahead as u32,
5618 behind: tracking.behind as u32,
5619 })
5620 })
5621 .unwrap_or(git::repository::UpstreamTracking::Gone),
5622 }),
5623 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5624 git::repository::CommitSummary {
5625 sha: commit.sha.to_string().into(),
5626 subject: commit.subject.to_string().into(),
5627 commit_timestamp: commit.commit_timestamp,
5628 author_name: commit.author_name.to_string().into(),
5629 has_parent: true,
5630 }
5631 }),
5632 }
5633}
5634
5635fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5636 proto::GitCommitDetails {
5637 sha: commit.sha.to_string(),
5638 message: commit.message.to_string(),
5639 commit_timestamp: commit.commit_timestamp,
5640 author_email: commit.author_email.to_string(),
5641 author_name: commit.author_name.to_string(),
5642 }
5643}
5644
5645fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5646 CommitDetails {
5647 sha: proto.sha.clone().into(),
5648 message: proto.message.clone().into(),
5649 commit_timestamp: proto.commit_timestamp,
5650 author_email: proto.author_email.clone().into(),
5651 author_name: proto.author_name.clone().into(),
5652 }
5653}
5654
5655async fn compute_snapshot(
5656 id: RepositoryId,
5657 work_directory_abs_path: Arc<Path>,
5658 prev_snapshot: RepositorySnapshot,
5659 backend: Arc<dyn GitRepository>,
5660) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5661 let mut events = Vec::new();
5662 let branches = backend.branches().await?;
5663 let branch = branches.into_iter().find(|branch| branch.is_head);
5664 let statuses = backend
5665 .status(&[RepoPath::from_rel_path(
5666 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
5667 )])
5668 .await?;
5669 let stash_entries = backend.stash_entries().await?;
5670 let statuses_by_path = SumTree::from_iter(
5671 statuses
5672 .entries
5673 .iter()
5674 .map(|(repo_path, status)| StatusEntry {
5675 repo_path: repo_path.clone(),
5676 status: *status,
5677 }),
5678 (),
5679 );
5680 let (merge_details, merge_heads_changed) =
5681 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5682 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5683
5684 let pending_ops_by_path = SumTree::from_iter(
5685 prev_snapshot.pending_ops_by_path.iter().filter_map(|ops| {
5686 let inner_ops: Vec<PendingOp> =
5687 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5688 if inner_ops.is_empty() {
5689 None
5690 } else {
5691 Some(PendingOps {
5692 repo_path: ops.repo_path.clone(),
5693 ops: inner_ops,
5694 })
5695 }
5696 }),
5697 (),
5698 );
5699
5700 if pending_ops_by_path != prev_snapshot.pending_ops_by_path {
5701 events.push(RepositoryEvent::PendingOpsChanged {
5702 pending_ops: prev_snapshot.pending_ops_by_path.clone(),
5703 })
5704 }
5705
5706 if merge_heads_changed {
5707 events.push(RepositoryEvent::MergeHeadsChanged);
5708 }
5709
5710 if statuses_by_path != prev_snapshot.statuses_by_path {
5711 events.push(RepositoryEvent::StatusesChanged { full_scan: true })
5712 }
5713
5714 // Useful when branch is None in detached head state
5715 let head_commit = match backend.head_sha().await {
5716 Some(head_sha) => backend.show(head_sha).await.log_err(),
5717 None => None,
5718 };
5719
5720 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5721 events.push(RepositoryEvent::BranchChanged);
5722 }
5723
5724 // Used by edit prediction data collection
5725 let remote_origin_url = backend.remote_url("origin");
5726 let remote_upstream_url = backend.remote_url("upstream");
5727
5728 let snapshot = RepositorySnapshot {
5729 id,
5730 statuses_by_path,
5731 pending_ops_by_path,
5732 work_directory_abs_path,
5733 path_style: prev_snapshot.path_style,
5734 scan_id: prev_snapshot.scan_id + 1,
5735 branch,
5736 head_commit,
5737 merge: merge_details,
5738 remote_origin_url,
5739 remote_upstream_url,
5740 stash_entries,
5741 };
5742
5743 Ok((snapshot, events))
5744}
5745
5746fn status_from_proto(
5747 simple_status: i32,
5748 status: Option<proto::GitFileStatus>,
5749) -> anyhow::Result<FileStatus> {
5750 use proto::git_file_status::Variant;
5751
5752 let Some(variant) = status.and_then(|status| status.variant) else {
5753 let code = proto::GitStatus::from_i32(simple_status)
5754 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5755 let result = match code {
5756 proto::GitStatus::Added => TrackedStatus {
5757 worktree_status: StatusCode::Added,
5758 index_status: StatusCode::Unmodified,
5759 }
5760 .into(),
5761 proto::GitStatus::Modified => TrackedStatus {
5762 worktree_status: StatusCode::Modified,
5763 index_status: StatusCode::Unmodified,
5764 }
5765 .into(),
5766 proto::GitStatus::Conflict => UnmergedStatus {
5767 first_head: UnmergedStatusCode::Updated,
5768 second_head: UnmergedStatusCode::Updated,
5769 }
5770 .into(),
5771 proto::GitStatus::Deleted => TrackedStatus {
5772 worktree_status: StatusCode::Deleted,
5773 index_status: StatusCode::Unmodified,
5774 }
5775 .into(),
5776 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5777 };
5778 return Ok(result);
5779 };
5780
5781 let result = match variant {
5782 Variant::Untracked(_) => FileStatus::Untracked,
5783 Variant::Ignored(_) => FileStatus::Ignored,
5784 Variant::Unmerged(unmerged) => {
5785 let [first_head, second_head] =
5786 [unmerged.first_head, unmerged.second_head].map(|head| {
5787 let code = proto::GitStatus::from_i32(head)
5788 .with_context(|| format!("Invalid git status code: {head}"))?;
5789 let result = match code {
5790 proto::GitStatus::Added => UnmergedStatusCode::Added,
5791 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5792 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5793 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5794 };
5795 Ok(result)
5796 });
5797 let [first_head, second_head] = [first_head?, second_head?];
5798 UnmergedStatus {
5799 first_head,
5800 second_head,
5801 }
5802 .into()
5803 }
5804 Variant::Tracked(tracked) => {
5805 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5806 .map(|status| {
5807 let code = proto::GitStatus::from_i32(status)
5808 .with_context(|| format!("Invalid git status code: {status}"))?;
5809 let result = match code {
5810 proto::GitStatus::Modified => StatusCode::Modified,
5811 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5812 proto::GitStatus::Added => StatusCode::Added,
5813 proto::GitStatus::Deleted => StatusCode::Deleted,
5814 proto::GitStatus::Renamed => StatusCode::Renamed,
5815 proto::GitStatus::Copied => StatusCode::Copied,
5816 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5817 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5818 };
5819 Ok(result)
5820 });
5821 let [index_status, worktree_status] = [index_status?, worktree_status?];
5822 TrackedStatus {
5823 index_status,
5824 worktree_status,
5825 }
5826 .into()
5827 }
5828 };
5829 Ok(result)
5830}
5831
5832fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5833 use proto::git_file_status::{Tracked, Unmerged, Variant};
5834
5835 let variant = match status {
5836 FileStatus::Untracked => Variant::Untracked(Default::default()),
5837 FileStatus::Ignored => Variant::Ignored(Default::default()),
5838 FileStatus::Unmerged(UnmergedStatus {
5839 first_head,
5840 second_head,
5841 }) => Variant::Unmerged(Unmerged {
5842 first_head: unmerged_status_to_proto(first_head),
5843 second_head: unmerged_status_to_proto(second_head),
5844 }),
5845 FileStatus::Tracked(TrackedStatus {
5846 index_status,
5847 worktree_status,
5848 }) => Variant::Tracked(Tracked {
5849 index_status: tracked_status_to_proto(index_status),
5850 worktree_status: tracked_status_to_proto(worktree_status),
5851 }),
5852 };
5853 proto::GitFileStatus {
5854 variant: Some(variant),
5855 }
5856}
5857
5858fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
5859 match code {
5860 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
5861 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
5862 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
5863 }
5864}
5865
5866fn tracked_status_to_proto(code: StatusCode) -> i32 {
5867 match code {
5868 StatusCode::Added => proto::GitStatus::Added as _,
5869 StatusCode::Deleted => proto::GitStatus::Deleted as _,
5870 StatusCode::Modified => proto::GitStatus::Modified as _,
5871 StatusCode::Renamed => proto::GitStatus::Renamed as _,
5872 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
5873 StatusCode::Copied => proto::GitStatus::Copied as _,
5874 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
5875 }
5876}