1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use std::{
60 cmp::Ordering,
61 collections::{BTreeSet, HashSet, VecDeque},
62 future::Future,
63 mem,
64 ops::Range,
65 path::{Path, PathBuf},
66 str::FromStr,
67 sync::{
68 Arc,
69 atomic::{self, AtomicU64},
70 },
71 time::Instant,
72};
73use sum_tree::{Edit, SumTree, TreeSet};
74use task::Shell;
75use text::{Bias, BufferId};
76use util::{
77 ResultExt, debug_panic,
78 paths::{PathStyle, SanitizedPath},
79 post_inc,
80 rel_path::RelPath,
81};
82use worktree::{
83 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
84 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
85};
86use zeroize::Zeroize;
87
88pub struct GitStore {
89 state: GitStoreState,
90 buffer_store: Entity<BufferStore>,
91 worktree_store: Entity<WorktreeStore>,
92 repositories: HashMap<RepositoryId, Entity<Repository>>,
93 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
94 active_repo_id: Option<RepositoryId>,
95 #[allow(clippy::type_complexity)]
96 loading_diffs:
97 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
98 diffs: HashMap<BufferId, Entity<BufferGitState>>,
99 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
100 _subscriptions: Vec<Subscription>,
101}
102
103#[derive(Default)]
104struct SharedDiffs {
105 unstaged: Option<Entity<BufferDiff>>,
106 uncommitted: Option<Entity<BufferDiff>>,
107}
108
109struct BufferGitState {
110 unstaged_diff: Option<WeakEntity<BufferDiff>>,
111 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
112 conflict_set: Option<WeakEntity<ConflictSet>>,
113 recalculate_diff_task: Option<Task<Result<()>>>,
114 reparse_conflict_markers_task: Option<Task<Result<()>>>,
115 language: Option<Arc<Language>>,
116 language_registry: Option<Arc<LanguageRegistry>>,
117 conflict_updated_futures: Vec<oneshot::Sender<()>>,
118 recalculating_tx: postage::watch::Sender<bool>,
119
120 /// These operation counts are used to ensure that head and index text
121 /// values read from the git repository are up-to-date with any hunk staging
122 /// operations that have been performed on the BufferDiff.
123 ///
124 /// The operation count is incremented immediately when the user initiates a
125 /// hunk stage/unstage operation. Then, upon finishing writing the new index
126 /// text do disk, the `operation count as of write` is updated to reflect
127 /// the operation count that prompted the write.
128 hunk_staging_operation_count: usize,
129 hunk_staging_operation_count_as_of_write: usize,
130
131 head_text: Option<Arc<String>>,
132 index_text: Option<Arc<String>>,
133 head_changed: bool,
134 index_changed: bool,
135 language_changed: bool,
136}
137
138#[derive(Clone, Debug)]
139enum DiffBasesChange {
140 SetIndex(Option<String>),
141 SetHead(Option<String>),
142 SetEach {
143 index: Option<String>,
144 head: Option<String>,
145 },
146 SetBoth(Option<String>),
147}
148
149#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
150enum DiffKind {
151 Unstaged,
152 Uncommitted,
153}
154
155enum GitStoreState {
156 Local {
157 next_repository_id: Arc<AtomicU64>,
158 downstream: Option<LocalDownstreamState>,
159 project_environment: Entity<ProjectEnvironment>,
160 fs: Arc<dyn Fs>,
161 },
162 Remote {
163 upstream_client: AnyProtoClient,
164 upstream_project_id: u64,
165 downstream: Option<(AnyProtoClient, ProjectId)>,
166 },
167}
168
169enum DownstreamUpdate {
170 UpdateRepository(RepositorySnapshot),
171 RemoveRepository(RepositoryId),
172}
173
174struct LocalDownstreamState {
175 client: AnyProtoClient,
176 project_id: ProjectId,
177 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
178 _task: Task<Result<()>>,
179}
180
181#[derive(Clone, Debug)]
182pub struct GitStoreCheckpoint {
183 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
184}
185
186#[derive(Clone, Debug, PartialEq, Eq)]
187pub struct StatusEntry {
188 pub repo_path: RepoPath,
189 pub status: FileStatus,
190}
191
192impl StatusEntry {
193 fn to_proto(&self) -> proto::StatusEntry {
194 let simple_status = match self.status {
195 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
196 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
197 FileStatus::Tracked(TrackedStatus {
198 index_status,
199 worktree_status,
200 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
201 worktree_status
202 } else {
203 index_status
204 }),
205 };
206
207 proto::StatusEntry {
208 repo_path: self.repo_path.to_proto(),
209 simple_status,
210 status: Some(status_to_proto(self.status)),
211 }
212 }
213}
214
215impl TryFrom<proto::StatusEntry> for StatusEntry {
216 type Error = anyhow::Error;
217
218 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
219 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
220 let status = status_from_proto(value.simple_status, value.status)?;
221 Ok(Self { repo_path, status })
222 }
223}
224
225impl sum_tree::Item for StatusEntry {
226 type Summary = PathSummary<GitSummary>;
227
228 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
229 PathSummary {
230 max_path: self.repo_path.as_ref().clone(),
231 item_summary: self.status.summary(),
232 }
233 }
234}
235
236impl sum_tree::KeyedItem for StatusEntry {
237 type Key = PathKey;
238
239 fn key(&self) -> Self::Key {
240 PathKey(self.repo_path.as_ref().clone())
241 }
242}
243
244#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
245pub struct RepositoryId(pub u64);
246
247#[derive(Clone, Debug, Default, PartialEq, Eq)]
248pub struct MergeDetails {
249 pub conflicted_paths: TreeSet<RepoPath>,
250 pub message: Option<SharedString>,
251 pub heads: Vec<Option<SharedString>>,
252}
253
254#[derive(Clone, Debug, PartialEq, Eq)]
255pub struct RepositorySnapshot {
256 pub id: RepositoryId,
257 pub statuses_by_path: SumTree<StatusEntry>,
258 pub work_directory_abs_path: Arc<Path>,
259 pub path_style: PathStyle,
260 pub branch: Option<Branch>,
261 pub head_commit: Option<CommitDetails>,
262 pub scan_id: u64,
263 pub merge: MergeDetails,
264 pub remote_origin_url: Option<String>,
265 pub remote_upstream_url: Option<String>,
266 pub stash_entries: GitStash,
267}
268
269type JobId = u64;
270
271#[derive(Clone, Debug, PartialEq, Eq)]
272pub struct JobInfo {
273 pub start: Instant,
274 pub message: SharedString,
275}
276
277pub struct Repository {
278 this: WeakEntity<Self>,
279 snapshot: RepositorySnapshot,
280 commit_message_buffer: Option<Entity<Buffer>>,
281 git_store: WeakEntity<GitStore>,
282 // For a local repository, holds paths that have had worktree events since the last status scan completed,
283 // and that should be examined during the next status scan.
284 paths_needing_status_update: BTreeSet<RepoPath>,
285 job_sender: mpsc::UnboundedSender<GitJob>,
286 active_jobs: HashMap<JobId, JobInfo>,
287 pending_ops: SumTree<PendingOps>,
288 job_id: JobId,
289 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
290 latest_askpass_id: u64,
291 repository_state: Shared<Task<Result<RepositoryState, String>>>,
292}
293
294impl std::ops::Deref for Repository {
295 type Target = RepositorySnapshot;
296
297 fn deref(&self) -> &Self::Target {
298 &self.snapshot
299 }
300}
301
302#[derive(Clone)]
303pub struct LocalRepositoryState {
304 pub fs: Arc<dyn Fs>,
305 pub backend: Arc<dyn GitRepository>,
306 pub environment: Arc<HashMap<String, String>>,
307}
308
309impl LocalRepositoryState {
310 async fn new(
311 work_directory_abs_path: Arc<Path>,
312 dot_git_abs_path: Arc<Path>,
313 project_environment: WeakEntity<ProjectEnvironment>,
314 fs: Arc<dyn Fs>,
315 cx: &mut AsyncApp,
316 ) -> anyhow::Result<Self> {
317 let environment = project_environment
318 .update(cx, |project_environment, cx| {
319 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
320 })?
321 .await
322 .unwrap_or_else(|| {
323 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
324 HashMap::default()
325 });
326 let search_paths = environment.get("PATH").map(|val| val.to_owned());
327 let backend = cx
328 .background_spawn({
329 let fs = fs.clone();
330 async move {
331 let system_git_binary_path = search_paths
332 .and_then(|search_paths| {
333 which::which_in("git", Some(search_paths), &work_directory_abs_path)
334 .ok()
335 })
336 .or_else(|| which::which("git").ok());
337 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
338 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
339 }
340 })
341 .await?;
342 Ok(LocalRepositoryState {
343 backend,
344 environment: Arc::new(environment),
345 fs,
346 })
347 }
348}
349
350#[derive(Clone)]
351pub struct RemoteRepositoryState {
352 pub project_id: ProjectId,
353 pub client: AnyProtoClient,
354}
355
356#[derive(Clone)]
357pub enum RepositoryState {
358 Local(LocalRepositoryState),
359 Remote(RemoteRepositoryState),
360}
361
362#[derive(Clone, Debug, PartialEq, Eq)]
363pub enum RepositoryEvent {
364 StatusesChanged,
365 MergeHeadsChanged,
366 BranchChanged,
367 StashEntriesChanged,
368 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
369}
370
371#[derive(Clone, Debug)]
372pub struct JobsUpdated;
373
374#[derive(Debug)]
375pub enum GitStoreEvent {
376 ActiveRepositoryChanged(Option<RepositoryId>),
377 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
378 RepositoryAdded,
379 RepositoryRemoved(RepositoryId),
380 IndexWriteError(anyhow::Error),
381 JobsUpdated,
382 ConflictsUpdated,
383}
384
385impl EventEmitter<RepositoryEvent> for Repository {}
386impl EventEmitter<JobsUpdated> for Repository {}
387impl EventEmitter<GitStoreEvent> for GitStore {}
388
389pub struct GitJob {
390 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
391 key: Option<GitJobKey>,
392}
393
394#[derive(PartialEq, Eq)]
395enum GitJobKey {
396 WriteIndex(Vec<RepoPath>),
397 ReloadBufferDiffBases,
398 RefreshStatuses,
399 ReloadGitState,
400}
401
402impl GitStore {
403 pub fn local(
404 worktree_store: &Entity<WorktreeStore>,
405 buffer_store: Entity<BufferStore>,
406 environment: Entity<ProjectEnvironment>,
407 fs: Arc<dyn Fs>,
408 cx: &mut Context<Self>,
409 ) -> Self {
410 Self::new(
411 worktree_store.clone(),
412 buffer_store,
413 GitStoreState::Local {
414 next_repository_id: Arc::new(AtomicU64::new(1)),
415 downstream: None,
416 project_environment: environment,
417 fs,
418 },
419 cx,
420 )
421 }
422
423 pub fn remote(
424 worktree_store: &Entity<WorktreeStore>,
425 buffer_store: Entity<BufferStore>,
426 upstream_client: AnyProtoClient,
427 project_id: u64,
428 cx: &mut Context<Self>,
429 ) -> Self {
430 Self::new(
431 worktree_store.clone(),
432 buffer_store,
433 GitStoreState::Remote {
434 upstream_client,
435 upstream_project_id: project_id,
436 downstream: None,
437 },
438 cx,
439 )
440 }
441
442 fn new(
443 worktree_store: Entity<WorktreeStore>,
444 buffer_store: Entity<BufferStore>,
445 state: GitStoreState,
446 cx: &mut Context<Self>,
447 ) -> Self {
448 let _subscriptions = vec![
449 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
450 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
451 ];
452
453 GitStore {
454 state,
455 buffer_store,
456 worktree_store,
457 repositories: HashMap::default(),
458 worktree_ids: HashMap::default(),
459 active_repo_id: None,
460 _subscriptions,
461 loading_diffs: HashMap::default(),
462 shared_diffs: HashMap::default(),
463 diffs: HashMap::default(),
464 }
465 }
466
467 pub fn init(client: &AnyProtoClient) {
468 client.add_entity_request_handler(Self::handle_get_remotes);
469 client.add_entity_request_handler(Self::handle_get_branches);
470 client.add_entity_request_handler(Self::handle_get_default_branch);
471 client.add_entity_request_handler(Self::handle_change_branch);
472 client.add_entity_request_handler(Self::handle_create_branch);
473 client.add_entity_request_handler(Self::handle_rename_branch);
474 client.add_entity_request_handler(Self::handle_git_init);
475 client.add_entity_request_handler(Self::handle_push);
476 client.add_entity_request_handler(Self::handle_pull);
477 client.add_entity_request_handler(Self::handle_fetch);
478 client.add_entity_request_handler(Self::handle_stage);
479 client.add_entity_request_handler(Self::handle_unstage);
480 client.add_entity_request_handler(Self::handle_stash);
481 client.add_entity_request_handler(Self::handle_stash_pop);
482 client.add_entity_request_handler(Self::handle_stash_apply);
483 client.add_entity_request_handler(Self::handle_stash_drop);
484 client.add_entity_request_handler(Self::handle_commit);
485 client.add_entity_request_handler(Self::handle_run_hook);
486 client.add_entity_request_handler(Self::handle_reset);
487 client.add_entity_request_handler(Self::handle_show);
488 client.add_entity_request_handler(Self::handle_load_commit_diff);
489 client.add_entity_request_handler(Self::handle_checkout_files);
490 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
491 client.add_entity_request_handler(Self::handle_set_index_text);
492 client.add_entity_request_handler(Self::handle_askpass);
493 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
494 client.add_entity_request_handler(Self::handle_git_diff);
495 client.add_entity_request_handler(Self::handle_tree_diff);
496 client.add_entity_request_handler(Self::handle_get_blob_content);
497 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
498 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
499 client.add_entity_message_handler(Self::handle_update_diff_bases);
500 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
501 client.add_entity_request_handler(Self::handle_blame_buffer);
502 client.add_entity_message_handler(Self::handle_update_repository);
503 client.add_entity_message_handler(Self::handle_remove_repository);
504 client.add_entity_request_handler(Self::handle_git_clone);
505 client.add_entity_request_handler(Self::handle_get_worktrees);
506 client.add_entity_request_handler(Self::handle_create_worktree);
507 }
508
509 pub fn is_local(&self) -> bool {
510 matches!(self.state, GitStoreState::Local { .. })
511 }
512 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
513 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
514 let id = repo.read(cx).id;
515 if self.active_repo_id != Some(id) {
516 self.active_repo_id = Some(id);
517 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
518 }
519 }
520 }
521
522 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
523 match &mut self.state {
524 GitStoreState::Remote {
525 downstream: downstream_client,
526 ..
527 } => {
528 for repo in self.repositories.values() {
529 let update = repo.read(cx).snapshot.initial_update(project_id);
530 for update in split_repository_update(update) {
531 client.send(update).log_err();
532 }
533 }
534 *downstream_client = Some((client, ProjectId(project_id)));
535 }
536 GitStoreState::Local {
537 downstream: downstream_client,
538 ..
539 } => {
540 let mut snapshots = HashMap::default();
541 let (updates_tx, mut updates_rx) = mpsc::unbounded();
542 for repo in self.repositories.values() {
543 updates_tx
544 .unbounded_send(DownstreamUpdate::UpdateRepository(
545 repo.read(cx).snapshot.clone(),
546 ))
547 .ok();
548 }
549 *downstream_client = Some(LocalDownstreamState {
550 client: client.clone(),
551 project_id: ProjectId(project_id),
552 updates_tx,
553 _task: cx.spawn(async move |this, cx| {
554 cx.background_spawn(async move {
555 while let Some(update) = updates_rx.next().await {
556 match update {
557 DownstreamUpdate::UpdateRepository(snapshot) => {
558 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
559 {
560 let update =
561 snapshot.build_update(old_snapshot, project_id);
562 *old_snapshot = snapshot;
563 for update in split_repository_update(update) {
564 client.send(update)?;
565 }
566 } else {
567 let update = snapshot.initial_update(project_id);
568 for update in split_repository_update(update) {
569 client.send(update)?;
570 }
571 snapshots.insert(snapshot.id, snapshot);
572 }
573 }
574 DownstreamUpdate::RemoveRepository(id) => {
575 client.send(proto::RemoveRepository {
576 project_id,
577 id: id.to_proto(),
578 })?;
579 }
580 }
581 }
582 anyhow::Ok(())
583 })
584 .await
585 .ok();
586 this.update(cx, |this, _| {
587 if let GitStoreState::Local {
588 downstream: downstream_client,
589 ..
590 } = &mut this.state
591 {
592 downstream_client.take();
593 } else {
594 unreachable!("unshared called on remote store");
595 }
596 })
597 }),
598 });
599 }
600 }
601 }
602
603 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
604 match &mut self.state {
605 GitStoreState::Local {
606 downstream: downstream_client,
607 ..
608 } => {
609 downstream_client.take();
610 }
611 GitStoreState::Remote {
612 downstream: downstream_client,
613 ..
614 } => {
615 downstream_client.take();
616 }
617 }
618 self.shared_diffs.clear();
619 }
620
621 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
622 self.shared_diffs.remove(peer_id);
623 }
624
625 pub fn active_repository(&self) -> Option<Entity<Repository>> {
626 self.active_repo_id
627 .as_ref()
628 .map(|id| self.repositories[id].clone())
629 }
630
631 pub fn open_unstaged_diff(
632 &mut self,
633 buffer: Entity<Buffer>,
634 cx: &mut Context<Self>,
635 ) -> Task<Result<Entity<BufferDiff>>> {
636 let buffer_id = buffer.read(cx).remote_id();
637 if let Some(diff_state) = self.diffs.get(&buffer_id)
638 && let Some(unstaged_diff) = diff_state
639 .read(cx)
640 .unstaged_diff
641 .as_ref()
642 .and_then(|weak| weak.upgrade())
643 {
644 if let Some(task) =
645 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
646 {
647 return cx.background_executor().spawn(async move {
648 task.await;
649 Ok(unstaged_diff)
650 });
651 }
652 return Task::ready(Ok(unstaged_diff));
653 }
654
655 let Some((repo, repo_path)) =
656 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
657 else {
658 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
659 };
660
661 let task = self
662 .loading_diffs
663 .entry((buffer_id, DiffKind::Unstaged))
664 .or_insert_with(|| {
665 let staged_text = repo.update(cx, |repo, cx| {
666 repo.load_staged_text(buffer_id, repo_path, cx)
667 });
668 cx.spawn(async move |this, cx| {
669 Self::open_diff_internal(
670 this,
671 DiffKind::Unstaged,
672 staged_text.await.map(DiffBasesChange::SetIndex),
673 buffer,
674 cx,
675 )
676 .await
677 .map_err(Arc::new)
678 })
679 .shared()
680 })
681 .clone();
682
683 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
684 }
685
686 pub fn open_diff_since(
687 &mut self,
688 oid: Option<git::Oid>,
689 buffer: Entity<Buffer>,
690 repo: Entity<Repository>,
691 languages: Arc<LanguageRegistry>,
692 cx: &mut Context<Self>,
693 ) -> Task<Result<Entity<BufferDiff>>> {
694 cx.spawn(async move |this, cx| {
695 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
696 let content = match oid {
697 None => None,
698 Some(oid) => Some(
699 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
700 .await?,
701 ),
702 };
703 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
704
705 buffer_diff
706 .update(cx, |buffer_diff, cx| {
707 buffer_diff.set_base_text(
708 content.map(Arc::new),
709 buffer_snapshot.language().cloned(),
710 Some(languages.clone()),
711 buffer_snapshot.text,
712 cx,
713 )
714 })?
715 .await?;
716 let unstaged_diff = this
717 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
718 .await?;
719 buffer_diff.update(cx, |buffer_diff, _| {
720 buffer_diff.set_secondary_diff(unstaged_diff);
721 })?;
722
723 this.update(cx, |_, cx| {
724 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
725 .detach();
726 })?;
727
728 Ok(buffer_diff)
729 })
730 }
731
732 pub fn open_uncommitted_diff(
733 &mut self,
734 buffer: Entity<Buffer>,
735 cx: &mut Context<Self>,
736 ) -> Task<Result<Entity<BufferDiff>>> {
737 let buffer_id = buffer.read(cx).remote_id();
738
739 if let Some(diff_state) = self.diffs.get(&buffer_id)
740 && let Some(uncommitted_diff) = diff_state
741 .read(cx)
742 .uncommitted_diff
743 .as_ref()
744 .and_then(|weak| weak.upgrade())
745 {
746 if let Some(task) =
747 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
748 {
749 return cx.background_executor().spawn(async move {
750 task.await;
751 Ok(uncommitted_diff)
752 });
753 }
754 return Task::ready(Ok(uncommitted_diff));
755 }
756
757 let Some((repo, repo_path)) =
758 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
759 else {
760 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
761 };
762
763 let task = self
764 .loading_diffs
765 .entry((buffer_id, DiffKind::Uncommitted))
766 .or_insert_with(|| {
767 let changes = repo.update(cx, |repo, cx| {
768 repo.load_committed_text(buffer_id, repo_path, cx)
769 });
770
771 // todo(lw): hot foreground spawn
772 cx.spawn(async move |this, cx| {
773 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
774 .await
775 .map_err(Arc::new)
776 })
777 .shared()
778 })
779 .clone();
780
781 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
782 }
783
784 async fn open_diff_internal(
785 this: WeakEntity<Self>,
786 kind: DiffKind,
787 texts: Result<DiffBasesChange>,
788 buffer_entity: Entity<Buffer>,
789 cx: &mut AsyncApp,
790 ) -> Result<Entity<BufferDiff>> {
791 let diff_bases_change = match texts {
792 Err(e) => {
793 this.update(cx, |this, cx| {
794 let buffer = buffer_entity.read(cx);
795 let buffer_id = buffer.remote_id();
796 this.loading_diffs.remove(&(buffer_id, kind));
797 })?;
798 return Err(e);
799 }
800 Ok(change) => change,
801 };
802
803 this.update(cx, |this, cx| {
804 let buffer = buffer_entity.read(cx);
805 let buffer_id = buffer.remote_id();
806 let language = buffer.language().cloned();
807 let language_registry = buffer.language_registry();
808 let text_snapshot = buffer.text_snapshot();
809 this.loading_diffs.remove(&(buffer_id, kind));
810
811 let git_store = cx.weak_entity();
812 let diff_state = this
813 .diffs
814 .entry(buffer_id)
815 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
816
817 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
818
819 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
820 diff_state.update(cx, |diff_state, cx| {
821 diff_state.language = language;
822 diff_state.language_registry = language_registry;
823
824 match kind {
825 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
826 DiffKind::Uncommitted => {
827 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
828 diff
829 } else {
830 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
831 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
832 unstaged_diff
833 };
834
835 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
836 diff_state.uncommitted_diff = Some(diff.downgrade())
837 }
838 }
839
840 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
841 let rx = diff_state.wait_for_recalculation();
842
843 anyhow::Ok(async move {
844 if let Some(rx) = rx {
845 rx.await;
846 }
847 Ok(diff)
848 })
849 })
850 })??
851 .await
852 }
853
854 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
855 let diff_state = self.diffs.get(&buffer_id)?;
856 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
857 }
858
859 pub fn get_uncommitted_diff(
860 &self,
861 buffer_id: BufferId,
862 cx: &App,
863 ) -> Option<Entity<BufferDiff>> {
864 let diff_state = self.diffs.get(&buffer_id)?;
865 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
866 }
867
868 pub fn open_conflict_set(
869 &mut self,
870 buffer: Entity<Buffer>,
871 cx: &mut Context<Self>,
872 ) -> Entity<ConflictSet> {
873 log::debug!("open conflict set");
874 let buffer_id = buffer.read(cx).remote_id();
875
876 if let Some(git_state) = self.diffs.get(&buffer_id)
877 && let Some(conflict_set) = git_state
878 .read(cx)
879 .conflict_set
880 .as_ref()
881 .and_then(|weak| weak.upgrade())
882 {
883 let conflict_set = conflict_set;
884 let buffer_snapshot = buffer.read(cx).text_snapshot();
885
886 git_state.update(cx, |state, cx| {
887 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
888 });
889
890 return conflict_set;
891 }
892
893 let is_unmerged = self
894 .repository_and_path_for_buffer_id(buffer_id, cx)
895 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
896 let git_store = cx.weak_entity();
897 let buffer_git_state = self
898 .diffs
899 .entry(buffer_id)
900 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
901 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
902
903 self._subscriptions
904 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
905 cx.emit(GitStoreEvent::ConflictsUpdated);
906 }));
907
908 buffer_git_state.update(cx, |state, cx| {
909 state.conflict_set = Some(conflict_set.downgrade());
910 let buffer_snapshot = buffer.read(cx).text_snapshot();
911 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
912 });
913
914 conflict_set
915 }
916
917 pub fn project_path_git_status(
918 &self,
919 project_path: &ProjectPath,
920 cx: &App,
921 ) -> Option<FileStatus> {
922 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
923 Some(repo.read(cx).status_for_path(&repo_path)?.status)
924 }
925
926 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
927 let mut work_directory_abs_paths = Vec::new();
928 let mut checkpoints = Vec::new();
929 for repository in self.repositories.values() {
930 repository.update(cx, |repository, _| {
931 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
932 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
933 });
934 }
935
936 cx.background_executor().spawn(async move {
937 let checkpoints = future::try_join_all(checkpoints).await?;
938 Ok(GitStoreCheckpoint {
939 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
940 .into_iter()
941 .zip(checkpoints)
942 .collect(),
943 })
944 })
945 }
946
947 pub fn restore_checkpoint(
948 &self,
949 checkpoint: GitStoreCheckpoint,
950 cx: &mut App,
951 ) -> Task<Result<()>> {
952 let repositories_by_work_dir_abs_path = self
953 .repositories
954 .values()
955 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
956 .collect::<HashMap<_, _>>();
957
958 let mut tasks = Vec::new();
959 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
960 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
961 let restore = repository.update(cx, |repository, _| {
962 repository.restore_checkpoint(checkpoint)
963 });
964 tasks.push(async move { restore.await? });
965 }
966 }
967 cx.background_spawn(async move {
968 future::try_join_all(tasks).await?;
969 Ok(())
970 })
971 }
972
973 /// Compares two checkpoints, returning true if they are equal.
974 pub fn compare_checkpoints(
975 &self,
976 left: GitStoreCheckpoint,
977 mut right: GitStoreCheckpoint,
978 cx: &mut App,
979 ) -> Task<Result<bool>> {
980 let repositories_by_work_dir_abs_path = self
981 .repositories
982 .values()
983 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
984 .collect::<HashMap<_, _>>();
985
986 let mut tasks = Vec::new();
987 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
988 if let Some(right_checkpoint) = right
989 .checkpoints_by_work_dir_abs_path
990 .remove(&work_dir_abs_path)
991 {
992 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
993 {
994 let compare = repository.update(cx, |repository, _| {
995 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
996 });
997
998 tasks.push(async move { compare.await? });
999 }
1000 } else {
1001 return Task::ready(Ok(false));
1002 }
1003 }
1004 cx.background_spawn(async move {
1005 Ok(future::try_join_all(tasks)
1006 .await?
1007 .into_iter()
1008 .all(|result| result))
1009 })
1010 }
1011
1012 /// Blames a buffer.
1013 pub fn blame_buffer(
1014 &self,
1015 buffer: &Entity<Buffer>,
1016 version: Option<clock::Global>,
1017 cx: &mut Context<Self>,
1018 ) -> Task<Result<Option<Blame>>> {
1019 let buffer = buffer.read(cx);
1020 let Some((repo, repo_path)) =
1021 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1022 else {
1023 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1024 };
1025 let content = match &version {
1026 Some(version) => buffer.rope_for_version(version),
1027 None => buffer.as_rope().clone(),
1028 };
1029 let version = version.unwrap_or(buffer.version());
1030 let buffer_id = buffer.remote_id();
1031
1032 let repo = repo.downgrade();
1033 cx.spawn(async move |_, cx| {
1034 let repository_state = repo
1035 .update(cx, |repo, _| repo.repository_state.clone())?
1036 .await
1037 .map_err(|err| anyhow::anyhow!(err))?;
1038 match repository_state {
1039 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1040 .blame(repo_path.clone(), content)
1041 .await
1042 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1043 .map(Some),
1044 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1045 let response = client
1046 .request(proto::BlameBuffer {
1047 project_id: project_id.to_proto(),
1048 buffer_id: buffer_id.into(),
1049 version: serialize_version(&version),
1050 })
1051 .await?;
1052 Ok(deserialize_blame_buffer_response(response))
1053 }
1054 }
1055 })
1056 }
1057
1058 pub fn get_permalink_to_line(
1059 &self,
1060 buffer: &Entity<Buffer>,
1061 selection: Range<u32>,
1062 cx: &mut App,
1063 ) -> Task<Result<url::Url>> {
1064 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1065 return Task::ready(Err(anyhow!("buffer has no file")));
1066 };
1067
1068 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1069 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1070 cx,
1071 ) else {
1072 // If we're not in a Git repo, check whether this is a Rust source
1073 // file in the Cargo registry (presumably opened with go-to-definition
1074 // from a normal Rust file). If so, we can put together a permalink
1075 // using crate metadata.
1076 if buffer
1077 .read(cx)
1078 .language()
1079 .is_none_or(|lang| lang.name() != "Rust".into())
1080 {
1081 return Task::ready(Err(anyhow!("no permalink available")));
1082 }
1083 let file_path = file.worktree.read(cx).absolutize(&file.path);
1084 return cx.spawn(async move |cx| {
1085 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1086 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1087 .context("no permalink available")
1088 });
1089 };
1090
1091 let buffer_id = buffer.read(cx).remote_id();
1092 let branch = repo.read(cx).branch.clone();
1093 let remote = branch
1094 .as_ref()
1095 .and_then(|b| b.upstream.as_ref())
1096 .and_then(|b| b.remote_name())
1097 .unwrap_or("origin")
1098 .to_string();
1099
1100 let rx = repo.update(cx, |repo, _| {
1101 repo.send_job(None, move |state, cx| async move {
1102 match state {
1103 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1104 let origin_url = backend
1105 .remote_url(&remote)
1106 .with_context(|| format!("remote \"{remote}\" not found"))?;
1107
1108 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1109
1110 let provider_registry =
1111 cx.update(GitHostingProviderRegistry::default_global)?;
1112
1113 let (provider, remote) =
1114 parse_git_remote_url(provider_registry, &origin_url)
1115 .context("parsing Git remote URL")?;
1116
1117 Ok(provider.build_permalink(
1118 remote,
1119 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1120 ))
1121 }
1122 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1123 let response = client
1124 .request(proto::GetPermalinkToLine {
1125 project_id: project_id.to_proto(),
1126 buffer_id: buffer_id.into(),
1127 selection: Some(proto::Range {
1128 start: selection.start as u64,
1129 end: selection.end as u64,
1130 }),
1131 })
1132 .await?;
1133
1134 url::Url::parse(&response.permalink).context("failed to parse permalink")
1135 }
1136 }
1137 })
1138 });
1139 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1140 }
1141
1142 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1143 match &self.state {
1144 GitStoreState::Local {
1145 downstream: downstream_client,
1146 ..
1147 } => downstream_client
1148 .as_ref()
1149 .map(|state| (state.client.clone(), state.project_id)),
1150 GitStoreState::Remote {
1151 downstream: downstream_client,
1152 ..
1153 } => downstream_client.clone(),
1154 }
1155 }
1156
1157 fn upstream_client(&self) -> Option<AnyProtoClient> {
1158 match &self.state {
1159 GitStoreState::Local { .. } => None,
1160 GitStoreState::Remote {
1161 upstream_client, ..
1162 } => Some(upstream_client.clone()),
1163 }
1164 }
1165
1166 fn on_worktree_store_event(
1167 &mut self,
1168 worktree_store: Entity<WorktreeStore>,
1169 event: &WorktreeStoreEvent,
1170 cx: &mut Context<Self>,
1171 ) {
1172 let GitStoreState::Local {
1173 project_environment,
1174 downstream,
1175 next_repository_id,
1176 fs,
1177 } = &self.state
1178 else {
1179 return;
1180 };
1181
1182 match event {
1183 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1184 if let Some(worktree) = self
1185 .worktree_store
1186 .read(cx)
1187 .worktree_for_id(*worktree_id, cx)
1188 {
1189 let paths_by_git_repo =
1190 self.process_updated_entries(&worktree, updated_entries, cx);
1191 let downstream = downstream
1192 .as_ref()
1193 .map(|downstream| downstream.updates_tx.clone());
1194 cx.spawn(async move |_, cx| {
1195 let paths_by_git_repo = paths_by_git_repo.await;
1196 for (repo, paths) in paths_by_git_repo {
1197 repo.update(cx, |repo, cx| {
1198 repo.paths_changed(paths, downstream.clone(), cx);
1199 })
1200 .ok();
1201 }
1202 })
1203 .detach();
1204 }
1205 }
1206 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1207 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1208 else {
1209 return;
1210 };
1211 if !worktree.read(cx).is_visible() {
1212 log::debug!(
1213 "not adding repositories for local worktree {:?} because it's not visible",
1214 worktree.read(cx).abs_path()
1215 );
1216 return;
1217 }
1218 self.update_repositories_from_worktree(
1219 *worktree_id,
1220 project_environment.clone(),
1221 next_repository_id.clone(),
1222 downstream
1223 .as_ref()
1224 .map(|downstream| downstream.updates_tx.clone()),
1225 changed_repos.clone(),
1226 fs.clone(),
1227 cx,
1228 );
1229 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1230 }
1231 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1232 let repos_without_worktree: Vec<RepositoryId> = self
1233 .worktree_ids
1234 .iter_mut()
1235 .filter_map(|(repo_id, worktree_ids)| {
1236 worktree_ids.remove(worktree_id);
1237 if worktree_ids.is_empty() {
1238 Some(*repo_id)
1239 } else {
1240 None
1241 }
1242 })
1243 .collect();
1244 let is_active_repo_removed = repos_without_worktree
1245 .iter()
1246 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1247
1248 for repo_id in repos_without_worktree {
1249 self.repositories.remove(&repo_id);
1250 self.worktree_ids.remove(&repo_id);
1251 if let Some(updates_tx) =
1252 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1253 {
1254 updates_tx
1255 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1256 .ok();
1257 }
1258 }
1259
1260 if is_active_repo_removed {
1261 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1262 self.active_repo_id = Some(repo_id);
1263 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1264 } else {
1265 self.active_repo_id = None;
1266 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1267 }
1268 }
1269 }
1270 _ => {}
1271 }
1272 }
1273 fn on_repository_event(
1274 &mut self,
1275 repo: Entity<Repository>,
1276 event: &RepositoryEvent,
1277 cx: &mut Context<Self>,
1278 ) {
1279 let id = repo.read(cx).id;
1280 let repo_snapshot = repo.read(cx).snapshot.clone();
1281 for (buffer_id, diff) in self.diffs.iter() {
1282 if let Some((buffer_repo, repo_path)) =
1283 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1284 && buffer_repo == repo
1285 {
1286 diff.update(cx, |diff, cx| {
1287 if let Some(conflict_set) = &diff.conflict_set {
1288 let conflict_status_changed =
1289 conflict_set.update(cx, |conflict_set, cx| {
1290 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1291 conflict_set.set_has_conflict(has_conflict, cx)
1292 })?;
1293 if conflict_status_changed {
1294 let buffer_store = self.buffer_store.read(cx);
1295 if let Some(buffer) = buffer_store.get(*buffer_id) {
1296 let _ = diff
1297 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1298 }
1299 }
1300 }
1301 anyhow::Ok(())
1302 })
1303 .ok();
1304 }
1305 }
1306 cx.emit(GitStoreEvent::RepositoryUpdated(
1307 id,
1308 event.clone(),
1309 self.active_repo_id == Some(id),
1310 ))
1311 }
1312
1313 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1314 cx.emit(GitStoreEvent::JobsUpdated)
1315 }
1316
1317 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1318 fn update_repositories_from_worktree(
1319 &mut self,
1320 worktree_id: WorktreeId,
1321 project_environment: Entity<ProjectEnvironment>,
1322 next_repository_id: Arc<AtomicU64>,
1323 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1324 updated_git_repositories: UpdatedGitRepositoriesSet,
1325 fs: Arc<dyn Fs>,
1326 cx: &mut Context<Self>,
1327 ) {
1328 let mut removed_ids = Vec::new();
1329 for update in updated_git_repositories.iter() {
1330 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1331 let existing_work_directory_abs_path =
1332 repo.read(cx).work_directory_abs_path.clone();
1333 Some(&existing_work_directory_abs_path)
1334 == update.old_work_directory_abs_path.as_ref()
1335 || Some(&existing_work_directory_abs_path)
1336 == update.new_work_directory_abs_path.as_ref()
1337 }) {
1338 let repo_id = *id;
1339 if let Some(new_work_directory_abs_path) =
1340 update.new_work_directory_abs_path.clone()
1341 {
1342 self.worktree_ids
1343 .entry(repo_id)
1344 .or_insert_with(HashSet::new)
1345 .insert(worktree_id);
1346 existing.update(cx, |existing, cx| {
1347 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1348 existing.schedule_scan(updates_tx.clone(), cx);
1349 });
1350 } else {
1351 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1352 worktree_ids.remove(&worktree_id);
1353 if worktree_ids.is_empty() {
1354 removed_ids.push(repo_id);
1355 }
1356 }
1357 }
1358 } else if let UpdatedGitRepository {
1359 new_work_directory_abs_path: Some(work_directory_abs_path),
1360 dot_git_abs_path: Some(dot_git_abs_path),
1361 repository_dir_abs_path: Some(_repository_dir_abs_path),
1362 common_dir_abs_path: Some(_common_dir_abs_path),
1363 ..
1364 } = update
1365 {
1366 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1367 let git_store = cx.weak_entity();
1368 let repo = cx.new(|cx| {
1369 let mut repo = Repository::local(
1370 id,
1371 work_directory_abs_path.clone(),
1372 dot_git_abs_path.clone(),
1373 project_environment.downgrade(),
1374 fs.clone(),
1375 git_store,
1376 cx,
1377 );
1378 if let Some(updates_tx) = updates_tx.as_ref() {
1379 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1380 updates_tx
1381 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1382 .ok();
1383 }
1384 repo.schedule_scan(updates_tx.clone(), cx);
1385 repo
1386 });
1387 self._subscriptions
1388 .push(cx.subscribe(&repo, Self::on_repository_event));
1389 self._subscriptions
1390 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1391 self.repositories.insert(id, repo);
1392 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1393 cx.emit(GitStoreEvent::RepositoryAdded);
1394 self.active_repo_id.get_or_insert_with(|| {
1395 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1396 id
1397 });
1398 }
1399 }
1400
1401 for id in removed_ids {
1402 if self.active_repo_id == Some(id) {
1403 self.active_repo_id = None;
1404 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1405 }
1406 self.repositories.remove(&id);
1407 if let Some(updates_tx) = updates_tx.as_ref() {
1408 updates_tx
1409 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1410 .ok();
1411 }
1412 }
1413 }
1414
1415 fn on_buffer_store_event(
1416 &mut self,
1417 _: Entity<BufferStore>,
1418 event: &BufferStoreEvent,
1419 cx: &mut Context<Self>,
1420 ) {
1421 match event {
1422 BufferStoreEvent::BufferAdded(buffer) => {
1423 cx.subscribe(buffer, |this, buffer, event, cx| {
1424 if let BufferEvent::LanguageChanged = event {
1425 let buffer_id = buffer.read(cx).remote_id();
1426 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1427 diff_state.update(cx, |diff_state, cx| {
1428 diff_state.buffer_language_changed(buffer, cx);
1429 });
1430 }
1431 }
1432 })
1433 .detach();
1434 }
1435 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1436 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1437 diffs.remove(buffer_id);
1438 }
1439 }
1440 BufferStoreEvent::BufferDropped(buffer_id) => {
1441 self.diffs.remove(buffer_id);
1442 for diffs in self.shared_diffs.values_mut() {
1443 diffs.remove(buffer_id);
1444 }
1445 }
1446 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1447 // Whenever a buffer's file path changes, it's possible that the
1448 // new path is actually a path that is being tracked by a git
1449 // repository. In that case, we'll want to update the buffer's
1450 // `BufferDiffState`, in case it already has one.
1451 let buffer_id = buffer.read(cx).remote_id();
1452 let diff_state = self.diffs.get(&buffer_id);
1453 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1454
1455 if let Some(diff_state) = diff_state
1456 && let Some((repo, repo_path)) = repo
1457 {
1458 let buffer = buffer.clone();
1459 let diff_state = diff_state.clone();
1460
1461 cx.spawn(async move |_git_store, cx| {
1462 async {
1463 let diff_bases_change = repo
1464 .update(cx, |repo, cx| {
1465 repo.load_committed_text(buffer_id, repo_path, cx)
1466 })?
1467 .await?;
1468
1469 diff_state.update(cx, |diff_state, cx| {
1470 let buffer_snapshot = buffer.read(cx).text_snapshot();
1471 diff_state.diff_bases_changed(
1472 buffer_snapshot,
1473 Some(diff_bases_change),
1474 cx,
1475 );
1476 })
1477 }
1478 .await
1479 .log_err();
1480 })
1481 .detach();
1482 }
1483 }
1484 _ => {}
1485 }
1486 }
1487
1488 pub fn recalculate_buffer_diffs(
1489 &mut self,
1490 buffers: Vec<Entity<Buffer>>,
1491 cx: &mut Context<Self>,
1492 ) -> impl Future<Output = ()> + use<> {
1493 let mut futures = Vec::new();
1494 for buffer in buffers {
1495 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1496 let buffer = buffer.read(cx).text_snapshot();
1497 diff_state.update(cx, |diff_state, cx| {
1498 diff_state.recalculate_diffs(buffer.clone(), cx);
1499 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1500 });
1501 futures.push(diff_state.update(cx, |diff_state, cx| {
1502 diff_state
1503 .reparse_conflict_markers(buffer, cx)
1504 .map(|_| {})
1505 .boxed()
1506 }));
1507 }
1508 }
1509 async move {
1510 futures::future::join_all(futures).await;
1511 }
1512 }
1513
1514 fn on_buffer_diff_event(
1515 &mut self,
1516 diff: Entity<buffer_diff::BufferDiff>,
1517 event: &BufferDiffEvent,
1518 cx: &mut Context<Self>,
1519 ) {
1520 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1521 let buffer_id = diff.read(cx).buffer_id;
1522 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1523 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1524 diff_state.hunk_staging_operation_count += 1;
1525 diff_state.hunk_staging_operation_count
1526 });
1527 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1528 let recv = repo.update(cx, |repo, cx| {
1529 log::debug!("hunks changed for {}", path.as_unix_str());
1530 repo.spawn_set_index_text_job(
1531 path,
1532 new_index_text.as_ref().map(|rope| rope.to_string()),
1533 Some(hunk_staging_operation_count),
1534 cx,
1535 )
1536 });
1537 let diff = diff.downgrade();
1538 cx.spawn(async move |this, cx| {
1539 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1540 diff.update(cx, |diff, cx| {
1541 diff.clear_pending_hunks(cx);
1542 })
1543 .ok();
1544 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1545 .ok();
1546 }
1547 })
1548 .detach();
1549 }
1550 }
1551 }
1552 }
1553
1554 fn local_worktree_git_repos_changed(
1555 &mut self,
1556 worktree: Entity<Worktree>,
1557 changed_repos: &UpdatedGitRepositoriesSet,
1558 cx: &mut Context<Self>,
1559 ) {
1560 log::debug!("local worktree repos changed");
1561 debug_assert!(worktree.read(cx).is_local());
1562
1563 for repository in self.repositories.values() {
1564 repository.update(cx, |repository, cx| {
1565 let repo_abs_path = &repository.work_directory_abs_path;
1566 if changed_repos.iter().any(|update| {
1567 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1568 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1569 }) {
1570 repository.reload_buffer_diff_bases(cx);
1571 }
1572 });
1573 }
1574 }
1575
1576 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1577 &self.repositories
1578 }
1579
1580 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1581 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1582 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1583 Some(status.status)
1584 }
1585
1586 pub fn repository_and_path_for_buffer_id(
1587 &self,
1588 buffer_id: BufferId,
1589 cx: &App,
1590 ) -> Option<(Entity<Repository>, RepoPath)> {
1591 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1592 let project_path = buffer.read(cx).project_path(cx)?;
1593 self.repository_and_path_for_project_path(&project_path, cx)
1594 }
1595
1596 pub fn repository_and_path_for_project_path(
1597 &self,
1598 path: &ProjectPath,
1599 cx: &App,
1600 ) -> Option<(Entity<Repository>, RepoPath)> {
1601 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1602 self.repositories
1603 .values()
1604 .filter_map(|repo| {
1605 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1606 Some((repo.clone(), repo_path))
1607 })
1608 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1609 }
1610
1611 pub fn git_init(
1612 &self,
1613 path: Arc<Path>,
1614 fallback_branch_name: String,
1615 cx: &App,
1616 ) -> Task<Result<()>> {
1617 match &self.state {
1618 GitStoreState::Local { fs, .. } => {
1619 let fs = fs.clone();
1620 cx.background_executor()
1621 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1622 }
1623 GitStoreState::Remote {
1624 upstream_client,
1625 upstream_project_id: project_id,
1626 ..
1627 } => {
1628 let client = upstream_client.clone();
1629 let project_id = *project_id;
1630 cx.background_executor().spawn(async move {
1631 client
1632 .request(proto::GitInit {
1633 project_id: project_id,
1634 abs_path: path.to_string_lossy().into_owned(),
1635 fallback_branch_name,
1636 })
1637 .await?;
1638 Ok(())
1639 })
1640 }
1641 }
1642 }
1643
1644 pub fn git_clone(
1645 &self,
1646 repo: String,
1647 path: impl Into<Arc<std::path::Path>>,
1648 cx: &App,
1649 ) -> Task<Result<()>> {
1650 let path = path.into();
1651 match &self.state {
1652 GitStoreState::Local { fs, .. } => {
1653 let fs = fs.clone();
1654 cx.background_executor()
1655 .spawn(async move { fs.git_clone(&repo, &path).await })
1656 }
1657 GitStoreState::Remote {
1658 upstream_client,
1659 upstream_project_id,
1660 ..
1661 } => {
1662 if upstream_client.is_via_collab() {
1663 return Task::ready(Err(anyhow!(
1664 "Git Clone isn't supported for project guests"
1665 )));
1666 }
1667 let request = upstream_client.request(proto::GitClone {
1668 project_id: *upstream_project_id,
1669 abs_path: path.to_string_lossy().into_owned(),
1670 remote_repo: repo,
1671 });
1672
1673 cx.background_spawn(async move {
1674 let result = request.await?;
1675
1676 match result.success {
1677 true => Ok(()),
1678 false => Err(anyhow!("Git Clone failed")),
1679 }
1680 })
1681 }
1682 }
1683 }
1684
1685 async fn handle_update_repository(
1686 this: Entity<Self>,
1687 envelope: TypedEnvelope<proto::UpdateRepository>,
1688 mut cx: AsyncApp,
1689 ) -> Result<()> {
1690 this.update(&mut cx, |this, cx| {
1691 let path_style = this.worktree_store.read(cx).path_style();
1692 let mut update = envelope.payload;
1693
1694 let id = RepositoryId::from_proto(update.id);
1695 let client = this.upstream_client().context("no upstream client")?;
1696
1697 let mut repo_subscription = None;
1698 let repo = this.repositories.entry(id).or_insert_with(|| {
1699 let git_store = cx.weak_entity();
1700 let repo = cx.new(|cx| {
1701 Repository::remote(
1702 id,
1703 Path::new(&update.abs_path).into(),
1704 path_style,
1705 ProjectId(update.project_id),
1706 client,
1707 git_store,
1708 cx,
1709 )
1710 });
1711 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1712 cx.emit(GitStoreEvent::RepositoryAdded);
1713 repo
1714 });
1715 this._subscriptions.extend(repo_subscription);
1716
1717 repo.update(cx, {
1718 let update = update.clone();
1719 |repo, cx| repo.apply_remote_update(update, cx)
1720 })?;
1721
1722 this.active_repo_id.get_or_insert_with(|| {
1723 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1724 id
1725 });
1726
1727 if let Some((client, project_id)) = this.downstream_client() {
1728 update.project_id = project_id.to_proto();
1729 client.send(update).log_err();
1730 }
1731 Ok(())
1732 })?
1733 }
1734
1735 async fn handle_remove_repository(
1736 this: Entity<Self>,
1737 envelope: TypedEnvelope<proto::RemoveRepository>,
1738 mut cx: AsyncApp,
1739 ) -> Result<()> {
1740 this.update(&mut cx, |this, cx| {
1741 let mut update = envelope.payload;
1742 let id = RepositoryId::from_proto(update.id);
1743 this.repositories.remove(&id);
1744 if let Some((client, project_id)) = this.downstream_client() {
1745 update.project_id = project_id.to_proto();
1746 client.send(update).log_err();
1747 }
1748 if this.active_repo_id == Some(id) {
1749 this.active_repo_id = None;
1750 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1751 }
1752 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1753 })
1754 }
1755
1756 async fn handle_git_init(
1757 this: Entity<Self>,
1758 envelope: TypedEnvelope<proto::GitInit>,
1759 cx: AsyncApp,
1760 ) -> Result<proto::Ack> {
1761 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1762 let name = envelope.payload.fallback_branch_name;
1763 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1764 .await?;
1765
1766 Ok(proto::Ack {})
1767 }
1768
1769 async fn handle_git_clone(
1770 this: Entity<Self>,
1771 envelope: TypedEnvelope<proto::GitClone>,
1772 cx: AsyncApp,
1773 ) -> Result<proto::GitCloneResponse> {
1774 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1775 let repo_name = envelope.payload.remote_repo;
1776 let result = cx
1777 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1778 .await;
1779
1780 Ok(proto::GitCloneResponse {
1781 success: result.is_ok(),
1782 })
1783 }
1784
1785 async fn handle_fetch(
1786 this: Entity<Self>,
1787 envelope: TypedEnvelope<proto::Fetch>,
1788 mut cx: AsyncApp,
1789 ) -> Result<proto::RemoteMessageResponse> {
1790 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1791 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1792 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1793 let askpass_id = envelope.payload.askpass_id;
1794
1795 let askpass = make_remote_delegate(
1796 this,
1797 envelope.payload.project_id,
1798 repository_id,
1799 askpass_id,
1800 &mut cx,
1801 );
1802
1803 let remote_output = repository_handle
1804 .update(&mut cx, |repository_handle, cx| {
1805 repository_handle.fetch(fetch_options, askpass, cx)
1806 })?
1807 .await??;
1808
1809 Ok(proto::RemoteMessageResponse {
1810 stdout: remote_output.stdout,
1811 stderr: remote_output.stderr,
1812 })
1813 }
1814
1815 async fn handle_push(
1816 this: Entity<Self>,
1817 envelope: TypedEnvelope<proto::Push>,
1818 mut cx: AsyncApp,
1819 ) -> Result<proto::RemoteMessageResponse> {
1820 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1821 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1822
1823 let askpass_id = envelope.payload.askpass_id;
1824 let askpass = make_remote_delegate(
1825 this,
1826 envelope.payload.project_id,
1827 repository_id,
1828 askpass_id,
1829 &mut cx,
1830 );
1831
1832 let options = envelope
1833 .payload
1834 .options
1835 .as_ref()
1836 .map(|_| match envelope.payload.options() {
1837 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1838 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1839 });
1840
1841 let branch_name = envelope.payload.branch_name.into();
1842 let remote_name = envelope.payload.remote_name.into();
1843
1844 let remote_output = repository_handle
1845 .update(&mut cx, |repository_handle, cx| {
1846 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1847 })?
1848 .await??;
1849 Ok(proto::RemoteMessageResponse {
1850 stdout: remote_output.stdout,
1851 stderr: remote_output.stderr,
1852 })
1853 }
1854
1855 async fn handle_pull(
1856 this: Entity<Self>,
1857 envelope: TypedEnvelope<proto::Pull>,
1858 mut cx: AsyncApp,
1859 ) -> Result<proto::RemoteMessageResponse> {
1860 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1861 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1862 let askpass_id = envelope.payload.askpass_id;
1863 let askpass = make_remote_delegate(
1864 this,
1865 envelope.payload.project_id,
1866 repository_id,
1867 askpass_id,
1868 &mut cx,
1869 );
1870
1871 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1872 let remote_name = envelope.payload.remote_name.into();
1873 let rebase = envelope.payload.rebase;
1874
1875 let remote_message = repository_handle
1876 .update(&mut cx, |repository_handle, cx| {
1877 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1878 })?
1879 .await??;
1880
1881 Ok(proto::RemoteMessageResponse {
1882 stdout: remote_message.stdout,
1883 stderr: remote_message.stderr,
1884 })
1885 }
1886
1887 async fn handle_stage(
1888 this: Entity<Self>,
1889 envelope: TypedEnvelope<proto::Stage>,
1890 mut cx: AsyncApp,
1891 ) -> Result<proto::Ack> {
1892 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1893 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1894
1895 let entries = envelope
1896 .payload
1897 .paths
1898 .into_iter()
1899 .map(|path| RepoPath::new(&path))
1900 .collect::<Result<Vec<_>>>()?;
1901
1902 repository_handle
1903 .update(&mut cx, |repository_handle, cx| {
1904 repository_handle.stage_entries(entries, cx)
1905 })?
1906 .await?;
1907 Ok(proto::Ack {})
1908 }
1909
1910 async fn handle_unstage(
1911 this: Entity<Self>,
1912 envelope: TypedEnvelope<proto::Unstage>,
1913 mut cx: AsyncApp,
1914 ) -> Result<proto::Ack> {
1915 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1916 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1917
1918 let entries = envelope
1919 .payload
1920 .paths
1921 .into_iter()
1922 .map(|path| RepoPath::new(&path))
1923 .collect::<Result<Vec<_>>>()?;
1924
1925 repository_handle
1926 .update(&mut cx, |repository_handle, cx| {
1927 repository_handle.unstage_entries(entries, cx)
1928 })?
1929 .await?;
1930
1931 Ok(proto::Ack {})
1932 }
1933
1934 async fn handle_stash(
1935 this: Entity<Self>,
1936 envelope: TypedEnvelope<proto::Stash>,
1937 mut cx: AsyncApp,
1938 ) -> Result<proto::Ack> {
1939 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1940 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1941
1942 let entries = envelope
1943 .payload
1944 .paths
1945 .into_iter()
1946 .map(|path| RepoPath::new(&path))
1947 .collect::<Result<Vec<_>>>()?;
1948
1949 repository_handle
1950 .update(&mut cx, |repository_handle, cx| {
1951 repository_handle.stash_entries(entries, cx)
1952 })?
1953 .await?;
1954
1955 Ok(proto::Ack {})
1956 }
1957
1958 async fn handle_stash_pop(
1959 this: Entity<Self>,
1960 envelope: TypedEnvelope<proto::StashPop>,
1961 mut cx: AsyncApp,
1962 ) -> Result<proto::Ack> {
1963 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1964 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1965 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1966
1967 repository_handle
1968 .update(&mut cx, |repository_handle, cx| {
1969 repository_handle.stash_pop(stash_index, cx)
1970 })?
1971 .await?;
1972
1973 Ok(proto::Ack {})
1974 }
1975
1976 async fn handle_stash_apply(
1977 this: Entity<Self>,
1978 envelope: TypedEnvelope<proto::StashApply>,
1979 mut cx: AsyncApp,
1980 ) -> Result<proto::Ack> {
1981 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1982 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1983 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1984
1985 repository_handle
1986 .update(&mut cx, |repository_handle, cx| {
1987 repository_handle.stash_apply(stash_index, cx)
1988 })?
1989 .await?;
1990
1991 Ok(proto::Ack {})
1992 }
1993
1994 async fn handle_stash_drop(
1995 this: Entity<Self>,
1996 envelope: TypedEnvelope<proto::StashDrop>,
1997 mut cx: AsyncApp,
1998 ) -> Result<proto::Ack> {
1999 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2000 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2001 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2002
2003 repository_handle
2004 .update(&mut cx, |repository_handle, cx| {
2005 repository_handle.stash_drop(stash_index, cx)
2006 })?
2007 .await??;
2008
2009 Ok(proto::Ack {})
2010 }
2011
2012 async fn handle_set_index_text(
2013 this: Entity<Self>,
2014 envelope: TypedEnvelope<proto::SetIndexText>,
2015 mut cx: AsyncApp,
2016 ) -> Result<proto::Ack> {
2017 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2018 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2019 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2020
2021 repository_handle
2022 .update(&mut cx, |repository_handle, cx| {
2023 repository_handle.spawn_set_index_text_job(
2024 repo_path,
2025 envelope.payload.text,
2026 None,
2027 cx,
2028 )
2029 })?
2030 .await??;
2031 Ok(proto::Ack {})
2032 }
2033
2034 async fn handle_run_hook(
2035 this: Entity<Self>,
2036 envelope: TypedEnvelope<proto::RunGitHook>,
2037 mut cx: AsyncApp,
2038 ) -> Result<proto::Ack> {
2039 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2040 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2041 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2042 repository_handle
2043 .update(&mut cx, |repository_handle, cx| {
2044 repository_handle.run_hook(hook, cx)
2045 })?
2046 .await??;
2047 Ok(proto::Ack {})
2048 }
2049
2050 async fn handle_commit(
2051 this: Entity<Self>,
2052 envelope: TypedEnvelope<proto::Commit>,
2053 mut cx: AsyncApp,
2054 ) -> Result<proto::Ack> {
2055 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2056 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2057 let askpass_id = envelope.payload.askpass_id;
2058
2059 let askpass = make_remote_delegate(
2060 this,
2061 envelope.payload.project_id,
2062 repository_id,
2063 askpass_id,
2064 &mut cx,
2065 );
2066
2067 let message = SharedString::from(envelope.payload.message);
2068 let name = envelope.payload.name.map(SharedString::from);
2069 let email = envelope.payload.email.map(SharedString::from);
2070 let options = envelope.payload.options.unwrap_or_default();
2071
2072 repository_handle
2073 .update(&mut cx, |repository_handle, cx| {
2074 repository_handle.commit(
2075 message,
2076 name.zip(email),
2077 CommitOptions {
2078 amend: options.amend,
2079 signoff: options.signoff,
2080 },
2081 askpass,
2082 cx,
2083 )
2084 })?
2085 .await??;
2086 Ok(proto::Ack {})
2087 }
2088
2089 async fn handle_get_remotes(
2090 this: Entity<Self>,
2091 envelope: TypedEnvelope<proto::GetRemotes>,
2092 mut cx: AsyncApp,
2093 ) -> Result<proto::GetRemotesResponse> {
2094 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2095 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2096
2097 let branch_name = envelope.payload.branch_name;
2098
2099 let remotes = repository_handle
2100 .update(&mut cx, |repository_handle, _| {
2101 repository_handle.get_remotes(branch_name)
2102 })?
2103 .await??;
2104
2105 Ok(proto::GetRemotesResponse {
2106 remotes: remotes
2107 .into_iter()
2108 .map(|remotes| proto::get_remotes_response::Remote {
2109 name: remotes.name.to_string(),
2110 })
2111 .collect::<Vec<_>>(),
2112 })
2113 }
2114
2115 async fn handle_get_worktrees(
2116 this: Entity<Self>,
2117 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2118 mut cx: AsyncApp,
2119 ) -> Result<proto::GitWorktreesResponse> {
2120 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2121 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2122
2123 let worktrees = repository_handle
2124 .update(&mut cx, |repository_handle, _| {
2125 repository_handle.worktrees()
2126 })?
2127 .await??;
2128
2129 Ok(proto::GitWorktreesResponse {
2130 worktrees: worktrees
2131 .into_iter()
2132 .map(|worktree| worktree_to_proto(&worktree))
2133 .collect::<Vec<_>>(),
2134 })
2135 }
2136
2137 async fn handle_create_worktree(
2138 this: Entity<Self>,
2139 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2140 mut cx: AsyncApp,
2141 ) -> Result<proto::Ack> {
2142 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2143 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2144 let directory = PathBuf::from(envelope.payload.directory);
2145 let name = envelope.payload.name;
2146 let commit = envelope.payload.commit;
2147
2148 repository_handle
2149 .update(&mut cx, |repository_handle, _| {
2150 repository_handle.create_worktree(name, directory, commit)
2151 })?
2152 .await??;
2153
2154 Ok(proto::Ack {})
2155 }
2156
2157 async fn handle_get_branches(
2158 this: Entity<Self>,
2159 envelope: TypedEnvelope<proto::GitGetBranches>,
2160 mut cx: AsyncApp,
2161 ) -> Result<proto::GitBranchesResponse> {
2162 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2163 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2164
2165 let branches = repository_handle
2166 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2167 .await??;
2168
2169 Ok(proto::GitBranchesResponse {
2170 branches: branches
2171 .into_iter()
2172 .map(|branch| branch_to_proto(&branch))
2173 .collect::<Vec<_>>(),
2174 })
2175 }
2176 async fn handle_get_default_branch(
2177 this: Entity<Self>,
2178 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2179 mut cx: AsyncApp,
2180 ) -> Result<proto::GetDefaultBranchResponse> {
2181 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2182 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2183
2184 let branch = repository_handle
2185 .update(&mut cx, |repository_handle, _| {
2186 repository_handle.default_branch()
2187 })?
2188 .await??
2189 .map(Into::into);
2190
2191 Ok(proto::GetDefaultBranchResponse { branch })
2192 }
2193 async fn handle_create_branch(
2194 this: Entity<Self>,
2195 envelope: TypedEnvelope<proto::GitCreateBranch>,
2196 mut cx: AsyncApp,
2197 ) -> Result<proto::Ack> {
2198 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2199 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2200 let branch_name = envelope.payload.branch_name;
2201
2202 repository_handle
2203 .update(&mut cx, |repository_handle, _| {
2204 repository_handle.create_branch(branch_name, None)
2205 })?
2206 .await??;
2207
2208 Ok(proto::Ack {})
2209 }
2210
2211 async fn handle_change_branch(
2212 this: Entity<Self>,
2213 envelope: TypedEnvelope<proto::GitChangeBranch>,
2214 mut cx: AsyncApp,
2215 ) -> Result<proto::Ack> {
2216 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2217 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2218 let branch_name = envelope.payload.branch_name;
2219
2220 repository_handle
2221 .update(&mut cx, |repository_handle, _| {
2222 repository_handle.change_branch(branch_name)
2223 })?
2224 .await??;
2225
2226 Ok(proto::Ack {})
2227 }
2228
2229 async fn handle_rename_branch(
2230 this: Entity<Self>,
2231 envelope: TypedEnvelope<proto::GitRenameBranch>,
2232 mut cx: AsyncApp,
2233 ) -> Result<proto::Ack> {
2234 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2235 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2236 let branch = envelope.payload.branch;
2237 let new_name = envelope.payload.new_name;
2238
2239 repository_handle
2240 .update(&mut cx, |repository_handle, _| {
2241 repository_handle.rename_branch(branch, new_name)
2242 })?
2243 .await??;
2244
2245 Ok(proto::Ack {})
2246 }
2247
2248 async fn handle_show(
2249 this: Entity<Self>,
2250 envelope: TypedEnvelope<proto::GitShow>,
2251 mut cx: AsyncApp,
2252 ) -> Result<proto::GitCommitDetails> {
2253 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2254 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2255
2256 let commit = repository_handle
2257 .update(&mut cx, |repository_handle, _| {
2258 repository_handle.show(envelope.payload.commit)
2259 })?
2260 .await??;
2261 Ok(proto::GitCommitDetails {
2262 sha: commit.sha.into(),
2263 message: commit.message.into(),
2264 commit_timestamp: commit.commit_timestamp,
2265 author_email: commit.author_email.into(),
2266 author_name: commit.author_name.into(),
2267 })
2268 }
2269
2270 async fn handle_load_commit_diff(
2271 this: Entity<Self>,
2272 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2273 mut cx: AsyncApp,
2274 ) -> Result<proto::LoadCommitDiffResponse> {
2275 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2276 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2277
2278 let commit_diff = repository_handle
2279 .update(&mut cx, |repository_handle, _| {
2280 repository_handle.load_commit_diff(envelope.payload.commit)
2281 })?
2282 .await??;
2283 Ok(proto::LoadCommitDiffResponse {
2284 files: commit_diff
2285 .files
2286 .into_iter()
2287 .map(|file| proto::CommitFile {
2288 path: file.path.to_proto(),
2289 old_text: file.old_text,
2290 new_text: file.new_text,
2291 })
2292 .collect(),
2293 })
2294 }
2295
2296 async fn handle_reset(
2297 this: Entity<Self>,
2298 envelope: TypedEnvelope<proto::GitReset>,
2299 mut cx: AsyncApp,
2300 ) -> Result<proto::Ack> {
2301 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2302 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2303
2304 let mode = match envelope.payload.mode() {
2305 git_reset::ResetMode::Soft => ResetMode::Soft,
2306 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2307 };
2308
2309 repository_handle
2310 .update(&mut cx, |repository_handle, cx| {
2311 repository_handle.reset(envelope.payload.commit, mode, cx)
2312 })?
2313 .await??;
2314 Ok(proto::Ack {})
2315 }
2316
2317 async fn handle_checkout_files(
2318 this: Entity<Self>,
2319 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2320 mut cx: AsyncApp,
2321 ) -> Result<proto::Ack> {
2322 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2323 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2324 let paths = envelope
2325 .payload
2326 .paths
2327 .iter()
2328 .map(|s| RepoPath::from_proto(s))
2329 .collect::<Result<Vec<_>>>()?;
2330
2331 repository_handle
2332 .update(&mut cx, |repository_handle, cx| {
2333 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2334 })?
2335 .await?;
2336 Ok(proto::Ack {})
2337 }
2338
2339 async fn handle_open_commit_message_buffer(
2340 this: Entity<Self>,
2341 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2342 mut cx: AsyncApp,
2343 ) -> Result<proto::OpenBufferResponse> {
2344 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2345 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2346 let buffer = repository
2347 .update(&mut cx, |repository, cx| {
2348 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2349 })?
2350 .await?;
2351
2352 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2353 this.update(&mut cx, |this, cx| {
2354 this.buffer_store.update(cx, |buffer_store, cx| {
2355 buffer_store
2356 .create_buffer_for_peer(
2357 &buffer,
2358 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2359 cx,
2360 )
2361 .detach_and_log_err(cx);
2362 })
2363 })?;
2364
2365 Ok(proto::OpenBufferResponse {
2366 buffer_id: buffer_id.to_proto(),
2367 })
2368 }
2369
2370 async fn handle_askpass(
2371 this: Entity<Self>,
2372 envelope: TypedEnvelope<proto::AskPassRequest>,
2373 mut cx: AsyncApp,
2374 ) -> Result<proto::AskPassResponse> {
2375 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2376 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2377
2378 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2379 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2380 debug_panic!("no askpass found");
2381 anyhow::bail!("no askpass found");
2382 };
2383
2384 let response = askpass
2385 .ask_password(envelope.payload.prompt)
2386 .await
2387 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2388
2389 delegates
2390 .lock()
2391 .insert(envelope.payload.askpass_id, askpass);
2392
2393 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2394 Ok(proto::AskPassResponse {
2395 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2396 })
2397 }
2398
2399 async fn handle_check_for_pushed_commits(
2400 this: Entity<Self>,
2401 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2402 mut cx: AsyncApp,
2403 ) -> Result<proto::CheckForPushedCommitsResponse> {
2404 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2405 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2406
2407 let branches = repository_handle
2408 .update(&mut cx, |repository_handle, _| {
2409 repository_handle.check_for_pushed_commits()
2410 })?
2411 .await??;
2412 Ok(proto::CheckForPushedCommitsResponse {
2413 pushed_to: branches
2414 .into_iter()
2415 .map(|commit| commit.to_string())
2416 .collect(),
2417 })
2418 }
2419
2420 async fn handle_git_diff(
2421 this: Entity<Self>,
2422 envelope: TypedEnvelope<proto::GitDiff>,
2423 mut cx: AsyncApp,
2424 ) -> Result<proto::GitDiffResponse> {
2425 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2426 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2427 let diff_type = match envelope.payload.diff_type() {
2428 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2429 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2430 };
2431
2432 let mut diff = repository_handle
2433 .update(&mut cx, |repository_handle, cx| {
2434 repository_handle.diff(diff_type, cx)
2435 })?
2436 .await??;
2437 const ONE_MB: usize = 1_000_000;
2438 if diff.len() > ONE_MB {
2439 diff = diff.chars().take(ONE_MB).collect()
2440 }
2441
2442 Ok(proto::GitDiffResponse { diff })
2443 }
2444
2445 async fn handle_tree_diff(
2446 this: Entity<Self>,
2447 request: TypedEnvelope<proto::GetTreeDiff>,
2448 mut cx: AsyncApp,
2449 ) -> Result<proto::GetTreeDiffResponse> {
2450 let repository_id = RepositoryId(request.payload.repository_id);
2451 let diff_type = if request.payload.is_merge {
2452 DiffTreeType::MergeBase {
2453 base: request.payload.base.into(),
2454 head: request.payload.head.into(),
2455 }
2456 } else {
2457 DiffTreeType::Since {
2458 base: request.payload.base.into(),
2459 head: request.payload.head.into(),
2460 }
2461 };
2462
2463 let diff = this
2464 .update(&mut cx, |this, cx| {
2465 let repository = this.repositories().get(&repository_id)?;
2466 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2467 })?
2468 .context("missing repository")?
2469 .await??;
2470
2471 Ok(proto::GetTreeDiffResponse {
2472 entries: diff
2473 .entries
2474 .into_iter()
2475 .map(|(path, status)| proto::TreeDiffStatus {
2476 path: path.as_ref().to_proto(),
2477 status: match status {
2478 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2479 TreeDiffStatus::Modified { .. } => {
2480 proto::tree_diff_status::Status::Modified.into()
2481 }
2482 TreeDiffStatus::Deleted { .. } => {
2483 proto::tree_diff_status::Status::Deleted.into()
2484 }
2485 },
2486 oid: match status {
2487 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2488 Some(old.to_string())
2489 }
2490 TreeDiffStatus::Added => None,
2491 },
2492 })
2493 .collect(),
2494 })
2495 }
2496
2497 async fn handle_get_blob_content(
2498 this: Entity<Self>,
2499 request: TypedEnvelope<proto::GetBlobContent>,
2500 mut cx: AsyncApp,
2501 ) -> Result<proto::GetBlobContentResponse> {
2502 let oid = git::Oid::from_str(&request.payload.oid)?;
2503 let repository_id = RepositoryId(request.payload.repository_id);
2504 let content = this
2505 .update(&mut cx, |this, cx| {
2506 let repository = this.repositories().get(&repository_id)?;
2507 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2508 })?
2509 .context("missing repository")?
2510 .await?;
2511 Ok(proto::GetBlobContentResponse { content })
2512 }
2513
2514 async fn handle_open_unstaged_diff(
2515 this: Entity<Self>,
2516 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2517 mut cx: AsyncApp,
2518 ) -> Result<proto::OpenUnstagedDiffResponse> {
2519 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2520 let diff = this
2521 .update(&mut cx, |this, cx| {
2522 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2523 Some(this.open_unstaged_diff(buffer, cx))
2524 })?
2525 .context("missing buffer")?
2526 .await?;
2527 this.update(&mut cx, |this, _| {
2528 let shared_diffs = this
2529 .shared_diffs
2530 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2531 .or_default();
2532 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2533 })?;
2534 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2535 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2536 }
2537
2538 async fn handle_open_uncommitted_diff(
2539 this: Entity<Self>,
2540 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2541 mut cx: AsyncApp,
2542 ) -> Result<proto::OpenUncommittedDiffResponse> {
2543 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2544 let diff = this
2545 .update(&mut cx, |this, cx| {
2546 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2547 Some(this.open_uncommitted_diff(buffer, cx))
2548 })?
2549 .context("missing buffer")?
2550 .await?;
2551 this.update(&mut cx, |this, _| {
2552 let shared_diffs = this
2553 .shared_diffs
2554 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2555 .or_default();
2556 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2557 })?;
2558 diff.read_with(&cx, |diff, cx| {
2559 use proto::open_uncommitted_diff_response::Mode;
2560
2561 let unstaged_diff = diff.secondary_diff();
2562 let index_snapshot = unstaged_diff.and_then(|diff| {
2563 let diff = diff.read(cx);
2564 diff.base_text_exists().then(|| diff.base_text())
2565 });
2566
2567 let mode;
2568 let staged_text;
2569 let committed_text;
2570 if diff.base_text_exists() {
2571 let committed_snapshot = diff.base_text();
2572 committed_text = Some(committed_snapshot.text());
2573 if let Some(index_text) = index_snapshot {
2574 if index_text.remote_id() == committed_snapshot.remote_id() {
2575 mode = Mode::IndexMatchesHead;
2576 staged_text = None;
2577 } else {
2578 mode = Mode::IndexAndHead;
2579 staged_text = Some(index_text.text());
2580 }
2581 } else {
2582 mode = Mode::IndexAndHead;
2583 staged_text = None;
2584 }
2585 } else {
2586 mode = Mode::IndexAndHead;
2587 committed_text = None;
2588 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2589 }
2590
2591 proto::OpenUncommittedDiffResponse {
2592 committed_text,
2593 staged_text,
2594 mode: mode.into(),
2595 }
2596 })
2597 }
2598
2599 async fn handle_update_diff_bases(
2600 this: Entity<Self>,
2601 request: TypedEnvelope<proto::UpdateDiffBases>,
2602 mut cx: AsyncApp,
2603 ) -> Result<()> {
2604 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2605 this.update(&mut cx, |this, cx| {
2606 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2607 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2608 {
2609 let buffer = buffer.read(cx).text_snapshot();
2610 diff_state.update(cx, |diff_state, cx| {
2611 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2612 })
2613 }
2614 })
2615 }
2616
2617 async fn handle_blame_buffer(
2618 this: Entity<Self>,
2619 envelope: TypedEnvelope<proto::BlameBuffer>,
2620 mut cx: AsyncApp,
2621 ) -> Result<proto::BlameBufferResponse> {
2622 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2623 let version = deserialize_version(&envelope.payload.version);
2624 let buffer = this.read_with(&cx, |this, cx| {
2625 this.buffer_store.read(cx).get_existing(buffer_id)
2626 })??;
2627 buffer
2628 .update(&mut cx, |buffer, _| {
2629 buffer.wait_for_version(version.clone())
2630 })?
2631 .await?;
2632 let blame = this
2633 .update(&mut cx, |this, cx| {
2634 this.blame_buffer(&buffer, Some(version), cx)
2635 })?
2636 .await?;
2637 Ok(serialize_blame_buffer_response(blame))
2638 }
2639
2640 async fn handle_get_permalink_to_line(
2641 this: Entity<Self>,
2642 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2643 mut cx: AsyncApp,
2644 ) -> Result<proto::GetPermalinkToLineResponse> {
2645 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2646 // let version = deserialize_version(&envelope.payload.version);
2647 let selection = {
2648 let proto_selection = envelope
2649 .payload
2650 .selection
2651 .context("no selection to get permalink for defined")?;
2652 proto_selection.start as u32..proto_selection.end as u32
2653 };
2654 let buffer = this.read_with(&cx, |this, cx| {
2655 this.buffer_store.read(cx).get_existing(buffer_id)
2656 })??;
2657 let permalink = this
2658 .update(&mut cx, |this, cx| {
2659 this.get_permalink_to_line(&buffer, selection, cx)
2660 })?
2661 .await?;
2662 Ok(proto::GetPermalinkToLineResponse {
2663 permalink: permalink.to_string(),
2664 })
2665 }
2666
2667 fn repository_for_request(
2668 this: &Entity<Self>,
2669 id: RepositoryId,
2670 cx: &mut AsyncApp,
2671 ) -> Result<Entity<Repository>> {
2672 this.read_with(cx, |this, _| {
2673 this.repositories
2674 .get(&id)
2675 .context("missing repository handle")
2676 .cloned()
2677 })?
2678 }
2679
2680 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2681 self.repositories
2682 .iter()
2683 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2684 .collect()
2685 }
2686
2687 fn process_updated_entries(
2688 &self,
2689 worktree: &Entity<Worktree>,
2690 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2691 cx: &mut App,
2692 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2693 let path_style = worktree.read(cx).path_style();
2694 let mut repo_paths = self
2695 .repositories
2696 .values()
2697 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2698 .collect::<Vec<_>>();
2699 let mut entries: Vec<_> = updated_entries
2700 .iter()
2701 .map(|(path, _, _)| path.clone())
2702 .collect();
2703 entries.sort();
2704 let worktree = worktree.read(cx);
2705
2706 let entries = entries
2707 .into_iter()
2708 .map(|path| worktree.absolutize(&path))
2709 .collect::<Arc<[_]>>();
2710
2711 let executor = cx.background_executor().clone();
2712 cx.background_executor().spawn(async move {
2713 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2714 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2715 let mut tasks = FuturesOrdered::new();
2716 for (repo_path, repo) in repo_paths.into_iter().rev() {
2717 let entries = entries.clone();
2718 let task = executor.spawn(async move {
2719 // Find all repository paths that belong to this repo
2720 let mut ix = entries.partition_point(|path| path < &*repo_path);
2721 if ix == entries.len() {
2722 return None;
2723 };
2724
2725 let mut paths = Vec::new();
2726 // All paths prefixed by a given repo will constitute a continuous range.
2727 while let Some(path) = entries.get(ix)
2728 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2729 &repo_path, path, path_style,
2730 )
2731 {
2732 paths.push((repo_path, ix));
2733 ix += 1;
2734 }
2735 if paths.is_empty() {
2736 None
2737 } else {
2738 Some((repo, paths))
2739 }
2740 });
2741 tasks.push_back(task);
2742 }
2743
2744 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2745 let mut path_was_used = vec![false; entries.len()];
2746 let tasks = tasks.collect::<Vec<_>>().await;
2747 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2748 // We always want to assign a path to it's innermost repository.
2749 for t in tasks {
2750 let Some((repo, paths)) = t else {
2751 continue;
2752 };
2753 let entry = paths_by_git_repo.entry(repo).or_default();
2754 for (repo_path, ix) in paths {
2755 if path_was_used[ix] {
2756 continue;
2757 }
2758 path_was_used[ix] = true;
2759 entry.push(repo_path);
2760 }
2761 }
2762
2763 paths_by_git_repo
2764 })
2765 }
2766}
2767
2768impl BufferGitState {
2769 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2770 Self {
2771 unstaged_diff: Default::default(),
2772 uncommitted_diff: Default::default(),
2773 recalculate_diff_task: Default::default(),
2774 language: Default::default(),
2775 language_registry: Default::default(),
2776 recalculating_tx: postage::watch::channel_with(false).0,
2777 hunk_staging_operation_count: 0,
2778 hunk_staging_operation_count_as_of_write: 0,
2779 head_text: Default::default(),
2780 index_text: Default::default(),
2781 head_changed: Default::default(),
2782 index_changed: Default::default(),
2783 language_changed: Default::default(),
2784 conflict_updated_futures: Default::default(),
2785 conflict_set: Default::default(),
2786 reparse_conflict_markers_task: Default::default(),
2787 }
2788 }
2789
2790 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2791 self.language = buffer.read(cx).language().cloned();
2792 self.language_changed = true;
2793 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2794 }
2795
2796 fn reparse_conflict_markers(
2797 &mut self,
2798 buffer: text::BufferSnapshot,
2799 cx: &mut Context<Self>,
2800 ) -> oneshot::Receiver<()> {
2801 let (tx, rx) = oneshot::channel();
2802
2803 let Some(conflict_set) = self
2804 .conflict_set
2805 .as_ref()
2806 .and_then(|conflict_set| conflict_set.upgrade())
2807 else {
2808 return rx;
2809 };
2810
2811 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2812 if conflict_set.has_conflict {
2813 Some(conflict_set.snapshot())
2814 } else {
2815 None
2816 }
2817 });
2818
2819 if let Some(old_snapshot) = old_snapshot {
2820 self.conflict_updated_futures.push(tx);
2821 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2822 let (snapshot, changed_range) = cx
2823 .background_spawn(async move {
2824 let new_snapshot = ConflictSet::parse(&buffer);
2825 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2826 (new_snapshot, changed_range)
2827 })
2828 .await;
2829 this.update(cx, |this, cx| {
2830 if let Some(conflict_set) = &this.conflict_set {
2831 conflict_set
2832 .update(cx, |conflict_set, cx| {
2833 conflict_set.set_snapshot(snapshot, changed_range, cx);
2834 })
2835 .ok();
2836 }
2837 let futures = std::mem::take(&mut this.conflict_updated_futures);
2838 for tx in futures {
2839 tx.send(()).ok();
2840 }
2841 })
2842 }))
2843 }
2844
2845 rx
2846 }
2847
2848 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2849 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2850 }
2851
2852 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2853 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2854 }
2855
2856 fn handle_base_texts_updated(
2857 &mut self,
2858 buffer: text::BufferSnapshot,
2859 message: proto::UpdateDiffBases,
2860 cx: &mut Context<Self>,
2861 ) {
2862 use proto::update_diff_bases::Mode;
2863
2864 let Some(mode) = Mode::from_i32(message.mode) else {
2865 return;
2866 };
2867
2868 let diff_bases_change = match mode {
2869 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2870 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2871 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2872 Mode::IndexAndHead => DiffBasesChange::SetEach {
2873 index: message.staged_text,
2874 head: message.committed_text,
2875 },
2876 };
2877
2878 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2879 }
2880
2881 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2882 if *self.recalculating_tx.borrow() {
2883 let mut rx = self.recalculating_tx.subscribe();
2884 Some(async move {
2885 loop {
2886 let is_recalculating = rx.recv().await;
2887 if is_recalculating != Some(true) {
2888 break;
2889 }
2890 }
2891 })
2892 } else {
2893 None
2894 }
2895 }
2896
2897 fn diff_bases_changed(
2898 &mut self,
2899 buffer: text::BufferSnapshot,
2900 diff_bases_change: Option<DiffBasesChange>,
2901 cx: &mut Context<Self>,
2902 ) {
2903 match diff_bases_change {
2904 Some(DiffBasesChange::SetIndex(index)) => {
2905 self.index_text = index.map(|mut index| {
2906 text::LineEnding::normalize(&mut index);
2907 Arc::new(index)
2908 });
2909 self.index_changed = true;
2910 }
2911 Some(DiffBasesChange::SetHead(head)) => {
2912 self.head_text = head.map(|mut head| {
2913 text::LineEnding::normalize(&mut head);
2914 Arc::new(head)
2915 });
2916 self.head_changed = true;
2917 }
2918 Some(DiffBasesChange::SetBoth(text)) => {
2919 let text = text.map(|mut text| {
2920 text::LineEnding::normalize(&mut text);
2921 Arc::new(text)
2922 });
2923 self.head_text = text.clone();
2924 self.index_text = text;
2925 self.head_changed = true;
2926 self.index_changed = true;
2927 }
2928 Some(DiffBasesChange::SetEach { index, head }) => {
2929 self.index_text = index.map(|mut index| {
2930 text::LineEnding::normalize(&mut index);
2931 Arc::new(index)
2932 });
2933 self.index_changed = true;
2934 self.head_text = head.map(|mut head| {
2935 text::LineEnding::normalize(&mut head);
2936 Arc::new(head)
2937 });
2938 self.head_changed = true;
2939 }
2940 None => {}
2941 }
2942
2943 self.recalculate_diffs(buffer, cx)
2944 }
2945
2946 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2947 *self.recalculating_tx.borrow_mut() = true;
2948
2949 let language = self.language.clone();
2950 let language_registry = self.language_registry.clone();
2951 let unstaged_diff = self.unstaged_diff();
2952 let uncommitted_diff = self.uncommitted_diff();
2953 let head = self.head_text.clone();
2954 let index = self.index_text.clone();
2955 let index_changed = self.index_changed;
2956 let head_changed = self.head_changed;
2957 let language_changed = self.language_changed;
2958 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2959 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2960 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2961 (None, None) => true,
2962 _ => false,
2963 };
2964 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2965 log::debug!(
2966 "start recalculating diffs for buffer {}",
2967 buffer.remote_id()
2968 );
2969
2970 let mut new_unstaged_diff = None;
2971 if let Some(unstaged_diff) = &unstaged_diff {
2972 new_unstaged_diff = Some(
2973 BufferDiff::update_diff(
2974 unstaged_diff.clone(),
2975 buffer.clone(),
2976 index,
2977 index_changed,
2978 language_changed,
2979 language.clone(),
2980 language_registry.clone(),
2981 cx,
2982 )
2983 .await?,
2984 );
2985 }
2986
2987 let mut new_uncommitted_diff = None;
2988 if let Some(uncommitted_diff) = &uncommitted_diff {
2989 new_uncommitted_diff = if index_matches_head {
2990 new_unstaged_diff.clone()
2991 } else {
2992 Some(
2993 BufferDiff::update_diff(
2994 uncommitted_diff.clone(),
2995 buffer.clone(),
2996 head,
2997 head_changed,
2998 language_changed,
2999 language.clone(),
3000 language_registry.clone(),
3001 cx,
3002 )
3003 .await?,
3004 )
3005 }
3006 }
3007
3008 let cancel = this.update(cx, |this, _| {
3009 // This checks whether all pending stage/unstage operations
3010 // have quiesced (i.e. both the corresponding write and the
3011 // read of that write have completed). If not, then we cancel
3012 // this recalculation attempt to avoid invalidating pending
3013 // state too quickly; another recalculation will come along
3014 // later and clear the pending state once the state of the index has settled.
3015 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3016 *this.recalculating_tx.borrow_mut() = false;
3017 true
3018 } else {
3019 false
3020 }
3021 })?;
3022 if cancel {
3023 log::debug!(
3024 concat!(
3025 "aborting recalculating diffs for buffer {}",
3026 "due to subsequent hunk operations",
3027 ),
3028 buffer.remote_id()
3029 );
3030 return Ok(());
3031 }
3032
3033 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3034 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3035 {
3036 unstaged_diff.update(cx, |diff, cx| {
3037 if language_changed {
3038 diff.language_changed(cx);
3039 }
3040 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3041 })?
3042 } else {
3043 None
3044 };
3045
3046 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3047 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3048 {
3049 uncommitted_diff.update(cx, |diff, cx| {
3050 if language_changed {
3051 diff.language_changed(cx);
3052 }
3053 diff.set_snapshot_with_secondary(
3054 new_uncommitted_diff,
3055 &buffer,
3056 unstaged_changed_range,
3057 true,
3058 cx,
3059 );
3060 })?;
3061 }
3062
3063 log::debug!(
3064 "finished recalculating diffs for buffer {}",
3065 buffer.remote_id()
3066 );
3067
3068 if let Some(this) = this.upgrade() {
3069 this.update(cx, |this, _| {
3070 this.index_changed = false;
3071 this.head_changed = false;
3072 this.language_changed = false;
3073 *this.recalculating_tx.borrow_mut() = false;
3074 })?;
3075 }
3076
3077 Ok(())
3078 }));
3079 }
3080}
3081
3082fn make_remote_delegate(
3083 this: Entity<GitStore>,
3084 project_id: u64,
3085 repository_id: RepositoryId,
3086 askpass_id: u64,
3087 cx: &mut AsyncApp,
3088) -> AskPassDelegate {
3089 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3090 this.update(cx, |this, cx| {
3091 let Some((client, _)) = this.downstream_client() else {
3092 return;
3093 };
3094 let response = client.request(proto::AskPassRequest {
3095 project_id,
3096 repository_id: repository_id.to_proto(),
3097 askpass_id,
3098 prompt,
3099 });
3100 cx.spawn(async move |_, _| {
3101 let mut response = response.await?.response;
3102 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3103 .ok();
3104 response.zeroize();
3105 anyhow::Ok(())
3106 })
3107 .detach_and_log_err(cx);
3108 })
3109 .log_err();
3110 })
3111}
3112
3113impl RepositoryId {
3114 pub fn to_proto(self) -> u64 {
3115 self.0
3116 }
3117
3118 pub fn from_proto(id: u64) -> Self {
3119 RepositoryId(id)
3120 }
3121}
3122
3123impl RepositorySnapshot {
3124 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3125 Self {
3126 id,
3127 statuses_by_path: Default::default(),
3128 work_directory_abs_path,
3129 branch: None,
3130 head_commit: None,
3131 scan_id: 0,
3132 merge: Default::default(),
3133 remote_origin_url: None,
3134 remote_upstream_url: None,
3135 stash_entries: Default::default(),
3136 path_style,
3137 }
3138 }
3139
3140 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3141 proto::UpdateRepository {
3142 branch_summary: self.branch.as_ref().map(branch_to_proto),
3143 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3144 updated_statuses: self
3145 .statuses_by_path
3146 .iter()
3147 .map(|entry| entry.to_proto())
3148 .collect(),
3149 removed_statuses: Default::default(),
3150 current_merge_conflicts: self
3151 .merge
3152 .conflicted_paths
3153 .iter()
3154 .map(|repo_path| repo_path.to_proto())
3155 .collect(),
3156 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3157 project_id,
3158 id: self.id.to_proto(),
3159 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3160 entry_ids: vec![self.id.to_proto()],
3161 scan_id: self.scan_id,
3162 is_last_update: true,
3163 stash_entries: self
3164 .stash_entries
3165 .entries
3166 .iter()
3167 .map(stash_to_proto)
3168 .collect(),
3169 }
3170 }
3171
3172 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3173 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3174 let mut removed_statuses: Vec<String> = Vec::new();
3175
3176 let mut new_statuses = self.statuses_by_path.iter().peekable();
3177 let mut old_statuses = old.statuses_by_path.iter().peekable();
3178
3179 let mut current_new_entry = new_statuses.next();
3180 let mut current_old_entry = old_statuses.next();
3181 loop {
3182 match (current_new_entry, current_old_entry) {
3183 (Some(new_entry), Some(old_entry)) => {
3184 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3185 Ordering::Less => {
3186 updated_statuses.push(new_entry.to_proto());
3187 current_new_entry = new_statuses.next();
3188 }
3189 Ordering::Equal => {
3190 if new_entry.status != old_entry.status {
3191 updated_statuses.push(new_entry.to_proto());
3192 }
3193 current_old_entry = old_statuses.next();
3194 current_new_entry = new_statuses.next();
3195 }
3196 Ordering::Greater => {
3197 removed_statuses.push(old_entry.repo_path.to_proto());
3198 current_old_entry = old_statuses.next();
3199 }
3200 }
3201 }
3202 (None, Some(old_entry)) => {
3203 removed_statuses.push(old_entry.repo_path.to_proto());
3204 current_old_entry = old_statuses.next();
3205 }
3206 (Some(new_entry), None) => {
3207 updated_statuses.push(new_entry.to_proto());
3208 current_new_entry = new_statuses.next();
3209 }
3210 (None, None) => break,
3211 }
3212 }
3213
3214 proto::UpdateRepository {
3215 branch_summary: self.branch.as_ref().map(branch_to_proto),
3216 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3217 updated_statuses,
3218 removed_statuses,
3219 current_merge_conflicts: self
3220 .merge
3221 .conflicted_paths
3222 .iter()
3223 .map(|path| path.to_proto())
3224 .collect(),
3225 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3226 project_id,
3227 id: self.id.to_proto(),
3228 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3229 entry_ids: vec![],
3230 scan_id: self.scan_id,
3231 is_last_update: true,
3232 stash_entries: self
3233 .stash_entries
3234 .entries
3235 .iter()
3236 .map(stash_to_proto)
3237 .collect(),
3238 }
3239 }
3240
3241 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3242 self.statuses_by_path.iter().cloned()
3243 }
3244
3245 pub fn status_summary(&self) -> GitSummary {
3246 self.statuses_by_path.summary().item_summary
3247 }
3248
3249 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3250 self.statuses_by_path
3251 .get(&PathKey(path.as_ref().clone()), ())
3252 .cloned()
3253 }
3254
3255 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3256 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3257 }
3258
3259 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3260 self.path_style
3261 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3262 .unwrap()
3263 .into()
3264 }
3265
3266 #[inline]
3267 fn abs_path_to_repo_path_inner(
3268 work_directory_abs_path: &Path,
3269 abs_path: &Path,
3270 path_style: PathStyle,
3271 ) -> Option<RepoPath> {
3272 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3273 Some(RepoPath::from_rel_path(&rel_path))
3274 }
3275
3276 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3277 self.merge.conflicted_paths.contains(repo_path)
3278 }
3279
3280 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3281 let had_conflict_on_last_merge_head_change =
3282 self.merge.conflicted_paths.contains(repo_path);
3283 let has_conflict_currently = self
3284 .status_for_path(repo_path)
3285 .is_some_and(|entry| entry.status.is_conflicted());
3286 had_conflict_on_last_merge_head_change || has_conflict_currently
3287 }
3288
3289 /// This is the name that will be displayed in the repository selector for this repository.
3290 pub fn display_name(&self) -> SharedString {
3291 self.work_directory_abs_path
3292 .file_name()
3293 .unwrap_or_default()
3294 .to_string_lossy()
3295 .to_string()
3296 .into()
3297 }
3298}
3299
3300pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3301 proto::StashEntry {
3302 oid: entry.oid.as_bytes().to_vec(),
3303 message: entry.message.clone(),
3304 branch: entry.branch.clone(),
3305 index: entry.index as u64,
3306 timestamp: entry.timestamp,
3307 }
3308}
3309
3310pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3311 Ok(StashEntry {
3312 oid: Oid::from_bytes(&entry.oid)?,
3313 message: entry.message.clone(),
3314 index: entry.index as usize,
3315 branch: entry.branch.clone(),
3316 timestamp: entry.timestamp,
3317 })
3318}
3319
3320impl MergeDetails {
3321 async fn load(
3322 backend: &Arc<dyn GitRepository>,
3323 status: &SumTree<StatusEntry>,
3324 prev_snapshot: &RepositorySnapshot,
3325 ) -> Result<(MergeDetails, bool)> {
3326 log::debug!("load merge details");
3327 let message = backend.merge_message().await;
3328 let heads = backend
3329 .revparse_batch(vec![
3330 "MERGE_HEAD".into(),
3331 "CHERRY_PICK_HEAD".into(),
3332 "REBASE_HEAD".into(),
3333 "REVERT_HEAD".into(),
3334 "APPLY_HEAD".into(),
3335 ])
3336 .await
3337 .log_err()
3338 .unwrap_or_default()
3339 .into_iter()
3340 .map(|opt| opt.map(SharedString::from))
3341 .collect::<Vec<_>>();
3342 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3343 let conflicted_paths = if merge_heads_changed {
3344 let current_conflicted_paths = TreeSet::from_ordered_entries(
3345 status
3346 .iter()
3347 .filter(|entry| entry.status.is_conflicted())
3348 .map(|entry| entry.repo_path.clone()),
3349 );
3350
3351 // It can happen that we run a scan while a lengthy merge is in progress
3352 // that will eventually result in conflicts, but before those conflicts
3353 // are reported by `git status`. Since for the moment we only care about
3354 // the merge heads state for the purposes of tracking conflicts, don't update
3355 // this state until we see some conflicts.
3356 if heads.iter().any(Option::is_some)
3357 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3358 && current_conflicted_paths.is_empty()
3359 {
3360 log::debug!("not updating merge heads because no conflicts found");
3361 return Ok((
3362 MergeDetails {
3363 message: message.map(SharedString::from),
3364 ..prev_snapshot.merge.clone()
3365 },
3366 false,
3367 ));
3368 }
3369
3370 current_conflicted_paths
3371 } else {
3372 prev_snapshot.merge.conflicted_paths.clone()
3373 };
3374 let details = MergeDetails {
3375 conflicted_paths,
3376 message: message.map(SharedString::from),
3377 heads,
3378 };
3379 Ok((details, merge_heads_changed))
3380 }
3381}
3382
3383impl Repository {
3384 pub fn snapshot(&self) -> RepositorySnapshot {
3385 self.snapshot.clone()
3386 }
3387
3388 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3389 self.pending_ops.iter().cloned()
3390 }
3391
3392 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3393 self.pending_ops.summary().clone()
3394 }
3395
3396 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3397 self.pending_ops
3398 .get(&PathKey(path.as_ref().clone()), ())
3399 .cloned()
3400 }
3401
3402 fn local(
3403 id: RepositoryId,
3404 work_directory_abs_path: Arc<Path>,
3405 dot_git_abs_path: Arc<Path>,
3406 project_environment: WeakEntity<ProjectEnvironment>,
3407 fs: Arc<dyn Fs>,
3408 git_store: WeakEntity<GitStore>,
3409 cx: &mut Context<Self>,
3410 ) -> Self {
3411 let snapshot =
3412 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3413 let state = cx
3414 .spawn(async move |_, cx| {
3415 LocalRepositoryState::new(
3416 work_directory_abs_path,
3417 dot_git_abs_path,
3418 project_environment,
3419 fs,
3420 cx,
3421 )
3422 .await
3423 .map_err(|err| err.to_string())
3424 })
3425 .shared();
3426 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3427 let state = cx
3428 .spawn(async move |_, _| {
3429 let state = state.await?;
3430 Ok(RepositoryState::Local(state))
3431 })
3432 .shared();
3433
3434 Repository {
3435 this: cx.weak_entity(),
3436 git_store,
3437 snapshot,
3438 pending_ops: Default::default(),
3439 repository_state: state,
3440 commit_message_buffer: None,
3441 askpass_delegates: Default::default(),
3442 paths_needing_status_update: Default::default(),
3443 latest_askpass_id: 0,
3444 job_sender,
3445 job_id: 0,
3446 active_jobs: Default::default(),
3447 }
3448 }
3449
3450 fn remote(
3451 id: RepositoryId,
3452 work_directory_abs_path: Arc<Path>,
3453 path_style: PathStyle,
3454 project_id: ProjectId,
3455 client: AnyProtoClient,
3456 git_store: WeakEntity<GitStore>,
3457 cx: &mut Context<Self>,
3458 ) -> Self {
3459 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3460 let repository_state = RemoteRepositoryState { project_id, client };
3461 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3462 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3463 Self {
3464 this: cx.weak_entity(),
3465 snapshot,
3466 commit_message_buffer: None,
3467 git_store,
3468 pending_ops: Default::default(),
3469 paths_needing_status_update: Default::default(),
3470 job_sender,
3471 repository_state,
3472 askpass_delegates: Default::default(),
3473 latest_askpass_id: 0,
3474 active_jobs: Default::default(),
3475 job_id: 0,
3476 }
3477 }
3478
3479 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3480 self.git_store.upgrade()
3481 }
3482
3483 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3484 let this = cx.weak_entity();
3485 let git_store = self.git_store.clone();
3486 let _ = self.send_keyed_job(
3487 Some(GitJobKey::ReloadBufferDiffBases),
3488 None,
3489 |state, mut cx| async move {
3490 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3491 log::error!("tried to recompute diffs for a non-local repository");
3492 return Ok(());
3493 };
3494
3495 let Some(this) = this.upgrade() else {
3496 return Ok(());
3497 };
3498
3499 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3500 git_store.update(cx, |git_store, cx| {
3501 git_store
3502 .diffs
3503 .iter()
3504 .filter_map(|(buffer_id, diff_state)| {
3505 let buffer_store = git_store.buffer_store.read(cx);
3506 let buffer = buffer_store.get(*buffer_id)?;
3507 let file = File::from_dyn(buffer.read(cx).file())?;
3508 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3509 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3510 log::debug!(
3511 "start reload diff bases for repo path {}",
3512 repo_path.as_unix_str()
3513 );
3514 diff_state.update(cx, |diff_state, _| {
3515 let has_unstaged_diff = diff_state
3516 .unstaged_diff
3517 .as_ref()
3518 .is_some_and(|diff| diff.is_upgradable());
3519 let has_uncommitted_diff = diff_state
3520 .uncommitted_diff
3521 .as_ref()
3522 .is_some_and(|set| set.is_upgradable());
3523
3524 Some((
3525 buffer,
3526 repo_path,
3527 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3528 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3529 ))
3530 })
3531 })
3532 .collect::<Vec<_>>()
3533 })
3534 })??;
3535
3536 let buffer_diff_base_changes = cx
3537 .background_spawn(async move {
3538 let mut changes = Vec::new();
3539 for (buffer, repo_path, current_index_text, current_head_text) in
3540 &repo_diff_state_updates
3541 {
3542 let index_text = if current_index_text.is_some() {
3543 backend.load_index_text(repo_path.clone()).await
3544 } else {
3545 None
3546 };
3547 let head_text = if current_head_text.is_some() {
3548 backend.load_committed_text(repo_path.clone()).await
3549 } else {
3550 None
3551 };
3552
3553 let change =
3554 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3555 (Some(current_index), Some(current_head)) => {
3556 let index_changed =
3557 index_text.as_ref() != current_index.as_deref();
3558 let head_changed =
3559 head_text.as_ref() != current_head.as_deref();
3560 if index_changed && head_changed {
3561 if index_text == head_text {
3562 Some(DiffBasesChange::SetBoth(head_text))
3563 } else {
3564 Some(DiffBasesChange::SetEach {
3565 index: index_text,
3566 head: head_text,
3567 })
3568 }
3569 } else if index_changed {
3570 Some(DiffBasesChange::SetIndex(index_text))
3571 } else if head_changed {
3572 Some(DiffBasesChange::SetHead(head_text))
3573 } else {
3574 None
3575 }
3576 }
3577 (Some(current_index), None) => {
3578 let index_changed =
3579 index_text.as_ref() != current_index.as_deref();
3580 index_changed
3581 .then_some(DiffBasesChange::SetIndex(index_text))
3582 }
3583 (None, Some(current_head)) => {
3584 let head_changed =
3585 head_text.as_ref() != current_head.as_deref();
3586 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3587 }
3588 (None, None) => None,
3589 };
3590
3591 changes.push((buffer.clone(), change))
3592 }
3593 changes
3594 })
3595 .await;
3596
3597 git_store.update(&mut cx, |git_store, cx| {
3598 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3599 let buffer_snapshot = buffer.read(cx).text_snapshot();
3600 let buffer_id = buffer_snapshot.remote_id();
3601 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3602 continue;
3603 };
3604
3605 let downstream_client = git_store.downstream_client();
3606 diff_state.update(cx, |diff_state, cx| {
3607 use proto::update_diff_bases::Mode;
3608
3609 if let Some((diff_bases_change, (client, project_id))) =
3610 diff_bases_change.clone().zip(downstream_client)
3611 {
3612 let (staged_text, committed_text, mode) = match diff_bases_change {
3613 DiffBasesChange::SetIndex(index) => {
3614 (index, None, Mode::IndexOnly)
3615 }
3616 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3617 DiffBasesChange::SetEach { index, head } => {
3618 (index, head, Mode::IndexAndHead)
3619 }
3620 DiffBasesChange::SetBoth(text) => {
3621 (None, text, Mode::IndexMatchesHead)
3622 }
3623 };
3624 client
3625 .send(proto::UpdateDiffBases {
3626 project_id: project_id.to_proto(),
3627 buffer_id: buffer_id.to_proto(),
3628 staged_text,
3629 committed_text,
3630 mode: mode as i32,
3631 })
3632 .log_err();
3633 }
3634
3635 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3636 });
3637 }
3638 })
3639 },
3640 );
3641 }
3642
3643 pub fn send_job<F, Fut, R>(
3644 &mut self,
3645 status: Option<SharedString>,
3646 job: F,
3647 ) -> oneshot::Receiver<R>
3648 where
3649 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3650 Fut: Future<Output = R> + 'static,
3651 R: Send + 'static,
3652 {
3653 self.send_keyed_job(None, status, job)
3654 }
3655
3656 fn send_keyed_job<F, Fut, R>(
3657 &mut self,
3658 key: Option<GitJobKey>,
3659 status: Option<SharedString>,
3660 job: F,
3661 ) -> oneshot::Receiver<R>
3662 where
3663 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3664 Fut: Future<Output = R> + 'static,
3665 R: Send + 'static,
3666 {
3667 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3668 let job_id = post_inc(&mut self.job_id);
3669 let this = self.this.clone();
3670 self.job_sender
3671 .unbounded_send(GitJob {
3672 key,
3673 job: Box::new(move |state, cx: &mut AsyncApp| {
3674 let job = job(state, cx.clone());
3675 cx.spawn(async move |cx| {
3676 if let Some(s) = status.clone() {
3677 this.update(cx, |this, cx| {
3678 this.active_jobs.insert(
3679 job_id,
3680 JobInfo {
3681 start: Instant::now(),
3682 message: s.clone(),
3683 },
3684 );
3685
3686 cx.notify();
3687 })
3688 .ok();
3689 }
3690 let result = job.await;
3691
3692 this.update(cx, |this, cx| {
3693 this.active_jobs.remove(&job_id);
3694 cx.notify();
3695 })
3696 .ok();
3697
3698 result_tx.send(result).ok();
3699 })
3700 }),
3701 })
3702 .ok();
3703 result_rx
3704 }
3705
3706 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3707 let Some(git_store) = self.git_store.upgrade() else {
3708 return;
3709 };
3710 let entity = cx.entity();
3711 git_store.update(cx, |git_store, cx| {
3712 let Some((&id, _)) = git_store
3713 .repositories
3714 .iter()
3715 .find(|(_, handle)| *handle == &entity)
3716 else {
3717 return;
3718 };
3719 git_store.active_repo_id = Some(id);
3720 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3721 });
3722 }
3723
3724 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3725 self.snapshot.status()
3726 }
3727
3728 pub fn cached_stash(&self) -> GitStash {
3729 self.snapshot.stash_entries.clone()
3730 }
3731
3732 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3733 let git_store = self.git_store.upgrade()?;
3734 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3735 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3736 let abs_path = SanitizedPath::new(&abs_path);
3737 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3738 Some(ProjectPath {
3739 worktree_id: worktree.read(cx).id(),
3740 path: relative_path,
3741 })
3742 }
3743
3744 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3745 let git_store = self.git_store.upgrade()?;
3746 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3747 let abs_path = worktree_store.absolutize(path, cx)?;
3748 self.snapshot.abs_path_to_repo_path(&abs_path)
3749 }
3750
3751 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3752 other
3753 .read(cx)
3754 .snapshot
3755 .work_directory_abs_path
3756 .starts_with(&self.snapshot.work_directory_abs_path)
3757 }
3758
3759 pub fn open_commit_buffer(
3760 &mut self,
3761 languages: Option<Arc<LanguageRegistry>>,
3762 buffer_store: Entity<BufferStore>,
3763 cx: &mut Context<Self>,
3764 ) -> Task<Result<Entity<Buffer>>> {
3765 let id = self.id;
3766 if let Some(buffer) = self.commit_message_buffer.clone() {
3767 return Task::ready(Ok(buffer));
3768 }
3769 let this = cx.weak_entity();
3770
3771 let rx = self.send_job(None, move |state, mut cx| async move {
3772 let Some(this) = this.upgrade() else {
3773 bail!("git store was dropped");
3774 };
3775 match state {
3776 RepositoryState::Local(..) => {
3777 this.update(&mut cx, |_, cx| {
3778 Self::open_local_commit_buffer(languages, buffer_store, cx)
3779 })?
3780 .await
3781 }
3782 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3783 let request = client.request(proto::OpenCommitMessageBuffer {
3784 project_id: project_id.0,
3785 repository_id: id.to_proto(),
3786 });
3787 let response = request.await.context("requesting to open commit buffer")?;
3788 let buffer_id = BufferId::new(response.buffer_id)?;
3789 let buffer = buffer_store
3790 .update(&mut cx, |buffer_store, cx| {
3791 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3792 })?
3793 .await?;
3794 if let Some(language_registry) = languages {
3795 let git_commit_language =
3796 language_registry.language_for_name("Git Commit").await?;
3797 buffer.update(&mut cx, |buffer, cx| {
3798 buffer.set_language(Some(git_commit_language), cx);
3799 })?;
3800 }
3801 this.update(&mut cx, |this, _| {
3802 this.commit_message_buffer = Some(buffer.clone());
3803 })?;
3804 Ok(buffer)
3805 }
3806 }
3807 });
3808
3809 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3810 }
3811
3812 fn open_local_commit_buffer(
3813 language_registry: Option<Arc<LanguageRegistry>>,
3814 buffer_store: Entity<BufferStore>,
3815 cx: &mut Context<Self>,
3816 ) -> Task<Result<Entity<Buffer>>> {
3817 cx.spawn(async move |repository, cx| {
3818 let buffer = buffer_store
3819 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3820 .await?;
3821
3822 if let Some(language_registry) = language_registry {
3823 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3824 buffer.update(cx, |buffer, cx| {
3825 buffer.set_language(Some(git_commit_language), cx);
3826 })?;
3827 }
3828
3829 repository.update(cx, |repository, _| {
3830 repository.commit_message_buffer = Some(buffer.clone());
3831 })?;
3832 Ok(buffer)
3833 })
3834 }
3835
3836 pub fn checkout_files(
3837 &mut self,
3838 commit: &str,
3839 paths: Vec<RepoPath>,
3840 cx: &mut Context<Self>,
3841 ) -> Task<Result<()>> {
3842 let commit = commit.to_string();
3843 let id = self.id;
3844
3845 self.spawn_job_with_tracking(
3846 paths.clone(),
3847 pending_op::GitStatus::Reverted,
3848 cx,
3849 async move |this, cx| {
3850 this.update(cx, |this, _cx| {
3851 this.send_job(
3852 Some(format!("git checkout {}", commit).into()),
3853 move |git_repo, _| async move {
3854 match git_repo {
3855 RepositoryState::Local(LocalRepositoryState {
3856 backend,
3857 environment,
3858 ..
3859 }) => {
3860 backend
3861 .checkout_files(commit, paths, environment.clone())
3862 .await
3863 }
3864 RepositoryState::Remote(RemoteRepositoryState {
3865 project_id,
3866 client,
3867 }) => {
3868 client
3869 .request(proto::GitCheckoutFiles {
3870 project_id: project_id.0,
3871 repository_id: id.to_proto(),
3872 commit,
3873 paths: paths
3874 .into_iter()
3875 .map(|p| p.to_proto())
3876 .collect(),
3877 })
3878 .await?;
3879
3880 Ok(())
3881 }
3882 }
3883 },
3884 )
3885 })?
3886 .await?
3887 },
3888 )
3889 }
3890
3891 pub fn reset(
3892 &mut self,
3893 commit: String,
3894 reset_mode: ResetMode,
3895 _cx: &mut App,
3896 ) -> oneshot::Receiver<Result<()>> {
3897 let id = self.id;
3898
3899 self.send_job(None, move |git_repo, _| async move {
3900 match git_repo {
3901 RepositoryState::Local(LocalRepositoryState {
3902 backend,
3903 environment,
3904 ..
3905 }) => backend.reset(commit, reset_mode, environment).await,
3906 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3907 client
3908 .request(proto::GitReset {
3909 project_id: project_id.0,
3910 repository_id: id.to_proto(),
3911 commit,
3912 mode: match reset_mode {
3913 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3914 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3915 },
3916 })
3917 .await?;
3918
3919 Ok(())
3920 }
3921 }
3922 })
3923 }
3924
3925 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3926 let id = self.id;
3927 self.send_job(None, move |git_repo, _cx| async move {
3928 match git_repo {
3929 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
3930 backend.show(commit).await
3931 }
3932 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3933 let resp = client
3934 .request(proto::GitShow {
3935 project_id: project_id.0,
3936 repository_id: id.to_proto(),
3937 commit,
3938 })
3939 .await?;
3940
3941 Ok(CommitDetails {
3942 sha: resp.sha.into(),
3943 message: resp.message.into(),
3944 commit_timestamp: resp.commit_timestamp,
3945 author_email: resp.author_email.into(),
3946 author_name: resp.author_name.into(),
3947 })
3948 }
3949 }
3950 })
3951 }
3952
3953 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3954 let id = self.id;
3955 self.send_job(None, move |git_repo, cx| async move {
3956 match git_repo {
3957 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
3958 backend.load_commit(commit, cx).await
3959 }
3960 RepositoryState::Remote(RemoteRepositoryState {
3961 client, project_id, ..
3962 }) => {
3963 let response = client
3964 .request(proto::LoadCommitDiff {
3965 project_id: project_id.0,
3966 repository_id: id.to_proto(),
3967 commit,
3968 })
3969 .await?;
3970 Ok(CommitDiff {
3971 files: response
3972 .files
3973 .into_iter()
3974 .map(|file| {
3975 Ok(CommitFile {
3976 path: RepoPath::from_proto(&file.path)?,
3977 old_text: file.old_text,
3978 new_text: file.new_text,
3979 })
3980 })
3981 .collect::<Result<Vec<_>>>()?,
3982 })
3983 }
3984 }
3985 })
3986 }
3987
3988 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
3989 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
3990 }
3991
3992 fn save_buffers<'a>(
3993 &self,
3994 entries: impl IntoIterator<Item = &'a RepoPath>,
3995 cx: &mut Context<Self>,
3996 ) -> Vec<Task<anyhow::Result<()>>> {
3997 let mut save_futures = Vec::new();
3998 if let Some(buffer_store) = self.buffer_store(cx) {
3999 buffer_store.update(cx, |buffer_store, cx| {
4000 for path in entries {
4001 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4002 continue;
4003 };
4004 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4005 && buffer
4006 .read(cx)
4007 .file()
4008 .is_some_and(|file| file.disk_state().exists())
4009 && buffer.read(cx).has_unsaved_edits()
4010 {
4011 save_futures.push(buffer_store.save_buffer(buffer, cx));
4012 }
4013 }
4014 })
4015 }
4016 save_futures
4017 }
4018
4019 pub fn stage_entries(
4020 &mut self,
4021 entries: Vec<RepoPath>,
4022 cx: &mut Context<Self>,
4023 ) -> Task<anyhow::Result<()>> {
4024 if entries.is_empty() {
4025 return Task::ready(Ok(()));
4026 }
4027 let id = self.id;
4028 let save_tasks = self.save_buffers(&entries, cx);
4029 let paths = entries
4030 .iter()
4031 .map(|p| p.as_unix_str())
4032 .collect::<Vec<_>>()
4033 .join(" ");
4034 let status = format!("git add {paths}");
4035 let job_key = GitJobKey::WriteIndex(entries.clone());
4036
4037 self.spawn_job_with_tracking(
4038 entries.clone(),
4039 pending_op::GitStatus::Staged,
4040 cx,
4041 async move |this, cx| {
4042 for save_task in save_tasks {
4043 save_task.await?;
4044 }
4045
4046 this.update(cx, |this, _| {
4047 this.send_keyed_job(
4048 Some(job_key),
4049 Some(status.into()),
4050 move |git_repo, _cx| async move {
4051 match git_repo {
4052 RepositoryState::Local(LocalRepositoryState {
4053 backend,
4054 environment,
4055 ..
4056 }) => backend.stage_paths(entries, environment.clone()).await,
4057 RepositoryState::Remote(RemoteRepositoryState {
4058 project_id,
4059 client,
4060 }) => {
4061 client
4062 .request(proto::Stage {
4063 project_id: project_id.0,
4064 repository_id: id.to_proto(),
4065 paths: entries
4066 .into_iter()
4067 .map(|repo_path| repo_path.to_proto())
4068 .collect(),
4069 })
4070 .await
4071 .context("sending stage request")?;
4072
4073 Ok(())
4074 }
4075 }
4076 },
4077 )
4078 })?
4079 .await?
4080 },
4081 )
4082 }
4083
4084 pub fn unstage_entries(
4085 &mut self,
4086 entries: Vec<RepoPath>,
4087 cx: &mut Context<Self>,
4088 ) -> Task<anyhow::Result<()>> {
4089 if entries.is_empty() {
4090 return Task::ready(Ok(()));
4091 }
4092 let id = self.id;
4093 let save_tasks = self.save_buffers(&entries, cx);
4094 let paths = entries
4095 .iter()
4096 .map(|p| p.as_unix_str())
4097 .collect::<Vec<_>>()
4098 .join(" ");
4099 let status = format!("git reset {paths}");
4100 let job_key = GitJobKey::WriteIndex(entries.clone());
4101
4102 self.spawn_job_with_tracking(
4103 entries.clone(),
4104 pending_op::GitStatus::Unstaged,
4105 cx,
4106 async move |this, cx| {
4107 for save_task in save_tasks {
4108 save_task.await?;
4109 }
4110
4111 this.update(cx, |this, _| {
4112 this.send_keyed_job(
4113 Some(job_key),
4114 Some(status.into()),
4115 move |git_repo, _cx| async move {
4116 match git_repo {
4117 RepositoryState::Local(LocalRepositoryState {
4118 backend,
4119 environment,
4120 ..
4121 }) => backend.unstage_paths(entries, environment).await,
4122 RepositoryState::Remote(RemoteRepositoryState {
4123 project_id,
4124 client,
4125 }) => {
4126 client
4127 .request(proto::Unstage {
4128 project_id: project_id.0,
4129 repository_id: id.to_proto(),
4130 paths: entries
4131 .into_iter()
4132 .map(|repo_path| repo_path.to_proto())
4133 .collect(),
4134 })
4135 .await
4136 .context("sending unstage request")?;
4137
4138 Ok(())
4139 }
4140 }
4141 },
4142 )
4143 })?
4144 .await?
4145 },
4146 )
4147 }
4148
4149 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4150 let to_stage = self
4151 .cached_status()
4152 .filter_map(|entry| {
4153 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4154 if ops.staging() || ops.staged() {
4155 None
4156 } else {
4157 Some(entry.repo_path)
4158 }
4159 } else if entry.status.staging().is_fully_staged() {
4160 None
4161 } else {
4162 Some(entry.repo_path)
4163 }
4164 })
4165 .collect();
4166 self.stage_entries(to_stage, cx)
4167 }
4168
4169 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4170 let to_unstage = self
4171 .cached_status()
4172 .filter_map(|entry| {
4173 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4174 if !ops.staging() && !ops.staged() {
4175 None
4176 } else {
4177 Some(entry.repo_path)
4178 }
4179 } else if entry.status.staging().is_fully_unstaged() {
4180 None
4181 } else {
4182 Some(entry.repo_path)
4183 }
4184 })
4185 .collect();
4186 self.unstage_entries(to_unstage, cx)
4187 }
4188
4189 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4190 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4191
4192 self.stash_entries(to_stash, cx)
4193 }
4194
4195 pub fn stash_entries(
4196 &mut self,
4197 entries: Vec<RepoPath>,
4198 cx: &mut Context<Self>,
4199 ) -> Task<anyhow::Result<()>> {
4200 let id = self.id;
4201
4202 cx.spawn(async move |this, cx| {
4203 this.update(cx, |this, _| {
4204 this.send_job(None, move |git_repo, _cx| async move {
4205 match git_repo {
4206 RepositoryState::Local(LocalRepositoryState {
4207 backend,
4208 environment,
4209 ..
4210 }) => backend.stash_paths(entries, environment).await,
4211 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4212 client
4213 .request(proto::Stash {
4214 project_id: project_id.0,
4215 repository_id: id.to_proto(),
4216 paths: entries
4217 .into_iter()
4218 .map(|repo_path| repo_path.to_proto())
4219 .collect(),
4220 })
4221 .await
4222 .context("sending stash request")?;
4223 Ok(())
4224 }
4225 }
4226 })
4227 })?
4228 .await??;
4229 Ok(())
4230 })
4231 }
4232
4233 pub fn stash_pop(
4234 &mut self,
4235 index: Option<usize>,
4236 cx: &mut Context<Self>,
4237 ) -> Task<anyhow::Result<()>> {
4238 let id = self.id;
4239 cx.spawn(async move |this, cx| {
4240 this.update(cx, |this, _| {
4241 this.send_job(None, move |git_repo, _cx| async move {
4242 match git_repo {
4243 RepositoryState::Local(LocalRepositoryState {
4244 backend,
4245 environment,
4246 ..
4247 }) => backend.stash_pop(index, environment).await,
4248 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4249 client
4250 .request(proto::StashPop {
4251 project_id: project_id.0,
4252 repository_id: id.to_proto(),
4253 stash_index: index.map(|i| i as u64),
4254 })
4255 .await
4256 .context("sending stash pop request")?;
4257 Ok(())
4258 }
4259 }
4260 })
4261 })?
4262 .await??;
4263 Ok(())
4264 })
4265 }
4266
4267 pub fn stash_apply(
4268 &mut self,
4269 index: Option<usize>,
4270 cx: &mut Context<Self>,
4271 ) -> Task<anyhow::Result<()>> {
4272 let id = self.id;
4273 cx.spawn(async move |this, cx| {
4274 this.update(cx, |this, _| {
4275 this.send_job(None, move |git_repo, _cx| async move {
4276 match git_repo {
4277 RepositoryState::Local(LocalRepositoryState {
4278 backend,
4279 environment,
4280 ..
4281 }) => backend.stash_apply(index, environment).await,
4282 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4283 client
4284 .request(proto::StashApply {
4285 project_id: project_id.0,
4286 repository_id: id.to_proto(),
4287 stash_index: index.map(|i| i as u64),
4288 })
4289 .await
4290 .context("sending stash apply request")?;
4291 Ok(())
4292 }
4293 }
4294 })
4295 })?
4296 .await??;
4297 Ok(())
4298 })
4299 }
4300
4301 pub fn stash_drop(
4302 &mut self,
4303 index: Option<usize>,
4304 cx: &mut Context<Self>,
4305 ) -> oneshot::Receiver<anyhow::Result<()>> {
4306 let id = self.id;
4307 let updates_tx = self
4308 .git_store()
4309 .and_then(|git_store| match &git_store.read(cx).state {
4310 GitStoreState::Local { downstream, .. } => downstream
4311 .as_ref()
4312 .map(|downstream| downstream.updates_tx.clone()),
4313 _ => None,
4314 });
4315 let this = cx.weak_entity();
4316 self.send_job(None, move |git_repo, mut cx| async move {
4317 match git_repo {
4318 RepositoryState::Local(LocalRepositoryState {
4319 backend,
4320 environment,
4321 ..
4322 }) => {
4323 // TODO would be nice to not have to do this manually
4324 let result = backend.stash_drop(index, environment).await;
4325 if result.is_ok()
4326 && let Ok(stash_entries) = backend.stash_entries().await
4327 {
4328 let snapshot = this.update(&mut cx, |this, cx| {
4329 this.snapshot.stash_entries = stash_entries;
4330 cx.emit(RepositoryEvent::StashEntriesChanged);
4331 this.snapshot.clone()
4332 })?;
4333 if let Some(updates_tx) = updates_tx {
4334 updates_tx
4335 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4336 .ok();
4337 }
4338 }
4339
4340 result
4341 }
4342 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4343 client
4344 .request(proto::StashDrop {
4345 project_id: project_id.0,
4346 repository_id: id.to_proto(),
4347 stash_index: index.map(|i| i as u64),
4348 })
4349 .await
4350 .context("sending stash pop request")?;
4351 Ok(())
4352 }
4353 }
4354 })
4355 }
4356
4357 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4358 let id = self.id;
4359 self.send_job(
4360 Some(format!("git hook {}", hook.as_str()).into()),
4361 move |git_repo, _cx| async move {
4362 match git_repo {
4363 RepositoryState::Local(LocalRepositoryState {
4364 backend,
4365 environment,
4366 ..
4367 }) => backend.run_hook(hook, environment.clone()).await,
4368 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4369 client
4370 .request(proto::RunGitHook {
4371 project_id: project_id.0,
4372 repository_id: id.to_proto(),
4373 hook: hook.to_proto(),
4374 })
4375 .await?;
4376
4377 Ok(())
4378 }
4379 }
4380 },
4381 )
4382 }
4383
4384 pub fn commit(
4385 &mut self,
4386 message: SharedString,
4387 name_and_email: Option<(SharedString, SharedString)>,
4388 options: CommitOptions,
4389 askpass: AskPassDelegate,
4390 cx: &mut App,
4391 ) -> oneshot::Receiver<Result<()>> {
4392 let id = self.id;
4393 let askpass_delegates = self.askpass_delegates.clone();
4394 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4395
4396 let rx = self.run_hook(RunHook::PreCommit, cx);
4397
4398 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4399 rx.await??;
4400
4401 match git_repo {
4402 RepositoryState::Local(LocalRepositoryState {
4403 backend,
4404 environment,
4405 ..
4406 }) => {
4407 backend
4408 .commit(message, name_and_email, options, askpass, environment)
4409 .await
4410 }
4411 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4412 askpass_delegates.lock().insert(askpass_id, askpass);
4413 let _defer = util::defer(|| {
4414 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4415 debug_assert!(askpass_delegate.is_some());
4416 });
4417 let (name, email) = name_and_email.unzip();
4418 client
4419 .request(proto::Commit {
4420 project_id: project_id.0,
4421 repository_id: id.to_proto(),
4422 message: String::from(message),
4423 name: name.map(String::from),
4424 email: email.map(String::from),
4425 options: Some(proto::commit::CommitOptions {
4426 amend: options.amend,
4427 signoff: options.signoff,
4428 }),
4429 askpass_id,
4430 })
4431 .await
4432 .context("sending commit request")?;
4433
4434 Ok(())
4435 }
4436 }
4437 })
4438 }
4439
4440 pub fn fetch(
4441 &mut self,
4442 fetch_options: FetchOptions,
4443 askpass: AskPassDelegate,
4444 _cx: &mut App,
4445 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4446 let askpass_delegates = self.askpass_delegates.clone();
4447 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4448 let id = self.id;
4449
4450 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4451 match git_repo {
4452 RepositoryState::Local(LocalRepositoryState {
4453 backend,
4454 environment,
4455 ..
4456 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4457 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4458 askpass_delegates.lock().insert(askpass_id, askpass);
4459 let _defer = util::defer(|| {
4460 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4461 debug_assert!(askpass_delegate.is_some());
4462 });
4463
4464 let response = client
4465 .request(proto::Fetch {
4466 project_id: project_id.0,
4467 repository_id: id.to_proto(),
4468 askpass_id,
4469 remote: fetch_options.to_proto(),
4470 })
4471 .await
4472 .context("sending fetch request")?;
4473
4474 Ok(RemoteCommandOutput {
4475 stdout: response.stdout,
4476 stderr: response.stderr,
4477 })
4478 }
4479 }
4480 })
4481 }
4482
4483 pub fn push(
4484 &mut self,
4485 branch: SharedString,
4486 remote: SharedString,
4487 options: Option<PushOptions>,
4488 askpass: AskPassDelegate,
4489 cx: &mut Context<Self>,
4490 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4491 let askpass_delegates = self.askpass_delegates.clone();
4492 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4493 let id = self.id;
4494
4495 let args = options
4496 .map(|option| match option {
4497 PushOptions::SetUpstream => " --set-upstream",
4498 PushOptions::Force => " --force-with-lease",
4499 })
4500 .unwrap_or("");
4501
4502 let updates_tx = self
4503 .git_store()
4504 .and_then(|git_store| match &git_store.read(cx).state {
4505 GitStoreState::Local { downstream, .. } => downstream
4506 .as_ref()
4507 .map(|downstream| downstream.updates_tx.clone()),
4508 _ => None,
4509 });
4510
4511 let this = cx.weak_entity();
4512 self.send_job(
4513 Some(format!("git push {} {} {}", args, remote, branch).into()),
4514 move |git_repo, mut cx| async move {
4515 match git_repo {
4516 RepositoryState::Local(LocalRepositoryState {
4517 backend,
4518 environment,
4519 ..
4520 }) => {
4521 let result = backend
4522 .push(
4523 branch.to_string(),
4524 remote.to_string(),
4525 options,
4526 askpass,
4527 environment.clone(),
4528 cx.clone(),
4529 )
4530 .await;
4531 // TODO would be nice to not have to do this manually
4532 if result.is_ok() {
4533 let branches = backend.branches().await?;
4534 let branch = branches.into_iter().find(|branch| branch.is_head);
4535 log::info!("head branch after scan is {branch:?}");
4536 let snapshot = this.update(&mut cx, |this, cx| {
4537 this.snapshot.branch = branch;
4538 cx.emit(RepositoryEvent::BranchChanged);
4539 this.snapshot.clone()
4540 })?;
4541 if let Some(updates_tx) = updates_tx {
4542 updates_tx
4543 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4544 .ok();
4545 }
4546 }
4547 result
4548 }
4549 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4550 askpass_delegates.lock().insert(askpass_id, askpass);
4551 let _defer = util::defer(|| {
4552 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4553 debug_assert!(askpass_delegate.is_some());
4554 });
4555 let response = client
4556 .request(proto::Push {
4557 project_id: project_id.0,
4558 repository_id: id.to_proto(),
4559 askpass_id,
4560 branch_name: branch.to_string(),
4561 remote_name: remote.to_string(),
4562 options: options.map(|options| match options {
4563 PushOptions::Force => proto::push::PushOptions::Force,
4564 PushOptions::SetUpstream => {
4565 proto::push::PushOptions::SetUpstream
4566 }
4567 }
4568 as i32),
4569 })
4570 .await
4571 .context("sending push request")?;
4572
4573 Ok(RemoteCommandOutput {
4574 stdout: response.stdout,
4575 stderr: response.stderr,
4576 })
4577 }
4578 }
4579 },
4580 )
4581 }
4582
4583 pub fn pull(
4584 &mut self,
4585 branch: Option<SharedString>,
4586 remote: SharedString,
4587 rebase: bool,
4588 askpass: AskPassDelegate,
4589 _cx: &mut App,
4590 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4591 let askpass_delegates = self.askpass_delegates.clone();
4592 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4593 let id = self.id;
4594
4595 let mut status = "git pull".to_string();
4596 if rebase {
4597 status.push_str(" --rebase");
4598 }
4599 status.push_str(&format!(" {}", remote));
4600 if let Some(b) = &branch {
4601 status.push_str(&format!(" {}", b));
4602 }
4603
4604 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4605 match git_repo {
4606 RepositoryState::Local(LocalRepositoryState {
4607 backend,
4608 environment,
4609 ..
4610 }) => {
4611 backend
4612 .pull(
4613 branch.as_ref().map(|b| b.to_string()),
4614 remote.to_string(),
4615 rebase,
4616 askpass,
4617 environment.clone(),
4618 cx,
4619 )
4620 .await
4621 }
4622 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4623 askpass_delegates.lock().insert(askpass_id, askpass);
4624 let _defer = util::defer(|| {
4625 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4626 debug_assert!(askpass_delegate.is_some());
4627 });
4628 let response = client
4629 .request(proto::Pull {
4630 project_id: project_id.0,
4631 repository_id: id.to_proto(),
4632 askpass_id,
4633 rebase,
4634 branch_name: branch.as_ref().map(|b| b.to_string()),
4635 remote_name: remote.to_string(),
4636 })
4637 .await
4638 .context("sending pull request")?;
4639
4640 Ok(RemoteCommandOutput {
4641 stdout: response.stdout,
4642 stderr: response.stderr,
4643 })
4644 }
4645 }
4646 })
4647 }
4648
4649 fn spawn_set_index_text_job(
4650 &mut self,
4651 path: RepoPath,
4652 content: Option<String>,
4653 hunk_staging_operation_count: Option<usize>,
4654 cx: &mut Context<Self>,
4655 ) -> oneshot::Receiver<anyhow::Result<()>> {
4656 let id = self.id;
4657 let this = cx.weak_entity();
4658 let git_store = self.git_store.clone();
4659 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4660 self.send_keyed_job(
4661 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4662 None,
4663 move |git_repo, mut cx| async move {
4664 log::debug!(
4665 "start updating index text for buffer {}",
4666 path.as_unix_str()
4667 );
4668
4669 match git_repo {
4670 RepositoryState::Local(LocalRepositoryState {
4671 fs,
4672 backend,
4673 environment,
4674 ..
4675 }) => {
4676 let executable = match fs.metadata(&abs_path).await {
4677 Ok(Some(meta)) => meta.is_executable,
4678 Ok(None) => false,
4679 Err(_err) => false,
4680 };
4681 backend
4682 .set_index_text(path.clone(), content, environment.clone(), executable)
4683 .await?;
4684 }
4685 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4686 client
4687 .request(proto::SetIndexText {
4688 project_id: project_id.0,
4689 repository_id: id.to_proto(),
4690 path: path.to_proto(),
4691 text: content,
4692 })
4693 .await?;
4694 }
4695 }
4696 log::debug!(
4697 "finish updating index text for buffer {}",
4698 path.as_unix_str()
4699 );
4700
4701 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4702 let project_path = this
4703 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4704 .ok()
4705 .flatten();
4706 git_store.update(&mut cx, |git_store, cx| {
4707 let buffer_id = git_store
4708 .buffer_store
4709 .read(cx)
4710 .get_by_path(&project_path?)?
4711 .read(cx)
4712 .remote_id();
4713 let diff_state = git_store.diffs.get(&buffer_id)?;
4714 diff_state.update(cx, |diff_state, _| {
4715 diff_state.hunk_staging_operation_count_as_of_write =
4716 hunk_staging_operation_count;
4717 });
4718 Some(())
4719 })?;
4720 }
4721 Ok(())
4722 },
4723 )
4724 }
4725
4726 pub fn get_remotes(
4727 &mut self,
4728 branch_name: Option<String>,
4729 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4730 let id = self.id;
4731 self.send_job(None, move |repo, _cx| async move {
4732 match repo {
4733 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4734 backend.get_remotes(branch_name).await
4735 }
4736 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4737 let response = client
4738 .request(proto::GetRemotes {
4739 project_id: project_id.0,
4740 repository_id: id.to_proto(),
4741 branch_name,
4742 })
4743 .await?;
4744
4745 let remotes = response
4746 .remotes
4747 .into_iter()
4748 .map(|remotes| git::repository::Remote {
4749 name: remotes.name.into(),
4750 })
4751 .collect();
4752
4753 Ok(remotes)
4754 }
4755 }
4756 })
4757 }
4758
4759 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4760 let id = self.id;
4761 self.send_job(None, move |repo, _| async move {
4762 match repo {
4763 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4764 backend.branches().await
4765 }
4766 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4767 let response = client
4768 .request(proto::GitGetBranches {
4769 project_id: project_id.0,
4770 repository_id: id.to_proto(),
4771 })
4772 .await?;
4773
4774 let branches = response
4775 .branches
4776 .into_iter()
4777 .map(|branch| proto_to_branch(&branch))
4778 .collect();
4779
4780 Ok(branches)
4781 }
4782 }
4783 })
4784 }
4785
4786 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4787 let id = self.id;
4788 self.send_job(None, move |repo, _| async move {
4789 match repo {
4790 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4791 backend.worktrees().await
4792 }
4793 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4794 let response = client
4795 .request(proto::GitGetWorktrees {
4796 project_id: project_id.0,
4797 repository_id: id.to_proto(),
4798 })
4799 .await?;
4800
4801 let worktrees = response
4802 .worktrees
4803 .into_iter()
4804 .map(|worktree| proto_to_worktree(&worktree))
4805 .collect();
4806
4807 Ok(worktrees)
4808 }
4809 }
4810 })
4811 }
4812
4813 pub fn create_worktree(
4814 &mut self,
4815 name: String,
4816 path: PathBuf,
4817 commit: Option<String>,
4818 ) -> oneshot::Receiver<Result<()>> {
4819 let id = self.id;
4820 self.send_job(
4821 Some("git worktree add".into()),
4822 move |repo, _cx| async move {
4823 match repo {
4824 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4825 backend.create_worktree(name, path, commit).await
4826 }
4827 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4828 client
4829 .request(proto::GitCreateWorktree {
4830 project_id: project_id.0,
4831 repository_id: id.to_proto(),
4832 name,
4833 directory: path.to_string_lossy().to_string(),
4834 commit,
4835 })
4836 .await?;
4837
4838 Ok(())
4839 }
4840 }
4841 },
4842 )
4843 }
4844
4845 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4846 let id = self.id;
4847 self.send_job(None, move |repo, _| async move {
4848 match repo {
4849 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4850 backend.default_branch().await
4851 }
4852 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4853 let response = client
4854 .request(proto::GetDefaultBranch {
4855 project_id: project_id.0,
4856 repository_id: id.to_proto(),
4857 })
4858 .await?;
4859
4860 anyhow::Ok(response.branch.map(SharedString::from))
4861 }
4862 }
4863 })
4864 }
4865
4866 pub fn diff_tree(
4867 &mut self,
4868 diff_type: DiffTreeType,
4869 _cx: &App,
4870 ) -> oneshot::Receiver<Result<TreeDiff>> {
4871 let repository_id = self.snapshot.id;
4872 self.send_job(None, move |repo, _cx| async move {
4873 match repo {
4874 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4875 backend.diff_tree(diff_type).await
4876 }
4877 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4878 let response = client
4879 .request(proto::GetTreeDiff {
4880 project_id: project_id.0,
4881 repository_id: repository_id.0,
4882 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4883 base: diff_type.base().to_string(),
4884 head: diff_type.head().to_string(),
4885 })
4886 .await?;
4887
4888 let entries = response
4889 .entries
4890 .into_iter()
4891 .filter_map(|entry| {
4892 let status = match entry.status() {
4893 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4894 proto::tree_diff_status::Status::Modified => {
4895 TreeDiffStatus::Modified {
4896 old: git::Oid::from_str(
4897 &entry.oid.context("missing oid").log_err()?,
4898 )
4899 .log_err()?,
4900 }
4901 }
4902 proto::tree_diff_status::Status::Deleted => {
4903 TreeDiffStatus::Deleted {
4904 old: git::Oid::from_str(
4905 &entry.oid.context("missing oid").log_err()?,
4906 )
4907 .log_err()?,
4908 }
4909 }
4910 };
4911 Some((
4912 RepoPath::from_rel_path(
4913 &RelPath::from_proto(&entry.path).log_err()?,
4914 ),
4915 status,
4916 ))
4917 })
4918 .collect();
4919
4920 Ok(TreeDiff { entries })
4921 }
4922 }
4923 })
4924 }
4925
4926 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4927 let id = self.id;
4928 self.send_job(None, move |repo, _cx| async move {
4929 match repo {
4930 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4931 backend.diff(diff_type).await
4932 }
4933 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4934 let response = client
4935 .request(proto::GitDiff {
4936 project_id: project_id.0,
4937 repository_id: id.to_proto(),
4938 diff_type: match diff_type {
4939 DiffType::HeadToIndex => {
4940 proto::git_diff::DiffType::HeadToIndex.into()
4941 }
4942 DiffType::HeadToWorktree => {
4943 proto::git_diff::DiffType::HeadToWorktree.into()
4944 }
4945 },
4946 })
4947 .await?;
4948
4949 Ok(response.diff)
4950 }
4951 }
4952 })
4953 }
4954
4955 pub fn create_branch(
4956 &mut self,
4957 branch_name: String,
4958 base_branch: Option<String>,
4959 ) -> oneshot::Receiver<Result<()>> {
4960 let id = self.id;
4961 let status_msg = if let Some(ref base) = base_branch {
4962 format!("git switch -c {branch_name} {base}").into()
4963 } else {
4964 format!("git switch -c {branch_name}").into()
4965 };
4966 self.send_job(Some(status_msg), move |repo, _cx| async move {
4967 match repo {
4968 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4969 backend.create_branch(branch_name, base_branch).await
4970 }
4971 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4972 client
4973 .request(proto::GitCreateBranch {
4974 project_id: project_id.0,
4975 repository_id: id.to_proto(),
4976 branch_name,
4977 })
4978 .await?;
4979
4980 Ok(())
4981 }
4982 }
4983 })
4984 }
4985
4986 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4987 let id = self.id;
4988 self.send_job(
4989 Some(format!("git switch {branch_name}").into()),
4990 move |repo, _cx| async move {
4991 match repo {
4992 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4993 backend.change_branch(branch_name).await
4994 }
4995 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4996 client
4997 .request(proto::GitChangeBranch {
4998 project_id: project_id.0,
4999 repository_id: id.to_proto(),
5000 branch_name,
5001 })
5002 .await?;
5003
5004 Ok(())
5005 }
5006 }
5007 },
5008 )
5009 }
5010
5011 pub fn rename_branch(
5012 &mut self,
5013 branch: String,
5014 new_name: String,
5015 ) -> oneshot::Receiver<Result<()>> {
5016 let id = self.id;
5017 self.send_job(
5018 Some(format!("git branch -m {branch} {new_name}").into()),
5019 move |repo, _cx| async move {
5020 match repo {
5021 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5022 backend.rename_branch(branch, new_name).await
5023 }
5024 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5025 client
5026 .request(proto::GitRenameBranch {
5027 project_id: project_id.0,
5028 repository_id: id.to_proto(),
5029 branch,
5030 new_name,
5031 })
5032 .await?;
5033
5034 Ok(())
5035 }
5036 }
5037 },
5038 )
5039 }
5040
5041 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5042 let id = self.id;
5043 self.send_job(None, move |repo, _cx| async move {
5044 match repo {
5045 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5046 backend.check_for_pushed_commit().await
5047 }
5048 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5049 let response = client
5050 .request(proto::CheckForPushedCommits {
5051 project_id: project_id.0,
5052 repository_id: id.to_proto(),
5053 })
5054 .await?;
5055
5056 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5057
5058 Ok(branches)
5059 }
5060 }
5061 })
5062 }
5063
5064 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5065 self.send_job(None, |repo, _cx| async move {
5066 match repo {
5067 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5068 backend.checkpoint().await
5069 }
5070 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5071 }
5072 })
5073 }
5074
5075 pub fn restore_checkpoint(
5076 &mut self,
5077 checkpoint: GitRepositoryCheckpoint,
5078 ) -> oneshot::Receiver<Result<()>> {
5079 self.send_job(None, move |repo, _cx| async move {
5080 match repo {
5081 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5082 backend.restore_checkpoint(checkpoint).await
5083 }
5084 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5085 }
5086 })
5087 }
5088
5089 pub(crate) fn apply_remote_update(
5090 &mut self,
5091 update: proto::UpdateRepository,
5092 cx: &mut Context<Self>,
5093 ) -> Result<()> {
5094 let conflicted_paths = TreeSet::from_ordered_entries(
5095 update
5096 .current_merge_conflicts
5097 .into_iter()
5098 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5099 );
5100 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5101 let new_head_commit = update
5102 .head_commit_details
5103 .as_ref()
5104 .map(proto_to_commit_details);
5105 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5106 cx.emit(RepositoryEvent::BranchChanged)
5107 }
5108 self.snapshot.branch = new_branch;
5109 self.snapshot.head_commit = new_head_commit;
5110
5111 self.snapshot.merge.conflicted_paths = conflicted_paths;
5112 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5113 let new_stash_entries = GitStash {
5114 entries: update
5115 .stash_entries
5116 .iter()
5117 .filter_map(|entry| proto_to_stash(entry).ok())
5118 .collect(),
5119 };
5120 if self.snapshot.stash_entries != new_stash_entries {
5121 cx.emit(RepositoryEvent::StashEntriesChanged)
5122 }
5123 self.snapshot.stash_entries = new_stash_entries;
5124
5125 let edits = update
5126 .removed_statuses
5127 .into_iter()
5128 .filter_map(|path| {
5129 Some(sum_tree::Edit::Remove(PathKey(
5130 RelPath::from_proto(&path).log_err()?,
5131 )))
5132 })
5133 .chain(
5134 update
5135 .updated_statuses
5136 .into_iter()
5137 .filter_map(|updated_status| {
5138 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5139 }),
5140 )
5141 .collect::<Vec<_>>();
5142 if !edits.is_empty() {
5143 cx.emit(RepositoryEvent::StatusesChanged);
5144 }
5145 self.snapshot.statuses_by_path.edit(edits, ());
5146 if update.is_last_update {
5147 self.snapshot.scan_id = update.scan_id;
5148 }
5149 self.clear_pending_ops(cx);
5150 Ok(())
5151 }
5152
5153 pub fn compare_checkpoints(
5154 &mut self,
5155 left: GitRepositoryCheckpoint,
5156 right: GitRepositoryCheckpoint,
5157 ) -> oneshot::Receiver<Result<bool>> {
5158 self.send_job(None, move |repo, _cx| async move {
5159 match repo {
5160 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5161 backend.compare_checkpoints(left, right).await
5162 }
5163 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5164 }
5165 })
5166 }
5167
5168 pub fn diff_checkpoints(
5169 &mut self,
5170 base_checkpoint: GitRepositoryCheckpoint,
5171 target_checkpoint: GitRepositoryCheckpoint,
5172 ) -> oneshot::Receiver<Result<String>> {
5173 self.send_job(None, move |repo, _cx| async move {
5174 match repo {
5175 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5176 backend
5177 .diff_checkpoints(base_checkpoint, target_checkpoint)
5178 .await
5179 }
5180 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5181 }
5182 })
5183 }
5184
5185 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5186 let updated = SumTree::from_iter(
5187 self.pending_ops.iter().filter_map(|ops| {
5188 let inner_ops: Vec<PendingOp> =
5189 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5190 if inner_ops.is_empty() {
5191 None
5192 } else {
5193 Some(PendingOps {
5194 repo_path: ops.repo_path.clone(),
5195 ops: inner_ops,
5196 })
5197 }
5198 }),
5199 (),
5200 );
5201
5202 if updated != self.pending_ops {
5203 cx.emit(RepositoryEvent::PendingOpsChanged {
5204 pending_ops: self.pending_ops.clone(),
5205 })
5206 }
5207
5208 self.pending_ops = updated;
5209 }
5210
5211 fn schedule_scan(
5212 &mut self,
5213 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5214 cx: &mut Context<Self>,
5215 ) {
5216 let this = cx.weak_entity();
5217 let _ = self.send_keyed_job(
5218 Some(GitJobKey::ReloadGitState),
5219 None,
5220 |state, mut cx| async move {
5221 log::debug!("run scheduled git status scan");
5222
5223 let Some(this) = this.upgrade() else {
5224 return Ok(());
5225 };
5226 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5227 bail!("not a local repository")
5228 };
5229 let (snapshot, events) = this
5230 .update(&mut cx, |this, _| {
5231 this.paths_needing_status_update.clear();
5232 compute_snapshot(
5233 this.id,
5234 this.work_directory_abs_path.clone(),
5235 this.snapshot.clone(),
5236 backend.clone(),
5237 )
5238 })?
5239 .await?;
5240 this.update(&mut cx, |this, cx| {
5241 this.snapshot = snapshot.clone();
5242 this.clear_pending_ops(cx);
5243 for event in events {
5244 cx.emit(event);
5245 }
5246 })?;
5247 if let Some(updates_tx) = updates_tx {
5248 updates_tx
5249 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5250 .ok();
5251 }
5252 Ok(())
5253 },
5254 );
5255 }
5256
5257 fn spawn_local_git_worker(
5258 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5259 cx: &mut Context<Self>,
5260 ) -> mpsc::UnboundedSender<GitJob> {
5261 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5262
5263 cx.spawn(async move |_, cx| {
5264 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5265 if let Some(git_hosting_provider_registry) =
5266 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5267 {
5268 git_hosting_providers::register_additional_providers(
5269 git_hosting_provider_registry,
5270 state.backend.clone(),
5271 );
5272 }
5273 let state = RepositoryState::Local(state);
5274 let mut jobs = VecDeque::new();
5275 loop {
5276 while let Ok(Some(next_job)) = job_rx.try_next() {
5277 jobs.push_back(next_job);
5278 }
5279
5280 if let Some(job) = jobs.pop_front() {
5281 if let Some(current_key) = &job.key
5282 && jobs
5283 .iter()
5284 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5285 {
5286 continue;
5287 }
5288 (job.job)(state.clone(), cx).await;
5289 } else if let Some(job) = job_rx.next().await {
5290 jobs.push_back(job);
5291 } else {
5292 break;
5293 }
5294 }
5295 anyhow::Ok(())
5296 })
5297 .detach_and_log_err(cx);
5298
5299 job_tx
5300 }
5301
5302 fn spawn_remote_git_worker(
5303 state: RemoteRepositoryState,
5304 cx: &mut Context<Self>,
5305 ) -> mpsc::UnboundedSender<GitJob> {
5306 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5307
5308 cx.spawn(async move |_, cx| {
5309 let state = RepositoryState::Remote(state);
5310 let mut jobs = VecDeque::new();
5311 loop {
5312 while let Ok(Some(next_job)) = job_rx.try_next() {
5313 jobs.push_back(next_job);
5314 }
5315
5316 if let Some(job) = jobs.pop_front() {
5317 if let Some(current_key) = &job.key
5318 && jobs
5319 .iter()
5320 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5321 {
5322 continue;
5323 }
5324 (job.job)(state.clone(), cx).await;
5325 } else if let Some(job) = job_rx.next().await {
5326 jobs.push_back(job);
5327 } else {
5328 break;
5329 }
5330 }
5331 anyhow::Ok(())
5332 })
5333 .detach_and_log_err(cx);
5334
5335 job_tx
5336 }
5337
5338 fn load_staged_text(
5339 &mut self,
5340 buffer_id: BufferId,
5341 repo_path: RepoPath,
5342 cx: &App,
5343 ) -> Task<Result<Option<String>>> {
5344 let rx = self.send_job(None, move |state, _| async move {
5345 match state {
5346 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5347 anyhow::Ok(backend.load_index_text(repo_path).await)
5348 }
5349 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5350 let response = client
5351 .request(proto::OpenUnstagedDiff {
5352 project_id: project_id.to_proto(),
5353 buffer_id: buffer_id.to_proto(),
5354 })
5355 .await?;
5356 Ok(response.staged_text)
5357 }
5358 }
5359 });
5360 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5361 }
5362
5363 fn load_committed_text(
5364 &mut self,
5365 buffer_id: BufferId,
5366 repo_path: RepoPath,
5367 cx: &App,
5368 ) -> Task<Result<DiffBasesChange>> {
5369 let rx = self.send_job(None, move |state, _| async move {
5370 match state {
5371 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5372 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5373 let staged_text = backend.load_index_text(repo_path).await;
5374 let diff_bases_change = if committed_text == staged_text {
5375 DiffBasesChange::SetBoth(committed_text)
5376 } else {
5377 DiffBasesChange::SetEach {
5378 index: staged_text,
5379 head: committed_text,
5380 }
5381 };
5382 anyhow::Ok(diff_bases_change)
5383 }
5384 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5385 use proto::open_uncommitted_diff_response::Mode;
5386
5387 let response = client
5388 .request(proto::OpenUncommittedDiff {
5389 project_id: project_id.to_proto(),
5390 buffer_id: buffer_id.to_proto(),
5391 })
5392 .await?;
5393 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5394 let bases = match mode {
5395 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5396 Mode::IndexAndHead => DiffBasesChange::SetEach {
5397 head: response.committed_text,
5398 index: response.staged_text,
5399 },
5400 };
5401 Ok(bases)
5402 }
5403 }
5404 });
5405
5406 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5407 }
5408 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5409 let repository_id = self.snapshot.id;
5410 let rx = self.send_job(None, move |state, _| async move {
5411 match state {
5412 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5413 backend.load_blob_content(oid).await
5414 }
5415 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5416 let response = client
5417 .request(proto::GetBlobContent {
5418 project_id: project_id.to_proto(),
5419 repository_id: repository_id.0,
5420 oid: oid.to_string(),
5421 })
5422 .await?;
5423 Ok(response.content)
5424 }
5425 }
5426 });
5427 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5428 }
5429
5430 fn paths_changed(
5431 &mut self,
5432 paths: Vec<RepoPath>,
5433 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5434 cx: &mut Context<Self>,
5435 ) {
5436 self.paths_needing_status_update.extend(paths);
5437
5438 let this = cx.weak_entity();
5439 let _ = self.send_keyed_job(
5440 Some(GitJobKey::RefreshStatuses),
5441 None,
5442 |state, mut cx| async move {
5443 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5444 (
5445 this.snapshot.clone(),
5446 mem::take(&mut this.paths_needing_status_update),
5447 )
5448 })?;
5449 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5450 bail!("not a local repository")
5451 };
5452
5453 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5454 if paths.is_empty() {
5455 return Ok(());
5456 }
5457 let statuses = backend.status(&paths).await?;
5458 let stash_entries = backend.stash_entries().await?;
5459
5460 let changed_path_statuses = cx
5461 .background_spawn(async move {
5462 let mut changed_path_statuses = Vec::new();
5463 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5464 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5465
5466 for (repo_path, status) in &*statuses.entries {
5467 changed_paths.remove(repo_path);
5468 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5469 && cursor.item().is_some_and(|entry| entry.status == *status)
5470 {
5471 continue;
5472 }
5473
5474 changed_path_statuses.push(Edit::Insert(StatusEntry {
5475 repo_path: repo_path.clone(),
5476 status: *status,
5477 }));
5478 }
5479 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5480 for path in changed_paths.into_iter() {
5481 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5482 changed_path_statuses
5483 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5484 }
5485 }
5486 changed_path_statuses
5487 })
5488 .await;
5489
5490 this.update(&mut cx, |this, cx| {
5491 if this.snapshot.stash_entries != stash_entries {
5492 cx.emit(RepositoryEvent::StashEntriesChanged);
5493 this.snapshot.stash_entries = stash_entries;
5494 }
5495
5496 if !changed_path_statuses.is_empty() {
5497 cx.emit(RepositoryEvent::StatusesChanged);
5498 this.snapshot
5499 .statuses_by_path
5500 .edit(changed_path_statuses, ());
5501 this.snapshot.scan_id += 1;
5502 }
5503
5504 if let Some(updates_tx) = updates_tx {
5505 updates_tx
5506 .unbounded_send(DownstreamUpdate::UpdateRepository(
5507 this.snapshot.clone(),
5508 ))
5509 .ok();
5510 }
5511 })
5512 },
5513 );
5514 }
5515
5516 /// currently running git command and when it started
5517 pub fn current_job(&self) -> Option<JobInfo> {
5518 self.active_jobs.values().next().cloned()
5519 }
5520
5521 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5522 self.send_job(None, |_, _| async {})
5523 }
5524
5525 fn spawn_job_with_tracking<AsyncFn>(
5526 &mut self,
5527 paths: Vec<RepoPath>,
5528 git_status: pending_op::GitStatus,
5529 cx: &mut Context<Self>,
5530 f: AsyncFn,
5531 ) -> Task<Result<()>>
5532 where
5533 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5534 {
5535 let ids = self.new_pending_ops_for_paths(paths, git_status);
5536
5537 cx.spawn(async move |this, cx| {
5538 let (job_status, result) = match f(this.clone(), cx).await {
5539 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5540 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5541 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5542 };
5543
5544 this.update(cx, |this, _| {
5545 let mut edits = Vec::with_capacity(ids.len());
5546 for (id, entry) in ids {
5547 if let Some(mut ops) = this
5548 .pending_ops
5549 .get(&PathKey(entry.as_ref().clone()), ())
5550 .cloned()
5551 {
5552 if let Some(op) = ops.op_by_id_mut(id) {
5553 op.job_status = job_status;
5554 }
5555 edits.push(sum_tree::Edit::Insert(ops));
5556 }
5557 }
5558 this.pending_ops.edit(edits, ());
5559 })?;
5560
5561 result
5562 })
5563 }
5564
5565 fn new_pending_ops_for_paths(
5566 &mut self,
5567 paths: Vec<RepoPath>,
5568 git_status: pending_op::GitStatus,
5569 ) -> Vec<(PendingOpId, RepoPath)> {
5570 let mut edits = Vec::with_capacity(paths.len());
5571 let mut ids = Vec::with_capacity(paths.len());
5572 for path in paths {
5573 let mut ops = self
5574 .pending_ops
5575 .get(&PathKey(path.as_ref().clone()), ())
5576 .cloned()
5577 .unwrap_or_else(|| PendingOps::new(&path));
5578 let id = ops.max_id() + 1;
5579 ops.ops.push(PendingOp {
5580 id,
5581 git_status,
5582 job_status: pending_op::JobStatus::Running,
5583 });
5584 edits.push(sum_tree::Edit::Insert(ops));
5585 ids.push((id, path));
5586 }
5587 self.pending_ops.edit(edits, ());
5588 ids
5589 }
5590}
5591
5592fn get_permalink_in_rust_registry_src(
5593 provider_registry: Arc<GitHostingProviderRegistry>,
5594 path: PathBuf,
5595 selection: Range<u32>,
5596) -> Result<url::Url> {
5597 #[derive(Deserialize)]
5598 struct CargoVcsGit {
5599 sha1: String,
5600 }
5601
5602 #[derive(Deserialize)]
5603 struct CargoVcsInfo {
5604 git: CargoVcsGit,
5605 path_in_vcs: String,
5606 }
5607
5608 #[derive(Deserialize)]
5609 struct CargoPackage {
5610 repository: String,
5611 }
5612
5613 #[derive(Deserialize)]
5614 struct CargoToml {
5615 package: CargoPackage,
5616 }
5617
5618 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5619 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5620 Some((dir, json))
5621 }) else {
5622 bail!("No .cargo_vcs_info.json found in parent directories")
5623 };
5624 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5625 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5626 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5627 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5628 .context("parsing package.repository field of manifest")?;
5629 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5630 let permalink = provider.build_permalink(
5631 remote,
5632 BuildPermalinkParams::new(
5633 &cargo_vcs_info.git.sha1,
5634 &RepoPath::from_rel_path(
5635 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5636 ),
5637 Some(selection),
5638 ),
5639 );
5640 Ok(permalink)
5641}
5642
5643fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5644 let Some(blame) = blame else {
5645 return proto::BlameBufferResponse {
5646 blame_response: None,
5647 };
5648 };
5649
5650 let entries = blame
5651 .entries
5652 .into_iter()
5653 .map(|entry| proto::BlameEntry {
5654 sha: entry.sha.as_bytes().into(),
5655 start_line: entry.range.start,
5656 end_line: entry.range.end,
5657 original_line_number: entry.original_line_number,
5658 author: entry.author,
5659 author_mail: entry.author_mail,
5660 author_time: entry.author_time,
5661 author_tz: entry.author_tz,
5662 committer: entry.committer_name,
5663 committer_mail: entry.committer_email,
5664 committer_time: entry.committer_time,
5665 committer_tz: entry.committer_tz,
5666 summary: entry.summary,
5667 previous: entry.previous,
5668 filename: entry.filename,
5669 })
5670 .collect::<Vec<_>>();
5671
5672 let messages = blame
5673 .messages
5674 .into_iter()
5675 .map(|(oid, message)| proto::CommitMessage {
5676 oid: oid.as_bytes().into(),
5677 message,
5678 })
5679 .collect::<Vec<_>>();
5680
5681 proto::BlameBufferResponse {
5682 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5683 entries,
5684 messages,
5685 remote_url: blame.remote_url,
5686 }),
5687 }
5688}
5689
5690fn deserialize_blame_buffer_response(
5691 response: proto::BlameBufferResponse,
5692) -> Option<git::blame::Blame> {
5693 let response = response.blame_response?;
5694 let entries = response
5695 .entries
5696 .into_iter()
5697 .filter_map(|entry| {
5698 Some(git::blame::BlameEntry {
5699 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5700 range: entry.start_line..entry.end_line,
5701 original_line_number: entry.original_line_number,
5702 committer_name: entry.committer,
5703 committer_time: entry.committer_time,
5704 committer_tz: entry.committer_tz,
5705 committer_email: entry.committer_mail,
5706 author: entry.author,
5707 author_mail: entry.author_mail,
5708 author_time: entry.author_time,
5709 author_tz: entry.author_tz,
5710 summary: entry.summary,
5711 previous: entry.previous,
5712 filename: entry.filename,
5713 })
5714 })
5715 .collect::<Vec<_>>();
5716
5717 let messages = response
5718 .messages
5719 .into_iter()
5720 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5721 .collect::<HashMap<_, _>>();
5722
5723 Some(Blame {
5724 entries,
5725 messages,
5726 remote_url: response.remote_url,
5727 })
5728}
5729
5730fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5731 proto::Branch {
5732 is_head: branch.is_head,
5733 ref_name: branch.ref_name.to_string(),
5734 unix_timestamp: branch
5735 .most_recent_commit
5736 .as_ref()
5737 .map(|commit| commit.commit_timestamp as u64),
5738 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5739 ref_name: upstream.ref_name.to_string(),
5740 tracking: upstream
5741 .tracking
5742 .status()
5743 .map(|upstream| proto::UpstreamTracking {
5744 ahead: upstream.ahead as u64,
5745 behind: upstream.behind as u64,
5746 }),
5747 }),
5748 most_recent_commit: branch
5749 .most_recent_commit
5750 .as_ref()
5751 .map(|commit| proto::CommitSummary {
5752 sha: commit.sha.to_string(),
5753 subject: commit.subject.to_string(),
5754 commit_timestamp: commit.commit_timestamp,
5755 author_name: commit.author_name.to_string(),
5756 }),
5757 }
5758}
5759
5760fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5761 proto::Worktree {
5762 path: worktree.path.to_string_lossy().to_string(),
5763 ref_name: worktree.ref_name.to_string(),
5764 sha: worktree.sha.to_string(),
5765 }
5766}
5767
5768fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5769 git::repository::Worktree {
5770 path: PathBuf::from(proto.path.clone()),
5771 ref_name: proto.ref_name.clone().into(),
5772 sha: proto.sha.clone().into(),
5773 }
5774}
5775
5776fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5777 git::repository::Branch {
5778 is_head: proto.is_head,
5779 ref_name: proto.ref_name.clone().into(),
5780 upstream: proto
5781 .upstream
5782 .as_ref()
5783 .map(|upstream| git::repository::Upstream {
5784 ref_name: upstream.ref_name.to_string().into(),
5785 tracking: upstream
5786 .tracking
5787 .as_ref()
5788 .map(|tracking| {
5789 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5790 ahead: tracking.ahead as u32,
5791 behind: tracking.behind as u32,
5792 })
5793 })
5794 .unwrap_or(git::repository::UpstreamTracking::Gone),
5795 }),
5796 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5797 git::repository::CommitSummary {
5798 sha: commit.sha.to_string().into(),
5799 subject: commit.subject.to_string().into(),
5800 commit_timestamp: commit.commit_timestamp,
5801 author_name: commit.author_name.to_string().into(),
5802 has_parent: true,
5803 }
5804 }),
5805 }
5806}
5807
5808fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5809 proto::GitCommitDetails {
5810 sha: commit.sha.to_string(),
5811 message: commit.message.to_string(),
5812 commit_timestamp: commit.commit_timestamp,
5813 author_email: commit.author_email.to_string(),
5814 author_name: commit.author_name.to_string(),
5815 }
5816}
5817
5818fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5819 CommitDetails {
5820 sha: proto.sha.clone().into(),
5821 message: proto.message.clone().into(),
5822 commit_timestamp: proto.commit_timestamp,
5823 author_email: proto.author_email.clone().into(),
5824 author_name: proto.author_name.clone().into(),
5825 }
5826}
5827
5828async fn compute_snapshot(
5829 id: RepositoryId,
5830 work_directory_abs_path: Arc<Path>,
5831 prev_snapshot: RepositorySnapshot,
5832 backend: Arc<dyn GitRepository>,
5833) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5834 let mut events = Vec::new();
5835 let branches = backend.branches().await?;
5836 let branch = branches.into_iter().find(|branch| branch.is_head);
5837 let statuses = backend
5838 .status(&[RepoPath::from_rel_path(
5839 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
5840 )])
5841 .await?;
5842 let stash_entries = backend.stash_entries().await?;
5843 let statuses_by_path = SumTree::from_iter(
5844 statuses
5845 .entries
5846 .iter()
5847 .map(|(repo_path, status)| StatusEntry {
5848 repo_path: repo_path.clone(),
5849 status: *status,
5850 }),
5851 (),
5852 );
5853 let (merge_details, merge_heads_changed) =
5854 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5855 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5856
5857 if merge_heads_changed {
5858 events.push(RepositoryEvent::MergeHeadsChanged);
5859 }
5860
5861 if statuses_by_path != prev_snapshot.statuses_by_path {
5862 events.push(RepositoryEvent::StatusesChanged)
5863 }
5864
5865 // Useful when branch is None in detached head state
5866 let head_commit = match backend.head_sha().await {
5867 Some(head_sha) => backend.show(head_sha).await.log_err(),
5868 None => None,
5869 };
5870
5871 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5872 events.push(RepositoryEvent::BranchChanged);
5873 }
5874
5875 // Used by edit prediction data collection
5876 let remote_origin_url = backend.remote_url("origin");
5877 let remote_upstream_url = backend.remote_url("upstream");
5878
5879 let snapshot = RepositorySnapshot {
5880 id,
5881 statuses_by_path,
5882 work_directory_abs_path,
5883 path_style: prev_snapshot.path_style,
5884 scan_id: prev_snapshot.scan_id + 1,
5885 branch,
5886 head_commit,
5887 merge: merge_details,
5888 remote_origin_url,
5889 remote_upstream_url,
5890 stash_entries,
5891 };
5892
5893 Ok((snapshot, events))
5894}
5895
5896fn status_from_proto(
5897 simple_status: i32,
5898 status: Option<proto::GitFileStatus>,
5899) -> anyhow::Result<FileStatus> {
5900 use proto::git_file_status::Variant;
5901
5902 let Some(variant) = status.and_then(|status| status.variant) else {
5903 let code = proto::GitStatus::from_i32(simple_status)
5904 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5905 let result = match code {
5906 proto::GitStatus::Added => TrackedStatus {
5907 worktree_status: StatusCode::Added,
5908 index_status: StatusCode::Unmodified,
5909 }
5910 .into(),
5911 proto::GitStatus::Modified => TrackedStatus {
5912 worktree_status: StatusCode::Modified,
5913 index_status: StatusCode::Unmodified,
5914 }
5915 .into(),
5916 proto::GitStatus::Conflict => UnmergedStatus {
5917 first_head: UnmergedStatusCode::Updated,
5918 second_head: UnmergedStatusCode::Updated,
5919 }
5920 .into(),
5921 proto::GitStatus::Deleted => TrackedStatus {
5922 worktree_status: StatusCode::Deleted,
5923 index_status: StatusCode::Unmodified,
5924 }
5925 .into(),
5926 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5927 };
5928 return Ok(result);
5929 };
5930
5931 let result = match variant {
5932 Variant::Untracked(_) => FileStatus::Untracked,
5933 Variant::Ignored(_) => FileStatus::Ignored,
5934 Variant::Unmerged(unmerged) => {
5935 let [first_head, second_head] =
5936 [unmerged.first_head, unmerged.second_head].map(|head| {
5937 let code = proto::GitStatus::from_i32(head)
5938 .with_context(|| format!("Invalid git status code: {head}"))?;
5939 let result = match code {
5940 proto::GitStatus::Added => UnmergedStatusCode::Added,
5941 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5942 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5943 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5944 };
5945 Ok(result)
5946 });
5947 let [first_head, second_head] = [first_head?, second_head?];
5948 UnmergedStatus {
5949 first_head,
5950 second_head,
5951 }
5952 .into()
5953 }
5954 Variant::Tracked(tracked) => {
5955 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5956 .map(|status| {
5957 let code = proto::GitStatus::from_i32(status)
5958 .with_context(|| format!("Invalid git status code: {status}"))?;
5959 let result = match code {
5960 proto::GitStatus::Modified => StatusCode::Modified,
5961 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5962 proto::GitStatus::Added => StatusCode::Added,
5963 proto::GitStatus::Deleted => StatusCode::Deleted,
5964 proto::GitStatus::Renamed => StatusCode::Renamed,
5965 proto::GitStatus::Copied => StatusCode::Copied,
5966 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5967 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5968 };
5969 Ok(result)
5970 });
5971 let [index_status, worktree_status] = [index_status?, worktree_status?];
5972 TrackedStatus {
5973 index_status,
5974 worktree_status,
5975 }
5976 .into()
5977 }
5978 };
5979 Ok(result)
5980}
5981
5982fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5983 use proto::git_file_status::{Tracked, Unmerged, Variant};
5984
5985 let variant = match status {
5986 FileStatus::Untracked => Variant::Untracked(Default::default()),
5987 FileStatus::Ignored => Variant::Ignored(Default::default()),
5988 FileStatus::Unmerged(UnmergedStatus {
5989 first_head,
5990 second_head,
5991 }) => Variant::Unmerged(Unmerged {
5992 first_head: unmerged_status_to_proto(first_head),
5993 second_head: unmerged_status_to_proto(second_head),
5994 }),
5995 FileStatus::Tracked(TrackedStatus {
5996 index_status,
5997 worktree_status,
5998 }) => Variant::Tracked(Tracked {
5999 index_status: tracked_status_to_proto(index_status),
6000 worktree_status: tracked_status_to_proto(worktree_status),
6001 }),
6002 };
6003 proto::GitFileStatus {
6004 variant: Some(variant),
6005 }
6006}
6007
6008fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6009 match code {
6010 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6011 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6012 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6013 }
6014}
6015
6016fn tracked_status_to_proto(code: StatusCode) -> i32 {
6017 match code {
6018 StatusCode::Added => proto::GitStatus::Added as _,
6019 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6020 StatusCode::Modified => proto::GitStatus::Modified as _,
6021 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6022 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6023 StatusCode::Copied => proto::GitStatus::Copied as _,
6024 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6025 }
6026}