1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<String>>,
133 index_text: Option<Arc<String>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_git_init);
476 client.add_entity_request_handler(Self::handle_push);
477 client.add_entity_request_handler(Self::handle_pull);
478 client.add_entity_request_handler(Self::handle_fetch);
479 client.add_entity_request_handler(Self::handle_stage);
480 client.add_entity_request_handler(Self::handle_unstage);
481 client.add_entity_request_handler(Self::handle_stash);
482 client.add_entity_request_handler(Self::handle_stash_pop);
483 client.add_entity_request_handler(Self::handle_stash_apply);
484 client.add_entity_request_handler(Self::handle_stash_drop);
485 client.add_entity_request_handler(Self::handle_commit);
486 client.add_entity_request_handler(Self::handle_run_hook);
487 client.add_entity_request_handler(Self::handle_reset);
488 client.add_entity_request_handler(Self::handle_show);
489 client.add_entity_request_handler(Self::handle_load_commit_diff);
490 client.add_entity_request_handler(Self::handle_checkout_files);
491 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
492 client.add_entity_request_handler(Self::handle_set_index_text);
493 client.add_entity_request_handler(Self::handle_askpass);
494 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
495 client.add_entity_request_handler(Self::handle_git_diff);
496 client.add_entity_request_handler(Self::handle_tree_diff);
497 client.add_entity_request_handler(Self::handle_get_blob_content);
498 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
499 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
500 client.add_entity_message_handler(Self::handle_update_diff_bases);
501 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
502 client.add_entity_request_handler(Self::handle_blame_buffer);
503 client.add_entity_message_handler(Self::handle_update_repository);
504 client.add_entity_message_handler(Self::handle_remove_repository);
505 client.add_entity_request_handler(Self::handle_git_clone);
506 client.add_entity_request_handler(Self::handle_get_worktrees);
507 client.add_entity_request_handler(Self::handle_create_worktree);
508 }
509
510 pub fn is_local(&self) -> bool {
511 matches!(self.state, GitStoreState::Local { .. })
512 }
513 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
514 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
515 let id = repo.read(cx).id;
516 if self.active_repo_id != Some(id) {
517 self.active_repo_id = Some(id);
518 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
519 }
520 }
521 }
522
523 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
524 match &mut self.state {
525 GitStoreState::Remote {
526 downstream: downstream_client,
527 ..
528 } => {
529 for repo in self.repositories.values() {
530 let update = repo.read(cx).snapshot.initial_update(project_id);
531 for update in split_repository_update(update) {
532 client.send(update).log_err();
533 }
534 }
535 *downstream_client = Some((client, ProjectId(project_id)));
536 }
537 GitStoreState::Local {
538 downstream: downstream_client,
539 ..
540 } => {
541 let mut snapshots = HashMap::default();
542 let (updates_tx, mut updates_rx) = mpsc::unbounded();
543 for repo in self.repositories.values() {
544 updates_tx
545 .unbounded_send(DownstreamUpdate::UpdateRepository(
546 repo.read(cx).snapshot.clone(),
547 ))
548 .ok();
549 }
550 *downstream_client = Some(LocalDownstreamState {
551 client: client.clone(),
552 project_id: ProjectId(project_id),
553 updates_tx,
554 _task: cx.spawn(async move |this, cx| {
555 cx.background_spawn(async move {
556 while let Some(update) = updates_rx.next().await {
557 match update {
558 DownstreamUpdate::UpdateRepository(snapshot) => {
559 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
560 {
561 let update =
562 snapshot.build_update(old_snapshot, project_id);
563 *old_snapshot = snapshot;
564 for update in split_repository_update(update) {
565 client.send(update)?;
566 }
567 } else {
568 let update = snapshot.initial_update(project_id);
569 for update in split_repository_update(update) {
570 client.send(update)?;
571 }
572 snapshots.insert(snapshot.id, snapshot);
573 }
574 }
575 DownstreamUpdate::RemoveRepository(id) => {
576 client.send(proto::RemoveRepository {
577 project_id,
578 id: id.to_proto(),
579 })?;
580 }
581 }
582 }
583 anyhow::Ok(())
584 })
585 .await
586 .ok();
587 this.update(cx, |this, _| {
588 if let GitStoreState::Local {
589 downstream: downstream_client,
590 ..
591 } = &mut this.state
592 {
593 downstream_client.take();
594 } else {
595 unreachable!("unshared called on remote store");
596 }
597 })
598 }),
599 });
600 }
601 }
602 }
603
604 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
605 match &mut self.state {
606 GitStoreState::Local {
607 downstream: downstream_client,
608 ..
609 } => {
610 downstream_client.take();
611 }
612 GitStoreState::Remote {
613 downstream: downstream_client,
614 ..
615 } => {
616 downstream_client.take();
617 }
618 }
619 self.shared_diffs.clear();
620 }
621
622 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
623 self.shared_diffs.remove(peer_id);
624 }
625
626 pub fn active_repository(&self) -> Option<Entity<Repository>> {
627 self.active_repo_id
628 .as_ref()
629 .map(|id| self.repositories[id].clone())
630 }
631
632 pub fn open_unstaged_diff(
633 &mut self,
634 buffer: Entity<Buffer>,
635 cx: &mut Context<Self>,
636 ) -> Task<Result<Entity<BufferDiff>>> {
637 let buffer_id = buffer.read(cx).remote_id();
638 if let Some(diff_state) = self.diffs.get(&buffer_id)
639 && let Some(unstaged_diff) = diff_state
640 .read(cx)
641 .unstaged_diff
642 .as_ref()
643 .and_then(|weak| weak.upgrade())
644 {
645 if let Some(task) =
646 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
647 {
648 return cx.background_executor().spawn(async move {
649 task.await;
650 Ok(unstaged_diff)
651 });
652 }
653 return Task::ready(Ok(unstaged_diff));
654 }
655
656 let Some((repo, repo_path)) =
657 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
658 else {
659 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
660 };
661
662 let task = self
663 .loading_diffs
664 .entry((buffer_id, DiffKind::Unstaged))
665 .or_insert_with(|| {
666 let staged_text = repo.update(cx, |repo, cx| {
667 repo.load_staged_text(buffer_id, repo_path, cx)
668 });
669 cx.spawn(async move |this, cx| {
670 Self::open_diff_internal(
671 this,
672 DiffKind::Unstaged,
673 staged_text.await.map(DiffBasesChange::SetIndex),
674 buffer,
675 cx,
676 )
677 .await
678 .map_err(Arc::new)
679 })
680 .shared()
681 })
682 .clone();
683
684 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
685 }
686
687 pub fn open_diff_since(
688 &mut self,
689 oid: Option<git::Oid>,
690 buffer: Entity<Buffer>,
691 repo: Entity<Repository>,
692 languages: Arc<LanguageRegistry>,
693 cx: &mut Context<Self>,
694 ) -> Task<Result<Entity<BufferDiff>>> {
695 cx.spawn(async move |this, cx| {
696 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
697 let content = match oid {
698 None => None,
699 Some(oid) => Some(
700 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
701 .await?,
702 ),
703 };
704 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
705
706 buffer_diff
707 .update(cx, |buffer_diff, cx| {
708 buffer_diff.set_base_text(
709 content.map(Arc::new),
710 buffer_snapshot.language().cloned(),
711 Some(languages.clone()),
712 buffer_snapshot.text,
713 cx,
714 )
715 })?
716 .await?;
717 let unstaged_diff = this
718 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
719 .await?;
720 buffer_diff.update(cx, |buffer_diff, _| {
721 buffer_diff.set_secondary_diff(unstaged_diff);
722 })?;
723
724 this.update(cx, |_, cx| {
725 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
726 .detach();
727 })?;
728
729 Ok(buffer_diff)
730 })
731 }
732
733 pub fn open_uncommitted_diff(
734 &mut self,
735 buffer: Entity<Buffer>,
736 cx: &mut Context<Self>,
737 ) -> Task<Result<Entity<BufferDiff>>> {
738 let buffer_id = buffer.read(cx).remote_id();
739
740 if let Some(diff_state) = self.diffs.get(&buffer_id)
741 && let Some(uncommitted_diff) = diff_state
742 .read(cx)
743 .uncommitted_diff
744 .as_ref()
745 .and_then(|weak| weak.upgrade())
746 {
747 if let Some(task) =
748 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
749 {
750 return cx.background_executor().spawn(async move {
751 task.await;
752 Ok(uncommitted_diff)
753 });
754 }
755 return Task::ready(Ok(uncommitted_diff));
756 }
757
758 let Some((repo, repo_path)) =
759 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
760 else {
761 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
762 };
763
764 let task = self
765 .loading_diffs
766 .entry((buffer_id, DiffKind::Uncommitted))
767 .or_insert_with(|| {
768 let changes = repo.update(cx, |repo, cx| {
769 repo.load_committed_text(buffer_id, repo_path, cx)
770 });
771
772 // todo(lw): hot foreground spawn
773 cx.spawn(async move |this, cx| {
774 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
775 .await
776 .map_err(Arc::new)
777 })
778 .shared()
779 })
780 .clone();
781
782 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
783 }
784
785 async fn open_diff_internal(
786 this: WeakEntity<Self>,
787 kind: DiffKind,
788 texts: Result<DiffBasesChange>,
789 buffer_entity: Entity<Buffer>,
790 cx: &mut AsyncApp,
791 ) -> Result<Entity<BufferDiff>> {
792 let diff_bases_change = match texts {
793 Err(e) => {
794 this.update(cx, |this, cx| {
795 let buffer = buffer_entity.read(cx);
796 let buffer_id = buffer.remote_id();
797 this.loading_diffs.remove(&(buffer_id, kind));
798 })?;
799 return Err(e);
800 }
801 Ok(change) => change,
802 };
803
804 this.update(cx, |this, cx| {
805 let buffer = buffer_entity.read(cx);
806 let buffer_id = buffer.remote_id();
807 let language = buffer.language().cloned();
808 let language_registry = buffer.language_registry();
809 let text_snapshot = buffer.text_snapshot();
810 this.loading_diffs.remove(&(buffer_id, kind));
811
812 let git_store = cx.weak_entity();
813 let diff_state = this
814 .diffs
815 .entry(buffer_id)
816 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
817
818 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
819
820 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
821 diff_state.update(cx, |diff_state, cx| {
822 diff_state.language = language;
823 diff_state.language_registry = language_registry;
824
825 match kind {
826 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
827 DiffKind::Uncommitted => {
828 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
829 diff
830 } else {
831 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
832 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
833 unstaged_diff
834 };
835
836 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
837 diff_state.uncommitted_diff = Some(diff.downgrade())
838 }
839 }
840
841 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
842 let rx = diff_state.wait_for_recalculation();
843
844 anyhow::Ok(async move {
845 if let Some(rx) = rx {
846 rx.await;
847 }
848 Ok(diff)
849 })
850 })
851 })??
852 .await
853 }
854
855 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
856 let diff_state = self.diffs.get(&buffer_id)?;
857 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
858 }
859
860 pub fn get_uncommitted_diff(
861 &self,
862 buffer_id: BufferId,
863 cx: &App,
864 ) -> Option<Entity<BufferDiff>> {
865 let diff_state = self.diffs.get(&buffer_id)?;
866 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
867 }
868
869 pub fn open_conflict_set(
870 &mut self,
871 buffer: Entity<Buffer>,
872 cx: &mut Context<Self>,
873 ) -> Entity<ConflictSet> {
874 log::debug!("open conflict set");
875 let buffer_id = buffer.read(cx).remote_id();
876
877 if let Some(git_state) = self.diffs.get(&buffer_id)
878 && let Some(conflict_set) = git_state
879 .read(cx)
880 .conflict_set
881 .as_ref()
882 .and_then(|weak| weak.upgrade())
883 {
884 let conflict_set = conflict_set;
885 let buffer_snapshot = buffer.read(cx).text_snapshot();
886
887 git_state.update(cx, |state, cx| {
888 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
889 });
890
891 return conflict_set;
892 }
893
894 let is_unmerged = self
895 .repository_and_path_for_buffer_id(buffer_id, cx)
896 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
897 let git_store = cx.weak_entity();
898 let buffer_git_state = self
899 .diffs
900 .entry(buffer_id)
901 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
902 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
903
904 self._subscriptions
905 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
906 cx.emit(GitStoreEvent::ConflictsUpdated);
907 }));
908
909 buffer_git_state.update(cx, |state, cx| {
910 state.conflict_set = Some(conflict_set.downgrade());
911 let buffer_snapshot = buffer.read(cx).text_snapshot();
912 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
913 });
914
915 conflict_set
916 }
917
918 pub fn project_path_git_status(
919 &self,
920 project_path: &ProjectPath,
921 cx: &App,
922 ) -> Option<FileStatus> {
923 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
924 Some(repo.read(cx).status_for_path(&repo_path)?.status)
925 }
926
927 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
928 let mut work_directory_abs_paths = Vec::new();
929 let mut checkpoints = Vec::new();
930 for repository in self.repositories.values() {
931 repository.update(cx, |repository, _| {
932 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
933 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
934 });
935 }
936
937 cx.background_executor().spawn(async move {
938 let checkpoints = future::try_join_all(checkpoints).await?;
939 Ok(GitStoreCheckpoint {
940 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
941 .into_iter()
942 .zip(checkpoints)
943 .collect(),
944 })
945 })
946 }
947
948 pub fn restore_checkpoint(
949 &self,
950 checkpoint: GitStoreCheckpoint,
951 cx: &mut App,
952 ) -> Task<Result<()>> {
953 let repositories_by_work_dir_abs_path = self
954 .repositories
955 .values()
956 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
957 .collect::<HashMap<_, _>>();
958
959 let mut tasks = Vec::new();
960 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
961 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
962 let restore = repository.update(cx, |repository, _| {
963 repository.restore_checkpoint(checkpoint)
964 });
965 tasks.push(async move { restore.await? });
966 }
967 }
968 cx.background_spawn(async move {
969 future::try_join_all(tasks).await?;
970 Ok(())
971 })
972 }
973
974 /// Compares two checkpoints, returning true if they are equal.
975 pub fn compare_checkpoints(
976 &self,
977 left: GitStoreCheckpoint,
978 mut right: GitStoreCheckpoint,
979 cx: &mut App,
980 ) -> Task<Result<bool>> {
981 let repositories_by_work_dir_abs_path = self
982 .repositories
983 .values()
984 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
985 .collect::<HashMap<_, _>>();
986
987 let mut tasks = Vec::new();
988 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
989 if let Some(right_checkpoint) = right
990 .checkpoints_by_work_dir_abs_path
991 .remove(&work_dir_abs_path)
992 {
993 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
994 {
995 let compare = repository.update(cx, |repository, _| {
996 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
997 });
998
999 tasks.push(async move { compare.await? });
1000 }
1001 } else {
1002 return Task::ready(Ok(false));
1003 }
1004 }
1005 cx.background_spawn(async move {
1006 Ok(future::try_join_all(tasks)
1007 .await?
1008 .into_iter()
1009 .all(|result| result))
1010 })
1011 }
1012
1013 /// Blames a buffer.
1014 pub fn blame_buffer(
1015 &self,
1016 buffer: &Entity<Buffer>,
1017 version: Option<clock::Global>,
1018 cx: &mut Context<Self>,
1019 ) -> Task<Result<Option<Blame>>> {
1020 let buffer = buffer.read(cx);
1021 let Some((repo, repo_path)) =
1022 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1023 else {
1024 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1025 };
1026 let content = match &version {
1027 Some(version) => buffer.rope_for_version(version),
1028 None => buffer.as_rope().clone(),
1029 };
1030 let version = version.unwrap_or(buffer.version());
1031 let buffer_id = buffer.remote_id();
1032
1033 let repo = repo.downgrade();
1034 cx.spawn(async move |_, cx| {
1035 let repository_state = repo
1036 .update(cx, |repo, _| repo.repository_state.clone())?
1037 .await
1038 .map_err(|err| anyhow::anyhow!(err))?;
1039 match repository_state {
1040 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1041 .blame(repo_path.clone(), content)
1042 .await
1043 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1044 .map(Some),
1045 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1046 let response = client
1047 .request(proto::BlameBuffer {
1048 project_id: project_id.to_proto(),
1049 buffer_id: buffer_id.into(),
1050 version: serialize_version(&version),
1051 })
1052 .await?;
1053 Ok(deserialize_blame_buffer_response(response))
1054 }
1055 }
1056 })
1057 }
1058
1059 pub fn get_permalink_to_line(
1060 &self,
1061 buffer: &Entity<Buffer>,
1062 selection: Range<u32>,
1063 cx: &mut App,
1064 ) -> Task<Result<url::Url>> {
1065 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1066 return Task::ready(Err(anyhow!("buffer has no file")));
1067 };
1068
1069 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1070 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1071 cx,
1072 ) else {
1073 // If we're not in a Git repo, check whether this is a Rust source
1074 // file in the Cargo registry (presumably opened with go-to-definition
1075 // from a normal Rust file). If so, we can put together a permalink
1076 // using crate metadata.
1077 if buffer
1078 .read(cx)
1079 .language()
1080 .is_none_or(|lang| lang.name() != "Rust".into())
1081 {
1082 return Task::ready(Err(anyhow!("no permalink available")));
1083 }
1084 let file_path = file.worktree.read(cx).absolutize(&file.path);
1085 return cx.spawn(async move |cx| {
1086 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1087 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1088 .context("no permalink available")
1089 });
1090 };
1091
1092 let buffer_id = buffer.read(cx).remote_id();
1093 let branch = repo.read(cx).branch.clone();
1094 let remote = branch
1095 .as_ref()
1096 .and_then(|b| b.upstream.as_ref())
1097 .and_then(|b| b.remote_name())
1098 .unwrap_or("origin")
1099 .to_string();
1100
1101 let rx = repo.update(cx, |repo, _| {
1102 repo.send_job(None, move |state, cx| async move {
1103 match state {
1104 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1105 let origin_url = backend
1106 .remote_url(&remote)
1107 .with_context(|| format!("remote \"{remote}\" not found"))?;
1108
1109 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1110
1111 let provider_registry =
1112 cx.update(GitHostingProviderRegistry::default_global)?;
1113
1114 let (provider, remote) =
1115 parse_git_remote_url(provider_registry, &origin_url)
1116 .context("parsing Git remote URL")?;
1117
1118 Ok(provider.build_permalink(
1119 remote,
1120 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1121 ))
1122 }
1123 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1124 let response = client
1125 .request(proto::GetPermalinkToLine {
1126 project_id: project_id.to_proto(),
1127 buffer_id: buffer_id.into(),
1128 selection: Some(proto::Range {
1129 start: selection.start as u64,
1130 end: selection.end as u64,
1131 }),
1132 })
1133 .await?;
1134
1135 url::Url::parse(&response.permalink).context("failed to parse permalink")
1136 }
1137 }
1138 })
1139 });
1140 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1141 }
1142
1143 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1144 match &self.state {
1145 GitStoreState::Local {
1146 downstream: downstream_client,
1147 ..
1148 } => downstream_client
1149 .as_ref()
1150 .map(|state| (state.client.clone(), state.project_id)),
1151 GitStoreState::Remote {
1152 downstream: downstream_client,
1153 ..
1154 } => downstream_client.clone(),
1155 }
1156 }
1157
1158 fn upstream_client(&self) -> Option<AnyProtoClient> {
1159 match &self.state {
1160 GitStoreState::Local { .. } => None,
1161 GitStoreState::Remote {
1162 upstream_client, ..
1163 } => Some(upstream_client.clone()),
1164 }
1165 }
1166
1167 fn on_worktree_store_event(
1168 &mut self,
1169 worktree_store: Entity<WorktreeStore>,
1170 event: &WorktreeStoreEvent,
1171 cx: &mut Context<Self>,
1172 ) {
1173 let GitStoreState::Local {
1174 project_environment,
1175 downstream,
1176 next_repository_id,
1177 fs,
1178 } = &self.state
1179 else {
1180 return;
1181 };
1182
1183 match event {
1184 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1185 if let Some(worktree) = self
1186 .worktree_store
1187 .read(cx)
1188 .worktree_for_id(*worktree_id, cx)
1189 {
1190 let paths_by_git_repo =
1191 self.process_updated_entries(&worktree, updated_entries, cx);
1192 let downstream = downstream
1193 .as_ref()
1194 .map(|downstream| downstream.updates_tx.clone());
1195 cx.spawn(async move |_, cx| {
1196 let paths_by_git_repo = paths_by_git_repo.await;
1197 for (repo, paths) in paths_by_git_repo {
1198 repo.update(cx, |repo, cx| {
1199 repo.paths_changed(paths, downstream.clone(), cx);
1200 })
1201 .ok();
1202 }
1203 })
1204 .detach();
1205 }
1206 }
1207 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1208 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1209 else {
1210 return;
1211 };
1212 if !worktree.read(cx).is_visible() {
1213 log::debug!(
1214 "not adding repositories for local worktree {:?} because it's not visible",
1215 worktree.read(cx).abs_path()
1216 );
1217 return;
1218 }
1219 self.update_repositories_from_worktree(
1220 *worktree_id,
1221 project_environment.clone(),
1222 next_repository_id.clone(),
1223 downstream
1224 .as_ref()
1225 .map(|downstream| downstream.updates_tx.clone()),
1226 changed_repos.clone(),
1227 fs.clone(),
1228 cx,
1229 );
1230 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1231 }
1232 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1233 let repos_without_worktree: Vec<RepositoryId> = self
1234 .worktree_ids
1235 .iter_mut()
1236 .filter_map(|(repo_id, worktree_ids)| {
1237 worktree_ids.remove(worktree_id);
1238 if worktree_ids.is_empty() {
1239 Some(*repo_id)
1240 } else {
1241 None
1242 }
1243 })
1244 .collect();
1245 let is_active_repo_removed = repos_without_worktree
1246 .iter()
1247 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1248
1249 for repo_id in repos_without_worktree {
1250 self.repositories.remove(&repo_id);
1251 self.worktree_ids.remove(&repo_id);
1252 if let Some(updates_tx) =
1253 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1254 {
1255 updates_tx
1256 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1257 .ok();
1258 }
1259 }
1260
1261 if is_active_repo_removed {
1262 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1263 self.active_repo_id = Some(repo_id);
1264 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1265 } else {
1266 self.active_repo_id = None;
1267 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1268 }
1269 }
1270 }
1271 _ => {}
1272 }
1273 }
1274 fn on_repository_event(
1275 &mut self,
1276 repo: Entity<Repository>,
1277 event: &RepositoryEvent,
1278 cx: &mut Context<Self>,
1279 ) {
1280 let id = repo.read(cx).id;
1281 let repo_snapshot = repo.read(cx).snapshot.clone();
1282 for (buffer_id, diff) in self.diffs.iter() {
1283 if let Some((buffer_repo, repo_path)) =
1284 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1285 && buffer_repo == repo
1286 {
1287 diff.update(cx, |diff, cx| {
1288 if let Some(conflict_set) = &diff.conflict_set {
1289 let conflict_status_changed =
1290 conflict_set.update(cx, |conflict_set, cx| {
1291 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1292 conflict_set.set_has_conflict(has_conflict, cx)
1293 })?;
1294 if conflict_status_changed {
1295 let buffer_store = self.buffer_store.read(cx);
1296 if let Some(buffer) = buffer_store.get(*buffer_id) {
1297 let _ = diff
1298 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1299 }
1300 }
1301 }
1302 anyhow::Ok(())
1303 })
1304 .ok();
1305 }
1306 }
1307 cx.emit(GitStoreEvent::RepositoryUpdated(
1308 id,
1309 event.clone(),
1310 self.active_repo_id == Some(id),
1311 ))
1312 }
1313
1314 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1315 cx.emit(GitStoreEvent::JobsUpdated)
1316 }
1317
1318 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1319 fn update_repositories_from_worktree(
1320 &mut self,
1321 worktree_id: WorktreeId,
1322 project_environment: Entity<ProjectEnvironment>,
1323 next_repository_id: Arc<AtomicU64>,
1324 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1325 updated_git_repositories: UpdatedGitRepositoriesSet,
1326 fs: Arc<dyn Fs>,
1327 cx: &mut Context<Self>,
1328 ) {
1329 let mut removed_ids = Vec::new();
1330 for update in updated_git_repositories.iter() {
1331 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1332 let existing_work_directory_abs_path =
1333 repo.read(cx).work_directory_abs_path.clone();
1334 Some(&existing_work_directory_abs_path)
1335 == update.old_work_directory_abs_path.as_ref()
1336 || Some(&existing_work_directory_abs_path)
1337 == update.new_work_directory_abs_path.as_ref()
1338 }) {
1339 let repo_id = *id;
1340 if let Some(new_work_directory_abs_path) =
1341 update.new_work_directory_abs_path.clone()
1342 {
1343 self.worktree_ids
1344 .entry(repo_id)
1345 .or_insert_with(HashSet::new)
1346 .insert(worktree_id);
1347 existing.update(cx, |existing, cx| {
1348 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1349 existing.schedule_scan(updates_tx.clone(), cx);
1350 });
1351 } else {
1352 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1353 worktree_ids.remove(&worktree_id);
1354 if worktree_ids.is_empty() {
1355 removed_ids.push(repo_id);
1356 }
1357 }
1358 }
1359 } else if let UpdatedGitRepository {
1360 new_work_directory_abs_path: Some(work_directory_abs_path),
1361 dot_git_abs_path: Some(dot_git_abs_path),
1362 repository_dir_abs_path: Some(_repository_dir_abs_path),
1363 common_dir_abs_path: Some(_common_dir_abs_path),
1364 ..
1365 } = update
1366 {
1367 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1368 let git_store = cx.weak_entity();
1369 let repo = cx.new(|cx| {
1370 let mut repo = Repository::local(
1371 id,
1372 work_directory_abs_path.clone(),
1373 dot_git_abs_path.clone(),
1374 project_environment.downgrade(),
1375 fs.clone(),
1376 git_store,
1377 cx,
1378 );
1379 if let Some(updates_tx) = updates_tx.as_ref() {
1380 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1381 updates_tx
1382 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1383 .ok();
1384 }
1385 repo.schedule_scan(updates_tx.clone(), cx);
1386 repo
1387 });
1388 self._subscriptions
1389 .push(cx.subscribe(&repo, Self::on_repository_event));
1390 self._subscriptions
1391 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1392 self.repositories.insert(id, repo);
1393 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1394 cx.emit(GitStoreEvent::RepositoryAdded);
1395 self.active_repo_id.get_or_insert_with(|| {
1396 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1397 id
1398 });
1399 }
1400 }
1401
1402 for id in removed_ids {
1403 if self.active_repo_id == Some(id) {
1404 self.active_repo_id = None;
1405 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1406 }
1407 self.repositories.remove(&id);
1408 if let Some(updates_tx) = updates_tx.as_ref() {
1409 updates_tx
1410 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1411 .ok();
1412 }
1413 }
1414 }
1415
1416 fn on_buffer_store_event(
1417 &mut self,
1418 _: Entity<BufferStore>,
1419 event: &BufferStoreEvent,
1420 cx: &mut Context<Self>,
1421 ) {
1422 match event {
1423 BufferStoreEvent::BufferAdded(buffer) => {
1424 cx.subscribe(buffer, |this, buffer, event, cx| {
1425 if let BufferEvent::LanguageChanged = event {
1426 let buffer_id = buffer.read(cx).remote_id();
1427 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1428 diff_state.update(cx, |diff_state, cx| {
1429 diff_state.buffer_language_changed(buffer, cx);
1430 });
1431 }
1432 }
1433 })
1434 .detach();
1435 }
1436 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1437 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1438 diffs.remove(buffer_id);
1439 }
1440 }
1441 BufferStoreEvent::BufferDropped(buffer_id) => {
1442 self.diffs.remove(buffer_id);
1443 for diffs in self.shared_diffs.values_mut() {
1444 diffs.remove(buffer_id);
1445 }
1446 }
1447 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1448 // Whenever a buffer's file path changes, it's possible that the
1449 // new path is actually a path that is being tracked by a git
1450 // repository. In that case, we'll want to update the buffer's
1451 // `BufferDiffState`, in case it already has one.
1452 let buffer_id = buffer.read(cx).remote_id();
1453 let diff_state = self.diffs.get(&buffer_id);
1454 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1455
1456 if let Some(diff_state) = diff_state
1457 && let Some((repo, repo_path)) = repo
1458 {
1459 let buffer = buffer.clone();
1460 let diff_state = diff_state.clone();
1461
1462 cx.spawn(async move |_git_store, cx| {
1463 async {
1464 let diff_bases_change = repo
1465 .update(cx, |repo, cx| {
1466 repo.load_committed_text(buffer_id, repo_path, cx)
1467 })?
1468 .await?;
1469
1470 diff_state.update(cx, |diff_state, cx| {
1471 let buffer_snapshot = buffer.read(cx).text_snapshot();
1472 diff_state.diff_bases_changed(
1473 buffer_snapshot,
1474 Some(diff_bases_change),
1475 cx,
1476 );
1477 })
1478 }
1479 .await
1480 .log_err();
1481 })
1482 .detach();
1483 }
1484 }
1485 _ => {}
1486 }
1487 }
1488
1489 pub fn recalculate_buffer_diffs(
1490 &mut self,
1491 buffers: Vec<Entity<Buffer>>,
1492 cx: &mut Context<Self>,
1493 ) -> impl Future<Output = ()> + use<> {
1494 let mut futures = Vec::new();
1495 for buffer in buffers {
1496 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1497 let buffer = buffer.read(cx).text_snapshot();
1498 diff_state.update(cx, |diff_state, cx| {
1499 diff_state.recalculate_diffs(buffer.clone(), cx);
1500 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1501 });
1502 futures.push(diff_state.update(cx, |diff_state, cx| {
1503 diff_state
1504 .reparse_conflict_markers(buffer, cx)
1505 .map(|_| {})
1506 .boxed()
1507 }));
1508 }
1509 }
1510 async move {
1511 futures::future::join_all(futures).await;
1512 }
1513 }
1514
1515 fn on_buffer_diff_event(
1516 &mut self,
1517 diff: Entity<buffer_diff::BufferDiff>,
1518 event: &BufferDiffEvent,
1519 cx: &mut Context<Self>,
1520 ) {
1521 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1522 let buffer_id = diff.read(cx).buffer_id;
1523 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1524 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1525 diff_state.hunk_staging_operation_count += 1;
1526 diff_state.hunk_staging_operation_count
1527 });
1528 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1529 let recv = repo.update(cx, |repo, cx| {
1530 log::debug!("hunks changed for {}", path.as_unix_str());
1531 repo.spawn_set_index_text_job(
1532 path,
1533 new_index_text.as_ref().map(|rope| rope.to_string()),
1534 Some(hunk_staging_operation_count),
1535 cx,
1536 )
1537 });
1538 let diff = diff.downgrade();
1539 cx.spawn(async move |this, cx| {
1540 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1541 diff.update(cx, |diff, cx| {
1542 diff.clear_pending_hunks(cx);
1543 })
1544 .ok();
1545 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1546 .ok();
1547 }
1548 })
1549 .detach();
1550 }
1551 }
1552 }
1553 }
1554
1555 fn local_worktree_git_repos_changed(
1556 &mut self,
1557 worktree: Entity<Worktree>,
1558 changed_repos: &UpdatedGitRepositoriesSet,
1559 cx: &mut Context<Self>,
1560 ) {
1561 log::debug!("local worktree repos changed");
1562 debug_assert!(worktree.read(cx).is_local());
1563
1564 for repository in self.repositories.values() {
1565 repository.update(cx, |repository, cx| {
1566 let repo_abs_path = &repository.work_directory_abs_path;
1567 if changed_repos.iter().any(|update| {
1568 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1569 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1570 }) {
1571 repository.reload_buffer_diff_bases(cx);
1572 }
1573 });
1574 }
1575 }
1576
1577 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1578 &self.repositories
1579 }
1580
1581 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1582 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1583 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1584 Some(status.status)
1585 }
1586
1587 pub fn repository_and_path_for_buffer_id(
1588 &self,
1589 buffer_id: BufferId,
1590 cx: &App,
1591 ) -> Option<(Entity<Repository>, RepoPath)> {
1592 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1593 let project_path = buffer.read(cx).project_path(cx)?;
1594 self.repository_and_path_for_project_path(&project_path, cx)
1595 }
1596
1597 pub fn repository_and_path_for_project_path(
1598 &self,
1599 path: &ProjectPath,
1600 cx: &App,
1601 ) -> Option<(Entity<Repository>, RepoPath)> {
1602 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1603 self.repositories
1604 .values()
1605 .filter_map(|repo| {
1606 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1607 Some((repo.clone(), repo_path))
1608 })
1609 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1610 }
1611
1612 pub fn git_init(
1613 &self,
1614 path: Arc<Path>,
1615 fallback_branch_name: String,
1616 cx: &App,
1617 ) -> Task<Result<()>> {
1618 match &self.state {
1619 GitStoreState::Local { fs, .. } => {
1620 let fs = fs.clone();
1621 cx.background_executor()
1622 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1623 }
1624 GitStoreState::Remote {
1625 upstream_client,
1626 upstream_project_id: project_id,
1627 ..
1628 } => {
1629 let client = upstream_client.clone();
1630 let project_id = *project_id;
1631 cx.background_executor().spawn(async move {
1632 client
1633 .request(proto::GitInit {
1634 project_id: project_id,
1635 abs_path: path.to_string_lossy().into_owned(),
1636 fallback_branch_name,
1637 })
1638 .await?;
1639 Ok(())
1640 })
1641 }
1642 }
1643 }
1644
1645 pub fn git_clone(
1646 &self,
1647 repo: String,
1648 path: impl Into<Arc<std::path::Path>>,
1649 cx: &App,
1650 ) -> Task<Result<()>> {
1651 let path = path.into();
1652 match &self.state {
1653 GitStoreState::Local { fs, .. } => {
1654 let fs = fs.clone();
1655 cx.background_executor()
1656 .spawn(async move { fs.git_clone(&repo, &path).await })
1657 }
1658 GitStoreState::Remote {
1659 upstream_client,
1660 upstream_project_id,
1661 ..
1662 } => {
1663 if upstream_client.is_via_collab() {
1664 return Task::ready(Err(anyhow!(
1665 "Git Clone isn't supported for project guests"
1666 )));
1667 }
1668 let request = upstream_client.request(proto::GitClone {
1669 project_id: *upstream_project_id,
1670 abs_path: path.to_string_lossy().into_owned(),
1671 remote_repo: repo,
1672 });
1673
1674 cx.background_spawn(async move {
1675 let result = request.await?;
1676
1677 match result.success {
1678 true => Ok(()),
1679 false => Err(anyhow!("Git Clone failed")),
1680 }
1681 })
1682 }
1683 }
1684 }
1685
1686 async fn handle_update_repository(
1687 this: Entity<Self>,
1688 envelope: TypedEnvelope<proto::UpdateRepository>,
1689 mut cx: AsyncApp,
1690 ) -> Result<()> {
1691 this.update(&mut cx, |this, cx| {
1692 let path_style = this.worktree_store.read(cx).path_style();
1693 let mut update = envelope.payload;
1694
1695 let id = RepositoryId::from_proto(update.id);
1696 let client = this.upstream_client().context("no upstream client")?;
1697
1698 let mut repo_subscription = None;
1699 let repo = this.repositories.entry(id).or_insert_with(|| {
1700 let git_store = cx.weak_entity();
1701 let repo = cx.new(|cx| {
1702 Repository::remote(
1703 id,
1704 Path::new(&update.abs_path).into(),
1705 path_style,
1706 ProjectId(update.project_id),
1707 client,
1708 git_store,
1709 cx,
1710 )
1711 });
1712 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1713 cx.emit(GitStoreEvent::RepositoryAdded);
1714 repo
1715 });
1716 this._subscriptions.extend(repo_subscription);
1717
1718 repo.update(cx, {
1719 let update = update.clone();
1720 |repo, cx| repo.apply_remote_update(update, cx)
1721 })?;
1722
1723 this.active_repo_id.get_or_insert_with(|| {
1724 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1725 id
1726 });
1727
1728 if let Some((client, project_id)) = this.downstream_client() {
1729 update.project_id = project_id.to_proto();
1730 client.send(update).log_err();
1731 }
1732 Ok(())
1733 })?
1734 }
1735
1736 async fn handle_remove_repository(
1737 this: Entity<Self>,
1738 envelope: TypedEnvelope<proto::RemoveRepository>,
1739 mut cx: AsyncApp,
1740 ) -> Result<()> {
1741 this.update(&mut cx, |this, cx| {
1742 let mut update = envelope.payload;
1743 let id = RepositoryId::from_proto(update.id);
1744 this.repositories.remove(&id);
1745 if let Some((client, project_id)) = this.downstream_client() {
1746 update.project_id = project_id.to_proto();
1747 client.send(update).log_err();
1748 }
1749 if this.active_repo_id == Some(id) {
1750 this.active_repo_id = None;
1751 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1752 }
1753 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1754 })
1755 }
1756
1757 async fn handle_git_init(
1758 this: Entity<Self>,
1759 envelope: TypedEnvelope<proto::GitInit>,
1760 cx: AsyncApp,
1761 ) -> Result<proto::Ack> {
1762 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1763 let name = envelope.payload.fallback_branch_name;
1764 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1765 .await?;
1766
1767 Ok(proto::Ack {})
1768 }
1769
1770 async fn handle_git_clone(
1771 this: Entity<Self>,
1772 envelope: TypedEnvelope<proto::GitClone>,
1773 cx: AsyncApp,
1774 ) -> Result<proto::GitCloneResponse> {
1775 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1776 let repo_name = envelope.payload.remote_repo;
1777 let result = cx
1778 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1779 .await;
1780
1781 Ok(proto::GitCloneResponse {
1782 success: result.is_ok(),
1783 })
1784 }
1785
1786 async fn handle_fetch(
1787 this: Entity<Self>,
1788 envelope: TypedEnvelope<proto::Fetch>,
1789 mut cx: AsyncApp,
1790 ) -> Result<proto::RemoteMessageResponse> {
1791 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1792 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1793 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1794 let askpass_id = envelope.payload.askpass_id;
1795
1796 let askpass = make_remote_delegate(
1797 this,
1798 envelope.payload.project_id,
1799 repository_id,
1800 askpass_id,
1801 &mut cx,
1802 );
1803
1804 let remote_output = repository_handle
1805 .update(&mut cx, |repository_handle, cx| {
1806 repository_handle.fetch(fetch_options, askpass, cx)
1807 })?
1808 .await??;
1809
1810 Ok(proto::RemoteMessageResponse {
1811 stdout: remote_output.stdout,
1812 stderr: remote_output.stderr,
1813 })
1814 }
1815
1816 async fn handle_push(
1817 this: Entity<Self>,
1818 envelope: TypedEnvelope<proto::Push>,
1819 mut cx: AsyncApp,
1820 ) -> Result<proto::RemoteMessageResponse> {
1821 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1822 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1823
1824 let askpass_id = envelope.payload.askpass_id;
1825 let askpass = make_remote_delegate(
1826 this,
1827 envelope.payload.project_id,
1828 repository_id,
1829 askpass_id,
1830 &mut cx,
1831 );
1832
1833 let options = envelope
1834 .payload
1835 .options
1836 .as_ref()
1837 .map(|_| match envelope.payload.options() {
1838 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1839 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1840 });
1841
1842 let branch_name = envelope.payload.branch_name.into();
1843 let remote_name = envelope.payload.remote_name.into();
1844
1845 let remote_output = repository_handle
1846 .update(&mut cx, |repository_handle, cx| {
1847 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1848 })?
1849 .await??;
1850 Ok(proto::RemoteMessageResponse {
1851 stdout: remote_output.stdout,
1852 stderr: remote_output.stderr,
1853 })
1854 }
1855
1856 async fn handle_pull(
1857 this: Entity<Self>,
1858 envelope: TypedEnvelope<proto::Pull>,
1859 mut cx: AsyncApp,
1860 ) -> Result<proto::RemoteMessageResponse> {
1861 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1862 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1863 let askpass_id = envelope.payload.askpass_id;
1864 let askpass = make_remote_delegate(
1865 this,
1866 envelope.payload.project_id,
1867 repository_id,
1868 askpass_id,
1869 &mut cx,
1870 );
1871
1872 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1873 let remote_name = envelope.payload.remote_name.into();
1874 let rebase = envelope.payload.rebase;
1875
1876 let remote_message = repository_handle
1877 .update(&mut cx, |repository_handle, cx| {
1878 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1879 })?
1880 .await??;
1881
1882 Ok(proto::RemoteMessageResponse {
1883 stdout: remote_message.stdout,
1884 stderr: remote_message.stderr,
1885 })
1886 }
1887
1888 async fn handle_stage(
1889 this: Entity<Self>,
1890 envelope: TypedEnvelope<proto::Stage>,
1891 mut cx: AsyncApp,
1892 ) -> Result<proto::Ack> {
1893 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1894 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1895
1896 let entries = envelope
1897 .payload
1898 .paths
1899 .into_iter()
1900 .map(|path| RepoPath::new(&path))
1901 .collect::<Result<Vec<_>>>()?;
1902
1903 repository_handle
1904 .update(&mut cx, |repository_handle, cx| {
1905 repository_handle.stage_entries(entries, cx)
1906 })?
1907 .await?;
1908 Ok(proto::Ack {})
1909 }
1910
1911 async fn handle_unstage(
1912 this: Entity<Self>,
1913 envelope: TypedEnvelope<proto::Unstage>,
1914 mut cx: AsyncApp,
1915 ) -> Result<proto::Ack> {
1916 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1917 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1918
1919 let entries = envelope
1920 .payload
1921 .paths
1922 .into_iter()
1923 .map(|path| RepoPath::new(&path))
1924 .collect::<Result<Vec<_>>>()?;
1925
1926 repository_handle
1927 .update(&mut cx, |repository_handle, cx| {
1928 repository_handle.unstage_entries(entries, cx)
1929 })?
1930 .await?;
1931
1932 Ok(proto::Ack {})
1933 }
1934
1935 async fn handle_stash(
1936 this: Entity<Self>,
1937 envelope: TypedEnvelope<proto::Stash>,
1938 mut cx: AsyncApp,
1939 ) -> Result<proto::Ack> {
1940 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1941 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1942
1943 let entries = envelope
1944 .payload
1945 .paths
1946 .into_iter()
1947 .map(|path| RepoPath::new(&path))
1948 .collect::<Result<Vec<_>>>()?;
1949
1950 repository_handle
1951 .update(&mut cx, |repository_handle, cx| {
1952 repository_handle.stash_entries(entries, cx)
1953 })?
1954 .await?;
1955
1956 Ok(proto::Ack {})
1957 }
1958
1959 async fn handle_stash_pop(
1960 this: Entity<Self>,
1961 envelope: TypedEnvelope<proto::StashPop>,
1962 mut cx: AsyncApp,
1963 ) -> Result<proto::Ack> {
1964 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1965 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1966 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1967
1968 repository_handle
1969 .update(&mut cx, |repository_handle, cx| {
1970 repository_handle.stash_pop(stash_index, cx)
1971 })?
1972 .await?;
1973
1974 Ok(proto::Ack {})
1975 }
1976
1977 async fn handle_stash_apply(
1978 this: Entity<Self>,
1979 envelope: TypedEnvelope<proto::StashApply>,
1980 mut cx: AsyncApp,
1981 ) -> Result<proto::Ack> {
1982 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1983 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1984 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1985
1986 repository_handle
1987 .update(&mut cx, |repository_handle, cx| {
1988 repository_handle.stash_apply(stash_index, cx)
1989 })?
1990 .await?;
1991
1992 Ok(proto::Ack {})
1993 }
1994
1995 async fn handle_stash_drop(
1996 this: Entity<Self>,
1997 envelope: TypedEnvelope<proto::StashDrop>,
1998 mut cx: AsyncApp,
1999 ) -> Result<proto::Ack> {
2000 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2001 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2002 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2003
2004 repository_handle
2005 .update(&mut cx, |repository_handle, cx| {
2006 repository_handle.stash_drop(stash_index, cx)
2007 })?
2008 .await??;
2009
2010 Ok(proto::Ack {})
2011 }
2012
2013 async fn handle_set_index_text(
2014 this: Entity<Self>,
2015 envelope: TypedEnvelope<proto::SetIndexText>,
2016 mut cx: AsyncApp,
2017 ) -> Result<proto::Ack> {
2018 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2019 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2020 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2021
2022 repository_handle
2023 .update(&mut cx, |repository_handle, cx| {
2024 repository_handle.spawn_set_index_text_job(
2025 repo_path,
2026 envelope.payload.text,
2027 None,
2028 cx,
2029 )
2030 })?
2031 .await??;
2032 Ok(proto::Ack {})
2033 }
2034
2035 async fn handle_run_hook(
2036 this: Entity<Self>,
2037 envelope: TypedEnvelope<proto::RunGitHook>,
2038 mut cx: AsyncApp,
2039 ) -> Result<proto::Ack> {
2040 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2041 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2042 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2043 repository_handle
2044 .update(&mut cx, |repository_handle, cx| {
2045 repository_handle.run_hook(hook, cx)
2046 })?
2047 .await??;
2048 Ok(proto::Ack {})
2049 }
2050
2051 async fn handle_commit(
2052 this: Entity<Self>,
2053 envelope: TypedEnvelope<proto::Commit>,
2054 mut cx: AsyncApp,
2055 ) -> Result<proto::Ack> {
2056 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2057 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2058 let askpass_id = envelope.payload.askpass_id;
2059
2060 let askpass = make_remote_delegate(
2061 this,
2062 envelope.payload.project_id,
2063 repository_id,
2064 askpass_id,
2065 &mut cx,
2066 );
2067
2068 let message = SharedString::from(envelope.payload.message);
2069 let name = envelope.payload.name.map(SharedString::from);
2070 let email = envelope.payload.email.map(SharedString::from);
2071 let options = envelope.payload.options.unwrap_or_default();
2072
2073 repository_handle
2074 .update(&mut cx, |repository_handle, cx| {
2075 repository_handle.commit(
2076 message,
2077 name.zip(email),
2078 CommitOptions {
2079 amend: options.amend,
2080 signoff: options.signoff,
2081 },
2082 askpass,
2083 cx,
2084 )
2085 })?
2086 .await??;
2087 Ok(proto::Ack {})
2088 }
2089
2090 async fn handle_get_remotes(
2091 this: Entity<Self>,
2092 envelope: TypedEnvelope<proto::GetRemotes>,
2093 mut cx: AsyncApp,
2094 ) -> Result<proto::GetRemotesResponse> {
2095 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2096 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2097
2098 let branch_name = envelope.payload.branch_name;
2099
2100 let remotes = repository_handle
2101 .update(&mut cx, |repository_handle, _| {
2102 repository_handle.get_remotes(branch_name)
2103 })?
2104 .await??;
2105
2106 Ok(proto::GetRemotesResponse {
2107 remotes: remotes
2108 .into_iter()
2109 .map(|remotes| proto::get_remotes_response::Remote {
2110 name: remotes.name.to_string(),
2111 })
2112 .collect::<Vec<_>>(),
2113 })
2114 }
2115
2116 async fn handle_get_worktrees(
2117 this: Entity<Self>,
2118 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2119 mut cx: AsyncApp,
2120 ) -> Result<proto::GitWorktreesResponse> {
2121 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2122 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2123
2124 let worktrees = repository_handle
2125 .update(&mut cx, |repository_handle, _| {
2126 repository_handle.worktrees()
2127 })?
2128 .await??;
2129
2130 Ok(proto::GitWorktreesResponse {
2131 worktrees: worktrees
2132 .into_iter()
2133 .map(|worktree| worktree_to_proto(&worktree))
2134 .collect::<Vec<_>>(),
2135 })
2136 }
2137
2138 async fn handle_create_worktree(
2139 this: Entity<Self>,
2140 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2141 mut cx: AsyncApp,
2142 ) -> Result<proto::Ack> {
2143 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2144 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2145 let directory = PathBuf::from(envelope.payload.directory);
2146 let name = envelope.payload.name;
2147 let commit = envelope.payload.commit;
2148
2149 repository_handle
2150 .update(&mut cx, |repository_handle, _| {
2151 repository_handle.create_worktree(name, directory, commit)
2152 })?
2153 .await??;
2154
2155 Ok(proto::Ack {})
2156 }
2157
2158 async fn handle_get_branches(
2159 this: Entity<Self>,
2160 envelope: TypedEnvelope<proto::GitGetBranches>,
2161 mut cx: AsyncApp,
2162 ) -> Result<proto::GitBranchesResponse> {
2163 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2164 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2165
2166 let branches = repository_handle
2167 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2168 .await??;
2169
2170 Ok(proto::GitBranchesResponse {
2171 branches: branches
2172 .into_iter()
2173 .map(|branch| branch_to_proto(&branch))
2174 .collect::<Vec<_>>(),
2175 })
2176 }
2177 async fn handle_get_default_branch(
2178 this: Entity<Self>,
2179 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2180 mut cx: AsyncApp,
2181 ) -> Result<proto::GetDefaultBranchResponse> {
2182 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2183 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2184
2185 let branch = repository_handle
2186 .update(&mut cx, |repository_handle, _| {
2187 repository_handle.default_branch()
2188 })?
2189 .await??
2190 .map(Into::into);
2191
2192 Ok(proto::GetDefaultBranchResponse { branch })
2193 }
2194 async fn handle_create_branch(
2195 this: Entity<Self>,
2196 envelope: TypedEnvelope<proto::GitCreateBranch>,
2197 mut cx: AsyncApp,
2198 ) -> Result<proto::Ack> {
2199 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2200 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2201 let branch_name = envelope.payload.branch_name;
2202
2203 repository_handle
2204 .update(&mut cx, |repository_handle, _| {
2205 repository_handle.create_branch(branch_name, None)
2206 })?
2207 .await??;
2208
2209 Ok(proto::Ack {})
2210 }
2211
2212 async fn handle_change_branch(
2213 this: Entity<Self>,
2214 envelope: TypedEnvelope<proto::GitChangeBranch>,
2215 mut cx: AsyncApp,
2216 ) -> Result<proto::Ack> {
2217 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2218 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2219 let branch_name = envelope.payload.branch_name;
2220
2221 repository_handle
2222 .update(&mut cx, |repository_handle, _| {
2223 repository_handle.change_branch(branch_name)
2224 })?
2225 .await??;
2226
2227 Ok(proto::Ack {})
2228 }
2229
2230 async fn handle_rename_branch(
2231 this: Entity<Self>,
2232 envelope: TypedEnvelope<proto::GitRenameBranch>,
2233 mut cx: AsyncApp,
2234 ) -> Result<proto::Ack> {
2235 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2236 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2237 let branch = envelope.payload.branch;
2238 let new_name = envelope.payload.new_name;
2239
2240 repository_handle
2241 .update(&mut cx, |repository_handle, _| {
2242 repository_handle.rename_branch(branch, new_name)
2243 })?
2244 .await??;
2245
2246 Ok(proto::Ack {})
2247 }
2248
2249 async fn handle_show(
2250 this: Entity<Self>,
2251 envelope: TypedEnvelope<proto::GitShow>,
2252 mut cx: AsyncApp,
2253 ) -> Result<proto::GitCommitDetails> {
2254 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2255 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2256
2257 let commit = repository_handle
2258 .update(&mut cx, |repository_handle, _| {
2259 repository_handle.show(envelope.payload.commit)
2260 })?
2261 .await??;
2262 Ok(proto::GitCommitDetails {
2263 sha: commit.sha.into(),
2264 message: commit.message.into(),
2265 commit_timestamp: commit.commit_timestamp,
2266 author_email: commit.author_email.into(),
2267 author_name: commit.author_name.into(),
2268 })
2269 }
2270
2271 async fn handle_load_commit_diff(
2272 this: Entity<Self>,
2273 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2274 mut cx: AsyncApp,
2275 ) -> Result<proto::LoadCommitDiffResponse> {
2276 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2277 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2278
2279 let commit_diff = repository_handle
2280 .update(&mut cx, |repository_handle, _| {
2281 repository_handle.load_commit_diff(envelope.payload.commit)
2282 })?
2283 .await??;
2284 Ok(proto::LoadCommitDiffResponse {
2285 files: commit_diff
2286 .files
2287 .into_iter()
2288 .map(|file| proto::CommitFile {
2289 path: file.path.to_proto(),
2290 old_text: file.old_text,
2291 new_text: file.new_text,
2292 })
2293 .collect(),
2294 })
2295 }
2296
2297 async fn handle_reset(
2298 this: Entity<Self>,
2299 envelope: TypedEnvelope<proto::GitReset>,
2300 mut cx: AsyncApp,
2301 ) -> Result<proto::Ack> {
2302 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2303 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2304
2305 let mode = match envelope.payload.mode() {
2306 git_reset::ResetMode::Soft => ResetMode::Soft,
2307 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2308 };
2309
2310 repository_handle
2311 .update(&mut cx, |repository_handle, cx| {
2312 repository_handle.reset(envelope.payload.commit, mode, cx)
2313 })?
2314 .await??;
2315 Ok(proto::Ack {})
2316 }
2317
2318 async fn handle_checkout_files(
2319 this: Entity<Self>,
2320 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2321 mut cx: AsyncApp,
2322 ) -> Result<proto::Ack> {
2323 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2324 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2325 let paths = envelope
2326 .payload
2327 .paths
2328 .iter()
2329 .map(|s| RepoPath::from_proto(s))
2330 .collect::<Result<Vec<_>>>()?;
2331
2332 repository_handle
2333 .update(&mut cx, |repository_handle, cx| {
2334 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2335 })?
2336 .await?;
2337 Ok(proto::Ack {})
2338 }
2339
2340 async fn handle_open_commit_message_buffer(
2341 this: Entity<Self>,
2342 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2343 mut cx: AsyncApp,
2344 ) -> Result<proto::OpenBufferResponse> {
2345 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2346 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2347 let buffer = repository
2348 .update(&mut cx, |repository, cx| {
2349 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2350 })?
2351 .await?;
2352
2353 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2354 this.update(&mut cx, |this, cx| {
2355 this.buffer_store.update(cx, |buffer_store, cx| {
2356 buffer_store
2357 .create_buffer_for_peer(
2358 &buffer,
2359 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2360 cx,
2361 )
2362 .detach_and_log_err(cx);
2363 })
2364 })?;
2365
2366 Ok(proto::OpenBufferResponse {
2367 buffer_id: buffer_id.to_proto(),
2368 })
2369 }
2370
2371 async fn handle_askpass(
2372 this: Entity<Self>,
2373 envelope: TypedEnvelope<proto::AskPassRequest>,
2374 mut cx: AsyncApp,
2375 ) -> Result<proto::AskPassResponse> {
2376 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2377 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2378
2379 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2380 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2381 debug_panic!("no askpass found");
2382 anyhow::bail!("no askpass found");
2383 };
2384
2385 let response = askpass
2386 .ask_password(envelope.payload.prompt)
2387 .await
2388 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2389
2390 delegates
2391 .lock()
2392 .insert(envelope.payload.askpass_id, askpass);
2393
2394 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2395 Ok(proto::AskPassResponse {
2396 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2397 })
2398 }
2399
2400 async fn handle_check_for_pushed_commits(
2401 this: Entity<Self>,
2402 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2403 mut cx: AsyncApp,
2404 ) -> Result<proto::CheckForPushedCommitsResponse> {
2405 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2406 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2407
2408 let branches = repository_handle
2409 .update(&mut cx, |repository_handle, _| {
2410 repository_handle.check_for_pushed_commits()
2411 })?
2412 .await??;
2413 Ok(proto::CheckForPushedCommitsResponse {
2414 pushed_to: branches
2415 .into_iter()
2416 .map(|commit| commit.to_string())
2417 .collect(),
2418 })
2419 }
2420
2421 async fn handle_git_diff(
2422 this: Entity<Self>,
2423 envelope: TypedEnvelope<proto::GitDiff>,
2424 mut cx: AsyncApp,
2425 ) -> Result<proto::GitDiffResponse> {
2426 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2427 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2428 let diff_type = match envelope.payload.diff_type() {
2429 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2430 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2431 };
2432
2433 let mut diff = repository_handle
2434 .update(&mut cx, |repository_handle, cx| {
2435 repository_handle.diff(diff_type, cx)
2436 })?
2437 .await??;
2438 const ONE_MB: usize = 1_000_000;
2439 if diff.len() > ONE_MB {
2440 diff = diff.chars().take(ONE_MB).collect()
2441 }
2442
2443 Ok(proto::GitDiffResponse { diff })
2444 }
2445
2446 async fn handle_tree_diff(
2447 this: Entity<Self>,
2448 request: TypedEnvelope<proto::GetTreeDiff>,
2449 mut cx: AsyncApp,
2450 ) -> Result<proto::GetTreeDiffResponse> {
2451 let repository_id = RepositoryId(request.payload.repository_id);
2452 let diff_type = if request.payload.is_merge {
2453 DiffTreeType::MergeBase {
2454 base: request.payload.base.into(),
2455 head: request.payload.head.into(),
2456 }
2457 } else {
2458 DiffTreeType::Since {
2459 base: request.payload.base.into(),
2460 head: request.payload.head.into(),
2461 }
2462 };
2463
2464 let diff = this
2465 .update(&mut cx, |this, cx| {
2466 let repository = this.repositories().get(&repository_id)?;
2467 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2468 })?
2469 .context("missing repository")?
2470 .await??;
2471
2472 Ok(proto::GetTreeDiffResponse {
2473 entries: diff
2474 .entries
2475 .into_iter()
2476 .map(|(path, status)| proto::TreeDiffStatus {
2477 path: path.as_ref().to_proto(),
2478 status: match status {
2479 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2480 TreeDiffStatus::Modified { .. } => {
2481 proto::tree_diff_status::Status::Modified.into()
2482 }
2483 TreeDiffStatus::Deleted { .. } => {
2484 proto::tree_diff_status::Status::Deleted.into()
2485 }
2486 },
2487 oid: match status {
2488 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2489 Some(old.to_string())
2490 }
2491 TreeDiffStatus::Added => None,
2492 },
2493 })
2494 .collect(),
2495 })
2496 }
2497
2498 async fn handle_get_blob_content(
2499 this: Entity<Self>,
2500 request: TypedEnvelope<proto::GetBlobContent>,
2501 mut cx: AsyncApp,
2502 ) -> Result<proto::GetBlobContentResponse> {
2503 let oid = git::Oid::from_str(&request.payload.oid)?;
2504 let repository_id = RepositoryId(request.payload.repository_id);
2505 let content = this
2506 .update(&mut cx, |this, cx| {
2507 let repository = this.repositories().get(&repository_id)?;
2508 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2509 })?
2510 .context("missing repository")?
2511 .await?;
2512 Ok(proto::GetBlobContentResponse { content })
2513 }
2514
2515 async fn handle_open_unstaged_diff(
2516 this: Entity<Self>,
2517 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2518 mut cx: AsyncApp,
2519 ) -> Result<proto::OpenUnstagedDiffResponse> {
2520 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2521 let diff = this
2522 .update(&mut cx, |this, cx| {
2523 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2524 Some(this.open_unstaged_diff(buffer, cx))
2525 })?
2526 .context("missing buffer")?
2527 .await?;
2528 this.update(&mut cx, |this, _| {
2529 let shared_diffs = this
2530 .shared_diffs
2531 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2532 .or_default();
2533 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2534 })?;
2535 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2536 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2537 }
2538
2539 async fn handle_open_uncommitted_diff(
2540 this: Entity<Self>,
2541 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2542 mut cx: AsyncApp,
2543 ) -> Result<proto::OpenUncommittedDiffResponse> {
2544 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2545 let diff = this
2546 .update(&mut cx, |this, cx| {
2547 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2548 Some(this.open_uncommitted_diff(buffer, cx))
2549 })?
2550 .context("missing buffer")?
2551 .await?;
2552 this.update(&mut cx, |this, _| {
2553 let shared_diffs = this
2554 .shared_diffs
2555 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2556 .or_default();
2557 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2558 })?;
2559 diff.read_with(&cx, |diff, cx| {
2560 use proto::open_uncommitted_diff_response::Mode;
2561
2562 let unstaged_diff = diff.secondary_diff();
2563 let index_snapshot = unstaged_diff.and_then(|diff| {
2564 let diff = diff.read(cx);
2565 diff.base_text_exists().then(|| diff.base_text())
2566 });
2567
2568 let mode;
2569 let staged_text;
2570 let committed_text;
2571 if diff.base_text_exists() {
2572 let committed_snapshot = diff.base_text();
2573 committed_text = Some(committed_snapshot.text());
2574 if let Some(index_text) = index_snapshot {
2575 if index_text.remote_id() == committed_snapshot.remote_id() {
2576 mode = Mode::IndexMatchesHead;
2577 staged_text = None;
2578 } else {
2579 mode = Mode::IndexAndHead;
2580 staged_text = Some(index_text.text());
2581 }
2582 } else {
2583 mode = Mode::IndexAndHead;
2584 staged_text = None;
2585 }
2586 } else {
2587 mode = Mode::IndexAndHead;
2588 committed_text = None;
2589 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2590 }
2591
2592 proto::OpenUncommittedDiffResponse {
2593 committed_text,
2594 staged_text,
2595 mode: mode.into(),
2596 }
2597 })
2598 }
2599
2600 async fn handle_update_diff_bases(
2601 this: Entity<Self>,
2602 request: TypedEnvelope<proto::UpdateDiffBases>,
2603 mut cx: AsyncApp,
2604 ) -> Result<()> {
2605 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2606 this.update(&mut cx, |this, cx| {
2607 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2608 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2609 {
2610 let buffer = buffer.read(cx).text_snapshot();
2611 diff_state.update(cx, |diff_state, cx| {
2612 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2613 })
2614 }
2615 })
2616 }
2617
2618 async fn handle_blame_buffer(
2619 this: Entity<Self>,
2620 envelope: TypedEnvelope<proto::BlameBuffer>,
2621 mut cx: AsyncApp,
2622 ) -> Result<proto::BlameBufferResponse> {
2623 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2624 let version = deserialize_version(&envelope.payload.version);
2625 let buffer = this.read_with(&cx, |this, cx| {
2626 this.buffer_store.read(cx).get_existing(buffer_id)
2627 })??;
2628 buffer
2629 .update(&mut cx, |buffer, _| {
2630 buffer.wait_for_version(version.clone())
2631 })?
2632 .await?;
2633 let blame = this
2634 .update(&mut cx, |this, cx| {
2635 this.blame_buffer(&buffer, Some(version), cx)
2636 })?
2637 .await?;
2638 Ok(serialize_blame_buffer_response(blame))
2639 }
2640
2641 async fn handle_get_permalink_to_line(
2642 this: Entity<Self>,
2643 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2644 mut cx: AsyncApp,
2645 ) -> Result<proto::GetPermalinkToLineResponse> {
2646 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2647 // let version = deserialize_version(&envelope.payload.version);
2648 let selection = {
2649 let proto_selection = envelope
2650 .payload
2651 .selection
2652 .context("no selection to get permalink for defined")?;
2653 proto_selection.start as u32..proto_selection.end as u32
2654 };
2655 let buffer = this.read_with(&cx, |this, cx| {
2656 this.buffer_store.read(cx).get_existing(buffer_id)
2657 })??;
2658 let permalink = this
2659 .update(&mut cx, |this, cx| {
2660 this.get_permalink_to_line(&buffer, selection, cx)
2661 })?
2662 .await?;
2663 Ok(proto::GetPermalinkToLineResponse {
2664 permalink: permalink.to_string(),
2665 })
2666 }
2667
2668 fn repository_for_request(
2669 this: &Entity<Self>,
2670 id: RepositoryId,
2671 cx: &mut AsyncApp,
2672 ) -> Result<Entity<Repository>> {
2673 this.read_with(cx, |this, _| {
2674 this.repositories
2675 .get(&id)
2676 .context("missing repository handle")
2677 .cloned()
2678 })?
2679 }
2680
2681 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2682 self.repositories
2683 .iter()
2684 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2685 .collect()
2686 }
2687
2688 fn process_updated_entries(
2689 &self,
2690 worktree: &Entity<Worktree>,
2691 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2692 cx: &mut App,
2693 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2694 let path_style = worktree.read(cx).path_style();
2695 let mut repo_paths = self
2696 .repositories
2697 .values()
2698 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2699 .collect::<Vec<_>>();
2700 let mut entries: Vec<_> = updated_entries
2701 .iter()
2702 .map(|(path, _, _)| path.clone())
2703 .collect();
2704 entries.sort();
2705 let worktree = worktree.read(cx);
2706
2707 let entries = entries
2708 .into_iter()
2709 .map(|path| worktree.absolutize(&path))
2710 .collect::<Arc<[_]>>();
2711
2712 let executor = cx.background_executor().clone();
2713 cx.background_executor().spawn(async move {
2714 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2715 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2716 let mut tasks = FuturesOrdered::new();
2717 for (repo_path, repo) in repo_paths.into_iter().rev() {
2718 let entries = entries.clone();
2719 let task = executor.spawn(async move {
2720 // Find all repository paths that belong to this repo
2721 let mut ix = entries.partition_point(|path| path < &*repo_path);
2722 if ix == entries.len() {
2723 return None;
2724 };
2725
2726 let mut paths = Vec::new();
2727 // All paths prefixed by a given repo will constitute a continuous range.
2728 while let Some(path) = entries.get(ix)
2729 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2730 &repo_path, path, path_style,
2731 )
2732 {
2733 paths.push((repo_path, ix));
2734 ix += 1;
2735 }
2736 if paths.is_empty() {
2737 None
2738 } else {
2739 Some((repo, paths))
2740 }
2741 });
2742 tasks.push_back(task);
2743 }
2744
2745 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2746 let mut path_was_used = vec![false; entries.len()];
2747 let tasks = tasks.collect::<Vec<_>>().await;
2748 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2749 // We always want to assign a path to it's innermost repository.
2750 for t in tasks {
2751 let Some((repo, paths)) = t else {
2752 continue;
2753 };
2754 let entry = paths_by_git_repo.entry(repo).or_default();
2755 for (repo_path, ix) in paths {
2756 if path_was_used[ix] {
2757 continue;
2758 }
2759 path_was_used[ix] = true;
2760 entry.push(repo_path);
2761 }
2762 }
2763
2764 paths_by_git_repo
2765 })
2766 }
2767}
2768
2769impl BufferGitState {
2770 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2771 Self {
2772 unstaged_diff: Default::default(),
2773 uncommitted_diff: Default::default(),
2774 recalculate_diff_task: Default::default(),
2775 language: Default::default(),
2776 language_registry: Default::default(),
2777 recalculating_tx: postage::watch::channel_with(false).0,
2778 hunk_staging_operation_count: 0,
2779 hunk_staging_operation_count_as_of_write: 0,
2780 head_text: Default::default(),
2781 index_text: Default::default(),
2782 head_changed: Default::default(),
2783 index_changed: Default::default(),
2784 language_changed: Default::default(),
2785 conflict_updated_futures: Default::default(),
2786 conflict_set: Default::default(),
2787 reparse_conflict_markers_task: Default::default(),
2788 }
2789 }
2790
2791 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2792 self.language = buffer.read(cx).language().cloned();
2793 self.language_changed = true;
2794 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2795 }
2796
2797 fn reparse_conflict_markers(
2798 &mut self,
2799 buffer: text::BufferSnapshot,
2800 cx: &mut Context<Self>,
2801 ) -> oneshot::Receiver<()> {
2802 let (tx, rx) = oneshot::channel();
2803
2804 let Some(conflict_set) = self
2805 .conflict_set
2806 .as_ref()
2807 .and_then(|conflict_set| conflict_set.upgrade())
2808 else {
2809 return rx;
2810 };
2811
2812 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2813 if conflict_set.has_conflict {
2814 Some(conflict_set.snapshot())
2815 } else {
2816 None
2817 }
2818 });
2819
2820 if let Some(old_snapshot) = old_snapshot {
2821 self.conflict_updated_futures.push(tx);
2822 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2823 let (snapshot, changed_range) = cx
2824 .background_spawn(async move {
2825 let new_snapshot = ConflictSet::parse(&buffer);
2826 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2827 (new_snapshot, changed_range)
2828 })
2829 .await;
2830 this.update(cx, |this, cx| {
2831 if let Some(conflict_set) = &this.conflict_set {
2832 conflict_set
2833 .update(cx, |conflict_set, cx| {
2834 conflict_set.set_snapshot(snapshot, changed_range, cx);
2835 })
2836 .ok();
2837 }
2838 let futures = std::mem::take(&mut this.conflict_updated_futures);
2839 for tx in futures {
2840 tx.send(()).ok();
2841 }
2842 })
2843 }))
2844 }
2845
2846 rx
2847 }
2848
2849 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2850 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2851 }
2852
2853 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2854 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2855 }
2856
2857 fn handle_base_texts_updated(
2858 &mut self,
2859 buffer: text::BufferSnapshot,
2860 message: proto::UpdateDiffBases,
2861 cx: &mut Context<Self>,
2862 ) {
2863 use proto::update_diff_bases::Mode;
2864
2865 let Some(mode) = Mode::from_i32(message.mode) else {
2866 return;
2867 };
2868
2869 let diff_bases_change = match mode {
2870 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2871 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2872 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2873 Mode::IndexAndHead => DiffBasesChange::SetEach {
2874 index: message.staged_text,
2875 head: message.committed_text,
2876 },
2877 };
2878
2879 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2880 }
2881
2882 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2883 if *self.recalculating_tx.borrow() {
2884 let mut rx = self.recalculating_tx.subscribe();
2885 Some(async move {
2886 loop {
2887 let is_recalculating = rx.recv().await;
2888 if is_recalculating != Some(true) {
2889 break;
2890 }
2891 }
2892 })
2893 } else {
2894 None
2895 }
2896 }
2897
2898 fn diff_bases_changed(
2899 &mut self,
2900 buffer: text::BufferSnapshot,
2901 diff_bases_change: Option<DiffBasesChange>,
2902 cx: &mut Context<Self>,
2903 ) {
2904 match diff_bases_change {
2905 Some(DiffBasesChange::SetIndex(index)) => {
2906 self.index_text = index.map(|mut index| {
2907 text::LineEnding::normalize(&mut index);
2908 Arc::new(index)
2909 });
2910 self.index_changed = true;
2911 }
2912 Some(DiffBasesChange::SetHead(head)) => {
2913 self.head_text = head.map(|mut head| {
2914 text::LineEnding::normalize(&mut head);
2915 Arc::new(head)
2916 });
2917 self.head_changed = true;
2918 }
2919 Some(DiffBasesChange::SetBoth(text)) => {
2920 let text = text.map(|mut text| {
2921 text::LineEnding::normalize(&mut text);
2922 Arc::new(text)
2923 });
2924 self.head_text = text.clone();
2925 self.index_text = text;
2926 self.head_changed = true;
2927 self.index_changed = true;
2928 }
2929 Some(DiffBasesChange::SetEach { index, head }) => {
2930 self.index_text = index.map(|mut index| {
2931 text::LineEnding::normalize(&mut index);
2932 Arc::new(index)
2933 });
2934 self.index_changed = true;
2935 self.head_text = head.map(|mut head| {
2936 text::LineEnding::normalize(&mut head);
2937 Arc::new(head)
2938 });
2939 self.head_changed = true;
2940 }
2941 None => {}
2942 }
2943
2944 self.recalculate_diffs(buffer, cx)
2945 }
2946
2947 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2948 *self.recalculating_tx.borrow_mut() = true;
2949
2950 let language = self.language.clone();
2951 let language_registry = self.language_registry.clone();
2952 let unstaged_diff = self.unstaged_diff();
2953 let uncommitted_diff = self.uncommitted_diff();
2954 let head = self.head_text.clone();
2955 let index = self.index_text.clone();
2956 let index_changed = self.index_changed;
2957 let head_changed = self.head_changed;
2958 let language_changed = self.language_changed;
2959 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2960 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2961 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2962 (None, None) => true,
2963 _ => false,
2964 };
2965 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2966 log::debug!(
2967 "start recalculating diffs for buffer {}",
2968 buffer.remote_id()
2969 );
2970
2971 let mut new_unstaged_diff = None;
2972 if let Some(unstaged_diff) = &unstaged_diff {
2973 new_unstaged_diff = Some(
2974 BufferDiff::update_diff(
2975 unstaged_diff.clone(),
2976 buffer.clone(),
2977 index,
2978 index_changed,
2979 language_changed,
2980 language.clone(),
2981 language_registry.clone(),
2982 cx,
2983 )
2984 .await?,
2985 );
2986 }
2987
2988 // Dropping BufferDiff can be expensive, so yield back to the event loop
2989 // for a bit
2990 yield_now().await;
2991
2992 let mut new_uncommitted_diff = None;
2993 if let Some(uncommitted_diff) = &uncommitted_diff {
2994 new_uncommitted_diff = if index_matches_head {
2995 new_unstaged_diff.clone()
2996 } else {
2997 Some(
2998 BufferDiff::update_diff(
2999 uncommitted_diff.clone(),
3000 buffer.clone(),
3001 head,
3002 head_changed,
3003 language_changed,
3004 language.clone(),
3005 language_registry.clone(),
3006 cx,
3007 )
3008 .await?,
3009 )
3010 }
3011 }
3012
3013 // Dropping BufferDiff can be expensive, so yield back to the event loop
3014 // for a bit
3015 yield_now().await;
3016
3017 let cancel = this.update(cx, |this, _| {
3018 // This checks whether all pending stage/unstage operations
3019 // have quiesced (i.e. both the corresponding write and the
3020 // read of that write have completed). If not, then we cancel
3021 // this recalculation attempt to avoid invalidating pending
3022 // state too quickly; another recalculation will come along
3023 // later and clear the pending state once the state of the index has settled.
3024 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3025 *this.recalculating_tx.borrow_mut() = false;
3026 true
3027 } else {
3028 false
3029 }
3030 })?;
3031 if cancel {
3032 log::debug!(
3033 concat!(
3034 "aborting recalculating diffs for buffer {}",
3035 "due to subsequent hunk operations",
3036 ),
3037 buffer.remote_id()
3038 );
3039 return Ok(());
3040 }
3041
3042 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3043 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3044 {
3045 unstaged_diff.update(cx, |diff, cx| {
3046 if language_changed {
3047 diff.language_changed(cx);
3048 }
3049 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3050 })?
3051 } else {
3052 None
3053 };
3054
3055 yield_now().await;
3056
3057 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3058 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3059 {
3060 uncommitted_diff.update(cx, |diff, cx| {
3061 if language_changed {
3062 diff.language_changed(cx);
3063 }
3064 diff.set_snapshot_with_secondary(
3065 new_uncommitted_diff,
3066 &buffer,
3067 unstaged_changed_range,
3068 true,
3069 cx,
3070 );
3071 })?;
3072 }
3073
3074 log::debug!(
3075 "finished recalculating diffs for buffer {}",
3076 buffer.remote_id()
3077 );
3078
3079 if let Some(this) = this.upgrade() {
3080 this.update(cx, |this, _| {
3081 this.index_changed = false;
3082 this.head_changed = false;
3083 this.language_changed = false;
3084 *this.recalculating_tx.borrow_mut() = false;
3085 })?;
3086 }
3087
3088 Ok(())
3089 }));
3090 }
3091}
3092
3093fn make_remote_delegate(
3094 this: Entity<GitStore>,
3095 project_id: u64,
3096 repository_id: RepositoryId,
3097 askpass_id: u64,
3098 cx: &mut AsyncApp,
3099) -> AskPassDelegate {
3100 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3101 this.update(cx, |this, cx| {
3102 let Some((client, _)) = this.downstream_client() else {
3103 return;
3104 };
3105 let response = client.request(proto::AskPassRequest {
3106 project_id,
3107 repository_id: repository_id.to_proto(),
3108 askpass_id,
3109 prompt,
3110 });
3111 cx.spawn(async move |_, _| {
3112 let mut response = response.await?.response;
3113 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3114 .ok();
3115 response.zeroize();
3116 anyhow::Ok(())
3117 })
3118 .detach_and_log_err(cx);
3119 })
3120 .log_err();
3121 })
3122}
3123
3124impl RepositoryId {
3125 pub fn to_proto(self) -> u64 {
3126 self.0
3127 }
3128
3129 pub fn from_proto(id: u64) -> Self {
3130 RepositoryId(id)
3131 }
3132}
3133
3134impl RepositorySnapshot {
3135 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3136 Self {
3137 id,
3138 statuses_by_path: Default::default(),
3139 work_directory_abs_path,
3140 branch: None,
3141 head_commit: None,
3142 scan_id: 0,
3143 merge: Default::default(),
3144 remote_origin_url: None,
3145 remote_upstream_url: None,
3146 stash_entries: Default::default(),
3147 path_style,
3148 }
3149 }
3150
3151 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3152 proto::UpdateRepository {
3153 branch_summary: self.branch.as_ref().map(branch_to_proto),
3154 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3155 updated_statuses: self
3156 .statuses_by_path
3157 .iter()
3158 .map(|entry| entry.to_proto())
3159 .collect(),
3160 removed_statuses: Default::default(),
3161 current_merge_conflicts: self
3162 .merge
3163 .conflicted_paths
3164 .iter()
3165 .map(|repo_path| repo_path.to_proto())
3166 .collect(),
3167 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3168 project_id,
3169 id: self.id.to_proto(),
3170 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3171 entry_ids: vec![self.id.to_proto()],
3172 scan_id: self.scan_id,
3173 is_last_update: true,
3174 stash_entries: self
3175 .stash_entries
3176 .entries
3177 .iter()
3178 .map(stash_to_proto)
3179 .collect(),
3180 }
3181 }
3182
3183 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3184 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3185 let mut removed_statuses: Vec<String> = Vec::new();
3186
3187 let mut new_statuses = self.statuses_by_path.iter().peekable();
3188 let mut old_statuses = old.statuses_by_path.iter().peekable();
3189
3190 let mut current_new_entry = new_statuses.next();
3191 let mut current_old_entry = old_statuses.next();
3192 loop {
3193 match (current_new_entry, current_old_entry) {
3194 (Some(new_entry), Some(old_entry)) => {
3195 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3196 Ordering::Less => {
3197 updated_statuses.push(new_entry.to_proto());
3198 current_new_entry = new_statuses.next();
3199 }
3200 Ordering::Equal => {
3201 if new_entry.status != old_entry.status {
3202 updated_statuses.push(new_entry.to_proto());
3203 }
3204 current_old_entry = old_statuses.next();
3205 current_new_entry = new_statuses.next();
3206 }
3207 Ordering::Greater => {
3208 removed_statuses.push(old_entry.repo_path.to_proto());
3209 current_old_entry = old_statuses.next();
3210 }
3211 }
3212 }
3213 (None, Some(old_entry)) => {
3214 removed_statuses.push(old_entry.repo_path.to_proto());
3215 current_old_entry = old_statuses.next();
3216 }
3217 (Some(new_entry), None) => {
3218 updated_statuses.push(new_entry.to_proto());
3219 current_new_entry = new_statuses.next();
3220 }
3221 (None, None) => break,
3222 }
3223 }
3224
3225 proto::UpdateRepository {
3226 branch_summary: self.branch.as_ref().map(branch_to_proto),
3227 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3228 updated_statuses,
3229 removed_statuses,
3230 current_merge_conflicts: self
3231 .merge
3232 .conflicted_paths
3233 .iter()
3234 .map(|path| path.to_proto())
3235 .collect(),
3236 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3237 project_id,
3238 id: self.id.to_proto(),
3239 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3240 entry_ids: vec![],
3241 scan_id: self.scan_id,
3242 is_last_update: true,
3243 stash_entries: self
3244 .stash_entries
3245 .entries
3246 .iter()
3247 .map(stash_to_proto)
3248 .collect(),
3249 }
3250 }
3251
3252 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3253 self.statuses_by_path.iter().cloned()
3254 }
3255
3256 pub fn status_summary(&self) -> GitSummary {
3257 self.statuses_by_path.summary().item_summary
3258 }
3259
3260 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3261 self.statuses_by_path
3262 .get(&PathKey(path.as_ref().clone()), ())
3263 .cloned()
3264 }
3265
3266 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3267 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3268 }
3269
3270 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3271 self.path_style
3272 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3273 .unwrap()
3274 .into()
3275 }
3276
3277 #[inline]
3278 fn abs_path_to_repo_path_inner(
3279 work_directory_abs_path: &Path,
3280 abs_path: &Path,
3281 path_style: PathStyle,
3282 ) -> Option<RepoPath> {
3283 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3284 Some(RepoPath::from_rel_path(&rel_path))
3285 }
3286
3287 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3288 self.merge.conflicted_paths.contains(repo_path)
3289 }
3290
3291 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3292 let had_conflict_on_last_merge_head_change =
3293 self.merge.conflicted_paths.contains(repo_path);
3294 let has_conflict_currently = self
3295 .status_for_path(repo_path)
3296 .is_some_and(|entry| entry.status.is_conflicted());
3297 had_conflict_on_last_merge_head_change || has_conflict_currently
3298 }
3299
3300 /// This is the name that will be displayed in the repository selector for this repository.
3301 pub fn display_name(&self) -> SharedString {
3302 self.work_directory_abs_path
3303 .file_name()
3304 .unwrap_or_default()
3305 .to_string_lossy()
3306 .to_string()
3307 .into()
3308 }
3309}
3310
3311pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3312 proto::StashEntry {
3313 oid: entry.oid.as_bytes().to_vec(),
3314 message: entry.message.clone(),
3315 branch: entry.branch.clone(),
3316 index: entry.index as u64,
3317 timestamp: entry.timestamp,
3318 }
3319}
3320
3321pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3322 Ok(StashEntry {
3323 oid: Oid::from_bytes(&entry.oid)?,
3324 message: entry.message.clone(),
3325 index: entry.index as usize,
3326 branch: entry.branch.clone(),
3327 timestamp: entry.timestamp,
3328 })
3329}
3330
3331impl MergeDetails {
3332 async fn load(
3333 backend: &Arc<dyn GitRepository>,
3334 status: &SumTree<StatusEntry>,
3335 prev_snapshot: &RepositorySnapshot,
3336 ) -> Result<(MergeDetails, bool)> {
3337 log::debug!("load merge details");
3338 let message = backend.merge_message().await;
3339 let heads = backend
3340 .revparse_batch(vec![
3341 "MERGE_HEAD".into(),
3342 "CHERRY_PICK_HEAD".into(),
3343 "REBASE_HEAD".into(),
3344 "REVERT_HEAD".into(),
3345 "APPLY_HEAD".into(),
3346 ])
3347 .await
3348 .log_err()
3349 .unwrap_or_default()
3350 .into_iter()
3351 .map(|opt| opt.map(SharedString::from))
3352 .collect::<Vec<_>>();
3353 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3354 let conflicted_paths = if merge_heads_changed {
3355 let current_conflicted_paths = TreeSet::from_ordered_entries(
3356 status
3357 .iter()
3358 .filter(|entry| entry.status.is_conflicted())
3359 .map(|entry| entry.repo_path.clone()),
3360 );
3361
3362 // It can happen that we run a scan while a lengthy merge is in progress
3363 // that will eventually result in conflicts, but before those conflicts
3364 // are reported by `git status`. Since for the moment we only care about
3365 // the merge heads state for the purposes of tracking conflicts, don't update
3366 // this state until we see some conflicts.
3367 if heads.iter().any(Option::is_some)
3368 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3369 && current_conflicted_paths.is_empty()
3370 {
3371 log::debug!("not updating merge heads because no conflicts found");
3372 return Ok((
3373 MergeDetails {
3374 message: message.map(SharedString::from),
3375 ..prev_snapshot.merge.clone()
3376 },
3377 false,
3378 ));
3379 }
3380
3381 current_conflicted_paths
3382 } else {
3383 prev_snapshot.merge.conflicted_paths.clone()
3384 };
3385 let details = MergeDetails {
3386 conflicted_paths,
3387 message: message.map(SharedString::from),
3388 heads,
3389 };
3390 Ok((details, merge_heads_changed))
3391 }
3392}
3393
3394impl Repository {
3395 pub fn snapshot(&self) -> RepositorySnapshot {
3396 self.snapshot.clone()
3397 }
3398
3399 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3400 self.pending_ops.iter().cloned()
3401 }
3402
3403 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3404 self.pending_ops.summary().clone()
3405 }
3406
3407 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3408 self.pending_ops
3409 .get(&PathKey(path.as_ref().clone()), ())
3410 .cloned()
3411 }
3412
3413 fn local(
3414 id: RepositoryId,
3415 work_directory_abs_path: Arc<Path>,
3416 dot_git_abs_path: Arc<Path>,
3417 project_environment: WeakEntity<ProjectEnvironment>,
3418 fs: Arc<dyn Fs>,
3419 git_store: WeakEntity<GitStore>,
3420 cx: &mut Context<Self>,
3421 ) -> Self {
3422 let snapshot =
3423 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3424 let state = cx
3425 .spawn(async move |_, cx| {
3426 LocalRepositoryState::new(
3427 work_directory_abs_path,
3428 dot_git_abs_path,
3429 project_environment,
3430 fs,
3431 cx,
3432 )
3433 .await
3434 .map_err(|err| err.to_string())
3435 })
3436 .shared();
3437 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3438 let state = cx
3439 .spawn(async move |_, _| {
3440 let state = state.await?;
3441 Ok(RepositoryState::Local(state))
3442 })
3443 .shared();
3444
3445 Repository {
3446 this: cx.weak_entity(),
3447 git_store,
3448 snapshot,
3449 pending_ops: Default::default(),
3450 repository_state: state,
3451 commit_message_buffer: None,
3452 askpass_delegates: Default::default(),
3453 paths_needing_status_update: Default::default(),
3454 latest_askpass_id: 0,
3455 job_sender,
3456 job_id: 0,
3457 active_jobs: Default::default(),
3458 }
3459 }
3460
3461 fn remote(
3462 id: RepositoryId,
3463 work_directory_abs_path: Arc<Path>,
3464 path_style: PathStyle,
3465 project_id: ProjectId,
3466 client: AnyProtoClient,
3467 git_store: WeakEntity<GitStore>,
3468 cx: &mut Context<Self>,
3469 ) -> Self {
3470 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3471 let repository_state = RemoteRepositoryState { project_id, client };
3472 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3473 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3474 Self {
3475 this: cx.weak_entity(),
3476 snapshot,
3477 commit_message_buffer: None,
3478 git_store,
3479 pending_ops: Default::default(),
3480 paths_needing_status_update: Default::default(),
3481 job_sender,
3482 repository_state,
3483 askpass_delegates: Default::default(),
3484 latest_askpass_id: 0,
3485 active_jobs: Default::default(),
3486 job_id: 0,
3487 }
3488 }
3489
3490 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3491 self.git_store.upgrade()
3492 }
3493
3494 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3495 let this = cx.weak_entity();
3496 let git_store = self.git_store.clone();
3497 let _ = self.send_keyed_job(
3498 Some(GitJobKey::ReloadBufferDiffBases),
3499 None,
3500 |state, mut cx| async move {
3501 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3502 log::error!("tried to recompute diffs for a non-local repository");
3503 return Ok(());
3504 };
3505
3506 let Some(this) = this.upgrade() else {
3507 return Ok(());
3508 };
3509
3510 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3511 git_store.update(cx, |git_store, cx| {
3512 git_store
3513 .diffs
3514 .iter()
3515 .filter_map(|(buffer_id, diff_state)| {
3516 let buffer_store = git_store.buffer_store.read(cx);
3517 let buffer = buffer_store.get(*buffer_id)?;
3518 let file = File::from_dyn(buffer.read(cx).file())?;
3519 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3520 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3521 log::debug!(
3522 "start reload diff bases for repo path {}",
3523 repo_path.as_unix_str()
3524 );
3525 diff_state.update(cx, |diff_state, _| {
3526 let has_unstaged_diff = diff_state
3527 .unstaged_diff
3528 .as_ref()
3529 .is_some_and(|diff| diff.is_upgradable());
3530 let has_uncommitted_diff = diff_state
3531 .uncommitted_diff
3532 .as_ref()
3533 .is_some_and(|set| set.is_upgradable());
3534
3535 Some((
3536 buffer,
3537 repo_path,
3538 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3539 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3540 ))
3541 })
3542 })
3543 .collect::<Vec<_>>()
3544 })
3545 })??;
3546
3547 let buffer_diff_base_changes = cx
3548 .background_spawn(async move {
3549 let mut changes = Vec::new();
3550 for (buffer, repo_path, current_index_text, current_head_text) in
3551 &repo_diff_state_updates
3552 {
3553 let index_text = if current_index_text.is_some() {
3554 backend.load_index_text(repo_path.clone()).await
3555 } else {
3556 None
3557 };
3558 let head_text = if current_head_text.is_some() {
3559 backend.load_committed_text(repo_path.clone()).await
3560 } else {
3561 None
3562 };
3563
3564 let change =
3565 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3566 (Some(current_index), Some(current_head)) => {
3567 let index_changed =
3568 index_text.as_ref() != current_index.as_deref();
3569 let head_changed =
3570 head_text.as_ref() != current_head.as_deref();
3571 if index_changed && head_changed {
3572 if index_text == head_text {
3573 Some(DiffBasesChange::SetBoth(head_text))
3574 } else {
3575 Some(DiffBasesChange::SetEach {
3576 index: index_text,
3577 head: head_text,
3578 })
3579 }
3580 } else if index_changed {
3581 Some(DiffBasesChange::SetIndex(index_text))
3582 } else if head_changed {
3583 Some(DiffBasesChange::SetHead(head_text))
3584 } else {
3585 None
3586 }
3587 }
3588 (Some(current_index), None) => {
3589 let index_changed =
3590 index_text.as_ref() != current_index.as_deref();
3591 index_changed
3592 .then_some(DiffBasesChange::SetIndex(index_text))
3593 }
3594 (None, Some(current_head)) => {
3595 let head_changed =
3596 head_text.as_ref() != current_head.as_deref();
3597 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3598 }
3599 (None, None) => None,
3600 };
3601
3602 changes.push((buffer.clone(), change))
3603 }
3604 changes
3605 })
3606 .await;
3607
3608 git_store.update(&mut cx, |git_store, cx| {
3609 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3610 let buffer_snapshot = buffer.read(cx).text_snapshot();
3611 let buffer_id = buffer_snapshot.remote_id();
3612 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3613 continue;
3614 };
3615
3616 let downstream_client = git_store.downstream_client();
3617 diff_state.update(cx, |diff_state, cx| {
3618 use proto::update_diff_bases::Mode;
3619
3620 if let Some((diff_bases_change, (client, project_id))) =
3621 diff_bases_change.clone().zip(downstream_client)
3622 {
3623 let (staged_text, committed_text, mode) = match diff_bases_change {
3624 DiffBasesChange::SetIndex(index) => {
3625 (index, None, Mode::IndexOnly)
3626 }
3627 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3628 DiffBasesChange::SetEach { index, head } => {
3629 (index, head, Mode::IndexAndHead)
3630 }
3631 DiffBasesChange::SetBoth(text) => {
3632 (None, text, Mode::IndexMatchesHead)
3633 }
3634 };
3635 client
3636 .send(proto::UpdateDiffBases {
3637 project_id: project_id.to_proto(),
3638 buffer_id: buffer_id.to_proto(),
3639 staged_text,
3640 committed_text,
3641 mode: mode as i32,
3642 })
3643 .log_err();
3644 }
3645
3646 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3647 });
3648 }
3649 })
3650 },
3651 );
3652 }
3653
3654 pub fn send_job<F, Fut, R>(
3655 &mut self,
3656 status: Option<SharedString>,
3657 job: F,
3658 ) -> oneshot::Receiver<R>
3659 where
3660 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3661 Fut: Future<Output = R> + 'static,
3662 R: Send + 'static,
3663 {
3664 self.send_keyed_job(None, status, job)
3665 }
3666
3667 fn send_keyed_job<F, Fut, R>(
3668 &mut self,
3669 key: Option<GitJobKey>,
3670 status: Option<SharedString>,
3671 job: F,
3672 ) -> oneshot::Receiver<R>
3673 where
3674 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3675 Fut: Future<Output = R> + 'static,
3676 R: Send + 'static,
3677 {
3678 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3679 let job_id = post_inc(&mut self.job_id);
3680 let this = self.this.clone();
3681 self.job_sender
3682 .unbounded_send(GitJob {
3683 key,
3684 job: Box::new(move |state, cx: &mut AsyncApp| {
3685 let job = job(state, cx.clone());
3686 cx.spawn(async move |cx| {
3687 if let Some(s) = status.clone() {
3688 this.update(cx, |this, cx| {
3689 this.active_jobs.insert(
3690 job_id,
3691 JobInfo {
3692 start: Instant::now(),
3693 message: s.clone(),
3694 },
3695 );
3696
3697 cx.notify();
3698 })
3699 .ok();
3700 }
3701 let result = job.await;
3702
3703 this.update(cx, |this, cx| {
3704 this.active_jobs.remove(&job_id);
3705 cx.notify();
3706 })
3707 .ok();
3708
3709 result_tx.send(result).ok();
3710 })
3711 }),
3712 })
3713 .ok();
3714 result_rx
3715 }
3716
3717 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3718 let Some(git_store) = self.git_store.upgrade() else {
3719 return;
3720 };
3721 let entity = cx.entity();
3722 git_store.update(cx, |git_store, cx| {
3723 let Some((&id, _)) = git_store
3724 .repositories
3725 .iter()
3726 .find(|(_, handle)| *handle == &entity)
3727 else {
3728 return;
3729 };
3730 git_store.active_repo_id = Some(id);
3731 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3732 });
3733 }
3734
3735 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3736 self.snapshot.status()
3737 }
3738
3739 pub fn cached_stash(&self) -> GitStash {
3740 self.snapshot.stash_entries.clone()
3741 }
3742
3743 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3744 let git_store = self.git_store.upgrade()?;
3745 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3746 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3747 let abs_path = SanitizedPath::new(&abs_path);
3748 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3749 Some(ProjectPath {
3750 worktree_id: worktree.read(cx).id(),
3751 path: relative_path,
3752 })
3753 }
3754
3755 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3756 let git_store = self.git_store.upgrade()?;
3757 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3758 let abs_path = worktree_store.absolutize(path, cx)?;
3759 self.snapshot.abs_path_to_repo_path(&abs_path)
3760 }
3761
3762 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3763 other
3764 .read(cx)
3765 .snapshot
3766 .work_directory_abs_path
3767 .starts_with(&self.snapshot.work_directory_abs_path)
3768 }
3769
3770 pub fn open_commit_buffer(
3771 &mut self,
3772 languages: Option<Arc<LanguageRegistry>>,
3773 buffer_store: Entity<BufferStore>,
3774 cx: &mut Context<Self>,
3775 ) -> Task<Result<Entity<Buffer>>> {
3776 let id = self.id;
3777 if let Some(buffer) = self.commit_message_buffer.clone() {
3778 return Task::ready(Ok(buffer));
3779 }
3780 let this = cx.weak_entity();
3781
3782 let rx = self.send_job(None, move |state, mut cx| async move {
3783 let Some(this) = this.upgrade() else {
3784 bail!("git store was dropped");
3785 };
3786 match state {
3787 RepositoryState::Local(..) => {
3788 this.update(&mut cx, |_, cx| {
3789 Self::open_local_commit_buffer(languages, buffer_store, cx)
3790 })?
3791 .await
3792 }
3793 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3794 let request = client.request(proto::OpenCommitMessageBuffer {
3795 project_id: project_id.0,
3796 repository_id: id.to_proto(),
3797 });
3798 let response = request.await.context("requesting to open commit buffer")?;
3799 let buffer_id = BufferId::new(response.buffer_id)?;
3800 let buffer = buffer_store
3801 .update(&mut cx, |buffer_store, cx| {
3802 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3803 })?
3804 .await?;
3805 if let Some(language_registry) = languages {
3806 let git_commit_language =
3807 language_registry.language_for_name("Git Commit").await?;
3808 buffer.update(&mut cx, |buffer, cx| {
3809 buffer.set_language(Some(git_commit_language), cx);
3810 })?;
3811 }
3812 this.update(&mut cx, |this, _| {
3813 this.commit_message_buffer = Some(buffer.clone());
3814 })?;
3815 Ok(buffer)
3816 }
3817 }
3818 });
3819
3820 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3821 }
3822
3823 fn open_local_commit_buffer(
3824 language_registry: Option<Arc<LanguageRegistry>>,
3825 buffer_store: Entity<BufferStore>,
3826 cx: &mut Context<Self>,
3827 ) -> Task<Result<Entity<Buffer>>> {
3828 cx.spawn(async move |repository, cx| {
3829 let buffer = buffer_store
3830 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3831 .await?;
3832
3833 if let Some(language_registry) = language_registry {
3834 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3835 buffer.update(cx, |buffer, cx| {
3836 buffer.set_language(Some(git_commit_language), cx);
3837 })?;
3838 }
3839
3840 repository.update(cx, |repository, _| {
3841 repository.commit_message_buffer = Some(buffer.clone());
3842 })?;
3843 Ok(buffer)
3844 })
3845 }
3846
3847 pub fn checkout_files(
3848 &mut self,
3849 commit: &str,
3850 paths: Vec<RepoPath>,
3851 cx: &mut Context<Self>,
3852 ) -> Task<Result<()>> {
3853 let commit = commit.to_string();
3854 let id = self.id;
3855
3856 self.spawn_job_with_tracking(
3857 paths.clone(),
3858 pending_op::GitStatus::Reverted,
3859 cx,
3860 async move |this, cx| {
3861 this.update(cx, |this, _cx| {
3862 this.send_job(
3863 Some(format!("git checkout {}", commit).into()),
3864 move |git_repo, _| async move {
3865 match git_repo {
3866 RepositoryState::Local(LocalRepositoryState {
3867 backend,
3868 environment,
3869 ..
3870 }) => {
3871 backend
3872 .checkout_files(commit, paths, environment.clone())
3873 .await
3874 }
3875 RepositoryState::Remote(RemoteRepositoryState {
3876 project_id,
3877 client,
3878 }) => {
3879 client
3880 .request(proto::GitCheckoutFiles {
3881 project_id: project_id.0,
3882 repository_id: id.to_proto(),
3883 commit,
3884 paths: paths
3885 .into_iter()
3886 .map(|p| p.to_proto())
3887 .collect(),
3888 })
3889 .await?;
3890
3891 Ok(())
3892 }
3893 }
3894 },
3895 )
3896 })?
3897 .await?
3898 },
3899 )
3900 }
3901
3902 pub fn reset(
3903 &mut self,
3904 commit: String,
3905 reset_mode: ResetMode,
3906 _cx: &mut App,
3907 ) -> oneshot::Receiver<Result<()>> {
3908 let id = self.id;
3909
3910 self.send_job(None, move |git_repo, _| async move {
3911 match git_repo {
3912 RepositoryState::Local(LocalRepositoryState {
3913 backend,
3914 environment,
3915 ..
3916 }) => backend.reset(commit, reset_mode, environment).await,
3917 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3918 client
3919 .request(proto::GitReset {
3920 project_id: project_id.0,
3921 repository_id: id.to_proto(),
3922 commit,
3923 mode: match reset_mode {
3924 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3925 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3926 },
3927 })
3928 .await?;
3929
3930 Ok(())
3931 }
3932 }
3933 })
3934 }
3935
3936 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3937 let id = self.id;
3938 self.send_job(None, move |git_repo, _cx| async move {
3939 match git_repo {
3940 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
3941 backend.show(commit).await
3942 }
3943 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3944 let resp = client
3945 .request(proto::GitShow {
3946 project_id: project_id.0,
3947 repository_id: id.to_proto(),
3948 commit,
3949 })
3950 .await?;
3951
3952 Ok(CommitDetails {
3953 sha: resp.sha.into(),
3954 message: resp.message.into(),
3955 commit_timestamp: resp.commit_timestamp,
3956 author_email: resp.author_email.into(),
3957 author_name: resp.author_name.into(),
3958 })
3959 }
3960 }
3961 })
3962 }
3963
3964 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3965 let id = self.id;
3966 self.send_job(None, move |git_repo, cx| async move {
3967 match git_repo {
3968 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
3969 backend.load_commit(commit, cx).await
3970 }
3971 RepositoryState::Remote(RemoteRepositoryState {
3972 client, project_id, ..
3973 }) => {
3974 let response = client
3975 .request(proto::LoadCommitDiff {
3976 project_id: project_id.0,
3977 repository_id: id.to_proto(),
3978 commit,
3979 })
3980 .await?;
3981 Ok(CommitDiff {
3982 files: response
3983 .files
3984 .into_iter()
3985 .map(|file| {
3986 Ok(CommitFile {
3987 path: RepoPath::from_proto(&file.path)?,
3988 old_text: file.old_text,
3989 new_text: file.new_text,
3990 })
3991 })
3992 .collect::<Result<Vec<_>>>()?,
3993 })
3994 }
3995 }
3996 })
3997 }
3998
3999 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4000 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4001 }
4002
4003 fn save_buffers<'a>(
4004 &self,
4005 entries: impl IntoIterator<Item = &'a RepoPath>,
4006 cx: &mut Context<Self>,
4007 ) -> Vec<Task<anyhow::Result<()>>> {
4008 let mut save_futures = Vec::new();
4009 if let Some(buffer_store) = self.buffer_store(cx) {
4010 buffer_store.update(cx, |buffer_store, cx| {
4011 for path in entries {
4012 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4013 continue;
4014 };
4015 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4016 && buffer
4017 .read(cx)
4018 .file()
4019 .is_some_and(|file| file.disk_state().exists())
4020 && buffer.read(cx).has_unsaved_edits()
4021 {
4022 save_futures.push(buffer_store.save_buffer(buffer, cx));
4023 }
4024 }
4025 })
4026 }
4027 save_futures
4028 }
4029
4030 pub fn stage_entries(
4031 &mut self,
4032 entries: Vec<RepoPath>,
4033 cx: &mut Context<Self>,
4034 ) -> Task<anyhow::Result<()>> {
4035 if entries.is_empty() {
4036 return Task::ready(Ok(()));
4037 }
4038 let id = self.id;
4039 let save_tasks = self.save_buffers(&entries, cx);
4040 let paths = entries
4041 .iter()
4042 .map(|p| p.as_unix_str())
4043 .collect::<Vec<_>>()
4044 .join(" ");
4045 let status = format!("git add {paths}");
4046 let job_key = GitJobKey::WriteIndex(entries.clone());
4047
4048 self.spawn_job_with_tracking(
4049 entries.clone(),
4050 pending_op::GitStatus::Staged,
4051 cx,
4052 async move |this, cx| {
4053 for save_task in save_tasks {
4054 save_task.await?;
4055 }
4056
4057 this.update(cx, |this, _| {
4058 this.send_keyed_job(
4059 Some(job_key),
4060 Some(status.into()),
4061 move |git_repo, _cx| async move {
4062 match git_repo {
4063 RepositoryState::Local(LocalRepositoryState {
4064 backend,
4065 environment,
4066 ..
4067 }) => backend.stage_paths(entries, environment.clone()).await,
4068 RepositoryState::Remote(RemoteRepositoryState {
4069 project_id,
4070 client,
4071 }) => {
4072 client
4073 .request(proto::Stage {
4074 project_id: project_id.0,
4075 repository_id: id.to_proto(),
4076 paths: entries
4077 .into_iter()
4078 .map(|repo_path| repo_path.to_proto())
4079 .collect(),
4080 })
4081 .await
4082 .context("sending stage request")?;
4083
4084 Ok(())
4085 }
4086 }
4087 },
4088 )
4089 })?
4090 .await?
4091 },
4092 )
4093 }
4094
4095 pub fn unstage_entries(
4096 &mut self,
4097 entries: Vec<RepoPath>,
4098 cx: &mut Context<Self>,
4099 ) -> Task<anyhow::Result<()>> {
4100 if entries.is_empty() {
4101 return Task::ready(Ok(()));
4102 }
4103 let id = self.id;
4104 let save_tasks = self.save_buffers(&entries, cx);
4105 let paths = entries
4106 .iter()
4107 .map(|p| p.as_unix_str())
4108 .collect::<Vec<_>>()
4109 .join(" ");
4110 let status = format!("git reset {paths}");
4111 let job_key = GitJobKey::WriteIndex(entries.clone());
4112
4113 self.spawn_job_with_tracking(
4114 entries.clone(),
4115 pending_op::GitStatus::Unstaged,
4116 cx,
4117 async move |this, cx| {
4118 for save_task in save_tasks {
4119 save_task.await?;
4120 }
4121
4122 this.update(cx, |this, _| {
4123 this.send_keyed_job(
4124 Some(job_key),
4125 Some(status.into()),
4126 move |git_repo, _cx| async move {
4127 match git_repo {
4128 RepositoryState::Local(LocalRepositoryState {
4129 backend,
4130 environment,
4131 ..
4132 }) => backend.unstage_paths(entries, environment).await,
4133 RepositoryState::Remote(RemoteRepositoryState {
4134 project_id,
4135 client,
4136 }) => {
4137 client
4138 .request(proto::Unstage {
4139 project_id: project_id.0,
4140 repository_id: id.to_proto(),
4141 paths: entries
4142 .into_iter()
4143 .map(|repo_path| repo_path.to_proto())
4144 .collect(),
4145 })
4146 .await
4147 .context("sending unstage request")?;
4148
4149 Ok(())
4150 }
4151 }
4152 },
4153 )
4154 })?
4155 .await?
4156 },
4157 )
4158 }
4159
4160 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4161 let to_stage = self
4162 .cached_status()
4163 .filter_map(|entry| {
4164 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4165 if ops.staging() || ops.staged() {
4166 None
4167 } else {
4168 Some(entry.repo_path)
4169 }
4170 } else if entry.status.staging().is_fully_staged() {
4171 None
4172 } else {
4173 Some(entry.repo_path)
4174 }
4175 })
4176 .collect();
4177 self.stage_entries(to_stage, cx)
4178 }
4179
4180 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4181 let to_unstage = self
4182 .cached_status()
4183 .filter_map(|entry| {
4184 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4185 if !ops.staging() && !ops.staged() {
4186 None
4187 } else {
4188 Some(entry.repo_path)
4189 }
4190 } else if entry.status.staging().is_fully_unstaged() {
4191 None
4192 } else {
4193 Some(entry.repo_path)
4194 }
4195 })
4196 .collect();
4197 self.unstage_entries(to_unstage, cx)
4198 }
4199
4200 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4201 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4202
4203 self.stash_entries(to_stash, cx)
4204 }
4205
4206 pub fn stash_entries(
4207 &mut self,
4208 entries: Vec<RepoPath>,
4209 cx: &mut Context<Self>,
4210 ) -> Task<anyhow::Result<()>> {
4211 let id = self.id;
4212
4213 cx.spawn(async move |this, cx| {
4214 this.update(cx, |this, _| {
4215 this.send_job(None, move |git_repo, _cx| async move {
4216 match git_repo {
4217 RepositoryState::Local(LocalRepositoryState {
4218 backend,
4219 environment,
4220 ..
4221 }) => backend.stash_paths(entries, environment).await,
4222 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4223 client
4224 .request(proto::Stash {
4225 project_id: project_id.0,
4226 repository_id: id.to_proto(),
4227 paths: entries
4228 .into_iter()
4229 .map(|repo_path| repo_path.to_proto())
4230 .collect(),
4231 })
4232 .await
4233 .context("sending stash request")?;
4234 Ok(())
4235 }
4236 }
4237 })
4238 })?
4239 .await??;
4240 Ok(())
4241 })
4242 }
4243
4244 pub fn stash_pop(
4245 &mut self,
4246 index: Option<usize>,
4247 cx: &mut Context<Self>,
4248 ) -> Task<anyhow::Result<()>> {
4249 let id = self.id;
4250 cx.spawn(async move |this, cx| {
4251 this.update(cx, |this, _| {
4252 this.send_job(None, move |git_repo, _cx| async move {
4253 match git_repo {
4254 RepositoryState::Local(LocalRepositoryState {
4255 backend,
4256 environment,
4257 ..
4258 }) => backend.stash_pop(index, environment).await,
4259 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4260 client
4261 .request(proto::StashPop {
4262 project_id: project_id.0,
4263 repository_id: id.to_proto(),
4264 stash_index: index.map(|i| i as u64),
4265 })
4266 .await
4267 .context("sending stash pop request")?;
4268 Ok(())
4269 }
4270 }
4271 })
4272 })?
4273 .await??;
4274 Ok(())
4275 })
4276 }
4277
4278 pub fn stash_apply(
4279 &mut self,
4280 index: Option<usize>,
4281 cx: &mut Context<Self>,
4282 ) -> Task<anyhow::Result<()>> {
4283 let id = self.id;
4284 cx.spawn(async move |this, cx| {
4285 this.update(cx, |this, _| {
4286 this.send_job(None, move |git_repo, _cx| async move {
4287 match git_repo {
4288 RepositoryState::Local(LocalRepositoryState {
4289 backend,
4290 environment,
4291 ..
4292 }) => backend.stash_apply(index, environment).await,
4293 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4294 client
4295 .request(proto::StashApply {
4296 project_id: project_id.0,
4297 repository_id: id.to_proto(),
4298 stash_index: index.map(|i| i as u64),
4299 })
4300 .await
4301 .context("sending stash apply request")?;
4302 Ok(())
4303 }
4304 }
4305 })
4306 })?
4307 .await??;
4308 Ok(())
4309 })
4310 }
4311
4312 pub fn stash_drop(
4313 &mut self,
4314 index: Option<usize>,
4315 cx: &mut Context<Self>,
4316 ) -> oneshot::Receiver<anyhow::Result<()>> {
4317 let id = self.id;
4318 let updates_tx = self
4319 .git_store()
4320 .and_then(|git_store| match &git_store.read(cx).state {
4321 GitStoreState::Local { downstream, .. } => downstream
4322 .as_ref()
4323 .map(|downstream| downstream.updates_tx.clone()),
4324 _ => None,
4325 });
4326 let this = cx.weak_entity();
4327 self.send_job(None, move |git_repo, mut cx| async move {
4328 match git_repo {
4329 RepositoryState::Local(LocalRepositoryState {
4330 backend,
4331 environment,
4332 ..
4333 }) => {
4334 // TODO would be nice to not have to do this manually
4335 let result = backend.stash_drop(index, environment).await;
4336 if result.is_ok()
4337 && let Ok(stash_entries) = backend.stash_entries().await
4338 {
4339 let snapshot = this.update(&mut cx, |this, cx| {
4340 this.snapshot.stash_entries = stash_entries;
4341 cx.emit(RepositoryEvent::StashEntriesChanged);
4342 this.snapshot.clone()
4343 })?;
4344 if let Some(updates_tx) = updates_tx {
4345 updates_tx
4346 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4347 .ok();
4348 }
4349 }
4350
4351 result
4352 }
4353 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4354 client
4355 .request(proto::StashDrop {
4356 project_id: project_id.0,
4357 repository_id: id.to_proto(),
4358 stash_index: index.map(|i| i as u64),
4359 })
4360 .await
4361 .context("sending stash pop request")?;
4362 Ok(())
4363 }
4364 }
4365 })
4366 }
4367
4368 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4369 let id = self.id;
4370 self.send_job(
4371 Some(format!("git hook {}", hook.as_str()).into()),
4372 move |git_repo, _cx| async move {
4373 match git_repo {
4374 RepositoryState::Local(LocalRepositoryState {
4375 backend,
4376 environment,
4377 ..
4378 }) => backend.run_hook(hook, environment.clone()).await,
4379 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4380 client
4381 .request(proto::RunGitHook {
4382 project_id: project_id.0,
4383 repository_id: id.to_proto(),
4384 hook: hook.to_proto(),
4385 })
4386 .await?;
4387
4388 Ok(())
4389 }
4390 }
4391 },
4392 )
4393 }
4394
4395 pub fn commit(
4396 &mut self,
4397 message: SharedString,
4398 name_and_email: Option<(SharedString, SharedString)>,
4399 options: CommitOptions,
4400 askpass: AskPassDelegate,
4401 cx: &mut App,
4402 ) -> oneshot::Receiver<Result<()>> {
4403 let id = self.id;
4404 let askpass_delegates = self.askpass_delegates.clone();
4405 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4406
4407 let rx = self.run_hook(RunHook::PreCommit, cx);
4408
4409 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4410 rx.await??;
4411
4412 match git_repo {
4413 RepositoryState::Local(LocalRepositoryState {
4414 backend,
4415 environment,
4416 ..
4417 }) => {
4418 backend
4419 .commit(message, name_and_email, options, askpass, environment)
4420 .await
4421 }
4422 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4423 askpass_delegates.lock().insert(askpass_id, askpass);
4424 let _defer = util::defer(|| {
4425 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4426 debug_assert!(askpass_delegate.is_some());
4427 });
4428 let (name, email) = name_and_email.unzip();
4429 client
4430 .request(proto::Commit {
4431 project_id: project_id.0,
4432 repository_id: id.to_proto(),
4433 message: String::from(message),
4434 name: name.map(String::from),
4435 email: email.map(String::from),
4436 options: Some(proto::commit::CommitOptions {
4437 amend: options.amend,
4438 signoff: options.signoff,
4439 }),
4440 askpass_id,
4441 })
4442 .await
4443 .context("sending commit request")?;
4444
4445 Ok(())
4446 }
4447 }
4448 })
4449 }
4450
4451 pub fn fetch(
4452 &mut self,
4453 fetch_options: FetchOptions,
4454 askpass: AskPassDelegate,
4455 _cx: &mut App,
4456 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4457 let askpass_delegates = self.askpass_delegates.clone();
4458 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4459 let id = self.id;
4460
4461 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4462 match git_repo {
4463 RepositoryState::Local(LocalRepositoryState {
4464 backend,
4465 environment,
4466 ..
4467 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4468 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4469 askpass_delegates.lock().insert(askpass_id, askpass);
4470 let _defer = util::defer(|| {
4471 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4472 debug_assert!(askpass_delegate.is_some());
4473 });
4474
4475 let response = client
4476 .request(proto::Fetch {
4477 project_id: project_id.0,
4478 repository_id: id.to_proto(),
4479 askpass_id,
4480 remote: fetch_options.to_proto(),
4481 })
4482 .await
4483 .context("sending fetch request")?;
4484
4485 Ok(RemoteCommandOutput {
4486 stdout: response.stdout,
4487 stderr: response.stderr,
4488 })
4489 }
4490 }
4491 })
4492 }
4493
4494 pub fn push(
4495 &mut self,
4496 branch: SharedString,
4497 remote: SharedString,
4498 options: Option<PushOptions>,
4499 askpass: AskPassDelegate,
4500 cx: &mut Context<Self>,
4501 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4502 let askpass_delegates = self.askpass_delegates.clone();
4503 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4504 let id = self.id;
4505
4506 let args = options
4507 .map(|option| match option {
4508 PushOptions::SetUpstream => " --set-upstream",
4509 PushOptions::Force => " --force-with-lease",
4510 })
4511 .unwrap_or("");
4512
4513 let updates_tx = self
4514 .git_store()
4515 .and_then(|git_store| match &git_store.read(cx).state {
4516 GitStoreState::Local { downstream, .. } => downstream
4517 .as_ref()
4518 .map(|downstream| downstream.updates_tx.clone()),
4519 _ => None,
4520 });
4521
4522 let this = cx.weak_entity();
4523 self.send_job(
4524 Some(format!("git push {} {} {}", args, remote, branch).into()),
4525 move |git_repo, mut cx| async move {
4526 match git_repo {
4527 RepositoryState::Local(LocalRepositoryState {
4528 backend,
4529 environment,
4530 ..
4531 }) => {
4532 let result = backend
4533 .push(
4534 branch.to_string(),
4535 remote.to_string(),
4536 options,
4537 askpass,
4538 environment.clone(),
4539 cx.clone(),
4540 )
4541 .await;
4542 // TODO would be nice to not have to do this manually
4543 if result.is_ok() {
4544 let branches = backend.branches().await?;
4545 let branch = branches.into_iter().find(|branch| branch.is_head);
4546 log::info!("head branch after scan is {branch:?}");
4547 let snapshot = this.update(&mut cx, |this, cx| {
4548 this.snapshot.branch = branch;
4549 cx.emit(RepositoryEvent::BranchChanged);
4550 this.snapshot.clone()
4551 })?;
4552 if let Some(updates_tx) = updates_tx {
4553 updates_tx
4554 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4555 .ok();
4556 }
4557 }
4558 result
4559 }
4560 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4561 askpass_delegates.lock().insert(askpass_id, askpass);
4562 let _defer = util::defer(|| {
4563 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4564 debug_assert!(askpass_delegate.is_some());
4565 });
4566 let response = client
4567 .request(proto::Push {
4568 project_id: project_id.0,
4569 repository_id: id.to_proto(),
4570 askpass_id,
4571 branch_name: branch.to_string(),
4572 remote_name: remote.to_string(),
4573 options: options.map(|options| match options {
4574 PushOptions::Force => proto::push::PushOptions::Force,
4575 PushOptions::SetUpstream => {
4576 proto::push::PushOptions::SetUpstream
4577 }
4578 }
4579 as i32),
4580 })
4581 .await
4582 .context("sending push request")?;
4583
4584 Ok(RemoteCommandOutput {
4585 stdout: response.stdout,
4586 stderr: response.stderr,
4587 })
4588 }
4589 }
4590 },
4591 )
4592 }
4593
4594 pub fn pull(
4595 &mut self,
4596 branch: Option<SharedString>,
4597 remote: SharedString,
4598 rebase: bool,
4599 askpass: AskPassDelegate,
4600 _cx: &mut App,
4601 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4602 let askpass_delegates = self.askpass_delegates.clone();
4603 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4604 let id = self.id;
4605
4606 let mut status = "git pull".to_string();
4607 if rebase {
4608 status.push_str(" --rebase");
4609 }
4610 status.push_str(&format!(" {}", remote));
4611 if let Some(b) = &branch {
4612 status.push_str(&format!(" {}", b));
4613 }
4614
4615 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4616 match git_repo {
4617 RepositoryState::Local(LocalRepositoryState {
4618 backend,
4619 environment,
4620 ..
4621 }) => {
4622 backend
4623 .pull(
4624 branch.as_ref().map(|b| b.to_string()),
4625 remote.to_string(),
4626 rebase,
4627 askpass,
4628 environment.clone(),
4629 cx,
4630 )
4631 .await
4632 }
4633 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4634 askpass_delegates.lock().insert(askpass_id, askpass);
4635 let _defer = util::defer(|| {
4636 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4637 debug_assert!(askpass_delegate.is_some());
4638 });
4639 let response = client
4640 .request(proto::Pull {
4641 project_id: project_id.0,
4642 repository_id: id.to_proto(),
4643 askpass_id,
4644 rebase,
4645 branch_name: branch.as_ref().map(|b| b.to_string()),
4646 remote_name: remote.to_string(),
4647 })
4648 .await
4649 .context("sending pull request")?;
4650
4651 Ok(RemoteCommandOutput {
4652 stdout: response.stdout,
4653 stderr: response.stderr,
4654 })
4655 }
4656 }
4657 })
4658 }
4659
4660 fn spawn_set_index_text_job(
4661 &mut self,
4662 path: RepoPath,
4663 content: Option<String>,
4664 hunk_staging_operation_count: Option<usize>,
4665 cx: &mut Context<Self>,
4666 ) -> oneshot::Receiver<anyhow::Result<()>> {
4667 let id = self.id;
4668 let this = cx.weak_entity();
4669 let git_store = self.git_store.clone();
4670 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4671 self.send_keyed_job(
4672 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4673 None,
4674 move |git_repo, mut cx| async move {
4675 log::debug!(
4676 "start updating index text for buffer {}",
4677 path.as_unix_str()
4678 );
4679
4680 match git_repo {
4681 RepositoryState::Local(LocalRepositoryState {
4682 fs,
4683 backend,
4684 environment,
4685 ..
4686 }) => {
4687 let executable = match fs.metadata(&abs_path).await {
4688 Ok(Some(meta)) => meta.is_executable,
4689 Ok(None) => false,
4690 Err(_err) => false,
4691 };
4692 backend
4693 .set_index_text(path.clone(), content, environment.clone(), executable)
4694 .await?;
4695 }
4696 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4697 client
4698 .request(proto::SetIndexText {
4699 project_id: project_id.0,
4700 repository_id: id.to_proto(),
4701 path: path.to_proto(),
4702 text: content,
4703 })
4704 .await?;
4705 }
4706 }
4707 log::debug!(
4708 "finish updating index text for buffer {}",
4709 path.as_unix_str()
4710 );
4711
4712 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4713 let project_path = this
4714 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4715 .ok()
4716 .flatten();
4717 git_store.update(&mut cx, |git_store, cx| {
4718 let buffer_id = git_store
4719 .buffer_store
4720 .read(cx)
4721 .get_by_path(&project_path?)?
4722 .read(cx)
4723 .remote_id();
4724 let diff_state = git_store.diffs.get(&buffer_id)?;
4725 diff_state.update(cx, |diff_state, _| {
4726 diff_state.hunk_staging_operation_count_as_of_write =
4727 hunk_staging_operation_count;
4728 });
4729 Some(())
4730 })?;
4731 }
4732 Ok(())
4733 },
4734 )
4735 }
4736
4737 pub fn get_remotes(
4738 &mut self,
4739 branch_name: Option<String>,
4740 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4741 let id = self.id;
4742 self.send_job(None, move |repo, _cx| async move {
4743 match repo {
4744 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4745 backend.get_remotes(branch_name).await
4746 }
4747 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4748 let response = client
4749 .request(proto::GetRemotes {
4750 project_id: project_id.0,
4751 repository_id: id.to_proto(),
4752 branch_name,
4753 })
4754 .await?;
4755
4756 let remotes = response
4757 .remotes
4758 .into_iter()
4759 .map(|remotes| git::repository::Remote {
4760 name: remotes.name.into(),
4761 })
4762 .collect();
4763
4764 Ok(remotes)
4765 }
4766 }
4767 })
4768 }
4769
4770 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4771 let id = self.id;
4772 self.send_job(None, move |repo, _| async move {
4773 match repo {
4774 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4775 backend.branches().await
4776 }
4777 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4778 let response = client
4779 .request(proto::GitGetBranches {
4780 project_id: project_id.0,
4781 repository_id: id.to_proto(),
4782 })
4783 .await?;
4784
4785 let branches = response
4786 .branches
4787 .into_iter()
4788 .map(|branch| proto_to_branch(&branch))
4789 .collect();
4790
4791 Ok(branches)
4792 }
4793 }
4794 })
4795 }
4796
4797 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4798 let id = self.id;
4799 self.send_job(None, move |repo, _| async move {
4800 match repo {
4801 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4802 backend.worktrees().await
4803 }
4804 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4805 let response = client
4806 .request(proto::GitGetWorktrees {
4807 project_id: project_id.0,
4808 repository_id: id.to_proto(),
4809 })
4810 .await?;
4811
4812 let worktrees = response
4813 .worktrees
4814 .into_iter()
4815 .map(|worktree| proto_to_worktree(&worktree))
4816 .collect();
4817
4818 Ok(worktrees)
4819 }
4820 }
4821 })
4822 }
4823
4824 pub fn create_worktree(
4825 &mut self,
4826 name: String,
4827 path: PathBuf,
4828 commit: Option<String>,
4829 ) -> oneshot::Receiver<Result<()>> {
4830 let id = self.id;
4831 self.send_job(
4832 Some("git worktree add".into()),
4833 move |repo, _cx| async move {
4834 match repo {
4835 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4836 backend.create_worktree(name, path, commit).await
4837 }
4838 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4839 client
4840 .request(proto::GitCreateWorktree {
4841 project_id: project_id.0,
4842 repository_id: id.to_proto(),
4843 name,
4844 directory: path.to_string_lossy().to_string(),
4845 commit,
4846 })
4847 .await?;
4848
4849 Ok(())
4850 }
4851 }
4852 },
4853 )
4854 }
4855
4856 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4857 let id = self.id;
4858 self.send_job(None, move |repo, _| async move {
4859 match repo {
4860 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4861 backend.default_branch().await
4862 }
4863 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4864 let response = client
4865 .request(proto::GetDefaultBranch {
4866 project_id: project_id.0,
4867 repository_id: id.to_proto(),
4868 })
4869 .await?;
4870
4871 anyhow::Ok(response.branch.map(SharedString::from))
4872 }
4873 }
4874 })
4875 }
4876
4877 pub fn diff_tree(
4878 &mut self,
4879 diff_type: DiffTreeType,
4880 _cx: &App,
4881 ) -> oneshot::Receiver<Result<TreeDiff>> {
4882 let repository_id = self.snapshot.id;
4883 self.send_job(None, move |repo, _cx| async move {
4884 match repo {
4885 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4886 backend.diff_tree(diff_type).await
4887 }
4888 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4889 let response = client
4890 .request(proto::GetTreeDiff {
4891 project_id: project_id.0,
4892 repository_id: repository_id.0,
4893 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4894 base: diff_type.base().to_string(),
4895 head: diff_type.head().to_string(),
4896 })
4897 .await?;
4898
4899 let entries = response
4900 .entries
4901 .into_iter()
4902 .filter_map(|entry| {
4903 let status = match entry.status() {
4904 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4905 proto::tree_diff_status::Status::Modified => {
4906 TreeDiffStatus::Modified {
4907 old: git::Oid::from_str(
4908 &entry.oid.context("missing oid").log_err()?,
4909 )
4910 .log_err()?,
4911 }
4912 }
4913 proto::tree_diff_status::Status::Deleted => {
4914 TreeDiffStatus::Deleted {
4915 old: git::Oid::from_str(
4916 &entry.oid.context("missing oid").log_err()?,
4917 )
4918 .log_err()?,
4919 }
4920 }
4921 };
4922 Some((
4923 RepoPath::from_rel_path(
4924 &RelPath::from_proto(&entry.path).log_err()?,
4925 ),
4926 status,
4927 ))
4928 })
4929 .collect();
4930
4931 Ok(TreeDiff { entries })
4932 }
4933 }
4934 })
4935 }
4936
4937 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4938 let id = self.id;
4939 self.send_job(None, move |repo, _cx| async move {
4940 match repo {
4941 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4942 backend.diff(diff_type).await
4943 }
4944 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4945 let response = client
4946 .request(proto::GitDiff {
4947 project_id: project_id.0,
4948 repository_id: id.to_proto(),
4949 diff_type: match diff_type {
4950 DiffType::HeadToIndex => {
4951 proto::git_diff::DiffType::HeadToIndex.into()
4952 }
4953 DiffType::HeadToWorktree => {
4954 proto::git_diff::DiffType::HeadToWorktree.into()
4955 }
4956 },
4957 })
4958 .await?;
4959
4960 Ok(response.diff)
4961 }
4962 }
4963 })
4964 }
4965
4966 pub fn create_branch(
4967 &mut self,
4968 branch_name: String,
4969 base_branch: Option<String>,
4970 ) -> oneshot::Receiver<Result<()>> {
4971 let id = self.id;
4972 let status_msg = if let Some(ref base) = base_branch {
4973 format!("git switch -c {branch_name} {base}").into()
4974 } else {
4975 format!("git switch -c {branch_name}").into()
4976 };
4977 self.send_job(Some(status_msg), move |repo, _cx| async move {
4978 match repo {
4979 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4980 backend.create_branch(branch_name, base_branch).await
4981 }
4982 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4983 client
4984 .request(proto::GitCreateBranch {
4985 project_id: project_id.0,
4986 repository_id: id.to_proto(),
4987 branch_name,
4988 })
4989 .await?;
4990
4991 Ok(())
4992 }
4993 }
4994 })
4995 }
4996
4997 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
4998 let id = self.id;
4999 self.send_job(
5000 Some(format!("git switch {branch_name}").into()),
5001 move |repo, _cx| async move {
5002 match repo {
5003 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5004 backend.change_branch(branch_name).await
5005 }
5006 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5007 client
5008 .request(proto::GitChangeBranch {
5009 project_id: project_id.0,
5010 repository_id: id.to_proto(),
5011 branch_name,
5012 })
5013 .await?;
5014
5015 Ok(())
5016 }
5017 }
5018 },
5019 )
5020 }
5021
5022 pub fn rename_branch(
5023 &mut self,
5024 branch: String,
5025 new_name: String,
5026 ) -> oneshot::Receiver<Result<()>> {
5027 let id = self.id;
5028 self.send_job(
5029 Some(format!("git branch -m {branch} {new_name}").into()),
5030 move |repo, _cx| async move {
5031 match repo {
5032 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5033 backend.rename_branch(branch, new_name).await
5034 }
5035 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5036 client
5037 .request(proto::GitRenameBranch {
5038 project_id: project_id.0,
5039 repository_id: id.to_proto(),
5040 branch,
5041 new_name,
5042 })
5043 .await?;
5044
5045 Ok(())
5046 }
5047 }
5048 },
5049 )
5050 }
5051
5052 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5053 let id = self.id;
5054 self.send_job(None, move |repo, _cx| async move {
5055 match repo {
5056 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5057 backend.check_for_pushed_commit().await
5058 }
5059 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5060 let response = client
5061 .request(proto::CheckForPushedCommits {
5062 project_id: project_id.0,
5063 repository_id: id.to_proto(),
5064 })
5065 .await?;
5066
5067 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5068
5069 Ok(branches)
5070 }
5071 }
5072 })
5073 }
5074
5075 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5076 self.send_job(None, |repo, _cx| async move {
5077 match repo {
5078 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5079 backend.checkpoint().await
5080 }
5081 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5082 }
5083 })
5084 }
5085
5086 pub fn restore_checkpoint(
5087 &mut self,
5088 checkpoint: GitRepositoryCheckpoint,
5089 ) -> oneshot::Receiver<Result<()>> {
5090 self.send_job(None, move |repo, _cx| async move {
5091 match repo {
5092 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5093 backend.restore_checkpoint(checkpoint).await
5094 }
5095 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5096 }
5097 })
5098 }
5099
5100 pub(crate) fn apply_remote_update(
5101 &mut self,
5102 update: proto::UpdateRepository,
5103 cx: &mut Context<Self>,
5104 ) -> Result<()> {
5105 let conflicted_paths = TreeSet::from_ordered_entries(
5106 update
5107 .current_merge_conflicts
5108 .into_iter()
5109 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5110 );
5111 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5112 let new_head_commit = update
5113 .head_commit_details
5114 .as_ref()
5115 .map(proto_to_commit_details);
5116 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5117 cx.emit(RepositoryEvent::BranchChanged)
5118 }
5119 self.snapshot.branch = new_branch;
5120 self.snapshot.head_commit = new_head_commit;
5121
5122 self.snapshot.merge.conflicted_paths = conflicted_paths;
5123 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5124 let new_stash_entries = GitStash {
5125 entries: update
5126 .stash_entries
5127 .iter()
5128 .filter_map(|entry| proto_to_stash(entry).ok())
5129 .collect(),
5130 };
5131 if self.snapshot.stash_entries != new_stash_entries {
5132 cx.emit(RepositoryEvent::StashEntriesChanged)
5133 }
5134 self.snapshot.stash_entries = new_stash_entries;
5135
5136 let edits = update
5137 .removed_statuses
5138 .into_iter()
5139 .filter_map(|path| {
5140 Some(sum_tree::Edit::Remove(PathKey(
5141 RelPath::from_proto(&path).log_err()?,
5142 )))
5143 })
5144 .chain(
5145 update
5146 .updated_statuses
5147 .into_iter()
5148 .filter_map(|updated_status| {
5149 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5150 }),
5151 )
5152 .collect::<Vec<_>>();
5153 if !edits.is_empty() {
5154 cx.emit(RepositoryEvent::StatusesChanged);
5155 }
5156 self.snapshot.statuses_by_path.edit(edits, ());
5157 if update.is_last_update {
5158 self.snapshot.scan_id = update.scan_id;
5159 }
5160 self.clear_pending_ops(cx);
5161 Ok(())
5162 }
5163
5164 pub fn compare_checkpoints(
5165 &mut self,
5166 left: GitRepositoryCheckpoint,
5167 right: GitRepositoryCheckpoint,
5168 ) -> oneshot::Receiver<Result<bool>> {
5169 self.send_job(None, move |repo, _cx| async move {
5170 match repo {
5171 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5172 backend.compare_checkpoints(left, right).await
5173 }
5174 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5175 }
5176 })
5177 }
5178
5179 pub fn diff_checkpoints(
5180 &mut self,
5181 base_checkpoint: GitRepositoryCheckpoint,
5182 target_checkpoint: GitRepositoryCheckpoint,
5183 ) -> oneshot::Receiver<Result<String>> {
5184 self.send_job(None, move |repo, _cx| async move {
5185 match repo {
5186 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5187 backend
5188 .diff_checkpoints(base_checkpoint, target_checkpoint)
5189 .await
5190 }
5191 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5192 }
5193 })
5194 }
5195
5196 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5197 let updated = SumTree::from_iter(
5198 self.pending_ops.iter().filter_map(|ops| {
5199 let inner_ops: Vec<PendingOp> =
5200 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5201 if inner_ops.is_empty() {
5202 None
5203 } else {
5204 Some(PendingOps {
5205 repo_path: ops.repo_path.clone(),
5206 ops: inner_ops,
5207 })
5208 }
5209 }),
5210 (),
5211 );
5212
5213 if updated != self.pending_ops {
5214 cx.emit(RepositoryEvent::PendingOpsChanged {
5215 pending_ops: self.pending_ops.clone(),
5216 })
5217 }
5218
5219 self.pending_ops = updated;
5220 }
5221
5222 fn schedule_scan(
5223 &mut self,
5224 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5225 cx: &mut Context<Self>,
5226 ) {
5227 let this = cx.weak_entity();
5228 let _ = self.send_keyed_job(
5229 Some(GitJobKey::ReloadGitState),
5230 None,
5231 |state, mut cx| async move {
5232 log::debug!("run scheduled git status scan");
5233
5234 let Some(this) = this.upgrade() else {
5235 return Ok(());
5236 };
5237 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5238 bail!("not a local repository")
5239 };
5240 let (snapshot, events) = this
5241 .update(&mut cx, |this, _| {
5242 this.paths_needing_status_update.clear();
5243 compute_snapshot(
5244 this.id,
5245 this.work_directory_abs_path.clone(),
5246 this.snapshot.clone(),
5247 backend.clone(),
5248 )
5249 })?
5250 .await?;
5251 this.update(&mut cx, |this, cx| {
5252 this.snapshot = snapshot.clone();
5253 this.clear_pending_ops(cx);
5254 for event in events {
5255 cx.emit(event);
5256 }
5257 })?;
5258 if let Some(updates_tx) = updates_tx {
5259 updates_tx
5260 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5261 .ok();
5262 }
5263 Ok(())
5264 },
5265 );
5266 }
5267
5268 fn spawn_local_git_worker(
5269 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5270 cx: &mut Context<Self>,
5271 ) -> mpsc::UnboundedSender<GitJob> {
5272 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5273
5274 cx.spawn(async move |_, cx| {
5275 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5276 if let Some(git_hosting_provider_registry) =
5277 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5278 {
5279 git_hosting_providers::register_additional_providers(
5280 git_hosting_provider_registry,
5281 state.backend.clone(),
5282 );
5283 }
5284 let state = RepositoryState::Local(state);
5285 let mut jobs = VecDeque::new();
5286 loop {
5287 while let Ok(Some(next_job)) = job_rx.try_next() {
5288 jobs.push_back(next_job);
5289 }
5290
5291 if let Some(job) = jobs.pop_front() {
5292 if let Some(current_key) = &job.key
5293 && jobs
5294 .iter()
5295 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5296 {
5297 continue;
5298 }
5299 (job.job)(state.clone(), cx).await;
5300 } else if let Some(job) = job_rx.next().await {
5301 jobs.push_back(job);
5302 } else {
5303 break;
5304 }
5305 }
5306 anyhow::Ok(())
5307 })
5308 .detach_and_log_err(cx);
5309
5310 job_tx
5311 }
5312
5313 fn spawn_remote_git_worker(
5314 state: RemoteRepositoryState,
5315 cx: &mut Context<Self>,
5316 ) -> mpsc::UnboundedSender<GitJob> {
5317 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5318
5319 cx.spawn(async move |_, cx| {
5320 let state = RepositoryState::Remote(state);
5321 let mut jobs = VecDeque::new();
5322 loop {
5323 while let Ok(Some(next_job)) = job_rx.try_next() {
5324 jobs.push_back(next_job);
5325 }
5326
5327 if let Some(job) = jobs.pop_front() {
5328 if let Some(current_key) = &job.key
5329 && jobs
5330 .iter()
5331 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5332 {
5333 continue;
5334 }
5335 (job.job)(state.clone(), cx).await;
5336 } else if let Some(job) = job_rx.next().await {
5337 jobs.push_back(job);
5338 } else {
5339 break;
5340 }
5341 }
5342 anyhow::Ok(())
5343 })
5344 .detach_and_log_err(cx);
5345
5346 job_tx
5347 }
5348
5349 fn load_staged_text(
5350 &mut self,
5351 buffer_id: BufferId,
5352 repo_path: RepoPath,
5353 cx: &App,
5354 ) -> Task<Result<Option<String>>> {
5355 let rx = self.send_job(None, move |state, _| async move {
5356 match state {
5357 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5358 anyhow::Ok(backend.load_index_text(repo_path).await)
5359 }
5360 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5361 let response = client
5362 .request(proto::OpenUnstagedDiff {
5363 project_id: project_id.to_proto(),
5364 buffer_id: buffer_id.to_proto(),
5365 })
5366 .await?;
5367 Ok(response.staged_text)
5368 }
5369 }
5370 });
5371 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5372 }
5373
5374 fn load_committed_text(
5375 &mut self,
5376 buffer_id: BufferId,
5377 repo_path: RepoPath,
5378 cx: &App,
5379 ) -> Task<Result<DiffBasesChange>> {
5380 let rx = self.send_job(None, move |state, _| async move {
5381 match state {
5382 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5383 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5384 let staged_text = backend.load_index_text(repo_path).await;
5385 let diff_bases_change = if committed_text == staged_text {
5386 DiffBasesChange::SetBoth(committed_text)
5387 } else {
5388 DiffBasesChange::SetEach {
5389 index: staged_text,
5390 head: committed_text,
5391 }
5392 };
5393 anyhow::Ok(diff_bases_change)
5394 }
5395 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5396 use proto::open_uncommitted_diff_response::Mode;
5397
5398 let response = client
5399 .request(proto::OpenUncommittedDiff {
5400 project_id: project_id.to_proto(),
5401 buffer_id: buffer_id.to_proto(),
5402 })
5403 .await?;
5404 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5405 let bases = match mode {
5406 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5407 Mode::IndexAndHead => DiffBasesChange::SetEach {
5408 head: response.committed_text,
5409 index: response.staged_text,
5410 },
5411 };
5412 Ok(bases)
5413 }
5414 }
5415 });
5416
5417 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5418 }
5419 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5420 let repository_id = self.snapshot.id;
5421 let rx = self.send_job(None, move |state, _| async move {
5422 match state {
5423 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5424 backend.load_blob_content(oid).await
5425 }
5426 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5427 let response = client
5428 .request(proto::GetBlobContent {
5429 project_id: project_id.to_proto(),
5430 repository_id: repository_id.0,
5431 oid: oid.to_string(),
5432 })
5433 .await?;
5434 Ok(response.content)
5435 }
5436 }
5437 });
5438 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5439 }
5440
5441 fn paths_changed(
5442 &mut self,
5443 paths: Vec<RepoPath>,
5444 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5445 cx: &mut Context<Self>,
5446 ) {
5447 self.paths_needing_status_update.extend(paths);
5448
5449 let this = cx.weak_entity();
5450 let _ = self.send_keyed_job(
5451 Some(GitJobKey::RefreshStatuses),
5452 None,
5453 |state, mut cx| async move {
5454 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5455 (
5456 this.snapshot.clone(),
5457 mem::take(&mut this.paths_needing_status_update),
5458 )
5459 })?;
5460 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5461 bail!("not a local repository")
5462 };
5463
5464 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5465 if paths.is_empty() {
5466 return Ok(());
5467 }
5468 let statuses = backend.status(&paths).await?;
5469 let stash_entries = backend.stash_entries().await?;
5470
5471 let changed_path_statuses = cx
5472 .background_spawn(async move {
5473 let mut changed_path_statuses = Vec::new();
5474 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5475 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5476
5477 for (repo_path, status) in &*statuses.entries {
5478 changed_paths.remove(repo_path);
5479 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5480 && cursor.item().is_some_and(|entry| entry.status == *status)
5481 {
5482 continue;
5483 }
5484
5485 changed_path_statuses.push(Edit::Insert(StatusEntry {
5486 repo_path: repo_path.clone(),
5487 status: *status,
5488 }));
5489 }
5490 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5491 for path in changed_paths.into_iter() {
5492 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5493 changed_path_statuses
5494 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5495 }
5496 }
5497 changed_path_statuses
5498 })
5499 .await;
5500
5501 this.update(&mut cx, |this, cx| {
5502 if this.snapshot.stash_entries != stash_entries {
5503 cx.emit(RepositoryEvent::StashEntriesChanged);
5504 this.snapshot.stash_entries = stash_entries;
5505 }
5506
5507 if !changed_path_statuses.is_empty() {
5508 cx.emit(RepositoryEvent::StatusesChanged);
5509 this.snapshot
5510 .statuses_by_path
5511 .edit(changed_path_statuses, ());
5512 this.snapshot.scan_id += 1;
5513 }
5514
5515 if let Some(updates_tx) = updates_tx {
5516 updates_tx
5517 .unbounded_send(DownstreamUpdate::UpdateRepository(
5518 this.snapshot.clone(),
5519 ))
5520 .ok();
5521 }
5522 })
5523 },
5524 );
5525 }
5526
5527 /// currently running git command and when it started
5528 pub fn current_job(&self) -> Option<JobInfo> {
5529 self.active_jobs.values().next().cloned()
5530 }
5531
5532 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5533 self.send_job(None, |_, _| async {})
5534 }
5535
5536 fn spawn_job_with_tracking<AsyncFn>(
5537 &mut self,
5538 paths: Vec<RepoPath>,
5539 git_status: pending_op::GitStatus,
5540 cx: &mut Context<Self>,
5541 f: AsyncFn,
5542 ) -> Task<Result<()>>
5543 where
5544 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5545 {
5546 let ids = self.new_pending_ops_for_paths(paths, git_status);
5547
5548 cx.spawn(async move |this, cx| {
5549 let (job_status, result) = match f(this.clone(), cx).await {
5550 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5551 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5552 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5553 };
5554
5555 this.update(cx, |this, _| {
5556 let mut edits = Vec::with_capacity(ids.len());
5557 for (id, entry) in ids {
5558 if let Some(mut ops) = this
5559 .pending_ops
5560 .get(&PathKey(entry.as_ref().clone()), ())
5561 .cloned()
5562 {
5563 if let Some(op) = ops.op_by_id_mut(id) {
5564 op.job_status = job_status;
5565 }
5566 edits.push(sum_tree::Edit::Insert(ops));
5567 }
5568 }
5569 this.pending_ops.edit(edits, ());
5570 })?;
5571
5572 result
5573 })
5574 }
5575
5576 fn new_pending_ops_for_paths(
5577 &mut self,
5578 paths: Vec<RepoPath>,
5579 git_status: pending_op::GitStatus,
5580 ) -> Vec<(PendingOpId, RepoPath)> {
5581 let mut edits = Vec::with_capacity(paths.len());
5582 let mut ids = Vec::with_capacity(paths.len());
5583 for path in paths {
5584 let mut ops = self
5585 .pending_ops
5586 .get(&PathKey(path.as_ref().clone()), ())
5587 .cloned()
5588 .unwrap_or_else(|| PendingOps::new(&path));
5589 let id = ops.max_id() + 1;
5590 ops.ops.push(PendingOp {
5591 id,
5592 git_status,
5593 job_status: pending_op::JobStatus::Running,
5594 });
5595 edits.push(sum_tree::Edit::Insert(ops));
5596 ids.push((id, path));
5597 }
5598 self.pending_ops.edit(edits, ());
5599 ids
5600 }
5601}
5602
5603fn get_permalink_in_rust_registry_src(
5604 provider_registry: Arc<GitHostingProviderRegistry>,
5605 path: PathBuf,
5606 selection: Range<u32>,
5607) -> Result<url::Url> {
5608 #[derive(Deserialize)]
5609 struct CargoVcsGit {
5610 sha1: String,
5611 }
5612
5613 #[derive(Deserialize)]
5614 struct CargoVcsInfo {
5615 git: CargoVcsGit,
5616 path_in_vcs: String,
5617 }
5618
5619 #[derive(Deserialize)]
5620 struct CargoPackage {
5621 repository: String,
5622 }
5623
5624 #[derive(Deserialize)]
5625 struct CargoToml {
5626 package: CargoPackage,
5627 }
5628
5629 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5630 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5631 Some((dir, json))
5632 }) else {
5633 bail!("No .cargo_vcs_info.json found in parent directories")
5634 };
5635 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5636 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5637 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5638 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5639 .context("parsing package.repository field of manifest")?;
5640 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5641 let permalink = provider.build_permalink(
5642 remote,
5643 BuildPermalinkParams::new(
5644 &cargo_vcs_info.git.sha1,
5645 &RepoPath::from_rel_path(
5646 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5647 ),
5648 Some(selection),
5649 ),
5650 );
5651 Ok(permalink)
5652}
5653
5654fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5655 let Some(blame) = blame else {
5656 return proto::BlameBufferResponse {
5657 blame_response: None,
5658 };
5659 };
5660
5661 let entries = blame
5662 .entries
5663 .into_iter()
5664 .map(|entry| proto::BlameEntry {
5665 sha: entry.sha.as_bytes().into(),
5666 start_line: entry.range.start,
5667 end_line: entry.range.end,
5668 original_line_number: entry.original_line_number,
5669 author: entry.author,
5670 author_mail: entry.author_mail,
5671 author_time: entry.author_time,
5672 author_tz: entry.author_tz,
5673 committer: entry.committer_name,
5674 committer_mail: entry.committer_email,
5675 committer_time: entry.committer_time,
5676 committer_tz: entry.committer_tz,
5677 summary: entry.summary,
5678 previous: entry.previous,
5679 filename: entry.filename,
5680 })
5681 .collect::<Vec<_>>();
5682
5683 let messages = blame
5684 .messages
5685 .into_iter()
5686 .map(|(oid, message)| proto::CommitMessage {
5687 oid: oid.as_bytes().into(),
5688 message,
5689 })
5690 .collect::<Vec<_>>();
5691
5692 proto::BlameBufferResponse {
5693 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5694 entries,
5695 messages,
5696 remote_url: blame.remote_url,
5697 }),
5698 }
5699}
5700
5701fn deserialize_blame_buffer_response(
5702 response: proto::BlameBufferResponse,
5703) -> Option<git::blame::Blame> {
5704 let response = response.blame_response?;
5705 let entries = response
5706 .entries
5707 .into_iter()
5708 .filter_map(|entry| {
5709 Some(git::blame::BlameEntry {
5710 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5711 range: entry.start_line..entry.end_line,
5712 original_line_number: entry.original_line_number,
5713 committer_name: entry.committer,
5714 committer_time: entry.committer_time,
5715 committer_tz: entry.committer_tz,
5716 committer_email: entry.committer_mail,
5717 author: entry.author,
5718 author_mail: entry.author_mail,
5719 author_time: entry.author_time,
5720 author_tz: entry.author_tz,
5721 summary: entry.summary,
5722 previous: entry.previous,
5723 filename: entry.filename,
5724 })
5725 })
5726 .collect::<Vec<_>>();
5727
5728 let messages = response
5729 .messages
5730 .into_iter()
5731 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5732 .collect::<HashMap<_, _>>();
5733
5734 Some(Blame {
5735 entries,
5736 messages,
5737 remote_url: response.remote_url,
5738 })
5739}
5740
5741fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5742 proto::Branch {
5743 is_head: branch.is_head,
5744 ref_name: branch.ref_name.to_string(),
5745 unix_timestamp: branch
5746 .most_recent_commit
5747 .as_ref()
5748 .map(|commit| commit.commit_timestamp as u64),
5749 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5750 ref_name: upstream.ref_name.to_string(),
5751 tracking: upstream
5752 .tracking
5753 .status()
5754 .map(|upstream| proto::UpstreamTracking {
5755 ahead: upstream.ahead as u64,
5756 behind: upstream.behind as u64,
5757 }),
5758 }),
5759 most_recent_commit: branch
5760 .most_recent_commit
5761 .as_ref()
5762 .map(|commit| proto::CommitSummary {
5763 sha: commit.sha.to_string(),
5764 subject: commit.subject.to_string(),
5765 commit_timestamp: commit.commit_timestamp,
5766 author_name: commit.author_name.to_string(),
5767 }),
5768 }
5769}
5770
5771fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5772 proto::Worktree {
5773 path: worktree.path.to_string_lossy().to_string(),
5774 ref_name: worktree.ref_name.to_string(),
5775 sha: worktree.sha.to_string(),
5776 }
5777}
5778
5779fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5780 git::repository::Worktree {
5781 path: PathBuf::from(proto.path.clone()),
5782 ref_name: proto.ref_name.clone().into(),
5783 sha: proto.sha.clone().into(),
5784 }
5785}
5786
5787fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5788 git::repository::Branch {
5789 is_head: proto.is_head,
5790 ref_name: proto.ref_name.clone().into(),
5791 upstream: proto
5792 .upstream
5793 .as_ref()
5794 .map(|upstream| git::repository::Upstream {
5795 ref_name: upstream.ref_name.to_string().into(),
5796 tracking: upstream
5797 .tracking
5798 .as_ref()
5799 .map(|tracking| {
5800 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5801 ahead: tracking.ahead as u32,
5802 behind: tracking.behind as u32,
5803 })
5804 })
5805 .unwrap_or(git::repository::UpstreamTracking::Gone),
5806 }),
5807 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5808 git::repository::CommitSummary {
5809 sha: commit.sha.to_string().into(),
5810 subject: commit.subject.to_string().into(),
5811 commit_timestamp: commit.commit_timestamp,
5812 author_name: commit.author_name.to_string().into(),
5813 has_parent: true,
5814 }
5815 }),
5816 }
5817}
5818
5819fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5820 proto::GitCommitDetails {
5821 sha: commit.sha.to_string(),
5822 message: commit.message.to_string(),
5823 commit_timestamp: commit.commit_timestamp,
5824 author_email: commit.author_email.to_string(),
5825 author_name: commit.author_name.to_string(),
5826 }
5827}
5828
5829fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5830 CommitDetails {
5831 sha: proto.sha.clone().into(),
5832 message: proto.message.clone().into(),
5833 commit_timestamp: proto.commit_timestamp,
5834 author_email: proto.author_email.clone().into(),
5835 author_name: proto.author_name.clone().into(),
5836 }
5837}
5838
5839async fn compute_snapshot(
5840 id: RepositoryId,
5841 work_directory_abs_path: Arc<Path>,
5842 prev_snapshot: RepositorySnapshot,
5843 backend: Arc<dyn GitRepository>,
5844) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5845 let mut events = Vec::new();
5846 let branches = backend.branches().await?;
5847 let branch = branches.into_iter().find(|branch| branch.is_head);
5848 let statuses = backend
5849 .status(&[RepoPath::from_rel_path(
5850 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
5851 )])
5852 .await?;
5853 let stash_entries = backend.stash_entries().await?;
5854 let statuses_by_path = SumTree::from_iter(
5855 statuses
5856 .entries
5857 .iter()
5858 .map(|(repo_path, status)| StatusEntry {
5859 repo_path: repo_path.clone(),
5860 status: *status,
5861 }),
5862 (),
5863 );
5864 let (merge_details, merge_heads_changed) =
5865 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5866 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5867
5868 if merge_heads_changed {
5869 events.push(RepositoryEvent::MergeHeadsChanged);
5870 }
5871
5872 if statuses_by_path != prev_snapshot.statuses_by_path {
5873 events.push(RepositoryEvent::StatusesChanged)
5874 }
5875
5876 // Useful when branch is None in detached head state
5877 let head_commit = match backend.head_sha().await {
5878 Some(head_sha) => backend.show(head_sha).await.log_err(),
5879 None => None,
5880 };
5881
5882 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5883 events.push(RepositoryEvent::BranchChanged);
5884 }
5885
5886 // Used by edit prediction data collection
5887 let remote_origin_url = backend.remote_url("origin");
5888 let remote_upstream_url = backend.remote_url("upstream");
5889
5890 let snapshot = RepositorySnapshot {
5891 id,
5892 statuses_by_path,
5893 work_directory_abs_path,
5894 path_style: prev_snapshot.path_style,
5895 scan_id: prev_snapshot.scan_id + 1,
5896 branch,
5897 head_commit,
5898 merge: merge_details,
5899 remote_origin_url,
5900 remote_upstream_url,
5901 stash_entries,
5902 };
5903
5904 Ok((snapshot, events))
5905}
5906
5907fn status_from_proto(
5908 simple_status: i32,
5909 status: Option<proto::GitFileStatus>,
5910) -> anyhow::Result<FileStatus> {
5911 use proto::git_file_status::Variant;
5912
5913 let Some(variant) = status.and_then(|status| status.variant) else {
5914 let code = proto::GitStatus::from_i32(simple_status)
5915 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5916 let result = match code {
5917 proto::GitStatus::Added => TrackedStatus {
5918 worktree_status: StatusCode::Added,
5919 index_status: StatusCode::Unmodified,
5920 }
5921 .into(),
5922 proto::GitStatus::Modified => TrackedStatus {
5923 worktree_status: StatusCode::Modified,
5924 index_status: StatusCode::Unmodified,
5925 }
5926 .into(),
5927 proto::GitStatus::Conflict => UnmergedStatus {
5928 first_head: UnmergedStatusCode::Updated,
5929 second_head: UnmergedStatusCode::Updated,
5930 }
5931 .into(),
5932 proto::GitStatus::Deleted => TrackedStatus {
5933 worktree_status: StatusCode::Deleted,
5934 index_status: StatusCode::Unmodified,
5935 }
5936 .into(),
5937 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5938 };
5939 return Ok(result);
5940 };
5941
5942 let result = match variant {
5943 Variant::Untracked(_) => FileStatus::Untracked,
5944 Variant::Ignored(_) => FileStatus::Ignored,
5945 Variant::Unmerged(unmerged) => {
5946 let [first_head, second_head] =
5947 [unmerged.first_head, unmerged.second_head].map(|head| {
5948 let code = proto::GitStatus::from_i32(head)
5949 .with_context(|| format!("Invalid git status code: {head}"))?;
5950 let result = match code {
5951 proto::GitStatus::Added => UnmergedStatusCode::Added,
5952 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
5953 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
5954 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
5955 };
5956 Ok(result)
5957 });
5958 let [first_head, second_head] = [first_head?, second_head?];
5959 UnmergedStatus {
5960 first_head,
5961 second_head,
5962 }
5963 .into()
5964 }
5965 Variant::Tracked(tracked) => {
5966 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
5967 .map(|status| {
5968 let code = proto::GitStatus::from_i32(status)
5969 .with_context(|| format!("Invalid git status code: {status}"))?;
5970 let result = match code {
5971 proto::GitStatus::Modified => StatusCode::Modified,
5972 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
5973 proto::GitStatus::Added => StatusCode::Added,
5974 proto::GitStatus::Deleted => StatusCode::Deleted,
5975 proto::GitStatus::Renamed => StatusCode::Renamed,
5976 proto::GitStatus::Copied => StatusCode::Copied,
5977 proto::GitStatus::Unmodified => StatusCode::Unmodified,
5978 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
5979 };
5980 Ok(result)
5981 });
5982 let [index_status, worktree_status] = [index_status?, worktree_status?];
5983 TrackedStatus {
5984 index_status,
5985 worktree_status,
5986 }
5987 .into()
5988 }
5989 };
5990 Ok(result)
5991}
5992
5993fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
5994 use proto::git_file_status::{Tracked, Unmerged, Variant};
5995
5996 let variant = match status {
5997 FileStatus::Untracked => Variant::Untracked(Default::default()),
5998 FileStatus::Ignored => Variant::Ignored(Default::default()),
5999 FileStatus::Unmerged(UnmergedStatus {
6000 first_head,
6001 second_head,
6002 }) => Variant::Unmerged(Unmerged {
6003 first_head: unmerged_status_to_proto(first_head),
6004 second_head: unmerged_status_to_proto(second_head),
6005 }),
6006 FileStatus::Tracked(TrackedStatus {
6007 index_status,
6008 worktree_status,
6009 }) => Variant::Tracked(Tracked {
6010 index_status: tracked_status_to_proto(index_status),
6011 worktree_status: tracked_status_to_proto(worktree_status),
6012 }),
6013 };
6014 proto::GitFileStatus {
6015 variant: Some(variant),
6016 }
6017}
6018
6019fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6020 match code {
6021 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6022 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6023 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6024 }
6025}
6026
6027fn tracked_status_to_proto(code: StatusCode) -> i32 {
6028 match code {
6029 StatusCode::Added => proto::GitStatus::Added as _,
6030 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6031 StatusCode::Modified => proto::GitStatus::Modified as _,
6032 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6033 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6034 StatusCode::Copied => proto::GitStatus::Copied as _,
6035 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6036 }
6037}