1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<String>>,
133 index_text: Option<Arc<String>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_delete_branch);
476 client.add_entity_request_handler(Self::handle_git_init);
477 client.add_entity_request_handler(Self::handle_push);
478 client.add_entity_request_handler(Self::handle_pull);
479 client.add_entity_request_handler(Self::handle_fetch);
480 client.add_entity_request_handler(Self::handle_stage);
481 client.add_entity_request_handler(Self::handle_unstage);
482 client.add_entity_request_handler(Self::handle_stash);
483 client.add_entity_request_handler(Self::handle_stash_pop);
484 client.add_entity_request_handler(Self::handle_stash_apply);
485 client.add_entity_request_handler(Self::handle_stash_drop);
486 client.add_entity_request_handler(Self::handle_commit);
487 client.add_entity_request_handler(Self::handle_run_hook);
488 client.add_entity_request_handler(Self::handle_reset);
489 client.add_entity_request_handler(Self::handle_show);
490 client.add_entity_request_handler(Self::handle_load_commit_diff);
491 client.add_entity_request_handler(Self::handle_checkout_files);
492 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
493 client.add_entity_request_handler(Self::handle_set_index_text);
494 client.add_entity_request_handler(Self::handle_askpass);
495 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
496 client.add_entity_request_handler(Self::handle_git_diff);
497 client.add_entity_request_handler(Self::handle_tree_diff);
498 client.add_entity_request_handler(Self::handle_get_blob_content);
499 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
500 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
501 client.add_entity_message_handler(Self::handle_update_diff_bases);
502 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
503 client.add_entity_request_handler(Self::handle_blame_buffer);
504 client.add_entity_message_handler(Self::handle_update_repository);
505 client.add_entity_message_handler(Self::handle_remove_repository);
506 client.add_entity_request_handler(Self::handle_git_clone);
507 client.add_entity_request_handler(Self::handle_get_worktrees);
508 client.add_entity_request_handler(Self::handle_create_worktree);
509 }
510
511 pub fn is_local(&self) -> bool {
512 matches!(self.state, GitStoreState::Local { .. })
513 }
514 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
515 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
516 let id = repo.read(cx).id;
517 if self.active_repo_id != Some(id) {
518 self.active_repo_id = Some(id);
519 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
520 }
521 }
522 }
523
524 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
525 match &mut self.state {
526 GitStoreState::Remote {
527 downstream: downstream_client,
528 ..
529 } => {
530 for repo in self.repositories.values() {
531 let update = repo.read(cx).snapshot.initial_update(project_id);
532 for update in split_repository_update(update) {
533 client.send(update).log_err();
534 }
535 }
536 *downstream_client = Some((client, ProjectId(project_id)));
537 }
538 GitStoreState::Local {
539 downstream: downstream_client,
540 ..
541 } => {
542 let mut snapshots = HashMap::default();
543 let (updates_tx, mut updates_rx) = mpsc::unbounded();
544 for repo in self.repositories.values() {
545 updates_tx
546 .unbounded_send(DownstreamUpdate::UpdateRepository(
547 repo.read(cx).snapshot.clone(),
548 ))
549 .ok();
550 }
551 *downstream_client = Some(LocalDownstreamState {
552 client: client.clone(),
553 project_id: ProjectId(project_id),
554 updates_tx,
555 _task: cx.spawn(async move |this, cx| {
556 cx.background_spawn(async move {
557 while let Some(update) = updates_rx.next().await {
558 match update {
559 DownstreamUpdate::UpdateRepository(snapshot) => {
560 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
561 {
562 let update =
563 snapshot.build_update(old_snapshot, project_id);
564 *old_snapshot = snapshot;
565 for update in split_repository_update(update) {
566 client.send(update)?;
567 }
568 } else {
569 let update = snapshot.initial_update(project_id);
570 for update in split_repository_update(update) {
571 client.send(update)?;
572 }
573 snapshots.insert(snapshot.id, snapshot);
574 }
575 }
576 DownstreamUpdate::RemoveRepository(id) => {
577 client.send(proto::RemoveRepository {
578 project_id,
579 id: id.to_proto(),
580 })?;
581 }
582 }
583 }
584 anyhow::Ok(())
585 })
586 .await
587 .ok();
588 this.update(cx, |this, _| {
589 if let GitStoreState::Local {
590 downstream: downstream_client,
591 ..
592 } = &mut this.state
593 {
594 downstream_client.take();
595 } else {
596 unreachable!("unshared called on remote store");
597 }
598 })
599 }),
600 });
601 }
602 }
603 }
604
605 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
606 match &mut self.state {
607 GitStoreState::Local {
608 downstream: downstream_client,
609 ..
610 } => {
611 downstream_client.take();
612 }
613 GitStoreState::Remote {
614 downstream: downstream_client,
615 ..
616 } => {
617 downstream_client.take();
618 }
619 }
620 self.shared_diffs.clear();
621 }
622
623 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
624 self.shared_diffs.remove(peer_id);
625 }
626
627 pub fn active_repository(&self) -> Option<Entity<Repository>> {
628 self.active_repo_id
629 .as_ref()
630 .map(|id| self.repositories[id].clone())
631 }
632
633 pub fn open_unstaged_diff(
634 &mut self,
635 buffer: Entity<Buffer>,
636 cx: &mut Context<Self>,
637 ) -> Task<Result<Entity<BufferDiff>>> {
638 let buffer_id = buffer.read(cx).remote_id();
639 if let Some(diff_state) = self.diffs.get(&buffer_id)
640 && let Some(unstaged_diff) = diff_state
641 .read(cx)
642 .unstaged_diff
643 .as_ref()
644 .and_then(|weak| weak.upgrade())
645 {
646 if let Some(task) =
647 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
648 {
649 return cx.background_executor().spawn(async move {
650 task.await;
651 Ok(unstaged_diff)
652 });
653 }
654 return Task::ready(Ok(unstaged_diff));
655 }
656
657 let Some((repo, repo_path)) =
658 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
659 else {
660 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
661 };
662
663 let task = self
664 .loading_diffs
665 .entry((buffer_id, DiffKind::Unstaged))
666 .or_insert_with(|| {
667 let staged_text = repo.update(cx, |repo, cx| {
668 repo.load_staged_text(buffer_id, repo_path, cx)
669 });
670 cx.spawn(async move |this, cx| {
671 Self::open_diff_internal(
672 this,
673 DiffKind::Unstaged,
674 staged_text.await.map(DiffBasesChange::SetIndex),
675 buffer,
676 cx,
677 )
678 .await
679 .map_err(Arc::new)
680 })
681 .shared()
682 })
683 .clone();
684
685 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
686 }
687
688 pub fn open_diff_since(
689 &mut self,
690 oid: Option<git::Oid>,
691 buffer: Entity<Buffer>,
692 repo: Entity<Repository>,
693 languages: Arc<LanguageRegistry>,
694 cx: &mut Context<Self>,
695 ) -> Task<Result<Entity<BufferDiff>>> {
696 cx.spawn(async move |this, cx| {
697 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
698 let content = match oid {
699 None => None,
700 Some(oid) => Some(
701 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
702 .await?,
703 ),
704 };
705 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
706
707 buffer_diff
708 .update(cx, |buffer_diff, cx| {
709 buffer_diff.set_base_text(
710 content.map(Arc::new),
711 buffer_snapshot.language().cloned(),
712 Some(languages.clone()),
713 buffer_snapshot.text,
714 cx,
715 )
716 })?
717 .await?;
718 let unstaged_diff = this
719 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
720 .await?;
721 buffer_diff.update(cx, |buffer_diff, _| {
722 buffer_diff.set_secondary_diff(unstaged_diff);
723 })?;
724
725 this.update(cx, |_, cx| {
726 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
727 .detach();
728 })?;
729
730 Ok(buffer_diff)
731 })
732 }
733
734 pub fn open_uncommitted_diff(
735 &mut self,
736 buffer: Entity<Buffer>,
737 cx: &mut Context<Self>,
738 ) -> Task<Result<Entity<BufferDiff>>> {
739 let buffer_id = buffer.read(cx).remote_id();
740
741 if let Some(diff_state) = self.diffs.get(&buffer_id)
742 && let Some(uncommitted_diff) = diff_state
743 .read(cx)
744 .uncommitted_diff
745 .as_ref()
746 .and_then(|weak| weak.upgrade())
747 {
748 if let Some(task) =
749 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
750 {
751 return cx.background_executor().spawn(async move {
752 task.await;
753 Ok(uncommitted_diff)
754 });
755 }
756 return Task::ready(Ok(uncommitted_diff));
757 }
758
759 let Some((repo, repo_path)) =
760 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
761 else {
762 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
763 };
764
765 let task = self
766 .loading_diffs
767 .entry((buffer_id, DiffKind::Uncommitted))
768 .or_insert_with(|| {
769 let changes = repo.update(cx, |repo, cx| {
770 repo.load_committed_text(buffer_id, repo_path, cx)
771 });
772
773 // todo(lw): hot foreground spawn
774 cx.spawn(async move |this, cx| {
775 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
776 .await
777 .map_err(Arc::new)
778 })
779 .shared()
780 })
781 .clone();
782
783 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
784 }
785
786 async fn open_diff_internal(
787 this: WeakEntity<Self>,
788 kind: DiffKind,
789 texts: Result<DiffBasesChange>,
790 buffer_entity: Entity<Buffer>,
791 cx: &mut AsyncApp,
792 ) -> Result<Entity<BufferDiff>> {
793 let diff_bases_change = match texts {
794 Err(e) => {
795 this.update(cx, |this, cx| {
796 let buffer = buffer_entity.read(cx);
797 let buffer_id = buffer.remote_id();
798 this.loading_diffs.remove(&(buffer_id, kind));
799 })?;
800 return Err(e);
801 }
802 Ok(change) => change,
803 };
804
805 this.update(cx, |this, cx| {
806 let buffer = buffer_entity.read(cx);
807 let buffer_id = buffer.remote_id();
808 let language = buffer.language().cloned();
809 let language_registry = buffer.language_registry();
810 let text_snapshot = buffer.text_snapshot();
811 this.loading_diffs.remove(&(buffer_id, kind));
812
813 let git_store = cx.weak_entity();
814 let diff_state = this
815 .diffs
816 .entry(buffer_id)
817 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
818
819 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
820
821 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
822 diff_state.update(cx, |diff_state, cx| {
823 diff_state.language = language;
824 diff_state.language_registry = language_registry;
825
826 match kind {
827 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
828 DiffKind::Uncommitted => {
829 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
830 diff
831 } else {
832 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
833 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
834 unstaged_diff
835 };
836
837 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
838 diff_state.uncommitted_diff = Some(diff.downgrade())
839 }
840 }
841
842 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
843 let rx = diff_state.wait_for_recalculation();
844
845 anyhow::Ok(async move {
846 if let Some(rx) = rx {
847 rx.await;
848 }
849 Ok(diff)
850 })
851 })
852 })??
853 .await
854 }
855
856 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
857 let diff_state = self.diffs.get(&buffer_id)?;
858 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
859 }
860
861 pub fn get_uncommitted_diff(
862 &self,
863 buffer_id: BufferId,
864 cx: &App,
865 ) -> Option<Entity<BufferDiff>> {
866 let diff_state = self.diffs.get(&buffer_id)?;
867 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
868 }
869
870 pub fn open_conflict_set(
871 &mut self,
872 buffer: Entity<Buffer>,
873 cx: &mut Context<Self>,
874 ) -> Entity<ConflictSet> {
875 log::debug!("open conflict set");
876 let buffer_id = buffer.read(cx).remote_id();
877
878 if let Some(git_state) = self.diffs.get(&buffer_id)
879 && let Some(conflict_set) = git_state
880 .read(cx)
881 .conflict_set
882 .as_ref()
883 .and_then(|weak| weak.upgrade())
884 {
885 let conflict_set = conflict_set;
886 let buffer_snapshot = buffer.read(cx).text_snapshot();
887
888 git_state.update(cx, |state, cx| {
889 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
890 });
891
892 return conflict_set;
893 }
894
895 let is_unmerged = self
896 .repository_and_path_for_buffer_id(buffer_id, cx)
897 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
898 let git_store = cx.weak_entity();
899 let buffer_git_state = self
900 .diffs
901 .entry(buffer_id)
902 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
903 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
904
905 self._subscriptions
906 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
907 cx.emit(GitStoreEvent::ConflictsUpdated);
908 }));
909
910 buffer_git_state.update(cx, |state, cx| {
911 state.conflict_set = Some(conflict_set.downgrade());
912 let buffer_snapshot = buffer.read(cx).text_snapshot();
913 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
914 });
915
916 conflict_set
917 }
918
919 pub fn project_path_git_status(
920 &self,
921 project_path: &ProjectPath,
922 cx: &App,
923 ) -> Option<FileStatus> {
924 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
925 Some(repo.read(cx).status_for_path(&repo_path)?.status)
926 }
927
928 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
929 let mut work_directory_abs_paths = Vec::new();
930 let mut checkpoints = Vec::new();
931 for repository in self.repositories.values() {
932 repository.update(cx, |repository, _| {
933 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
934 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
935 });
936 }
937
938 cx.background_executor().spawn(async move {
939 let checkpoints = future::try_join_all(checkpoints).await?;
940 Ok(GitStoreCheckpoint {
941 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
942 .into_iter()
943 .zip(checkpoints)
944 .collect(),
945 })
946 })
947 }
948
949 pub fn restore_checkpoint(
950 &self,
951 checkpoint: GitStoreCheckpoint,
952 cx: &mut App,
953 ) -> Task<Result<()>> {
954 let repositories_by_work_dir_abs_path = self
955 .repositories
956 .values()
957 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
958 .collect::<HashMap<_, _>>();
959
960 let mut tasks = Vec::new();
961 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
962 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
963 let restore = repository.update(cx, |repository, _| {
964 repository.restore_checkpoint(checkpoint)
965 });
966 tasks.push(async move { restore.await? });
967 }
968 }
969 cx.background_spawn(async move {
970 future::try_join_all(tasks).await?;
971 Ok(())
972 })
973 }
974
975 /// Compares two checkpoints, returning true if they are equal.
976 pub fn compare_checkpoints(
977 &self,
978 left: GitStoreCheckpoint,
979 mut right: GitStoreCheckpoint,
980 cx: &mut App,
981 ) -> Task<Result<bool>> {
982 let repositories_by_work_dir_abs_path = self
983 .repositories
984 .values()
985 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
986 .collect::<HashMap<_, _>>();
987
988 let mut tasks = Vec::new();
989 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
990 if let Some(right_checkpoint) = right
991 .checkpoints_by_work_dir_abs_path
992 .remove(&work_dir_abs_path)
993 {
994 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
995 {
996 let compare = repository.update(cx, |repository, _| {
997 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
998 });
999
1000 tasks.push(async move { compare.await? });
1001 }
1002 } else {
1003 return Task::ready(Ok(false));
1004 }
1005 }
1006 cx.background_spawn(async move {
1007 Ok(future::try_join_all(tasks)
1008 .await?
1009 .into_iter()
1010 .all(|result| result))
1011 })
1012 }
1013
1014 /// Blames a buffer.
1015 pub fn blame_buffer(
1016 &self,
1017 buffer: &Entity<Buffer>,
1018 version: Option<clock::Global>,
1019 cx: &mut Context<Self>,
1020 ) -> Task<Result<Option<Blame>>> {
1021 let buffer = buffer.read(cx);
1022 let Some((repo, repo_path)) =
1023 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1024 else {
1025 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1026 };
1027 let content = match &version {
1028 Some(version) => buffer.rope_for_version(version),
1029 None => buffer.as_rope().clone(),
1030 };
1031 let version = version.unwrap_or(buffer.version());
1032 let buffer_id = buffer.remote_id();
1033
1034 let repo = repo.downgrade();
1035 cx.spawn(async move |_, cx| {
1036 let repository_state = repo
1037 .update(cx, |repo, _| repo.repository_state.clone())?
1038 .await
1039 .map_err(|err| anyhow::anyhow!(err))?;
1040 match repository_state {
1041 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1042 .blame(repo_path.clone(), content)
1043 .await
1044 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1045 .map(Some),
1046 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1047 let response = client
1048 .request(proto::BlameBuffer {
1049 project_id: project_id.to_proto(),
1050 buffer_id: buffer_id.into(),
1051 version: serialize_version(&version),
1052 })
1053 .await?;
1054 Ok(deserialize_blame_buffer_response(response))
1055 }
1056 }
1057 })
1058 }
1059
1060 pub fn get_permalink_to_line(
1061 &self,
1062 buffer: &Entity<Buffer>,
1063 selection: Range<u32>,
1064 cx: &mut App,
1065 ) -> Task<Result<url::Url>> {
1066 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1067 return Task::ready(Err(anyhow!("buffer has no file")));
1068 };
1069
1070 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1071 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1072 cx,
1073 ) else {
1074 // If we're not in a Git repo, check whether this is a Rust source
1075 // file in the Cargo registry (presumably opened with go-to-definition
1076 // from a normal Rust file). If so, we can put together a permalink
1077 // using crate metadata.
1078 if buffer
1079 .read(cx)
1080 .language()
1081 .is_none_or(|lang| lang.name() != "Rust".into())
1082 {
1083 return Task::ready(Err(anyhow!("no permalink available")));
1084 }
1085 let file_path = file.worktree.read(cx).absolutize(&file.path);
1086 return cx.spawn(async move |cx| {
1087 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1088 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1089 .context("no permalink available")
1090 });
1091 };
1092
1093 let buffer_id = buffer.read(cx).remote_id();
1094 let branch = repo.read(cx).branch.clone();
1095 let remote = branch
1096 .as_ref()
1097 .and_then(|b| b.upstream.as_ref())
1098 .and_then(|b| b.remote_name())
1099 .unwrap_or("origin")
1100 .to_string();
1101
1102 let rx = repo.update(cx, |repo, _| {
1103 repo.send_job(None, move |state, cx| async move {
1104 match state {
1105 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1106 let origin_url = backend
1107 .remote_url(&remote)
1108 .with_context(|| format!("remote \"{remote}\" not found"))?;
1109
1110 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1111
1112 let provider_registry =
1113 cx.update(GitHostingProviderRegistry::default_global)?;
1114
1115 let (provider, remote) =
1116 parse_git_remote_url(provider_registry, &origin_url)
1117 .context("parsing Git remote URL")?;
1118
1119 Ok(provider.build_permalink(
1120 remote,
1121 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1122 ))
1123 }
1124 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1125 let response = client
1126 .request(proto::GetPermalinkToLine {
1127 project_id: project_id.to_proto(),
1128 buffer_id: buffer_id.into(),
1129 selection: Some(proto::Range {
1130 start: selection.start as u64,
1131 end: selection.end as u64,
1132 }),
1133 })
1134 .await?;
1135
1136 url::Url::parse(&response.permalink).context("failed to parse permalink")
1137 }
1138 }
1139 })
1140 });
1141 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1142 }
1143
1144 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1145 match &self.state {
1146 GitStoreState::Local {
1147 downstream: downstream_client,
1148 ..
1149 } => downstream_client
1150 .as_ref()
1151 .map(|state| (state.client.clone(), state.project_id)),
1152 GitStoreState::Remote {
1153 downstream: downstream_client,
1154 ..
1155 } => downstream_client.clone(),
1156 }
1157 }
1158
1159 fn upstream_client(&self) -> Option<AnyProtoClient> {
1160 match &self.state {
1161 GitStoreState::Local { .. } => None,
1162 GitStoreState::Remote {
1163 upstream_client, ..
1164 } => Some(upstream_client.clone()),
1165 }
1166 }
1167
1168 fn on_worktree_store_event(
1169 &mut self,
1170 worktree_store: Entity<WorktreeStore>,
1171 event: &WorktreeStoreEvent,
1172 cx: &mut Context<Self>,
1173 ) {
1174 let GitStoreState::Local {
1175 project_environment,
1176 downstream,
1177 next_repository_id,
1178 fs,
1179 } = &self.state
1180 else {
1181 return;
1182 };
1183
1184 match event {
1185 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1186 if let Some(worktree) = self
1187 .worktree_store
1188 .read(cx)
1189 .worktree_for_id(*worktree_id, cx)
1190 {
1191 let paths_by_git_repo =
1192 self.process_updated_entries(&worktree, updated_entries, cx);
1193 let downstream = downstream
1194 .as_ref()
1195 .map(|downstream| downstream.updates_tx.clone());
1196 cx.spawn(async move |_, cx| {
1197 let paths_by_git_repo = paths_by_git_repo.await;
1198 for (repo, paths) in paths_by_git_repo {
1199 repo.update(cx, |repo, cx| {
1200 repo.paths_changed(paths, downstream.clone(), cx);
1201 })
1202 .ok();
1203 }
1204 })
1205 .detach();
1206 }
1207 }
1208 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1209 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1210 else {
1211 return;
1212 };
1213 if !worktree.read(cx).is_visible() {
1214 log::debug!(
1215 "not adding repositories for local worktree {:?} because it's not visible",
1216 worktree.read(cx).abs_path()
1217 );
1218 return;
1219 }
1220 self.update_repositories_from_worktree(
1221 *worktree_id,
1222 project_environment.clone(),
1223 next_repository_id.clone(),
1224 downstream
1225 .as_ref()
1226 .map(|downstream| downstream.updates_tx.clone()),
1227 changed_repos.clone(),
1228 fs.clone(),
1229 cx,
1230 );
1231 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1232 }
1233 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1234 let repos_without_worktree: Vec<RepositoryId> = self
1235 .worktree_ids
1236 .iter_mut()
1237 .filter_map(|(repo_id, worktree_ids)| {
1238 worktree_ids.remove(worktree_id);
1239 if worktree_ids.is_empty() {
1240 Some(*repo_id)
1241 } else {
1242 None
1243 }
1244 })
1245 .collect();
1246 let is_active_repo_removed = repos_without_worktree
1247 .iter()
1248 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1249
1250 for repo_id in repos_without_worktree {
1251 self.repositories.remove(&repo_id);
1252 self.worktree_ids.remove(&repo_id);
1253 if let Some(updates_tx) =
1254 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1255 {
1256 updates_tx
1257 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1258 .ok();
1259 }
1260 }
1261
1262 if is_active_repo_removed {
1263 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1264 self.active_repo_id = Some(repo_id);
1265 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1266 } else {
1267 self.active_repo_id = None;
1268 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1269 }
1270 }
1271 }
1272 _ => {}
1273 }
1274 }
1275 fn on_repository_event(
1276 &mut self,
1277 repo: Entity<Repository>,
1278 event: &RepositoryEvent,
1279 cx: &mut Context<Self>,
1280 ) {
1281 let id = repo.read(cx).id;
1282 let repo_snapshot = repo.read(cx).snapshot.clone();
1283 for (buffer_id, diff) in self.diffs.iter() {
1284 if let Some((buffer_repo, repo_path)) =
1285 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1286 && buffer_repo == repo
1287 {
1288 diff.update(cx, |diff, cx| {
1289 if let Some(conflict_set) = &diff.conflict_set {
1290 let conflict_status_changed =
1291 conflict_set.update(cx, |conflict_set, cx| {
1292 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1293 conflict_set.set_has_conflict(has_conflict, cx)
1294 })?;
1295 if conflict_status_changed {
1296 let buffer_store = self.buffer_store.read(cx);
1297 if let Some(buffer) = buffer_store.get(*buffer_id) {
1298 let _ = diff
1299 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1300 }
1301 }
1302 }
1303 anyhow::Ok(())
1304 })
1305 .ok();
1306 }
1307 }
1308 cx.emit(GitStoreEvent::RepositoryUpdated(
1309 id,
1310 event.clone(),
1311 self.active_repo_id == Some(id),
1312 ))
1313 }
1314
1315 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1316 cx.emit(GitStoreEvent::JobsUpdated)
1317 }
1318
1319 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1320 fn update_repositories_from_worktree(
1321 &mut self,
1322 worktree_id: WorktreeId,
1323 project_environment: Entity<ProjectEnvironment>,
1324 next_repository_id: Arc<AtomicU64>,
1325 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1326 updated_git_repositories: UpdatedGitRepositoriesSet,
1327 fs: Arc<dyn Fs>,
1328 cx: &mut Context<Self>,
1329 ) {
1330 let mut removed_ids = Vec::new();
1331 for update in updated_git_repositories.iter() {
1332 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1333 let existing_work_directory_abs_path =
1334 repo.read(cx).work_directory_abs_path.clone();
1335 Some(&existing_work_directory_abs_path)
1336 == update.old_work_directory_abs_path.as_ref()
1337 || Some(&existing_work_directory_abs_path)
1338 == update.new_work_directory_abs_path.as_ref()
1339 }) {
1340 let repo_id = *id;
1341 if let Some(new_work_directory_abs_path) =
1342 update.new_work_directory_abs_path.clone()
1343 {
1344 self.worktree_ids
1345 .entry(repo_id)
1346 .or_insert_with(HashSet::new)
1347 .insert(worktree_id);
1348 existing.update(cx, |existing, cx| {
1349 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1350 existing.schedule_scan(updates_tx.clone(), cx);
1351 });
1352 } else {
1353 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1354 worktree_ids.remove(&worktree_id);
1355 if worktree_ids.is_empty() {
1356 removed_ids.push(repo_id);
1357 }
1358 }
1359 }
1360 } else if let UpdatedGitRepository {
1361 new_work_directory_abs_path: Some(work_directory_abs_path),
1362 dot_git_abs_path: Some(dot_git_abs_path),
1363 repository_dir_abs_path: Some(_repository_dir_abs_path),
1364 common_dir_abs_path: Some(_common_dir_abs_path),
1365 ..
1366 } = update
1367 {
1368 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1369 let git_store = cx.weak_entity();
1370 let repo = cx.new(|cx| {
1371 let mut repo = Repository::local(
1372 id,
1373 work_directory_abs_path.clone(),
1374 dot_git_abs_path.clone(),
1375 project_environment.downgrade(),
1376 fs.clone(),
1377 git_store,
1378 cx,
1379 );
1380 if let Some(updates_tx) = updates_tx.as_ref() {
1381 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1382 updates_tx
1383 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1384 .ok();
1385 }
1386 repo.schedule_scan(updates_tx.clone(), cx);
1387 repo
1388 });
1389 self._subscriptions
1390 .push(cx.subscribe(&repo, Self::on_repository_event));
1391 self._subscriptions
1392 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1393 self.repositories.insert(id, repo);
1394 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1395 cx.emit(GitStoreEvent::RepositoryAdded);
1396 self.active_repo_id.get_or_insert_with(|| {
1397 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1398 id
1399 });
1400 }
1401 }
1402
1403 for id in removed_ids {
1404 if self.active_repo_id == Some(id) {
1405 self.active_repo_id = None;
1406 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1407 }
1408 self.repositories.remove(&id);
1409 if let Some(updates_tx) = updates_tx.as_ref() {
1410 updates_tx
1411 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1412 .ok();
1413 }
1414 }
1415 }
1416
1417 fn on_buffer_store_event(
1418 &mut self,
1419 _: Entity<BufferStore>,
1420 event: &BufferStoreEvent,
1421 cx: &mut Context<Self>,
1422 ) {
1423 match event {
1424 BufferStoreEvent::BufferAdded(buffer) => {
1425 cx.subscribe(buffer, |this, buffer, event, cx| {
1426 if let BufferEvent::LanguageChanged = event {
1427 let buffer_id = buffer.read(cx).remote_id();
1428 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1429 diff_state.update(cx, |diff_state, cx| {
1430 diff_state.buffer_language_changed(buffer, cx);
1431 });
1432 }
1433 }
1434 })
1435 .detach();
1436 }
1437 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1438 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1439 diffs.remove(buffer_id);
1440 }
1441 }
1442 BufferStoreEvent::BufferDropped(buffer_id) => {
1443 self.diffs.remove(buffer_id);
1444 for diffs in self.shared_diffs.values_mut() {
1445 diffs.remove(buffer_id);
1446 }
1447 }
1448 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1449 // Whenever a buffer's file path changes, it's possible that the
1450 // new path is actually a path that is being tracked by a git
1451 // repository. In that case, we'll want to update the buffer's
1452 // `BufferDiffState`, in case it already has one.
1453 let buffer_id = buffer.read(cx).remote_id();
1454 let diff_state = self.diffs.get(&buffer_id);
1455 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1456
1457 if let Some(diff_state) = diff_state
1458 && let Some((repo, repo_path)) = repo
1459 {
1460 let buffer = buffer.clone();
1461 let diff_state = diff_state.clone();
1462
1463 cx.spawn(async move |_git_store, cx| {
1464 async {
1465 let diff_bases_change = repo
1466 .update(cx, |repo, cx| {
1467 repo.load_committed_text(buffer_id, repo_path, cx)
1468 })?
1469 .await?;
1470
1471 diff_state.update(cx, |diff_state, cx| {
1472 let buffer_snapshot = buffer.read(cx).text_snapshot();
1473 diff_state.diff_bases_changed(
1474 buffer_snapshot,
1475 Some(diff_bases_change),
1476 cx,
1477 );
1478 })
1479 }
1480 .await
1481 .log_err();
1482 })
1483 .detach();
1484 }
1485 }
1486 _ => {}
1487 }
1488 }
1489
1490 pub fn recalculate_buffer_diffs(
1491 &mut self,
1492 buffers: Vec<Entity<Buffer>>,
1493 cx: &mut Context<Self>,
1494 ) -> impl Future<Output = ()> + use<> {
1495 let mut futures = Vec::new();
1496 for buffer in buffers {
1497 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1498 let buffer = buffer.read(cx).text_snapshot();
1499 diff_state.update(cx, |diff_state, cx| {
1500 diff_state.recalculate_diffs(buffer.clone(), cx);
1501 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1502 });
1503 futures.push(diff_state.update(cx, |diff_state, cx| {
1504 diff_state
1505 .reparse_conflict_markers(buffer, cx)
1506 .map(|_| {})
1507 .boxed()
1508 }));
1509 }
1510 }
1511 async move {
1512 futures::future::join_all(futures).await;
1513 }
1514 }
1515
1516 fn on_buffer_diff_event(
1517 &mut self,
1518 diff: Entity<buffer_diff::BufferDiff>,
1519 event: &BufferDiffEvent,
1520 cx: &mut Context<Self>,
1521 ) {
1522 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1523 let buffer_id = diff.read(cx).buffer_id;
1524 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1525 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1526 diff_state.hunk_staging_operation_count += 1;
1527 diff_state.hunk_staging_operation_count
1528 });
1529 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1530 let recv = repo.update(cx, |repo, cx| {
1531 log::debug!("hunks changed for {}", path.as_unix_str());
1532 repo.spawn_set_index_text_job(
1533 path,
1534 new_index_text.as_ref().map(|rope| rope.to_string()),
1535 Some(hunk_staging_operation_count),
1536 cx,
1537 )
1538 });
1539 let diff = diff.downgrade();
1540 cx.spawn(async move |this, cx| {
1541 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1542 diff.update(cx, |diff, cx| {
1543 diff.clear_pending_hunks(cx);
1544 })
1545 .ok();
1546 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1547 .ok();
1548 }
1549 })
1550 .detach();
1551 }
1552 }
1553 }
1554 }
1555
1556 fn local_worktree_git_repos_changed(
1557 &mut self,
1558 worktree: Entity<Worktree>,
1559 changed_repos: &UpdatedGitRepositoriesSet,
1560 cx: &mut Context<Self>,
1561 ) {
1562 log::debug!("local worktree repos changed");
1563 debug_assert!(worktree.read(cx).is_local());
1564
1565 for repository in self.repositories.values() {
1566 repository.update(cx, |repository, cx| {
1567 let repo_abs_path = &repository.work_directory_abs_path;
1568 if changed_repos.iter().any(|update| {
1569 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1570 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1571 }) {
1572 repository.reload_buffer_diff_bases(cx);
1573 }
1574 });
1575 }
1576 }
1577
1578 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1579 &self.repositories
1580 }
1581
1582 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1583 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1584 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1585 Some(status.status)
1586 }
1587
1588 pub fn repository_and_path_for_buffer_id(
1589 &self,
1590 buffer_id: BufferId,
1591 cx: &App,
1592 ) -> Option<(Entity<Repository>, RepoPath)> {
1593 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1594 let project_path = buffer.read(cx).project_path(cx)?;
1595 self.repository_and_path_for_project_path(&project_path, cx)
1596 }
1597
1598 pub fn repository_and_path_for_project_path(
1599 &self,
1600 path: &ProjectPath,
1601 cx: &App,
1602 ) -> Option<(Entity<Repository>, RepoPath)> {
1603 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1604 self.repositories
1605 .values()
1606 .filter_map(|repo| {
1607 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1608 Some((repo.clone(), repo_path))
1609 })
1610 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1611 }
1612
1613 pub fn git_init(
1614 &self,
1615 path: Arc<Path>,
1616 fallback_branch_name: String,
1617 cx: &App,
1618 ) -> Task<Result<()>> {
1619 match &self.state {
1620 GitStoreState::Local { fs, .. } => {
1621 let fs = fs.clone();
1622 cx.background_executor()
1623 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1624 }
1625 GitStoreState::Remote {
1626 upstream_client,
1627 upstream_project_id: project_id,
1628 ..
1629 } => {
1630 let client = upstream_client.clone();
1631 let project_id = *project_id;
1632 cx.background_executor().spawn(async move {
1633 client
1634 .request(proto::GitInit {
1635 project_id: project_id,
1636 abs_path: path.to_string_lossy().into_owned(),
1637 fallback_branch_name,
1638 })
1639 .await?;
1640 Ok(())
1641 })
1642 }
1643 }
1644 }
1645
1646 pub fn git_clone(
1647 &self,
1648 repo: String,
1649 path: impl Into<Arc<std::path::Path>>,
1650 cx: &App,
1651 ) -> Task<Result<()>> {
1652 let path = path.into();
1653 match &self.state {
1654 GitStoreState::Local { fs, .. } => {
1655 let fs = fs.clone();
1656 cx.background_executor()
1657 .spawn(async move { fs.git_clone(&repo, &path).await })
1658 }
1659 GitStoreState::Remote {
1660 upstream_client,
1661 upstream_project_id,
1662 ..
1663 } => {
1664 if upstream_client.is_via_collab() {
1665 return Task::ready(Err(anyhow!(
1666 "Git Clone isn't supported for project guests"
1667 )));
1668 }
1669 let request = upstream_client.request(proto::GitClone {
1670 project_id: *upstream_project_id,
1671 abs_path: path.to_string_lossy().into_owned(),
1672 remote_repo: repo,
1673 });
1674
1675 cx.background_spawn(async move {
1676 let result = request.await?;
1677
1678 match result.success {
1679 true => Ok(()),
1680 false => Err(anyhow!("Git Clone failed")),
1681 }
1682 })
1683 }
1684 }
1685 }
1686
1687 async fn handle_update_repository(
1688 this: Entity<Self>,
1689 envelope: TypedEnvelope<proto::UpdateRepository>,
1690 mut cx: AsyncApp,
1691 ) -> Result<()> {
1692 this.update(&mut cx, |this, cx| {
1693 let path_style = this.worktree_store.read(cx).path_style();
1694 let mut update = envelope.payload;
1695
1696 let id = RepositoryId::from_proto(update.id);
1697 let client = this.upstream_client().context("no upstream client")?;
1698
1699 let mut repo_subscription = None;
1700 let repo = this.repositories.entry(id).or_insert_with(|| {
1701 let git_store = cx.weak_entity();
1702 let repo = cx.new(|cx| {
1703 Repository::remote(
1704 id,
1705 Path::new(&update.abs_path).into(),
1706 path_style,
1707 ProjectId(update.project_id),
1708 client,
1709 git_store,
1710 cx,
1711 )
1712 });
1713 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1714 cx.emit(GitStoreEvent::RepositoryAdded);
1715 repo
1716 });
1717 this._subscriptions.extend(repo_subscription);
1718
1719 repo.update(cx, {
1720 let update = update.clone();
1721 |repo, cx| repo.apply_remote_update(update, cx)
1722 })?;
1723
1724 this.active_repo_id.get_or_insert_with(|| {
1725 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1726 id
1727 });
1728
1729 if let Some((client, project_id)) = this.downstream_client() {
1730 update.project_id = project_id.to_proto();
1731 client.send(update).log_err();
1732 }
1733 Ok(())
1734 })?
1735 }
1736
1737 async fn handle_remove_repository(
1738 this: Entity<Self>,
1739 envelope: TypedEnvelope<proto::RemoveRepository>,
1740 mut cx: AsyncApp,
1741 ) -> Result<()> {
1742 this.update(&mut cx, |this, cx| {
1743 let mut update = envelope.payload;
1744 let id = RepositoryId::from_proto(update.id);
1745 this.repositories.remove(&id);
1746 if let Some((client, project_id)) = this.downstream_client() {
1747 update.project_id = project_id.to_proto();
1748 client.send(update).log_err();
1749 }
1750 if this.active_repo_id == Some(id) {
1751 this.active_repo_id = None;
1752 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1753 }
1754 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1755 })
1756 }
1757
1758 async fn handle_git_init(
1759 this: Entity<Self>,
1760 envelope: TypedEnvelope<proto::GitInit>,
1761 cx: AsyncApp,
1762 ) -> Result<proto::Ack> {
1763 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1764 let name = envelope.payload.fallback_branch_name;
1765 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1766 .await?;
1767
1768 Ok(proto::Ack {})
1769 }
1770
1771 async fn handle_git_clone(
1772 this: Entity<Self>,
1773 envelope: TypedEnvelope<proto::GitClone>,
1774 cx: AsyncApp,
1775 ) -> Result<proto::GitCloneResponse> {
1776 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1777 let repo_name = envelope.payload.remote_repo;
1778 let result = cx
1779 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1780 .await;
1781
1782 Ok(proto::GitCloneResponse {
1783 success: result.is_ok(),
1784 })
1785 }
1786
1787 async fn handle_fetch(
1788 this: Entity<Self>,
1789 envelope: TypedEnvelope<proto::Fetch>,
1790 mut cx: AsyncApp,
1791 ) -> Result<proto::RemoteMessageResponse> {
1792 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1793 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1794 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1795 let askpass_id = envelope.payload.askpass_id;
1796
1797 let askpass = make_remote_delegate(
1798 this,
1799 envelope.payload.project_id,
1800 repository_id,
1801 askpass_id,
1802 &mut cx,
1803 );
1804
1805 let remote_output = repository_handle
1806 .update(&mut cx, |repository_handle, cx| {
1807 repository_handle.fetch(fetch_options, askpass, cx)
1808 })?
1809 .await??;
1810
1811 Ok(proto::RemoteMessageResponse {
1812 stdout: remote_output.stdout,
1813 stderr: remote_output.stderr,
1814 })
1815 }
1816
1817 async fn handle_push(
1818 this: Entity<Self>,
1819 envelope: TypedEnvelope<proto::Push>,
1820 mut cx: AsyncApp,
1821 ) -> Result<proto::RemoteMessageResponse> {
1822 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1823 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1824
1825 let askpass_id = envelope.payload.askpass_id;
1826 let askpass = make_remote_delegate(
1827 this,
1828 envelope.payload.project_id,
1829 repository_id,
1830 askpass_id,
1831 &mut cx,
1832 );
1833
1834 let options = envelope
1835 .payload
1836 .options
1837 .as_ref()
1838 .map(|_| match envelope.payload.options() {
1839 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1840 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1841 });
1842
1843 let branch_name = envelope.payload.branch_name.into();
1844 let remote_name = envelope.payload.remote_name.into();
1845
1846 let remote_output = repository_handle
1847 .update(&mut cx, |repository_handle, cx| {
1848 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1849 })?
1850 .await??;
1851 Ok(proto::RemoteMessageResponse {
1852 stdout: remote_output.stdout,
1853 stderr: remote_output.stderr,
1854 })
1855 }
1856
1857 async fn handle_pull(
1858 this: Entity<Self>,
1859 envelope: TypedEnvelope<proto::Pull>,
1860 mut cx: AsyncApp,
1861 ) -> Result<proto::RemoteMessageResponse> {
1862 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1863 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1864 let askpass_id = envelope.payload.askpass_id;
1865 let askpass = make_remote_delegate(
1866 this,
1867 envelope.payload.project_id,
1868 repository_id,
1869 askpass_id,
1870 &mut cx,
1871 );
1872
1873 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1874 let remote_name = envelope.payload.remote_name.into();
1875 let rebase = envelope.payload.rebase;
1876
1877 let remote_message = repository_handle
1878 .update(&mut cx, |repository_handle, cx| {
1879 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1880 })?
1881 .await??;
1882
1883 Ok(proto::RemoteMessageResponse {
1884 stdout: remote_message.stdout,
1885 stderr: remote_message.stderr,
1886 })
1887 }
1888
1889 async fn handle_stage(
1890 this: Entity<Self>,
1891 envelope: TypedEnvelope<proto::Stage>,
1892 mut cx: AsyncApp,
1893 ) -> Result<proto::Ack> {
1894 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1895 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1896
1897 let entries = envelope
1898 .payload
1899 .paths
1900 .into_iter()
1901 .map(|path| RepoPath::new(&path))
1902 .collect::<Result<Vec<_>>>()?;
1903
1904 repository_handle
1905 .update(&mut cx, |repository_handle, cx| {
1906 repository_handle.stage_entries(entries, cx)
1907 })?
1908 .await?;
1909 Ok(proto::Ack {})
1910 }
1911
1912 async fn handle_unstage(
1913 this: Entity<Self>,
1914 envelope: TypedEnvelope<proto::Unstage>,
1915 mut cx: AsyncApp,
1916 ) -> Result<proto::Ack> {
1917 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1918 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1919
1920 let entries = envelope
1921 .payload
1922 .paths
1923 .into_iter()
1924 .map(|path| RepoPath::new(&path))
1925 .collect::<Result<Vec<_>>>()?;
1926
1927 repository_handle
1928 .update(&mut cx, |repository_handle, cx| {
1929 repository_handle.unstage_entries(entries, cx)
1930 })?
1931 .await?;
1932
1933 Ok(proto::Ack {})
1934 }
1935
1936 async fn handle_stash(
1937 this: Entity<Self>,
1938 envelope: TypedEnvelope<proto::Stash>,
1939 mut cx: AsyncApp,
1940 ) -> Result<proto::Ack> {
1941 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1942 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1943
1944 let entries = envelope
1945 .payload
1946 .paths
1947 .into_iter()
1948 .map(|path| RepoPath::new(&path))
1949 .collect::<Result<Vec<_>>>()?;
1950
1951 repository_handle
1952 .update(&mut cx, |repository_handle, cx| {
1953 repository_handle.stash_entries(entries, cx)
1954 })?
1955 .await?;
1956
1957 Ok(proto::Ack {})
1958 }
1959
1960 async fn handle_stash_pop(
1961 this: Entity<Self>,
1962 envelope: TypedEnvelope<proto::StashPop>,
1963 mut cx: AsyncApp,
1964 ) -> Result<proto::Ack> {
1965 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1966 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1967 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1968
1969 repository_handle
1970 .update(&mut cx, |repository_handle, cx| {
1971 repository_handle.stash_pop(stash_index, cx)
1972 })?
1973 .await?;
1974
1975 Ok(proto::Ack {})
1976 }
1977
1978 async fn handle_stash_apply(
1979 this: Entity<Self>,
1980 envelope: TypedEnvelope<proto::StashApply>,
1981 mut cx: AsyncApp,
1982 ) -> Result<proto::Ack> {
1983 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1984 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1985 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1986
1987 repository_handle
1988 .update(&mut cx, |repository_handle, cx| {
1989 repository_handle.stash_apply(stash_index, cx)
1990 })?
1991 .await?;
1992
1993 Ok(proto::Ack {})
1994 }
1995
1996 async fn handle_stash_drop(
1997 this: Entity<Self>,
1998 envelope: TypedEnvelope<proto::StashDrop>,
1999 mut cx: AsyncApp,
2000 ) -> Result<proto::Ack> {
2001 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2002 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2003 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2004
2005 repository_handle
2006 .update(&mut cx, |repository_handle, cx| {
2007 repository_handle.stash_drop(stash_index, cx)
2008 })?
2009 .await??;
2010
2011 Ok(proto::Ack {})
2012 }
2013
2014 async fn handle_set_index_text(
2015 this: Entity<Self>,
2016 envelope: TypedEnvelope<proto::SetIndexText>,
2017 mut cx: AsyncApp,
2018 ) -> Result<proto::Ack> {
2019 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2020 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2021 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2022
2023 repository_handle
2024 .update(&mut cx, |repository_handle, cx| {
2025 repository_handle.spawn_set_index_text_job(
2026 repo_path,
2027 envelope.payload.text,
2028 None,
2029 cx,
2030 )
2031 })?
2032 .await??;
2033 Ok(proto::Ack {})
2034 }
2035
2036 async fn handle_run_hook(
2037 this: Entity<Self>,
2038 envelope: TypedEnvelope<proto::RunGitHook>,
2039 mut cx: AsyncApp,
2040 ) -> Result<proto::Ack> {
2041 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2042 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2043 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2044 repository_handle
2045 .update(&mut cx, |repository_handle, cx| {
2046 repository_handle.run_hook(hook, cx)
2047 })?
2048 .await??;
2049 Ok(proto::Ack {})
2050 }
2051
2052 async fn handle_commit(
2053 this: Entity<Self>,
2054 envelope: TypedEnvelope<proto::Commit>,
2055 mut cx: AsyncApp,
2056 ) -> Result<proto::Ack> {
2057 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2058 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2059 let askpass_id = envelope.payload.askpass_id;
2060
2061 let askpass = make_remote_delegate(
2062 this,
2063 envelope.payload.project_id,
2064 repository_id,
2065 askpass_id,
2066 &mut cx,
2067 );
2068
2069 let message = SharedString::from(envelope.payload.message);
2070 let name = envelope.payload.name.map(SharedString::from);
2071 let email = envelope.payload.email.map(SharedString::from);
2072 let options = envelope.payload.options.unwrap_or_default();
2073
2074 repository_handle
2075 .update(&mut cx, |repository_handle, cx| {
2076 repository_handle.commit(
2077 message,
2078 name.zip(email),
2079 CommitOptions {
2080 amend: options.amend,
2081 signoff: options.signoff,
2082 },
2083 askpass,
2084 cx,
2085 )
2086 })?
2087 .await??;
2088 Ok(proto::Ack {})
2089 }
2090
2091 async fn handle_get_remotes(
2092 this: Entity<Self>,
2093 envelope: TypedEnvelope<proto::GetRemotes>,
2094 mut cx: AsyncApp,
2095 ) -> Result<proto::GetRemotesResponse> {
2096 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2097 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2098
2099 let branch_name = envelope.payload.branch_name;
2100 let is_push = envelope.payload.is_push;
2101
2102 let remotes = repository_handle
2103 .update(&mut cx, |repository_handle, _| {
2104 repository_handle.get_remotes(branch_name, is_push)
2105 })?
2106 .await??;
2107
2108 Ok(proto::GetRemotesResponse {
2109 remotes: remotes
2110 .into_iter()
2111 .map(|remotes| proto::get_remotes_response::Remote {
2112 name: remotes.name.to_string(),
2113 })
2114 .collect::<Vec<_>>(),
2115 })
2116 }
2117
2118 async fn handle_get_worktrees(
2119 this: Entity<Self>,
2120 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2121 mut cx: AsyncApp,
2122 ) -> Result<proto::GitWorktreesResponse> {
2123 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2124 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2125
2126 let worktrees = repository_handle
2127 .update(&mut cx, |repository_handle, _| {
2128 repository_handle.worktrees()
2129 })?
2130 .await??;
2131
2132 Ok(proto::GitWorktreesResponse {
2133 worktrees: worktrees
2134 .into_iter()
2135 .map(|worktree| worktree_to_proto(&worktree))
2136 .collect::<Vec<_>>(),
2137 })
2138 }
2139
2140 async fn handle_create_worktree(
2141 this: Entity<Self>,
2142 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2143 mut cx: AsyncApp,
2144 ) -> Result<proto::Ack> {
2145 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2146 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2147 let directory = PathBuf::from(envelope.payload.directory);
2148 let name = envelope.payload.name;
2149 let commit = envelope.payload.commit;
2150
2151 repository_handle
2152 .update(&mut cx, |repository_handle, _| {
2153 repository_handle.create_worktree(name, directory, commit)
2154 })?
2155 .await??;
2156
2157 Ok(proto::Ack {})
2158 }
2159
2160 async fn handle_get_branches(
2161 this: Entity<Self>,
2162 envelope: TypedEnvelope<proto::GitGetBranches>,
2163 mut cx: AsyncApp,
2164 ) -> Result<proto::GitBranchesResponse> {
2165 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2166 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2167
2168 let branches = repository_handle
2169 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2170 .await??;
2171
2172 Ok(proto::GitBranchesResponse {
2173 branches: branches
2174 .into_iter()
2175 .map(|branch| branch_to_proto(&branch))
2176 .collect::<Vec<_>>(),
2177 })
2178 }
2179 async fn handle_get_default_branch(
2180 this: Entity<Self>,
2181 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2182 mut cx: AsyncApp,
2183 ) -> Result<proto::GetDefaultBranchResponse> {
2184 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2185 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2186
2187 let branch = repository_handle
2188 .update(&mut cx, |repository_handle, _| {
2189 repository_handle.default_branch()
2190 })?
2191 .await??
2192 .map(Into::into);
2193
2194 Ok(proto::GetDefaultBranchResponse { branch })
2195 }
2196 async fn handle_create_branch(
2197 this: Entity<Self>,
2198 envelope: TypedEnvelope<proto::GitCreateBranch>,
2199 mut cx: AsyncApp,
2200 ) -> Result<proto::Ack> {
2201 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2202 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2203 let branch_name = envelope.payload.branch_name;
2204
2205 repository_handle
2206 .update(&mut cx, |repository_handle, _| {
2207 repository_handle.create_branch(branch_name, None)
2208 })?
2209 .await??;
2210
2211 Ok(proto::Ack {})
2212 }
2213
2214 async fn handle_change_branch(
2215 this: Entity<Self>,
2216 envelope: TypedEnvelope<proto::GitChangeBranch>,
2217 mut cx: AsyncApp,
2218 ) -> Result<proto::Ack> {
2219 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2220 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2221 let branch_name = envelope.payload.branch_name;
2222
2223 repository_handle
2224 .update(&mut cx, |repository_handle, _| {
2225 repository_handle.change_branch(branch_name)
2226 })?
2227 .await??;
2228
2229 Ok(proto::Ack {})
2230 }
2231
2232 async fn handle_rename_branch(
2233 this: Entity<Self>,
2234 envelope: TypedEnvelope<proto::GitRenameBranch>,
2235 mut cx: AsyncApp,
2236 ) -> Result<proto::Ack> {
2237 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2238 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2239 let branch = envelope.payload.branch;
2240 let new_name = envelope.payload.new_name;
2241
2242 repository_handle
2243 .update(&mut cx, |repository_handle, _| {
2244 repository_handle.rename_branch(branch, new_name)
2245 })?
2246 .await??;
2247
2248 Ok(proto::Ack {})
2249 }
2250
2251 async fn handle_delete_branch(
2252 this: Entity<Self>,
2253 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2254 mut cx: AsyncApp,
2255 ) -> Result<proto::Ack> {
2256 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2257 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2258 let branch_name = envelope.payload.branch_name;
2259
2260 repository_handle
2261 .update(&mut cx, |repository_handle, _| {
2262 repository_handle.delete_branch(branch_name)
2263 })?
2264 .await??;
2265
2266 Ok(proto::Ack {})
2267 }
2268
2269 async fn handle_show(
2270 this: Entity<Self>,
2271 envelope: TypedEnvelope<proto::GitShow>,
2272 mut cx: AsyncApp,
2273 ) -> Result<proto::GitCommitDetails> {
2274 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2275 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2276
2277 let commit = repository_handle
2278 .update(&mut cx, |repository_handle, _| {
2279 repository_handle.show(envelope.payload.commit)
2280 })?
2281 .await??;
2282 Ok(proto::GitCommitDetails {
2283 sha: commit.sha.into(),
2284 message: commit.message.into(),
2285 commit_timestamp: commit.commit_timestamp,
2286 author_email: commit.author_email.into(),
2287 author_name: commit.author_name.into(),
2288 })
2289 }
2290
2291 async fn handle_load_commit_diff(
2292 this: Entity<Self>,
2293 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2294 mut cx: AsyncApp,
2295 ) -> Result<proto::LoadCommitDiffResponse> {
2296 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2297 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2298
2299 let commit_diff = repository_handle
2300 .update(&mut cx, |repository_handle, _| {
2301 repository_handle.load_commit_diff(envelope.payload.commit)
2302 })?
2303 .await??;
2304 Ok(proto::LoadCommitDiffResponse {
2305 files: commit_diff
2306 .files
2307 .into_iter()
2308 .map(|file| proto::CommitFile {
2309 path: file.path.to_proto(),
2310 old_text: file.old_text,
2311 new_text: file.new_text,
2312 })
2313 .collect(),
2314 })
2315 }
2316
2317 async fn handle_reset(
2318 this: Entity<Self>,
2319 envelope: TypedEnvelope<proto::GitReset>,
2320 mut cx: AsyncApp,
2321 ) -> Result<proto::Ack> {
2322 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2323 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2324
2325 let mode = match envelope.payload.mode() {
2326 git_reset::ResetMode::Soft => ResetMode::Soft,
2327 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2328 };
2329
2330 repository_handle
2331 .update(&mut cx, |repository_handle, cx| {
2332 repository_handle.reset(envelope.payload.commit, mode, cx)
2333 })?
2334 .await??;
2335 Ok(proto::Ack {})
2336 }
2337
2338 async fn handle_checkout_files(
2339 this: Entity<Self>,
2340 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2341 mut cx: AsyncApp,
2342 ) -> Result<proto::Ack> {
2343 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2344 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2345 let paths = envelope
2346 .payload
2347 .paths
2348 .iter()
2349 .map(|s| RepoPath::from_proto(s))
2350 .collect::<Result<Vec<_>>>()?;
2351
2352 repository_handle
2353 .update(&mut cx, |repository_handle, cx| {
2354 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2355 })?
2356 .await?;
2357 Ok(proto::Ack {})
2358 }
2359
2360 async fn handle_open_commit_message_buffer(
2361 this: Entity<Self>,
2362 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2363 mut cx: AsyncApp,
2364 ) -> Result<proto::OpenBufferResponse> {
2365 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2366 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2367 let buffer = repository
2368 .update(&mut cx, |repository, cx| {
2369 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2370 })?
2371 .await?;
2372
2373 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2374 this.update(&mut cx, |this, cx| {
2375 this.buffer_store.update(cx, |buffer_store, cx| {
2376 buffer_store
2377 .create_buffer_for_peer(
2378 &buffer,
2379 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2380 cx,
2381 )
2382 .detach_and_log_err(cx);
2383 })
2384 })?;
2385
2386 Ok(proto::OpenBufferResponse {
2387 buffer_id: buffer_id.to_proto(),
2388 })
2389 }
2390
2391 async fn handle_askpass(
2392 this: Entity<Self>,
2393 envelope: TypedEnvelope<proto::AskPassRequest>,
2394 mut cx: AsyncApp,
2395 ) -> Result<proto::AskPassResponse> {
2396 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2397 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2398
2399 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2400 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2401 debug_panic!("no askpass found");
2402 anyhow::bail!("no askpass found");
2403 };
2404
2405 let response = askpass
2406 .ask_password(envelope.payload.prompt)
2407 .await
2408 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2409
2410 delegates
2411 .lock()
2412 .insert(envelope.payload.askpass_id, askpass);
2413
2414 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2415 Ok(proto::AskPassResponse {
2416 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2417 })
2418 }
2419
2420 async fn handle_check_for_pushed_commits(
2421 this: Entity<Self>,
2422 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2423 mut cx: AsyncApp,
2424 ) -> Result<proto::CheckForPushedCommitsResponse> {
2425 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2426 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2427
2428 let branches = repository_handle
2429 .update(&mut cx, |repository_handle, _| {
2430 repository_handle.check_for_pushed_commits()
2431 })?
2432 .await??;
2433 Ok(proto::CheckForPushedCommitsResponse {
2434 pushed_to: branches
2435 .into_iter()
2436 .map(|commit| commit.to_string())
2437 .collect(),
2438 })
2439 }
2440
2441 async fn handle_git_diff(
2442 this: Entity<Self>,
2443 envelope: TypedEnvelope<proto::GitDiff>,
2444 mut cx: AsyncApp,
2445 ) -> Result<proto::GitDiffResponse> {
2446 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2447 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2448 let diff_type = match envelope.payload.diff_type() {
2449 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2450 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2451 };
2452
2453 let mut diff = repository_handle
2454 .update(&mut cx, |repository_handle, cx| {
2455 repository_handle.diff(diff_type, cx)
2456 })?
2457 .await??;
2458 const ONE_MB: usize = 1_000_000;
2459 if diff.len() > ONE_MB {
2460 diff = diff.chars().take(ONE_MB).collect()
2461 }
2462
2463 Ok(proto::GitDiffResponse { diff })
2464 }
2465
2466 async fn handle_tree_diff(
2467 this: Entity<Self>,
2468 request: TypedEnvelope<proto::GetTreeDiff>,
2469 mut cx: AsyncApp,
2470 ) -> Result<proto::GetTreeDiffResponse> {
2471 let repository_id = RepositoryId(request.payload.repository_id);
2472 let diff_type = if request.payload.is_merge {
2473 DiffTreeType::MergeBase {
2474 base: request.payload.base.into(),
2475 head: request.payload.head.into(),
2476 }
2477 } else {
2478 DiffTreeType::Since {
2479 base: request.payload.base.into(),
2480 head: request.payload.head.into(),
2481 }
2482 };
2483
2484 let diff = this
2485 .update(&mut cx, |this, cx| {
2486 let repository = this.repositories().get(&repository_id)?;
2487 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2488 })?
2489 .context("missing repository")?
2490 .await??;
2491
2492 Ok(proto::GetTreeDiffResponse {
2493 entries: diff
2494 .entries
2495 .into_iter()
2496 .map(|(path, status)| proto::TreeDiffStatus {
2497 path: path.as_ref().to_proto(),
2498 status: match status {
2499 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2500 TreeDiffStatus::Modified { .. } => {
2501 proto::tree_diff_status::Status::Modified.into()
2502 }
2503 TreeDiffStatus::Deleted { .. } => {
2504 proto::tree_diff_status::Status::Deleted.into()
2505 }
2506 },
2507 oid: match status {
2508 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2509 Some(old.to_string())
2510 }
2511 TreeDiffStatus::Added => None,
2512 },
2513 })
2514 .collect(),
2515 })
2516 }
2517
2518 async fn handle_get_blob_content(
2519 this: Entity<Self>,
2520 request: TypedEnvelope<proto::GetBlobContent>,
2521 mut cx: AsyncApp,
2522 ) -> Result<proto::GetBlobContentResponse> {
2523 let oid = git::Oid::from_str(&request.payload.oid)?;
2524 let repository_id = RepositoryId(request.payload.repository_id);
2525 let content = this
2526 .update(&mut cx, |this, cx| {
2527 let repository = this.repositories().get(&repository_id)?;
2528 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2529 })?
2530 .context("missing repository")?
2531 .await?;
2532 Ok(proto::GetBlobContentResponse { content })
2533 }
2534
2535 async fn handle_open_unstaged_diff(
2536 this: Entity<Self>,
2537 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2538 mut cx: AsyncApp,
2539 ) -> Result<proto::OpenUnstagedDiffResponse> {
2540 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2541 let diff = this
2542 .update(&mut cx, |this, cx| {
2543 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2544 Some(this.open_unstaged_diff(buffer, cx))
2545 })?
2546 .context("missing buffer")?
2547 .await?;
2548 this.update(&mut cx, |this, _| {
2549 let shared_diffs = this
2550 .shared_diffs
2551 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2552 .or_default();
2553 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2554 })?;
2555 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2556 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2557 }
2558
2559 async fn handle_open_uncommitted_diff(
2560 this: Entity<Self>,
2561 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2562 mut cx: AsyncApp,
2563 ) -> Result<proto::OpenUncommittedDiffResponse> {
2564 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2565 let diff = this
2566 .update(&mut cx, |this, cx| {
2567 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2568 Some(this.open_uncommitted_diff(buffer, cx))
2569 })?
2570 .context("missing buffer")?
2571 .await?;
2572 this.update(&mut cx, |this, _| {
2573 let shared_diffs = this
2574 .shared_diffs
2575 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2576 .or_default();
2577 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2578 })?;
2579 diff.read_with(&cx, |diff, cx| {
2580 use proto::open_uncommitted_diff_response::Mode;
2581
2582 let unstaged_diff = diff.secondary_diff();
2583 let index_snapshot = unstaged_diff.and_then(|diff| {
2584 let diff = diff.read(cx);
2585 diff.base_text_exists().then(|| diff.base_text())
2586 });
2587
2588 let mode;
2589 let staged_text;
2590 let committed_text;
2591 if diff.base_text_exists() {
2592 let committed_snapshot = diff.base_text();
2593 committed_text = Some(committed_snapshot.text());
2594 if let Some(index_text) = index_snapshot {
2595 if index_text.remote_id() == committed_snapshot.remote_id() {
2596 mode = Mode::IndexMatchesHead;
2597 staged_text = None;
2598 } else {
2599 mode = Mode::IndexAndHead;
2600 staged_text = Some(index_text.text());
2601 }
2602 } else {
2603 mode = Mode::IndexAndHead;
2604 staged_text = None;
2605 }
2606 } else {
2607 mode = Mode::IndexAndHead;
2608 committed_text = None;
2609 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2610 }
2611
2612 proto::OpenUncommittedDiffResponse {
2613 committed_text,
2614 staged_text,
2615 mode: mode.into(),
2616 }
2617 })
2618 }
2619
2620 async fn handle_update_diff_bases(
2621 this: Entity<Self>,
2622 request: TypedEnvelope<proto::UpdateDiffBases>,
2623 mut cx: AsyncApp,
2624 ) -> Result<()> {
2625 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2626 this.update(&mut cx, |this, cx| {
2627 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2628 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2629 {
2630 let buffer = buffer.read(cx).text_snapshot();
2631 diff_state.update(cx, |diff_state, cx| {
2632 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2633 })
2634 }
2635 })
2636 }
2637
2638 async fn handle_blame_buffer(
2639 this: Entity<Self>,
2640 envelope: TypedEnvelope<proto::BlameBuffer>,
2641 mut cx: AsyncApp,
2642 ) -> Result<proto::BlameBufferResponse> {
2643 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2644 let version = deserialize_version(&envelope.payload.version);
2645 let buffer = this.read_with(&cx, |this, cx| {
2646 this.buffer_store.read(cx).get_existing(buffer_id)
2647 })??;
2648 buffer
2649 .update(&mut cx, |buffer, _| {
2650 buffer.wait_for_version(version.clone())
2651 })?
2652 .await?;
2653 let blame = this
2654 .update(&mut cx, |this, cx| {
2655 this.blame_buffer(&buffer, Some(version), cx)
2656 })?
2657 .await?;
2658 Ok(serialize_blame_buffer_response(blame))
2659 }
2660
2661 async fn handle_get_permalink_to_line(
2662 this: Entity<Self>,
2663 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2664 mut cx: AsyncApp,
2665 ) -> Result<proto::GetPermalinkToLineResponse> {
2666 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2667 // let version = deserialize_version(&envelope.payload.version);
2668 let selection = {
2669 let proto_selection = envelope
2670 .payload
2671 .selection
2672 .context("no selection to get permalink for defined")?;
2673 proto_selection.start as u32..proto_selection.end as u32
2674 };
2675 let buffer = this.read_with(&cx, |this, cx| {
2676 this.buffer_store.read(cx).get_existing(buffer_id)
2677 })??;
2678 let permalink = this
2679 .update(&mut cx, |this, cx| {
2680 this.get_permalink_to_line(&buffer, selection, cx)
2681 })?
2682 .await?;
2683 Ok(proto::GetPermalinkToLineResponse {
2684 permalink: permalink.to_string(),
2685 })
2686 }
2687
2688 fn repository_for_request(
2689 this: &Entity<Self>,
2690 id: RepositoryId,
2691 cx: &mut AsyncApp,
2692 ) -> Result<Entity<Repository>> {
2693 this.read_with(cx, |this, _| {
2694 this.repositories
2695 .get(&id)
2696 .context("missing repository handle")
2697 .cloned()
2698 })?
2699 }
2700
2701 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2702 self.repositories
2703 .iter()
2704 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2705 .collect()
2706 }
2707
2708 fn process_updated_entries(
2709 &self,
2710 worktree: &Entity<Worktree>,
2711 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2712 cx: &mut App,
2713 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2714 let path_style = worktree.read(cx).path_style();
2715 let mut repo_paths = self
2716 .repositories
2717 .values()
2718 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2719 .collect::<Vec<_>>();
2720 let mut entries: Vec<_> = updated_entries
2721 .iter()
2722 .map(|(path, _, _)| path.clone())
2723 .collect();
2724 entries.sort();
2725 let worktree = worktree.read(cx);
2726
2727 let entries = entries
2728 .into_iter()
2729 .map(|path| worktree.absolutize(&path))
2730 .collect::<Arc<[_]>>();
2731
2732 let executor = cx.background_executor().clone();
2733 cx.background_executor().spawn(async move {
2734 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2735 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2736 let mut tasks = FuturesOrdered::new();
2737 for (repo_path, repo) in repo_paths.into_iter().rev() {
2738 let entries = entries.clone();
2739 let task = executor.spawn(async move {
2740 // Find all repository paths that belong to this repo
2741 let mut ix = entries.partition_point(|path| path < &*repo_path);
2742 if ix == entries.len() {
2743 return None;
2744 };
2745
2746 let mut paths = Vec::new();
2747 // All paths prefixed by a given repo will constitute a continuous range.
2748 while let Some(path) = entries.get(ix)
2749 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2750 &repo_path, path, path_style,
2751 )
2752 {
2753 paths.push((repo_path, ix));
2754 ix += 1;
2755 }
2756 if paths.is_empty() {
2757 None
2758 } else {
2759 Some((repo, paths))
2760 }
2761 });
2762 tasks.push_back(task);
2763 }
2764
2765 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2766 let mut path_was_used = vec![false; entries.len()];
2767 let tasks = tasks.collect::<Vec<_>>().await;
2768 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2769 // We always want to assign a path to it's innermost repository.
2770 for t in tasks {
2771 let Some((repo, paths)) = t else {
2772 continue;
2773 };
2774 let entry = paths_by_git_repo.entry(repo).or_default();
2775 for (repo_path, ix) in paths {
2776 if path_was_used[ix] {
2777 continue;
2778 }
2779 path_was_used[ix] = true;
2780 entry.push(repo_path);
2781 }
2782 }
2783
2784 paths_by_git_repo
2785 })
2786 }
2787}
2788
2789impl BufferGitState {
2790 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2791 Self {
2792 unstaged_diff: Default::default(),
2793 uncommitted_diff: Default::default(),
2794 recalculate_diff_task: Default::default(),
2795 language: Default::default(),
2796 language_registry: Default::default(),
2797 recalculating_tx: postage::watch::channel_with(false).0,
2798 hunk_staging_operation_count: 0,
2799 hunk_staging_operation_count_as_of_write: 0,
2800 head_text: Default::default(),
2801 index_text: Default::default(),
2802 head_changed: Default::default(),
2803 index_changed: Default::default(),
2804 language_changed: Default::default(),
2805 conflict_updated_futures: Default::default(),
2806 conflict_set: Default::default(),
2807 reparse_conflict_markers_task: Default::default(),
2808 }
2809 }
2810
2811 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2812 self.language = buffer.read(cx).language().cloned();
2813 self.language_changed = true;
2814 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2815 }
2816
2817 fn reparse_conflict_markers(
2818 &mut self,
2819 buffer: text::BufferSnapshot,
2820 cx: &mut Context<Self>,
2821 ) -> oneshot::Receiver<()> {
2822 let (tx, rx) = oneshot::channel();
2823
2824 let Some(conflict_set) = self
2825 .conflict_set
2826 .as_ref()
2827 .and_then(|conflict_set| conflict_set.upgrade())
2828 else {
2829 return rx;
2830 };
2831
2832 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2833 if conflict_set.has_conflict {
2834 Some(conflict_set.snapshot())
2835 } else {
2836 None
2837 }
2838 });
2839
2840 if let Some(old_snapshot) = old_snapshot {
2841 self.conflict_updated_futures.push(tx);
2842 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2843 let (snapshot, changed_range) = cx
2844 .background_spawn(async move {
2845 let new_snapshot = ConflictSet::parse(&buffer);
2846 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2847 (new_snapshot, changed_range)
2848 })
2849 .await;
2850 this.update(cx, |this, cx| {
2851 if let Some(conflict_set) = &this.conflict_set {
2852 conflict_set
2853 .update(cx, |conflict_set, cx| {
2854 conflict_set.set_snapshot(snapshot, changed_range, cx);
2855 })
2856 .ok();
2857 }
2858 let futures = std::mem::take(&mut this.conflict_updated_futures);
2859 for tx in futures {
2860 tx.send(()).ok();
2861 }
2862 })
2863 }))
2864 }
2865
2866 rx
2867 }
2868
2869 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2870 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2871 }
2872
2873 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2874 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2875 }
2876
2877 fn handle_base_texts_updated(
2878 &mut self,
2879 buffer: text::BufferSnapshot,
2880 message: proto::UpdateDiffBases,
2881 cx: &mut Context<Self>,
2882 ) {
2883 use proto::update_diff_bases::Mode;
2884
2885 let Some(mode) = Mode::from_i32(message.mode) else {
2886 return;
2887 };
2888
2889 let diff_bases_change = match mode {
2890 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2891 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2892 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2893 Mode::IndexAndHead => DiffBasesChange::SetEach {
2894 index: message.staged_text,
2895 head: message.committed_text,
2896 },
2897 };
2898
2899 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2900 }
2901
2902 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2903 if *self.recalculating_tx.borrow() {
2904 let mut rx = self.recalculating_tx.subscribe();
2905 Some(async move {
2906 loop {
2907 let is_recalculating = rx.recv().await;
2908 if is_recalculating != Some(true) {
2909 break;
2910 }
2911 }
2912 })
2913 } else {
2914 None
2915 }
2916 }
2917
2918 fn diff_bases_changed(
2919 &mut self,
2920 buffer: text::BufferSnapshot,
2921 diff_bases_change: Option<DiffBasesChange>,
2922 cx: &mut Context<Self>,
2923 ) {
2924 match diff_bases_change {
2925 Some(DiffBasesChange::SetIndex(index)) => {
2926 self.index_text = index.map(|mut index| {
2927 text::LineEnding::normalize(&mut index);
2928 Arc::new(index)
2929 });
2930 self.index_changed = true;
2931 }
2932 Some(DiffBasesChange::SetHead(head)) => {
2933 self.head_text = head.map(|mut head| {
2934 text::LineEnding::normalize(&mut head);
2935 Arc::new(head)
2936 });
2937 self.head_changed = true;
2938 }
2939 Some(DiffBasesChange::SetBoth(text)) => {
2940 let text = text.map(|mut text| {
2941 text::LineEnding::normalize(&mut text);
2942 Arc::new(text)
2943 });
2944 self.head_text = text.clone();
2945 self.index_text = text;
2946 self.head_changed = true;
2947 self.index_changed = true;
2948 }
2949 Some(DiffBasesChange::SetEach { index, head }) => {
2950 self.index_text = index.map(|mut index| {
2951 text::LineEnding::normalize(&mut index);
2952 Arc::new(index)
2953 });
2954 self.index_changed = true;
2955 self.head_text = head.map(|mut head| {
2956 text::LineEnding::normalize(&mut head);
2957 Arc::new(head)
2958 });
2959 self.head_changed = true;
2960 }
2961 None => {}
2962 }
2963
2964 self.recalculate_diffs(buffer, cx)
2965 }
2966
2967 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
2968 *self.recalculating_tx.borrow_mut() = true;
2969
2970 let language = self.language.clone();
2971 let language_registry = self.language_registry.clone();
2972 let unstaged_diff = self.unstaged_diff();
2973 let uncommitted_diff = self.uncommitted_diff();
2974 let head = self.head_text.clone();
2975 let index = self.index_text.clone();
2976 let index_changed = self.index_changed;
2977 let head_changed = self.head_changed;
2978 let language_changed = self.language_changed;
2979 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
2980 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
2981 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
2982 (None, None) => true,
2983 _ => false,
2984 };
2985 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
2986 log::debug!(
2987 "start recalculating diffs for buffer {}",
2988 buffer.remote_id()
2989 );
2990
2991 let mut new_unstaged_diff = None;
2992 if let Some(unstaged_diff) = &unstaged_diff {
2993 new_unstaged_diff = Some(
2994 BufferDiff::update_diff(
2995 unstaged_diff.clone(),
2996 buffer.clone(),
2997 index,
2998 index_changed,
2999 language_changed,
3000 language.clone(),
3001 language_registry.clone(),
3002 cx,
3003 )
3004 .await?,
3005 );
3006 }
3007
3008 // Dropping BufferDiff can be expensive, so yield back to the event loop
3009 // for a bit
3010 yield_now().await;
3011
3012 let mut new_uncommitted_diff = None;
3013 if let Some(uncommitted_diff) = &uncommitted_diff {
3014 new_uncommitted_diff = if index_matches_head {
3015 new_unstaged_diff.clone()
3016 } else {
3017 Some(
3018 BufferDiff::update_diff(
3019 uncommitted_diff.clone(),
3020 buffer.clone(),
3021 head,
3022 head_changed,
3023 language_changed,
3024 language.clone(),
3025 language_registry.clone(),
3026 cx,
3027 )
3028 .await?,
3029 )
3030 }
3031 }
3032
3033 // Dropping BufferDiff can be expensive, so yield back to the event loop
3034 // for a bit
3035 yield_now().await;
3036
3037 let cancel = this.update(cx, |this, _| {
3038 // This checks whether all pending stage/unstage operations
3039 // have quiesced (i.e. both the corresponding write and the
3040 // read of that write have completed). If not, then we cancel
3041 // this recalculation attempt to avoid invalidating pending
3042 // state too quickly; another recalculation will come along
3043 // later and clear the pending state once the state of the index has settled.
3044 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3045 *this.recalculating_tx.borrow_mut() = false;
3046 true
3047 } else {
3048 false
3049 }
3050 })?;
3051 if cancel {
3052 log::debug!(
3053 concat!(
3054 "aborting recalculating diffs for buffer {}",
3055 "due to subsequent hunk operations",
3056 ),
3057 buffer.remote_id()
3058 );
3059 return Ok(());
3060 }
3061
3062 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3063 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3064 {
3065 unstaged_diff.update(cx, |diff, cx| {
3066 if language_changed {
3067 diff.language_changed(cx);
3068 }
3069 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3070 })?
3071 } else {
3072 None
3073 };
3074
3075 yield_now().await;
3076
3077 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3078 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3079 {
3080 uncommitted_diff.update(cx, |diff, cx| {
3081 if language_changed {
3082 diff.language_changed(cx);
3083 }
3084 diff.set_snapshot_with_secondary(
3085 new_uncommitted_diff,
3086 &buffer,
3087 unstaged_changed_range,
3088 true,
3089 cx,
3090 );
3091 })?;
3092 }
3093
3094 log::debug!(
3095 "finished recalculating diffs for buffer {}",
3096 buffer.remote_id()
3097 );
3098
3099 if let Some(this) = this.upgrade() {
3100 this.update(cx, |this, _| {
3101 this.index_changed = false;
3102 this.head_changed = false;
3103 this.language_changed = false;
3104 *this.recalculating_tx.borrow_mut() = false;
3105 })?;
3106 }
3107
3108 Ok(())
3109 }));
3110 }
3111}
3112
3113fn make_remote_delegate(
3114 this: Entity<GitStore>,
3115 project_id: u64,
3116 repository_id: RepositoryId,
3117 askpass_id: u64,
3118 cx: &mut AsyncApp,
3119) -> AskPassDelegate {
3120 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3121 this.update(cx, |this, cx| {
3122 let Some((client, _)) = this.downstream_client() else {
3123 return;
3124 };
3125 let response = client.request(proto::AskPassRequest {
3126 project_id,
3127 repository_id: repository_id.to_proto(),
3128 askpass_id,
3129 prompt,
3130 });
3131 cx.spawn(async move |_, _| {
3132 let mut response = response.await?.response;
3133 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3134 .ok();
3135 response.zeroize();
3136 anyhow::Ok(())
3137 })
3138 .detach_and_log_err(cx);
3139 })
3140 .log_err();
3141 })
3142}
3143
3144impl RepositoryId {
3145 pub fn to_proto(self) -> u64 {
3146 self.0
3147 }
3148
3149 pub fn from_proto(id: u64) -> Self {
3150 RepositoryId(id)
3151 }
3152}
3153
3154impl RepositorySnapshot {
3155 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3156 Self {
3157 id,
3158 statuses_by_path: Default::default(),
3159 work_directory_abs_path,
3160 branch: None,
3161 head_commit: None,
3162 scan_id: 0,
3163 merge: Default::default(),
3164 remote_origin_url: None,
3165 remote_upstream_url: None,
3166 stash_entries: Default::default(),
3167 path_style,
3168 }
3169 }
3170
3171 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3172 proto::UpdateRepository {
3173 branch_summary: self.branch.as_ref().map(branch_to_proto),
3174 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3175 updated_statuses: self
3176 .statuses_by_path
3177 .iter()
3178 .map(|entry| entry.to_proto())
3179 .collect(),
3180 removed_statuses: Default::default(),
3181 current_merge_conflicts: self
3182 .merge
3183 .conflicted_paths
3184 .iter()
3185 .map(|repo_path| repo_path.to_proto())
3186 .collect(),
3187 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3188 project_id,
3189 id: self.id.to_proto(),
3190 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3191 entry_ids: vec![self.id.to_proto()],
3192 scan_id: self.scan_id,
3193 is_last_update: true,
3194 stash_entries: self
3195 .stash_entries
3196 .entries
3197 .iter()
3198 .map(stash_to_proto)
3199 .collect(),
3200 }
3201 }
3202
3203 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3204 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3205 let mut removed_statuses: Vec<String> = Vec::new();
3206
3207 let mut new_statuses = self.statuses_by_path.iter().peekable();
3208 let mut old_statuses = old.statuses_by_path.iter().peekable();
3209
3210 let mut current_new_entry = new_statuses.next();
3211 let mut current_old_entry = old_statuses.next();
3212 loop {
3213 match (current_new_entry, current_old_entry) {
3214 (Some(new_entry), Some(old_entry)) => {
3215 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3216 Ordering::Less => {
3217 updated_statuses.push(new_entry.to_proto());
3218 current_new_entry = new_statuses.next();
3219 }
3220 Ordering::Equal => {
3221 if new_entry.status != old_entry.status {
3222 updated_statuses.push(new_entry.to_proto());
3223 }
3224 current_old_entry = old_statuses.next();
3225 current_new_entry = new_statuses.next();
3226 }
3227 Ordering::Greater => {
3228 removed_statuses.push(old_entry.repo_path.to_proto());
3229 current_old_entry = old_statuses.next();
3230 }
3231 }
3232 }
3233 (None, Some(old_entry)) => {
3234 removed_statuses.push(old_entry.repo_path.to_proto());
3235 current_old_entry = old_statuses.next();
3236 }
3237 (Some(new_entry), None) => {
3238 updated_statuses.push(new_entry.to_proto());
3239 current_new_entry = new_statuses.next();
3240 }
3241 (None, None) => break,
3242 }
3243 }
3244
3245 proto::UpdateRepository {
3246 branch_summary: self.branch.as_ref().map(branch_to_proto),
3247 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3248 updated_statuses,
3249 removed_statuses,
3250 current_merge_conflicts: self
3251 .merge
3252 .conflicted_paths
3253 .iter()
3254 .map(|path| path.to_proto())
3255 .collect(),
3256 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3257 project_id,
3258 id: self.id.to_proto(),
3259 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3260 entry_ids: vec![],
3261 scan_id: self.scan_id,
3262 is_last_update: true,
3263 stash_entries: self
3264 .stash_entries
3265 .entries
3266 .iter()
3267 .map(stash_to_proto)
3268 .collect(),
3269 }
3270 }
3271
3272 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3273 self.statuses_by_path.iter().cloned()
3274 }
3275
3276 pub fn status_summary(&self) -> GitSummary {
3277 self.statuses_by_path.summary().item_summary
3278 }
3279
3280 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3281 self.statuses_by_path
3282 .get(&PathKey(path.as_ref().clone()), ())
3283 .cloned()
3284 }
3285
3286 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3287 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3288 }
3289
3290 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3291 self.path_style
3292 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3293 .unwrap()
3294 .into()
3295 }
3296
3297 #[inline]
3298 fn abs_path_to_repo_path_inner(
3299 work_directory_abs_path: &Path,
3300 abs_path: &Path,
3301 path_style: PathStyle,
3302 ) -> Option<RepoPath> {
3303 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3304 Some(RepoPath::from_rel_path(&rel_path))
3305 }
3306
3307 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3308 self.merge.conflicted_paths.contains(repo_path)
3309 }
3310
3311 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3312 let had_conflict_on_last_merge_head_change =
3313 self.merge.conflicted_paths.contains(repo_path);
3314 let has_conflict_currently = self
3315 .status_for_path(repo_path)
3316 .is_some_and(|entry| entry.status.is_conflicted());
3317 had_conflict_on_last_merge_head_change || has_conflict_currently
3318 }
3319
3320 /// This is the name that will be displayed in the repository selector for this repository.
3321 pub fn display_name(&self) -> SharedString {
3322 self.work_directory_abs_path
3323 .file_name()
3324 .unwrap_or_default()
3325 .to_string_lossy()
3326 .to_string()
3327 .into()
3328 }
3329}
3330
3331pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3332 proto::StashEntry {
3333 oid: entry.oid.as_bytes().to_vec(),
3334 message: entry.message.clone(),
3335 branch: entry.branch.clone(),
3336 index: entry.index as u64,
3337 timestamp: entry.timestamp,
3338 }
3339}
3340
3341pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3342 Ok(StashEntry {
3343 oid: Oid::from_bytes(&entry.oid)?,
3344 message: entry.message.clone(),
3345 index: entry.index as usize,
3346 branch: entry.branch.clone(),
3347 timestamp: entry.timestamp,
3348 })
3349}
3350
3351impl MergeDetails {
3352 async fn load(
3353 backend: &Arc<dyn GitRepository>,
3354 status: &SumTree<StatusEntry>,
3355 prev_snapshot: &RepositorySnapshot,
3356 ) -> Result<(MergeDetails, bool)> {
3357 log::debug!("load merge details");
3358 let message = backend.merge_message().await;
3359 let heads = backend
3360 .revparse_batch(vec![
3361 "MERGE_HEAD".into(),
3362 "CHERRY_PICK_HEAD".into(),
3363 "REBASE_HEAD".into(),
3364 "REVERT_HEAD".into(),
3365 "APPLY_HEAD".into(),
3366 ])
3367 .await
3368 .log_err()
3369 .unwrap_or_default()
3370 .into_iter()
3371 .map(|opt| opt.map(SharedString::from))
3372 .collect::<Vec<_>>();
3373 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3374 let conflicted_paths = if merge_heads_changed {
3375 let current_conflicted_paths = TreeSet::from_ordered_entries(
3376 status
3377 .iter()
3378 .filter(|entry| entry.status.is_conflicted())
3379 .map(|entry| entry.repo_path.clone()),
3380 );
3381
3382 // It can happen that we run a scan while a lengthy merge is in progress
3383 // that will eventually result in conflicts, but before those conflicts
3384 // are reported by `git status`. Since for the moment we only care about
3385 // the merge heads state for the purposes of tracking conflicts, don't update
3386 // this state until we see some conflicts.
3387 if heads.iter().any(Option::is_some)
3388 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3389 && current_conflicted_paths.is_empty()
3390 {
3391 log::debug!("not updating merge heads because no conflicts found");
3392 return Ok((
3393 MergeDetails {
3394 message: message.map(SharedString::from),
3395 ..prev_snapshot.merge.clone()
3396 },
3397 false,
3398 ));
3399 }
3400
3401 current_conflicted_paths
3402 } else {
3403 prev_snapshot.merge.conflicted_paths.clone()
3404 };
3405 let details = MergeDetails {
3406 conflicted_paths,
3407 message: message.map(SharedString::from),
3408 heads,
3409 };
3410 Ok((details, merge_heads_changed))
3411 }
3412}
3413
3414impl Repository {
3415 pub fn snapshot(&self) -> RepositorySnapshot {
3416 self.snapshot.clone()
3417 }
3418
3419 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3420 self.pending_ops.iter().cloned()
3421 }
3422
3423 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3424 self.pending_ops.summary().clone()
3425 }
3426
3427 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3428 self.pending_ops
3429 .get(&PathKey(path.as_ref().clone()), ())
3430 .cloned()
3431 }
3432
3433 fn local(
3434 id: RepositoryId,
3435 work_directory_abs_path: Arc<Path>,
3436 dot_git_abs_path: Arc<Path>,
3437 project_environment: WeakEntity<ProjectEnvironment>,
3438 fs: Arc<dyn Fs>,
3439 git_store: WeakEntity<GitStore>,
3440 cx: &mut Context<Self>,
3441 ) -> Self {
3442 let snapshot =
3443 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3444 let state = cx
3445 .spawn(async move |_, cx| {
3446 LocalRepositoryState::new(
3447 work_directory_abs_path,
3448 dot_git_abs_path,
3449 project_environment,
3450 fs,
3451 cx,
3452 )
3453 .await
3454 .map_err(|err| err.to_string())
3455 })
3456 .shared();
3457 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3458 let state = cx
3459 .spawn(async move |_, _| {
3460 let state = state.await?;
3461 Ok(RepositoryState::Local(state))
3462 })
3463 .shared();
3464
3465 Repository {
3466 this: cx.weak_entity(),
3467 git_store,
3468 snapshot,
3469 pending_ops: Default::default(),
3470 repository_state: state,
3471 commit_message_buffer: None,
3472 askpass_delegates: Default::default(),
3473 paths_needing_status_update: Default::default(),
3474 latest_askpass_id: 0,
3475 job_sender,
3476 job_id: 0,
3477 active_jobs: Default::default(),
3478 }
3479 }
3480
3481 fn remote(
3482 id: RepositoryId,
3483 work_directory_abs_path: Arc<Path>,
3484 path_style: PathStyle,
3485 project_id: ProjectId,
3486 client: AnyProtoClient,
3487 git_store: WeakEntity<GitStore>,
3488 cx: &mut Context<Self>,
3489 ) -> Self {
3490 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3491 let repository_state = RemoteRepositoryState { project_id, client };
3492 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3493 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3494 Self {
3495 this: cx.weak_entity(),
3496 snapshot,
3497 commit_message_buffer: None,
3498 git_store,
3499 pending_ops: Default::default(),
3500 paths_needing_status_update: Default::default(),
3501 job_sender,
3502 repository_state,
3503 askpass_delegates: Default::default(),
3504 latest_askpass_id: 0,
3505 active_jobs: Default::default(),
3506 job_id: 0,
3507 }
3508 }
3509
3510 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3511 self.git_store.upgrade()
3512 }
3513
3514 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3515 let this = cx.weak_entity();
3516 let git_store = self.git_store.clone();
3517 let _ = self.send_keyed_job(
3518 Some(GitJobKey::ReloadBufferDiffBases),
3519 None,
3520 |state, mut cx| async move {
3521 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3522 log::error!("tried to recompute diffs for a non-local repository");
3523 return Ok(());
3524 };
3525
3526 let Some(this) = this.upgrade() else {
3527 return Ok(());
3528 };
3529
3530 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3531 git_store.update(cx, |git_store, cx| {
3532 git_store
3533 .diffs
3534 .iter()
3535 .filter_map(|(buffer_id, diff_state)| {
3536 let buffer_store = git_store.buffer_store.read(cx);
3537 let buffer = buffer_store.get(*buffer_id)?;
3538 let file = File::from_dyn(buffer.read(cx).file())?;
3539 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3540 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3541 log::debug!(
3542 "start reload diff bases for repo path {}",
3543 repo_path.as_unix_str()
3544 );
3545 diff_state.update(cx, |diff_state, _| {
3546 let has_unstaged_diff = diff_state
3547 .unstaged_diff
3548 .as_ref()
3549 .is_some_and(|diff| diff.is_upgradable());
3550 let has_uncommitted_diff = diff_state
3551 .uncommitted_diff
3552 .as_ref()
3553 .is_some_and(|set| set.is_upgradable());
3554
3555 Some((
3556 buffer,
3557 repo_path,
3558 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3559 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3560 ))
3561 })
3562 })
3563 .collect::<Vec<_>>()
3564 })
3565 })??;
3566
3567 let buffer_diff_base_changes = cx
3568 .background_spawn(async move {
3569 let mut changes = Vec::new();
3570 for (buffer, repo_path, current_index_text, current_head_text) in
3571 &repo_diff_state_updates
3572 {
3573 let index_text = if current_index_text.is_some() {
3574 backend.load_index_text(repo_path.clone()).await
3575 } else {
3576 None
3577 };
3578 let head_text = if current_head_text.is_some() {
3579 backend.load_committed_text(repo_path.clone()).await
3580 } else {
3581 None
3582 };
3583
3584 let change =
3585 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3586 (Some(current_index), Some(current_head)) => {
3587 let index_changed =
3588 index_text.as_ref() != current_index.as_deref();
3589 let head_changed =
3590 head_text.as_ref() != current_head.as_deref();
3591 if index_changed && head_changed {
3592 if index_text == head_text {
3593 Some(DiffBasesChange::SetBoth(head_text))
3594 } else {
3595 Some(DiffBasesChange::SetEach {
3596 index: index_text,
3597 head: head_text,
3598 })
3599 }
3600 } else if index_changed {
3601 Some(DiffBasesChange::SetIndex(index_text))
3602 } else if head_changed {
3603 Some(DiffBasesChange::SetHead(head_text))
3604 } else {
3605 None
3606 }
3607 }
3608 (Some(current_index), None) => {
3609 let index_changed =
3610 index_text.as_ref() != current_index.as_deref();
3611 index_changed
3612 .then_some(DiffBasesChange::SetIndex(index_text))
3613 }
3614 (None, Some(current_head)) => {
3615 let head_changed =
3616 head_text.as_ref() != current_head.as_deref();
3617 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3618 }
3619 (None, None) => None,
3620 };
3621
3622 changes.push((buffer.clone(), change))
3623 }
3624 changes
3625 })
3626 .await;
3627
3628 git_store.update(&mut cx, |git_store, cx| {
3629 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3630 let buffer_snapshot = buffer.read(cx).text_snapshot();
3631 let buffer_id = buffer_snapshot.remote_id();
3632 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3633 continue;
3634 };
3635
3636 let downstream_client = git_store.downstream_client();
3637 diff_state.update(cx, |diff_state, cx| {
3638 use proto::update_diff_bases::Mode;
3639
3640 if let Some((diff_bases_change, (client, project_id))) =
3641 diff_bases_change.clone().zip(downstream_client)
3642 {
3643 let (staged_text, committed_text, mode) = match diff_bases_change {
3644 DiffBasesChange::SetIndex(index) => {
3645 (index, None, Mode::IndexOnly)
3646 }
3647 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3648 DiffBasesChange::SetEach { index, head } => {
3649 (index, head, Mode::IndexAndHead)
3650 }
3651 DiffBasesChange::SetBoth(text) => {
3652 (None, text, Mode::IndexMatchesHead)
3653 }
3654 };
3655 client
3656 .send(proto::UpdateDiffBases {
3657 project_id: project_id.to_proto(),
3658 buffer_id: buffer_id.to_proto(),
3659 staged_text,
3660 committed_text,
3661 mode: mode as i32,
3662 })
3663 .log_err();
3664 }
3665
3666 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3667 });
3668 }
3669 })
3670 },
3671 );
3672 }
3673
3674 pub fn send_job<F, Fut, R>(
3675 &mut self,
3676 status: Option<SharedString>,
3677 job: F,
3678 ) -> oneshot::Receiver<R>
3679 where
3680 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3681 Fut: Future<Output = R> + 'static,
3682 R: Send + 'static,
3683 {
3684 self.send_keyed_job(None, status, job)
3685 }
3686
3687 fn send_keyed_job<F, Fut, R>(
3688 &mut self,
3689 key: Option<GitJobKey>,
3690 status: Option<SharedString>,
3691 job: F,
3692 ) -> oneshot::Receiver<R>
3693 where
3694 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3695 Fut: Future<Output = R> + 'static,
3696 R: Send + 'static,
3697 {
3698 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3699 let job_id = post_inc(&mut self.job_id);
3700 let this = self.this.clone();
3701 self.job_sender
3702 .unbounded_send(GitJob {
3703 key,
3704 job: Box::new(move |state, cx: &mut AsyncApp| {
3705 let job = job(state, cx.clone());
3706 cx.spawn(async move |cx| {
3707 if let Some(s) = status.clone() {
3708 this.update(cx, |this, cx| {
3709 this.active_jobs.insert(
3710 job_id,
3711 JobInfo {
3712 start: Instant::now(),
3713 message: s.clone(),
3714 },
3715 );
3716
3717 cx.notify();
3718 })
3719 .ok();
3720 }
3721 let result = job.await;
3722
3723 this.update(cx, |this, cx| {
3724 this.active_jobs.remove(&job_id);
3725 cx.notify();
3726 })
3727 .ok();
3728
3729 result_tx.send(result).ok();
3730 })
3731 }),
3732 })
3733 .ok();
3734 result_rx
3735 }
3736
3737 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3738 let Some(git_store) = self.git_store.upgrade() else {
3739 return;
3740 };
3741 let entity = cx.entity();
3742 git_store.update(cx, |git_store, cx| {
3743 let Some((&id, _)) = git_store
3744 .repositories
3745 .iter()
3746 .find(|(_, handle)| *handle == &entity)
3747 else {
3748 return;
3749 };
3750 git_store.active_repo_id = Some(id);
3751 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3752 });
3753 }
3754
3755 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3756 self.snapshot.status()
3757 }
3758
3759 pub fn cached_stash(&self) -> GitStash {
3760 self.snapshot.stash_entries.clone()
3761 }
3762
3763 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3764 let git_store = self.git_store.upgrade()?;
3765 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3766 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3767 let abs_path = SanitizedPath::new(&abs_path);
3768 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3769 Some(ProjectPath {
3770 worktree_id: worktree.read(cx).id(),
3771 path: relative_path,
3772 })
3773 }
3774
3775 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3776 let git_store = self.git_store.upgrade()?;
3777 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3778 let abs_path = worktree_store.absolutize(path, cx)?;
3779 self.snapshot.abs_path_to_repo_path(&abs_path)
3780 }
3781
3782 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3783 other
3784 .read(cx)
3785 .snapshot
3786 .work_directory_abs_path
3787 .starts_with(&self.snapshot.work_directory_abs_path)
3788 }
3789
3790 pub fn open_commit_buffer(
3791 &mut self,
3792 languages: Option<Arc<LanguageRegistry>>,
3793 buffer_store: Entity<BufferStore>,
3794 cx: &mut Context<Self>,
3795 ) -> Task<Result<Entity<Buffer>>> {
3796 let id = self.id;
3797 if let Some(buffer) = self.commit_message_buffer.clone() {
3798 return Task::ready(Ok(buffer));
3799 }
3800 let this = cx.weak_entity();
3801
3802 let rx = self.send_job(None, move |state, mut cx| async move {
3803 let Some(this) = this.upgrade() else {
3804 bail!("git store was dropped");
3805 };
3806 match state {
3807 RepositoryState::Local(..) => {
3808 this.update(&mut cx, |_, cx| {
3809 Self::open_local_commit_buffer(languages, buffer_store, cx)
3810 })?
3811 .await
3812 }
3813 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3814 let request = client.request(proto::OpenCommitMessageBuffer {
3815 project_id: project_id.0,
3816 repository_id: id.to_proto(),
3817 });
3818 let response = request.await.context("requesting to open commit buffer")?;
3819 let buffer_id = BufferId::new(response.buffer_id)?;
3820 let buffer = buffer_store
3821 .update(&mut cx, |buffer_store, cx| {
3822 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3823 })?
3824 .await?;
3825 if let Some(language_registry) = languages {
3826 let git_commit_language =
3827 language_registry.language_for_name("Git Commit").await?;
3828 buffer.update(&mut cx, |buffer, cx| {
3829 buffer.set_language(Some(git_commit_language), cx);
3830 })?;
3831 }
3832 this.update(&mut cx, |this, _| {
3833 this.commit_message_buffer = Some(buffer.clone());
3834 })?;
3835 Ok(buffer)
3836 }
3837 }
3838 });
3839
3840 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3841 }
3842
3843 fn open_local_commit_buffer(
3844 language_registry: Option<Arc<LanguageRegistry>>,
3845 buffer_store: Entity<BufferStore>,
3846 cx: &mut Context<Self>,
3847 ) -> Task<Result<Entity<Buffer>>> {
3848 cx.spawn(async move |repository, cx| {
3849 let buffer = buffer_store
3850 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3851 .await?;
3852
3853 if let Some(language_registry) = language_registry {
3854 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3855 buffer.update(cx, |buffer, cx| {
3856 buffer.set_language(Some(git_commit_language), cx);
3857 })?;
3858 }
3859
3860 repository.update(cx, |repository, _| {
3861 repository.commit_message_buffer = Some(buffer.clone());
3862 })?;
3863 Ok(buffer)
3864 })
3865 }
3866
3867 pub fn checkout_files(
3868 &mut self,
3869 commit: &str,
3870 paths: Vec<RepoPath>,
3871 cx: &mut Context<Self>,
3872 ) -> Task<Result<()>> {
3873 let commit = commit.to_string();
3874 let id = self.id;
3875
3876 self.spawn_job_with_tracking(
3877 paths.clone(),
3878 pending_op::GitStatus::Reverted,
3879 cx,
3880 async move |this, cx| {
3881 this.update(cx, |this, _cx| {
3882 this.send_job(
3883 Some(format!("git checkout {}", commit).into()),
3884 move |git_repo, _| async move {
3885 match git_repo {
3886 RepositoryState::Local(LocalRepositoryState {
3887 backend,
3888 environment,
3889 ..
3890 }) => {
3891 backend
3892 .checkout_files(commit, paths, environment.clone())
3893 .await
3894 }
3895 RepositoryState::Remote(RemoteRepositoryState {
3896 project_id,
3897 client,
3898 }) => {
3899 client
3900 .request(proto::GitCheckoutFiles {
3901 project_id: project_id.0,
3902 repository_id: id.to_proto(),
3903 commit,
3904 paths: paths
3905 .into_iter()
3906 .map(|p| p.to_proto())
3907 .collect(),
3908 })
3909 .await?;
3910
3911 Ok(())
3912 }
3913 }
3914 },
3915 )
3916 })?
3917 .await?
3918 },
3919 )
3920 }
3921
3922 pub fn reset(
3923 &mut self,
3924 commit: String,
3925 reset_mode: ResetMode,
3926 _cx: &mut App,
3927 ) -> oneshot::Receiver<Result<()>> {
3928 let id = self.id;
3929
3930 self.send_job(None, move |git_repo, _| async move {
3931 match git_repo {
3932 RepositoryState::Local(LocalRepositoryState {
3933 backend,
3934 environment,
3935 ..
3936 }) => backend.reset(commit, reset_mode, environment).await,
3937 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3938 client
3939 .request(proto::GitReset {
3940 project_id: project_id.0,
3941 repository_id: id.to_proto(),
3942 commit,
3943 mode: match reset_mode {
3944 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
3945 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
3946 },
3947 })
3948 .await?;
3949
3950 Ok(())
3951 }
3952 }
3953 })
3954 }
3955
3956 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
3957 let id = self.id;
3958 self.send_job(None, move |git_repo, _cx| async move {
3959 match git_repo {
3960 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
3961 backend.show(commit).await
3962 }
3963 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3964 let resp = client
3965 .request(proto::GitShow {
3966 project_id: project_id.0,
3967 repository_id: id.to_proto(),
3968 commit,
3969 })
3970 .await?;
3971
3972 Ok(CommitDetails {
3973 sha: resp.sha.into(),
3974 message: resp.message.into(),
3975 commit_timestamp: resp.commit_timestamp,
3976 author_email: resp.author_email.into(),
3977 author_name: resp.author_name.into(),
3978 })
3979 }
3980 }
3981 })
3982 }
3983
3984 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
3985 let id = self.id;
3986 self.send_job(None, move |git_repo, cx| async move {
3987 match git_repo {
3988 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
3989 backend.load_commit(commit, cx).await
3990 }
3991 RepositoryState::Remote(RemoteRepositoryState {
3992 client, project_id, ..
3993 }) => {
3994 let response = client
3995 .request(proto::LoadCommitDiff {
3996 project_id: project_id.0,
3997 repository_id: id.to_proto(),
3998 commit,
3999 })
4000 .await?;
4001 Ok(CommitDiff {
4002 files: response
4003 .files
4004 .into_iter()
4005 .map(|file| {
4006 Ok(CommitFile {
4007 path: RepoPath::from_proto(&file.path)?,
4008 old_text: file.old_text,
4009 new_text: file.new_text,
4010 })
4011 })
4012 .collect::<Result<Vec<_>>>()?,
4013 })
4014 }
4015 }
4016 })
4017 }
4018
4019 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4020 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4021 }
4022
4023 fn save_buffers<'a>(
4024 &self,
4025 entries: impl IntoIterator<Item = &'a RepoPath>,
4026 cx: &mut Context<Self>,
4027 ) -> Vec<Task<anyhow::Result<()>>> {
4028 let mut save_futures = Vec::new();
4029 if let Some(buffer_store) = self.buffer_store(cx) {
4030 buffer_store.update(cx, |buffer_store, cx| {
4031 for path in entries {
4032 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4033 continue;
4034 };
4035 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4036 && buffer
4037 .read(cx)
4038 .file()
4039 .is_some_and(|file| file.disk_state().exists())
4040 && buffer.read(cx).has_unsaved_edits()
4041 {
4042 save_futures.push(buffer_store.save_buffer(buffer, cx));
4043 }
4044 }
4045 })
4046 }
4047 save_futures
4048 }
4049
4050 pub fn stage_entries(
4051 &mut self,
4052 entries: Vec<RepoPath>,
4053 cx: &mut Context<Self>,
4054 ) -> Task<anyhow::Result<()>> {
4055 if entries.is_empty() {
4056 return Task::ready(Ok(()));
4057 }
4058 let id = self.id;
4059 let save_tasks = self.save_buffers(&entries, cx);
4060 let paths = entries
4061 .iter()
4062 .map(|p| p.as_unix_str())
4063 .collect::<Vec<_>>()
4064 .join(" ");
4065 let status = format!("git add {paths}");
4066 let job_key = GitJobKey::WriteIndex(entries.clone());
4067
4068 self.spawn_job_with_tracking(
4069 entries.clone(),
4070 pending_op::GitStatus::Staged,
4071 cx,
4072 async move |this, cx| {
4073 for save_task in save_tasks {
4074 save_task.await?;
4075 }
4076
4077 this.update(cx, |this, _| {
4078 this.send_keyed_job(
4079 Some(job_key),
4080 Some(status.into()),
4081 move |git_repo, _cx| async move {
4082 match git_repo {
4083 RepositoryState::Local(LocalRepositoryState {
4084 backend,
4085 environment,
4086 ..
4087 }) => backend.stage_paths(entries, environment.clone()).await,
4088 RepositoryState::Remote(RemoteRepositoryState {
4089 project_id,
4090 client,
4091 }) => {
4092 client
4093 .request(proto::Stage {
4094 project_id: project_id.0,
4095 repository_id: id.to_proto(),
4096 paths: entries
4097 .into_iter()
4098 .map(|repo_path| repo_path.to_proto())
4099 .collect(),
4100 })
4101 .await
4102 .context("sending stage request")?;
4103
4104 Ok(())
4105 }
4106 }
4107 },
4108 )
4109 })?
4110 .await?
4111 },
4112 )
4113 }
4114
4115 pub fn unstage_entries(
4116 &mut self,
4117 entries: Vec<RepoPath>,
4118 cx: &mut Context<Self>,
4119 ) -> Task<anyhow::Result<()>> {
4120 if entries.is_empty() {
4121 return Task::ready(Ok(()));
4122 }
4123 let id = self.id;
4124 let save_tasks = self.save_buffers(&entries, cx);
4125 let paths = entries
4126 .iter()
4127 .map(|p| p.as_unix_str())
4128 .collect::<Vec<_>>()
4129 .join(" ");
4130 let status = format!("git reset {paths}");
4131 let job_key = GitJobKey::WriteIndex(entries.clone());
4132
4133 self.spawn_job_with_tracking(
4134 entries.clone(),
4135 pending_op::GitStatus::Unstaged,
4136 cx,
4137 async move |this, cx| {
4138 for save_task in save_tasks {
4139 save_task.await?;
4140 }
4141
4142 this.update(cx, |this, _| {
4143 this.send_keyed_job(
4144 Some(job_key),
4145 Some(status.into()),
4146 move |git_repo, _cx| async move {
4147 match git_repo {
4148 RepositoryState::Local(LocalRepositoryState {
4149 backend,
4150 environment,
4151 ..
4152 }) => backend.unstage_paths(entries, environment).await,
4153 RepositoryState::Remote(RemoteRepositoryState {
4154 project_id,
4155 client,
4156 }) => {
4157 client
4158 .request(proto::Unstage {
4159 project_id: project_id.0,
4160 repository_id: id.to_proto(),
4161 paths: entries
4162 .into_iter()
4163 .map(|repo_path| repo_path.to_proto())
4164 .collect(),
4165 })
4166 .await
4167 .context("sending unstage request")?;
4168
4169 Ok(())
4170 }
4171 }
4172 },
4173 )
4174 })?
4175 .await?
4176 },
4177 )
4178 }
4179
4180 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4181 let to_stage = self
4182 .cached_status()
4183 .filter_map(|entry| {
4184 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4185 if ops.staging() || ops.staged() {
4186 None
4187 } else {
4188 Some(entry.repo_path)
4189 }
4190 } else if entry.status.staging().is_fully_staged() {
4191 None
4192 } else {
4193 Some(entry.repo_path)
4194 }
4195 })
4196 .collect();
4197 self.stage_entries(to_stage, cx)
4198 }
4199
4200 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4201 let to_unstage = self
4202 .cached_status()
4203 .filter_map(|entry| {
4204 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4205 if !ops.staging() && !ops.staged() {
4206 None
4207 } else {
4208 Some(entry.repo_path)
4209 }
4210 } else if entry.status.staging().is_fully_unstaged() {
4211 None
4212 } else {
4213 Some(entry.repo_path)
4214 }
4215 })
4216 .collect();
4217 self.unstage_entries(to_unstage, cx)
4218 }
4219
4220 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4221 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4222
4223 self.stash_entries(to_stash, cx)
4224 }
4225
4226 pub fn stash_entries(
4227 &mut self,
4228 entries: Vec<RepoPath>,
4229 cx: &mut Context<Self>,
4230 ) -> Task<anyhow::Result<()>> {
4231 let id = self.id;
4232
4233 cx.spawn(async move |this, cx| {
4234 this.update(cx, |this, _| {
4235 this.send_job(None, move |git_repo, _cx| async move {
4236 match git_repo {
4237 RepositoryState::Local(LocalRepositoryState {
4238 backend,
4239 environment,
4240 ..
4241 }) => backend.stash_paths(entries, environment).await,
4242 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4243 client
4244 .request(proto::Stash {
4245 project_id: project_id.0,
4246 repository_id: id.to_proto(),
4247 paths: entries
4248 .into_iter()
4249 .map(|repo_path| repo_path.to_proto())
4250 .collect(),
4251 })
4252 .await
4253 .context("sending stash request")?;
4254 Ok(())
4255 }
4256 }
4257 })
4258 })?
4259 .await??;
4260 Ok(())
4261 })
4262 }
4263
4264 pub fn stash_pop(
4265 &mut self,
4266 index: Option<usize>,
4267 cx: &mut Context<Self>,
4268 ) -> Task<anyhow::Result<()>> {
4269 let id = self.id;
4270 cx.spawn(async move |this, cx| {
4271 this.update(cx, |this, _| {
4272 this.send_job(None, move |git_repo, _cx| async move {
4273 match git_repo {
4274 RepositoryState::Local(LocalRepositoryState {
4275 backend,
4276 environment,
4277 ..
4278 }) => backend.stash_pop(index, environment).await,
4279 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4280 client
4281 .request(proto::StashPop {
4282 project_id: project_id.0,
4283 repository_id: id.to_proto(),
4284 stash_index: index.map(|i| i as u64),
4285 })
4286 .await
4287 .context("sending stash pop request")?;
4288 Ok(())
4289 }
4290 }
4291 })
4292 })?
4293 .await??;
4294 Ok(())
4295 })
4296 }
4297
4298 pub fn stash_apply(
4299 &mut self,
4300 index: Option<usize>,
4301 cx: &mut Context<Self>,
4302 ) -> Task<anyhow::Result<()>> {
4303 let id = self.id;
4304 cx.spawn(async move |this, cx| {
4305 this.update(cx, |this, _| {
4306 this.send_job(None, move |git_repo, _cx| async move {
4307 match git_repo {
4308 RepositoryState::Local(LocalRepositoryState {
4309 backend,
4310 environment,
4311 ..
4312 }) => backend.stash_apply(index, environment).await,
4313 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4314 client
4315 .request(proto::StashApply {
4316 project_id: project_id.0,
4317 repository_id: id.to_proto(),
4318 stash_index: index.map(|i| i as u64),
4319 })
4320 .await
4321 .context("sending stash apply request")?;
4322 Ok(())
4323 }
4324 }
4325 })
4326 })?
4327 .await??;
4328 Ok(())
4329 })
4330 }
4331
4332 pub fn stash_drop(
4333 &mut self,
4334 index: Option<usize>,
4335 cx: &mut Context<Self>,
4336 ) -> oneshot::Receiver<anyhow::Result<()>> {
4337 let id = self.id;
4338 let updates_tx = self
4339 .git_store()
4340 .and_then(|git_store| match &git_store.read(cx).state {
4341 GitStoreState::Local { downstream, .. } => downstream
4342 .as_ref()
4343 .map(|downstream| downstream.updates_tx.clone()),
4344 _ => None,
4345 });
4346 let this = cx.weak_entity();
4347 self.send_job(None, move |git_repo, mut cx| async move {
4348 match git_repo {
4349 RepositoryState::Local(LocalRepositoryState {
4350 backend,
4351 environment,
4352 ..
4353 }) => {
4354 // TODO would be nice to not have to do this manually
4355 let result = backend.stash_drop(index, environment).await;
4356 if result.is_ok()
4357 && let Ok(stash_entries) = backend.stash_entries().await
4358 {
4359 let snapshot = this.update(&mut cx, |this, cx| {
4360 this.snapshot.stash_entries = stash_entries;
4361 cx.emit(RepositoryEvent::StashEntriesChanged);
4362 this.snapshot.clone()
4363 })?;
4364 if let Some(updates_tx) = updates_tx {
4365 updates_tx
4366 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4367 .ok();
4368 }
4369 }
4370
4371 result
4372 }
4373 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4374 client
4375 .request(proto::StashDrop {
4376 project_id: project_id.0,
4377 repository_id: id.to_proto(),
4378 stash_index: index.map(|i| i as u64),
4379 })
4380 .await
4381 .context("sending stash pop request")?;
4382 Ok(())
4383 }
4384 }
4385 })
4386 }
4387
4388 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4389 let id = self.id;
4390 self.send_job(
4391 Some(format!("git hook {}", hook.as_str()).into()),
4392 move |git_repo, _cx| async move {
4393 match git_repo {
4394 RepositoryState::Local(LocalRepositoryState {
4395 backend,
4396 environment,
4397 ..
4398 }) => backend.run_hook(hook, environment.clone()).await,
4399 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4400 client
4401 .request(proto::RunGitHook {
4402 project_id: project_id.0,
4403 repository_id: id.to_proto(),
4404 hook: hook.to_proto(),
4405 })
4406 .await?;
4407
4408 Ok(())
4409 }
4410 }
4411 },
4412 )
4413 }
4414
4415 pub fn commit(
4416 &mut self,
4417 message: SharedString,
4418 name_and_email: Option<(SharedString, SharedString)>,
4419 options: CommitOptions,
4420 askpass: AskPassDelegate,
4421 cx: &mut App,
4422 ) -> oneshot::Receiver<Result<()>> {
4423 let id = self.id;
4424 let askpass_delegates = self.askpass_delegates.clone();
4425 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4426
4427 let rx = self.run_hook(RunHook::PreCommit, cx);
4428
4429 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4430 rx.await??;
4431
4432 match git_repo {
4433 RepositoryState::Local(LocalRepositoryState {
4434 backend,
4435 environment,
4436 ..
4437 }) => {
4438 backend
4439 .commit(message, name_and_email, options, askpass, environment)
4440 .await
4441 }
4442 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4443 askpass_delegates.lock().insert(askpass_id, askpass);
4444 let _defer = util::defer(|| {
4445 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4446 debug_assert!(askpass_delegate.is_some());
4447 });
4448 let (name, email) = name_and_email.unzip();
4449 client
4450 .request(proto::Commit {
4451 project_id: project_id.0,
4452 repository_id: id.to_proto(),
4453 message: String::from(message),
4454 name: name.map(String::from),
4455 email: email.map(String::from),
4456 options: Some(proto::commit::CommitOptions {
4457 amend: options.amend,
4458 signoff: options.signoff,
4459 }),
4460 askpass_id,
4461 })
4462 .await
4463 .context("sending commit request")?;
4464
4465 Ok(())
4466 }
4467 }
4468 })
4469 }
4470
4471 pub fn fetch(
4472 &mut self,
4473 fetch_options: FetchOptions,
4474 askpass: AskPassDelegate,
4475 _cx: &mut App,
4476 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4477 let askpass_delegates = self.askpass_delegates.clone();
4478 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4479 let id = self.id;
4480
4481 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4482 match git_repo {
4483 RepositoryState::Local(LocalRepositoryState {
4484 backend,
4485 environment,
4486 ..
4487 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4488 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4489 askpass_delegates.lock().insert(askpass_id, askpass);
4490 let _defer = util::defer(|| {
4491 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4492 debug_assert!(askpass_delegate.is_some());
4493 });
4494
4495 let response = client
4496 .request(proto::Fetch {
4497 project_id: project_id.0,
4498 repository_id: id.to_proto(),
4499 askpass_id,
4500 remote: fetch_options.to_proto(),
4501 })
4502 .await
4503 .context("sending fetch request")?;
4504
4505 Ok(RemoteCommandOutput {
4506 stdout: response.stdout,
4507 stderr: response.stderr,
4508 })
4509 }
4510 }
4511 })
4512 }
4513
4514 pub fn push(
4515 &mut self,
4516 branch: SharedString,
4517 remote: SharedString,
4518 options: Option<PushOptions>,
4519 askpass: AskPassDelegate,
4520 cx: &mut Context<Self>,
4521 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4522 let askpass_delegates = self.askpass_delegates.clone();
4523 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4524 let id = self.id;
4525
4526 let args = options
4527 .map(|option| match option {
4528 PushOptions::SetUpstream => " --set-upstream",
4529 PushOptions::Force => " --force-with-lease",
4530 })
4531 .unwrap_or("");
4532
4533 let updates_tx = self
4534 .git_store()
4535 .and_then(|git_store| match &git_store.read(cx).state {
4536 GitStoreState::Local { downstream, .. } => downstream
4537 .as_ref()
4538 .map(|downstream| downstream.updates_tx.clone()),
4539 _ => None,
4540 });
4541
4542 let this = cx.weak_entity();
4543 self.send_job(
4544 Some(format!("git push {} {} {}", args, remote, branch).into()),
4545 move |git_repo, mut cx| async move {
4546 match git_repo {
4547 RepositoryState::Local(LocalRepositoryState {
4548 backend,
4549 environment,
4550 ..
4551 }) => {
4552 let result = backend
4553 .push(
4554 branch.to_string(),
4555 remote.to_string(),
4556 options,
4557 askpass,
4558 environment.clone(),
4559 cx.clone(),
4560 )
4561 .await;
4562 // TODO would be nice to not have to do this manually
4563 if result.is_ok() {
4564 let branches = backend.branches().await?;
4565 let branch = branches.into_iter().find(|branch| branch.is_head);
4566 log::info!("head branch after scan is {branch:?}");
4567 let snapshot = this.update(&mut cx, |this, cx| {
4568 this.snapshot.branch = branch;
4569 cx.emit(RepositoryEvent::BranchChanged);
4570 this.snapshot.clone()
4571 })?;
4572 if let Some(updates_tx) = updates_tx {
4573 updates_tx
4574 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4575 .ok();
4576 }
4577 }
4578 result
4579 }
4580 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4581 askpass_delegates.lock().insert(askpass_id, askpass);
4582 let _defer = util::defer(|| {
4583 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4584 debug_assert!(askpass_delegate.is_some());
4585 });
4586 let response = client
4587 .request(proto::Push {
4588 project_id: project_id.0,
4589 repository_id: id.to_proto(),
4590 askpass_id,
4591 branch_name: branch.to_string(),
4592 remote_name: remote.to_string(),
4593 options: options.map(|options| match options {
4594 PushOptions::Force => proto::push::PushOptions::Force,
4595 PushOptions::SetUpstream => {
4596 proto::push::PushOptions::SetUpstream
4597 }
4598 }
4599 as i32),
4600 })
4601 .await
4602 .context("sending push request")?;
4603
4604 Ok(RemoteCommandOutput {
4605 stdout: response.stdout,
4606 stderr: response.stderr,
4607 })
4608 }
4609 }
4610 },
4611 )
4612 }
4613
4614 pub fn pull(
4615 &mut self,
4616 branch: Option<SharedString>,
4617 remote: SharedString,
4618 rebase: bool,
4619 askpass: AskPassDelegate,
4620 _cx: &mut App,
4621 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4622 let askpass_delegates = self.askpass_delegates.clone();
4623 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4624 let id = self.id;
4625
4626 let mut status = "git pull".to_string();
4627 if rebase {
4628 status.push_str(" --rebase");
4629 }
4630 status.push_str(&format!(" {}", remote));
4631 if let Some(b) = &branch {
4632 status.push_str(&format!(" {}", b));
4633 }
4634
4635 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4636 match git_repo {
4637 RepositoryState::Local(LocalRepositoryState {
4638 backend,
4639 environment,
4640 ..
4641 }) => {
4642 backend
4643 .pull(
4644 branch.as_ref().map(|b| b.to_string()),
4645 remote.to_string(),
4646 rebase,
4647 askpass,
4648 environment.clone(),
4649 cx,
4650 )
4651 .await
4652 }
4653 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4654 askpass_delegates.lock().insert(askpass_id, askpass);
4655 let _defer = util::defer(|| {
4656 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4657 debug_assert!(askpass_delegate.is_some());
4658 });
4659 let response = client
4660 .request(proto::Pull {
4661 project_id: project_id.0,
4662 repository_id: id.to_proto(),
4663 askpass_id,
4664 rebase,
4665 branch_name: branch.as_ref().map(|b| b.to_string()),
4666 remote_name: remote.to_string(),
4667 })
4668 .await
4669 .context("sending pull request")?;
4670
4671 Ok(RemoteCommandOutput {
4672 stdout: response.stdout,
4673 stderr: response.stderr,
4674 })
4675 }
4676 }
4677 })
4678 }
4679
4680 fn spawn_set_index_text_job(
4681 &mut self,
4682 path: RepoPath,
4683 content: Option<String>,
4684 hunk_staging_operation_count: Option<usize>,
4685 cx: &mut Context<Self>,
4686 ) -> oneshot::Receiver<anyhow::Result<()>> {
4687 let id = self.id;
4688 let this = cx.weak_entity();
4689 let git_store = self.git_store.clone();
4690 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4691 self.send_keyed_job(
4692 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4693 None,
4694 move |git_repo, mut cx| async move {
4695 log::debug!(
4696 "start updating index text for buffer {}",
4697 path.as_unix_str()
4698 );
4699
4700 match git_repo {
4701 RepositoryState::Local(LocalRepositoryState {
4702 fs,
4703 backend,
4704 environment,
4705 ..
4706 }) => {
4707 let executable = match fs.metadata(&abs_path).await {
4708 Ok(Some(meta)) => meta.is_executable,
4709 Ok(None) => false,
4710 Err(_err) => false,
4711 };
4712 backend
4713 .set_index_text(path.clone(), content, environment.clone(), executable)
4714 .await?;
4715 }
4716 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4717 client
4718 .request(proto::SetIndexText {
4719 project_id: project_id.0,
4720 repository_id: id.to_proto(),
4721 path: path.to_proto(),
4722 text: content,
4723 })
4724 .await?;
4725 }
4726 }
4727 log::debug!(
4728 "finish updating index text for buffer {}",
4729 path.as_unix_str()
4730 );
4731
4732 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4733 let project_path = this
4734 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4735 .ok()
4736 .flatten();
4737 git_store.update(&mut cx, |git_store, cx| {
4738 let buffer_id = git_store
4739 .buffer_store
4740 .read(cx)
4741 .get_by_path(&project_path?)?
4742 .read(cx)
4743 .remote_id();
4744 let diff_state = git_store.diffs.get(&buffer_id)?;
4745 diff_state.update(cx, |diff_state, _| {
4746 diff_state.hunk_staging_operation_count_as_of_write =
4747 hunk_staging_operation_count;
4748 });
4749 Some(())
4750 })?;
4751 }
4752 Ok(())
4753 },
4754 )
4755 }
4756
4757 pub fn get_remotes(
4758 &mut self,
4759 branch_name: Option<String>,
4760 is_push: bool,
4761 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4762 let id = self.id;
4763 self.send_job(None, move |repo, _cx| async move {
4764 match repo {
4765 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4766 let remote = if let Some(branch_name) = branch_name {
4767 if is_push {
4768 backend.get_push_remote(branch_name).await?
4769 } else {
4770 backend.get_branch_remote(branch_name).await?
4771 }
4772 } else {
4773 None
4774 };
4775
4776 match remote {
4777 Some(remote) => Ok(vec![remote]),
4778 None => backend.get_all_remotes().await,
4779 }
4780 }
4781 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4782 let response = client
4783 .request(proto::GetRemotes {
4784 project_id: project_id.0,
4785 repository_id: id.to_proto(),
4786 branch_name,
4787 is_push,
4788 })
4789 .await?;
4790
4791 let remotes = response
4792 .remotes
4793 .into_iter()
4794 .map(|remotes| git::repository::Remote {
4795 name: remotes.name.into(),
4796 })
4797 .collect();
4798
4799 Ok(remotes)
4800 }
4801 }
4802 })
4803 }
4804
4805 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4806 let id = self.id;
4807 self.send_job(None, move |repo, _| async move {
4808 match repo {
4809 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4810 backend.branches().await
4811 }
4812 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4813 let response = client
4814 .request(proto::GitGetBranches {
4815 project_id: project_id.0,
4816 repository_id: id.to_proto(),
4817 })
4818 .await?;
4819
4820 let branches = response
4821 .branches
4822 .into_iter()
4823 .map(|branch| proto_to_branch(&branch))
4824 .collect();
4825
4826 Ok(branches)
4827 }
4828 }
4829 })
4830 }
4831
4832 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4833 let id = self.id;
4834 self.send_job(None, move |repo, _| async move {
4835 match repo {
4836 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4837 backend.worktrees().await
4838 }
4839 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4840 let response = client
4841 .request(proto::GitGetWorktrees {
4842 project_id: project_id.0,
4843 repository_id: id.to_proto(),
4844 })
4845 .await?;
4846
4847 let worktrees = response
4848 .worktrees
4849 .into_iter()
4850 .map(|worktree| proto_to_worktree(&worktree))
4851 .collect();
4852
4853 Ok(worktrees)
4854 }
4855 }
4856 })
4857 }
4858
4859 pub fn create_worktree(
4860 &mut self,
4861 name: String,
4862 path: PathBuf,
4863 commit: Option<String>,
4864 ) -> oneshot::Receiver<Result<()>> {
4865 let id = self.id;
4866 self.send_job(
4867 Some("git worktree add".into()),
4868 move |repo, _cx| async move {
4869 match repo {
4870 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4871 backend.create_worktree(name, path, commit).await
4872 }
4873 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4874 client
4875 .request(proto::GitCreateWorktree {
4876 project_id: project_id.0,
4877 repository_id: id.to_proto(),
4878 name,
4879 directory: path.to_string_lossy().to_string(),
4880 commit,
4881 })
4882 .await?;
4883
4884 Ok(())
4885 }
4886 }
4887 },
4888 )
4889 }
4890
4891 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4892 let id = self.id;
4893 self.send_job(None, move |repo, _| async move {
4894 match repo {
4895 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4896 backend.default_branch().await
4897 }
4898 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4899 let response = client
4900 .request(proto::GetDefaultBranch {
4901 project_id: project_id.0,
4902 repository_id: id.to_proto(),
4903 })
4904 .await?;
4905
4906 anyhow::Ok(response.branch.map(SharedString::from))
4907 }
4908 }
4909 })
4910 }
4911
4912 pub fn diff_tree(
4913 &mut self,
4914 diff_type: DiffTreeType,
4915 _cx: &App,
4916 ) -> oneshot::Receiver<Result<TreeDiff>> {
4917 let repository_id = self.snapshot.id;
4918 self.send_job(None, move |repo, _cx| async move {
4919 match repo {
4920 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4921 backend.diff_tree(diff_type).await
4922 }
4923 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4924 let response = client
4925 .request(proto::GetTreeDiff {
4926 project_id: project_id.0,
4927 repository_id: repository_id.0,
4928 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
4929 base: diff_type.base().to_string(),
4930 head: diff_type.head().to_string(),
4931 })
4932 .await?;
4933
4934 let entries = response
4935 .entries
4936 .into_iter()
4937 .filter_map(|entry| {
4938 let status = match entry.status() {
4939 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
4940 proto::tree_diff_status::Status::Modified => {
4941 TreeDiffStatus::Modified {
4942 old: git::Oid::from_str(
4943 &entry.oid.context("missing oid").log_err()?,
4944 )
4945 .log_err()?,
4946 }
4947 }
4948 proto::tree_diff_status::Status::Deleted => {
4949 TreeDiffStatus::Deleted {
4950 old: git::Oid::from_str(
4951 &entry.oid.context("missing oid").log_err()?,
4952 )
4953 .log_err()?,
4954 }
4955 }
4956 };
4957 Some((
4958 RepoPath::from_rel_path(
4959 &RelPath::from_proto(&entry.path).log_err()?,
4960 ),
4961 status,
4962 ))
4963 })
4964 .collect();
4965
4966 Ok(TreeDiff { entries })
4967 }
4968 }
4969 })
4970 }
4971
4972 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
4973 let id = self.id;
4974 self.send_job(None, move |repo, _cx| async move {
4975 match repo {
4976 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4977 backend.diff(diff_type).await
4978 }
4979 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4980 let response = client
4981 .request(proto::GitDiff {
4982 project_id: project_id.0,
4983 repository_id: id.to_proto(),
4984 diff_type: match diff_type {
4985 DiffType::HeadToIndex => {
4986 proto::git_diff::DiffType::HeadToIndex.into()
4987 }
4988 DiffType::HeadToWorktree => {
4989 proto::git_diff::DiffType::HeadToWorktree.into()
4990 }
4991 },
4992 })
4993 .await?;
4994
4995 Ok(response.diff)
4996 }
4997 }
4998 })
4999 }
5000
5001 pub fn create_branch(
5002 &mut self,
5003 branch_name: String,
5004 base_branch: Option<String>,
5005 ) -> oneshot::Receiver<Result<()>> {
5006 let id = self.id;
5007 let status_msg = if let Some(ref base) = base_branch {
5008 format!("git switch -c {branch_name} {base}").into()
5009 } else {
5010 format!("git switch -c {branch_name}").into()
5011 };
5012 self.send_job(Some(status_msg), move |repo, _cx| async move {
5013 match repo {
5014 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5015 backend.create_branch(branch_name, base_branch).await
5016 }
5017 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5018 client
5019 .request(proto::GitCreateBranch {
5020 project_id: project_id.0,
5021 repository_id: id.to_proto(),
5022 branch_name,
5023 })
5024 .await?;
5025
5026 Ok(())
5027 }
5028 }
5029 })
5030 }
5031
5032 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5033 let id = self.id;
5034 self.send_job(
5035 Some(format!("git switch {branch_name}").into()),
5036 move |repo, _cx| async move {
5037 match repo {
5038 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5039 backend.change_branch(branch_name).await
5040 }
5041 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5042 client
5043 .request(proto::GitChangeBranch {
5044 project_id: project_id.0,
5045 repository_id: id.to_proto(),
5046 branch_name,
5047 })
5048 .await?;
5049
5050 Ok(())
5051 }
5052 }
5053 },
5054 )
5055 }
5056
5057 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5058 let id = self.id;
5059 self.send_job(
5060 Some(format!("git branch -d {branch_name}").into()),
5061 move |repo, _cx| async move {
5062 match repo {
5063 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5064 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5065 client
5066 .request(proto::GitDeleteBranch {
5067 project_id: project_id.0,
5068 repository_id: id.to_proto(),
5069 branch_name,
5070 })
5071 .await?;
5072
5073 Ok(())
5074 }
5075 }
5076 },
5077 )
5078 }
5079
5080 pub fn rename_branch(
5081 &mut self,
5082 branch: String,
5083 new_name: String,
5084 ) -> oneshot::Receiver<Result<()>> {
5085 let id = self.id;
5086 self.send_job(
5087 Some(format!("git branch -m {branch} {new_name}").into()),
5088 move |repo, _cx| async move {
5089 match repo {
5090 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5091 backend.rename_branch(branch, new_name).await
5092 }
5093 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5094 client
5095 .request(proto::GitRenameBranch {
5096 project_id: project_id.0,
5097 repository_id: id.to_proto(),
5098 branch,
5099 new_name,
5100 })
5101 .await?;
5102
5103 Ok(())
5104 }
5105 }
5106 },
5107 )
5108 }
5109
5110 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5111 let id = self.id;
5112 self.send_job(None, move |repo, _cx| async move {
5113 match repo {
5114 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5115 backend.check_for_pushed_commit().await
5116 }
5117 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5118 let response = client
5119 .request(proto::CheckForPushedCommits {
5120 project_id: project_id.0,
5121 repository_id: id.to_proto(),
5122 })
5123 .await?;
5124
5125 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5126
5127 Ok(branches)
5128 }
5129 }
5130 })
5131 }
5132
5133 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5134 self.send_job(None, |repo, _cx| async move {
5135 match repo {
5136 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5137 backend.checkpoint().await
5138 }
5139 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5140 }
5141 })
5142 }
5143
5144 pub fn restore_checkpoint(
5145 &mut self,
5146 checkpoint: GitRepositoryCheckpoint,
5147 ) -> oneshot::Receiver<Result<()>> {
5148 self.send_job(None, move |repo, _cx| async move {
5149 match repo {
5150 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5151 backend.restore_checkpoint(checkpoint).await
5152 }
5153 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5154 }
5155 })
5156 }
5157
5158 pub(crate) fn apply_remote_update(
5159 &mut self,
5160 update: proto::UpdateRepository,
5161 cx: &mut Context<Self>,
5162 ) -> Result<()> {
5163 let conflicted_paths = TreeSet::from_ordered_entries(
5164 update
5165 .current_merge_conflicts
5166 .into_iter()
5167 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5168 );
5169 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5170 let new_head_commit = update
5171 .head_commit_details
5172 .as_ref()
5173 .map(proto_to_commit_details);
5174 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5175 cx.emit(RepositoryEvent::BranchChanged)
5176 }
5177 self.snapshot.branch = new_branch;
5178 self.snapshot.head_commit = new_head_commit;
5179
5180 self.snapshot.merge.conflicted_paths = conflicted_paths;
5181 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5182 let new_stash_entries = GitStash {
5183 entries: update
5184 .stash_entries
5185 .iter()
5186 .filter_map(|entry| proto_to_stash(entry).ok())
5187 .collect(),
5188 };
5189 if self.snapshot.stash_entries != new_stash_entries {
5190 cx.emit(RepositoryEvent::StashEntriesChanged)
5191 }
5192 self.snapshot.stash_entries = new_stash_entries;
5193
5194 let edits = update
5195 .removed_statuses
5196 .into_iter()
5197 .filter_map(|path| {
5198 Some(sum_tree::Edit::Remove(PathKey(
5199 RelPath::from_proto(&path).log_err()?,
5200 )))
5201 })
5202 .chain(
5203 update
5204 .updated_statuses
5205 .into_iter()
5206 .filter_map(|updated_status| {
5207 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5208 }),
5209 )
5210 .collect::<Vec<_>>();
5211 if !edits.is_empty() {
5212 cx.emit(RepositoryEvent::StatusesChanged);
5213 }
5214 self.snapshot.statuses_by_path.edit(edits, ());
5215 if update.is_last_update {
5216 self.snapshot.scan_id = update.scan_id;
5217 }
5218 self.clear_pending_ops(cx);
5219 Ok(())
5220 }
5221
5222 pub fn compare_checkpoints(
5223 &mut self,
5224 left: GitRepositoryCheckpoint,
5225 right: GitRepositoryCheckpoint,
5226 ) -> oneshot::Receiver<Result<bool>> {
5227 self.send_job(None, move |repo, _cx| async move {
5228 match repo {
5229 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5230 backend.compare_checkpoints(left, right).await
5231 }
5232 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5233 }
5234 })
5235 }
5236
5237 pub fn diff_checkpoints(
5238 &mut self,
5239 base_checkpoint: GitRepositoryCheckpoint,
5240 target_checkpoint: GitRepositoryCheckpoint,
5241 ) -> oneshot::Receiver<Result<String>> {
5242 self.send_job(None, move |repo, _cx| async move {
5243 match repo {
5244 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5245 backend
5246 .diff_checkpoints(base_checkpoint, target_checkpoint)
5247 .await
5248 }
5249 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5250 }
5251 })
5252 }
5253
5254 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5255 let updated = SumTree::from_iter(
5256 self.pending_ops.iter().filter_map(|ops| {
5257 let inner_ops: Vec<PendingOp> =
5258 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5259 if inner_ops.is_empty() {
5260 None
5261 } else {
5262 Some(PendingOps {
5263 repo_path: ops.repo_path.clone(),
5264 ops: inner_ops,
5265 })
5266 }
5267 }),
5268 (),
5269 );
5270
5271 if updated != self.pending_ops {
5272 cx.emit(RepositoryEvent::PendingOpsChanged {
5273 pending_ops: self.pending_ops.clone(),
5274 })
5275 }
5276
5277 self.pending_ops = updated;
5278 }
5279
5280 fn schedule_scan(
5281 &mut self,
5282 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5283 cx: &mut Context<Self>,
5284 ) {
5285 let this = cx.weak_entity();
5286 let _ = self.send_keyed_job(
5287 Some(GitJobKey::ReloadGitState),
5288 None,
5289 |state, mut cx| async move {
5290 log::debug!("run scheduled git status scan");
5291
5292 let Some(this) = this.upgrade() else {
5293 return Ok(());
5294 };
5295 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5296 bail!("not a local repository")
5297 };
5298 let (snapshot, events) = this
5299 .update(&mut cx, |this, _| {
5300 this.paths_needing_status_update.clear();
5301 compute_snapshot(
5302 this.id,
5303 this.work_directory_abs_path.clone(),
5304 this.snapshot.clone(),
5305 backend.clone(),
5306 )
5307 })?
5308 .await?;
5309 this.update(&mut cx, |this, cx| {
5310 this.snapshot = snapshot.clone();
5311 this.clear_pending_ops(cx);
5312 for event in events {
5313 cx.emit(event);
5314 }
5315 })?;
5316 if let Some(updates_tx) = updates_tx {
5317 updates_tx
5318 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5319 .ok();
5320 }
5321 Ok(())
5322 },
5323 );
5324 }
5325
5326 fn spawn_local_git_worker(
5327 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5328 cx: &mut Context<Self>,
5329 ) -> mpsc::UnboundedSender<GitJob> {
5330 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5331
5332 cx.spawn(async move |_, cx| {
5333 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5334 if let Some(git_hosting_provider_registry) =
5335 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5336 {
5337 git_hosting_providers::register_additional_providers(
5338 git_hosting_provider_registry,
5339 state.backend.clone(),
5340 );
5341 }
5342 let state = RepositoryState::Local(state);
5343 let mut jobs = VecDeque::new();
5344 loop {
5345 while let Ok(Some(next_job)) = job_rx.try_next() {
5346 jobs.push_back(next_job);
5347 }
5348
5349 if let Some(job) = jobs.pop_front() {
5350 if let Some(current_key) = &job.key
5351 && jobs
5352 .iter()
5353 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5354 {
5355 continue;
5356 }
5357 (job.job)(state.clone(), cx).await;
5358 } else if let Some(job) = job_rx.next().await {
5359 jobs.push_back(job);
5360 } else {
5361 break;
5362 }
5363 }
5364 anyhow::Ok(())
5365 })
5366 .detach_and_log_err(cx);
5367
5368 job_tx
5369 }
5370
5371 fn spawn_remote_git_worker(
5372 state: RemoteRepositoryState,
5373 cx: &mut Context<Self>,
5374 ) -> mpsc::UnboundedSender<GitJob> {
5375 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5376
5377 cx.spawn(async move |_, cx| {
5378 let state = RepositoryState::Remote(state);
5379 let mut jobs = VecDeque::new();
5380 loop {
5381 while let Ok(Some(next_job)) = job_rx.try_next() {
5382 jobs.push_back(next_job);
5383 }
5384
5385 if let Some(job) = jobs.pop_front() {
5386 if let Some(current_key) = &job.key
5387 && jobs
5388 .iter()
5389 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5390 {
5391 continue;
5392 }
5393 (job.job)(state.clone(), cx).await;
5394 } else if let Some(job) = job_rx.next().await {
5395 jobs.push_back(job);
5396 } else {
5397 break;
5398 }
5399 }
5400 anyhow::Ok(())
5401 })
5402 .detach_and_log_err(cx);
5403
5404 job_tx
5405 }
5406
5407 fn load_staged_text(
5408 &mut self,
5409 buffer_id: BufferId,
5410 repo_path: RepoPath,
5411 cx: &App,
5412 ) -> Task<Result<Option<String>>> {
5413 let rx = self.send_job(None, move |state, _| async move {
5414 match state {
5415 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5416 anyhow::Ok(backend.load_index_text(repo_path).await)
5417 }
5418 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5419 let response = client
5420 .request(proto::OpenUnstagedDiff {
5421 project_id: project_id.to_proto(),
5422 buffer_id: buffer_id.to_proto(),
5423 })
5424 .await?;
5425 Ok(response.staged_text)
5426 }
5427 }
5428 });
5429 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5430 }
5431
5432 fn load_committed_text(
5433 &mut self,
5434 buffer_id: BufferId,
5435 repo_path: RepoPath,
5436 cx: &App,
5437 ) -> Task<Result<DiffBasesChange>> {
5438 let rx = self.send_job(None, move |state, _| async move {
5439 match state {
5440 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5441 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5442 let staged_text = backend.load_index_text(repo_path).await;
5443 let diff_bases_change = if committed_text == staged_text {
5444 DiffBasesChange::SetBoth(committed_text)
5445 } else {
5446 DiffBasesChange::SetEach {
5447 index: staged_text,
5448 head: committed_text,
5449 }
5450 };
5451 anyhow::Ok(diff_bases_change)
5452 }
5453 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5454 use proto::open_uncommitted_diff_response::Mode;
5455
5456 let response = client
5457 .request(proto::OpenUncommittedDiff {
5458 project_id: project_id.to_proto(),
5459 buffer_id: buffer_id.to_proto(),
5460 })
5461 .await?;
5462 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5463 let bases = match mode {
5464 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5465 Mode::IndexAndHead => DiffBasesChange::SetEach {
5466 head: response.committed_text,
5467 index: response.staged_text,
5468 },
5469 };
5470 Ok(bases)
5471 }
5472 }
5473 });
5474
5475 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5476 }
5477 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5478 let repository_id = self.snapshot.id;
5479 let rx = self.send_job(None, move |state, _| async move {
5480 match state {
5481 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5482 backend.load_blob_content(oid).await
5483 }
5484 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5485 let response = client
5486 .request(proto::GetBlobContent {
5487 project_id: project_id.to_proto(),
5488 repository_id: repository_id.0,
5489 oid: oid.to_string(),
5490 })
5491 .await?;
5492 Ok(response.content)
5493 }
5494 }
5495 });
5496 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5497 }
5498
5499 fn paths_changed(
5500 &mut self,
5501 paths: Vec<RepoPath>,
5502 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5503 cx: &mut Context<Self>,
5504 ) {
5505 self.paths_needing_status_update.extend(paths);
5506
5507 let this = cx.weak_entity();
5508 let _ = self.send_keyed_job(
5509 Some(GitJobKey::RefreshStatuses),
5510 None,
5511 |state, mut cx| async move {
5512 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5513 (
5514 this.snapshot.clone(),
5515 mem::take(&mut this.paths_needing_status_update),
5516 )
5517 })?;
5518 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5519 bail!("not a local repository")
5520 };
5521
5522 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5523 if paths.is_empty() {
5524 return Ok(());
5525 }
5526 let statuses = backend.status(&paths).await?;
5527 let stash_entries = backend.stash_entries().await?;
5528
5529 let changed_path_statuses = cx
5530 .background_spawn(async move {
5531 let mut changed_path_statuses = Vec::new();
5532 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5533 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5534
5535 for (repo_path, status) in &*statuses.entries {
5536 changed_paths.remove(repo_path);
5537 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5538 && cursor.item().is_some_and(|entry| entry.status == *status)
5539 {
5540 continue;
5541 }
5542
5543 changed_path_statuses.push(Edit::Insert(StatusEntry {
5544 repo_path: repo_path.clone(),
5545 status: *status,
5546 }));
5547 }
5548 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5549 for path in changed_paths.into_iter() {
5550 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5551 changed_path_statuses
5552 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5553 }
5554 }
5555 changed_path_statuses
5556 })
5557 .await;
5558
5559 this.update(&mut cx, |this, cx| {
5560 if this.snapshot.stash_entries != stash_entries {
5561 cx.emit(RepositoryEvent::StashEntriesChanged);
5562 this.snapshot.stash_entries = stash_entries;
5563 }
5564
5565 if !changed_path_statuses.is_empty() {
5566 cx.emit(RepositoryEvent::StatusesChanged);
5567 this.snapshot
5568 .statuses_by_path
5569 .edit(changed_path_statuses, ());
5570 this.snapshot.scan_id += 1;
5571 }
5572
5573 if let Some(updates_tx) = updates_tx {
5574 updates_tx
5575 .unbounded_send(DownstreamUpdate::UpdateRepository(
5576 this.snapshot.clone(),
5577 ))
5578 .ok();
5579 }
5580 })
5581 },
5582 );
5583 }
5584
5585 /// currently running git command and when it started
5586 pub fn current_job(&self) -> Option<JobInfo> {
5587 self.active_jobs.values().next().cloned()
5588 }
5589
5590 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5591 self.send_job(None, |_, _| async {})
5592 }
5593
5594 fn spawn_job_with_tracking<AsyncFn>(
5595 &mut self,
5596 paths: Vec<RepoPath>,
5597 git_status: pending_op::GitStatus,
5598 cx: &mut Context<Self>,
5599 f: AsyncFn,
5600 ) -> Task<Result<()>>
5601 where
5602 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5603 {
5604 let ids = self.new_pending_ops_for_paths(paths, git_status);
5605
5606 cx.spawn(async move |this, cx| {
5607 let (job_status, result) = match f(this.clone(), cx).await {
5608 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5609 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5610 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5611 };
5612
5613 this.update(cx, |this, _| {
5614 let mut edits = Vec::with_capacity(ids.len());
5615 for (id, entry) in ids {
5616 if let Some(mut ops) = this
5617 .pending_ops
5618 .get(&PathKey(entry.as_ref().clone()), ())
5619 .cloned()
5620 {
5621 if let Some(op) = ops.op_by_id_mut(id) {
5622 op.job_status = job_status;
5623 }
5624 edits.push(sum_tree::Edit::Insert(ops));
5625 }
5626 }
5627 this.pending_ops.edit(edits, ());
5628 })?;
5629
5630 result
5631 })
5632 }
5633
5634 fn new_pending_ops_for_paths(
5635 &mut self,
5636 paths: Vec<RepoPath>,
5637 git_status: pending_op::GitStatus,
5638 ) -> Vec<(PendingOpId, RepoPath)> {
5639 let mut edits = Vec::with_capacity(paths.len());
5640 let mut ids = Vec::with_capacity(paths.len());
5641 for path in paths {
5642 let mut ops = self
5643 .pending_ops
5644 .get(&PathKey(path.as_ref().clone()), ())
5645 .cloned()
5646 .unwrap_or_else(|| PendingOps::new(&path));
5647 let id = ops.max_id() + 1;
5648 ops.ops.push(PendingOp {
5649 id,
5650 git_status,
5651 job_status: pending_op::JobStatus::Running,
5652 });
5653 edits.push(sum_tree::Edit::Insert(ops));
5654 ids.push((id, path));
5655 }
5656 self.pending_ops.edit(edits, ());
5657 ids
5658 }
5659}
5660
5661fn get_permalink_in_rust_registry_src(
5662 provider_registry: Arc<GitHostingProviderRegistry>,
5663 path: PathBuf,
5664 selection: Range<u32>,
5665) -> Result<url::Url> {
5666 #[derive(Deserialize)]
5667 struct CargoVcsGit {
5668 sha1: String,
5669 }
5670
5671 #[derive(Deserialize)]
5672 struct CargoVcsInfo {
5673 git: CargoVcsGit,
5674 path_in_vcs: String,
5675 }
5676
5677 #[derive(Deserialize)]
5678 struct CargoPackage {
5679 repository: String,
5680 }
5681
5682 #[derive(Deserialize)]
5683 struct CargoToml {
5684 package: CargoPackage,
5685 }
5686
5687 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5688 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5689 Some((dir, json))
5690 }) else {
5691 bail!("No .cargo_vcs_info.json found in parent directories")
5692 };
5693 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5694 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5695 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5696 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5697 .context("parsing package.repository field of manifest")?;
5698 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5699 let permalink = provider.build_permalink(
5700 remote,
5701 BuildPermalinkParams::new(
5702 &cargo_vcs_info.git.sha1,
5703 &RepoPath::from_rel_path(
5704 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5705 ),
5706 Some(selection),
5707 ),
5708 );
5709 Ok(permalink)
5710}
5711
5712fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5713 let Some(blame) = blame else {
5714 return proto::BlameBufferResponse {
5715 blame_response: None,
5716 };
5717 };
5718
5719 let entries = blame
5720 .entries
5721 .into_iter()
5722 .map(|entry| proto::BlameEntry {
5723 sha: entry.sha.as_bytes().into(),
5724 start_line: entry.range.start,
5725 end_line: entry.range.end,
5726 original_line_number: entry.original_line_number,
5727 author: entry.author,
5728 author_mail: entry.author_mail,
5729 author_time: entry.author_time,
5730 author_tz: entry.author_tz,
5731 committer: entry.committer_name,
5732 committer_mail: entry.committer_email,
5733 committer_time: entry.committer_time,
5734 committer_tz: entry.committer_tz,
5735 summary: entry.summary,
5736 previous: entry.previous,
5737 filename: entry.filename,
5738 })
5739 .collect::<Vec<_>>();
5740
5741 let messages = blame
5742 .messages
5743 .into_iter()
5744 .map(|(oid, message)| proto::CommitMessage {
5745 oid: oid.as_bytes().into(),
5746 message,
5747 })
5748 .collect::<Vec<_>>();
5749
5750 proto::BlameBufferResponse {
5751 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5752 entries,
5753 messages,
5754 remote_url: blame.remote_url,
5755 }),
5756 }
5757}
5758
5759fn deserialize_blame_buffer_response(
5760 response: proto::BlameBufferResponse,
5761) -> Option<git::blame::Blame> {
5762 let response = response.blame_response?;
5763 let entries = response
5764 .entries
5765 .into_iter()
5766 .filter_map(|entry| {
5767 Some(git::blame::BlameEntry {
5768 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5769 range: entry.start_line..entry.end_line,
5770 original_line_number: entry.original_line_number,
5771 committer_name: entry.committer,
5772 committer_time: entry.committer_time,
5773 committer_tz: entry.committer_tz,
5774 committer_email: entry.committer_mail,
5775 author: entry.author,
5776 author_mail: entry.author_mail,
5777 author_time: entry.author_time,
5778 author_tz: entry.author_tz,
5779 summary: entry.summary,
5780 previous: entry.previous,
5781 filename: entry.filename,
5782 })
5783 })
5784 .collect::<Vec<_>>();
5785
5786 let messages = response
5787 .messages
5788 .into_iter()
5789 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5790 .collect::<HashMap<_, _>>();
5791
5792 Some(Blame {
5793 entries,
5794 messages,
5795 remote_url: response.remote_url,
5796 })
5797}
5798
5799fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5800 proto::Branch {
5801 is_head: branch.is_head,
5802 ref_name: branch.ref_name.to_string(),
5803 unix_timestamp: branch
5804 .most_recent_commit
5805 .as_ref()
5806 .map(|commit| commit.commit_timestamp as u64),
5807 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5808 ref_name: upstream.ref_name.to_string(),
5809 tracking: upstream
5810 .tracking
5811 .status()
5812 .map(|upstream| proto::UpstreamTracking {
5813 ahead: upstream.ahead as u64,
5814 behind: upstream.behind as u64,
5815 }),
5816 }),
5817 most_recent_commit: branch
5818 .most_recent_commit
5819 .as_ref()
5820 .map(|commit| proto::CommitSummary {
5821 sha: commit.sha.to_string(),
5822 subject: commit.subject.to_string(),
5823 commit_timestamp: commit.commit_timestamp,
5824 author_name: commit.author_name.to_string(),
5825 }),
5826 }
5827}
5828
5829fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5830 proto::Worktree {
5831 path: worktree.path.to_string_lossy().to_string(),
5832 ref_name: worktree.ref_name.to_string(),
5833 sha: worktree.sha.to_string(),
5834 }
5835}
5836
5837fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5838 git::repository::Worktree {
5839 path: PathBuf::from(proto.path.clone()),
5840 ref_name: proto.ref_name.clone().into(),
5841 sha: proto.sha.clone().into(),
5842 }
5843}
5844
5845fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5846 git::repository::Branch {
5847 is_head: proto.is_head,
5848 ref_name: proto.ref_name.clone().into(),
5849 upstream: proto
5850 .upstream
5851 .as_ref()
5852 .map(|upstream| git::repository::Upstream {
5853 ref_name: upstream.ref_name.to_string().into(),
5854 tracking: upstream
5855 .tracking
5856 .as_ref()
5857 .map(|tracking| {
5858 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5859 ahead: tracking.ahead as u32,
5860 behind: tracking.behind as u32,
5861 })
5862 })
5863 .unwrap_or(git::repository::UpstreamTracking::Gone),
5864 }),
5865 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5866 git::repository::CommitSummary {
5867 sha: commit.sha.to_string().into(),
5868 subject: commit.subject.to_string().into(),
5869 commit_timestamp: commit.commit_timestamp,
5870 author_name: commit.author_name.to_string().into(),
5871 has_parent: true,
5872 }
5873 }),
5874 }
5875}
5876
5877fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5878 proto::GitCommitDetails {
5879 sha: commit.sha.to_string(),
5880 message: commit.message.to_string(),
5881 commit_timestamp: commit.commit_timestamp,
5882 author_email: commit.author_email.to_string(),
5883 author_name: commit.author_name.to_string(),
5884 }
5885}
5886
5887fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5888 CommitDetails {
5889 sha: proto.sha.clone().into(),
5890 message: proto.message.clone().into(),
5891 commit_timestamp: proto.commit_timestamp,
5892 author_email: proto.author_email.clone().into(),
5893 author_name: proto.author_name.clone().into(),
5894 }
5895}
5896
5897async fn compute_snapshot(
5898 id: RepositoryId,
5899 work_directory_abs_path: Arc<Path>,
5900 prev_snapshot: RepositorySnapshot,
5901 backend: Arc<dyn GitRepository>,
5902) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
5903 let mut events = Vec::new();
5904 let branches = backend.branches().await?;
5905 let branch = branches.into_iter().find(|branch| branch.is_head);
5906 let statuses = backend
5907 .status(&[RepoPath::from_rel_path(
5908 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
5909 )])
5910 .await?;
5911 let stash_entries = backend.stash_entries().await?;
5912 let statuses_by_path = SumTree::from_iter(
5913 statuses
5914 .entries
5915 .iter()
5916 .map(|(repo_path, status)| StatusEntry {
5917 repo_path: repo_path.clone(),
5918 status: *status,
5919 }),
5920 (),
5921 );
5922 let (merge_details, merge_heads_changed) =
5923 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
5924 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
5925
5926 if merge_heads_changed {
5927 events.push(RepositoryEvent::MergeHeadsChanged);
5928 }
5929
5930 if statuses_by_path != prev_snapshot.statuses_by_path {
5931 events.push(RepositoryEvent::StatusesChanged)
5932 }
5933
5934 // Useful when branch is None in detached head state
5935 let head_commit = match backend.head_sha().await {
5936 Some(head_sha) => backend.show(head_sha).await.log_err(),
5937 None => None,
5938 };
5939
5940 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
5941 events.push(RepositoryEvent::BranchChanged);
5942 }
5943
5944 // Used by edit prediction data collection
5945 let remote_origin_url = backend.remote_url("origin");
5946 let remote_upstream_url = backend.remote_url("upstream");
5947
5948 let snapshot = RepositorySnapshot {
5949 id,
5950 statuses_by_path,
5951 work_directory_abs_path,
5952 path_style: prev_snapshot.path_style,
5953 scan_id: prev_snapshot.scan_id + 1,
5954 branch,
5955 head_commit,
5956 merge: merge_details,
5957 remote_origin_url,
5958 remote_upstream_url,
5959 stash_entries,
5960 };
5961
5962 Ok((snapshot, events))
5963}
5964
5965fn status_from_proto(
5966 simple_status: i32,
5967 status: Option<proto::GitFileStatus>,
5968) -> anyhow::Result<FileStatus> {
5969 use proto::git_file_status::Variant;
5970
5971 let Some(variant) = status.and_then(|status| status.variant) else {
5972 let code = proto::GitStatus::from_i32(simple_status)
5973 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
5974 let result = match code {
5975 proto::GitStatus::Added => TrackedStatus {
5976 worktree_status: StatusCode::Added,
5977 index_status: StatusCode::Unmodified,
5978 }
5979 .into(),
5980 proto::GitStatus::Modified => TrackedStatus {
5981 worktree_status: StatusCode::Modified,
5982 index_status: StatusCode::Unmodified,
5983 }
5984 .into(),
5985 proto::GitStatus::Conflict => UnmergedStatus {
5986 first_head: UnmergedStatusCode::Updated,
5987 second_head: UnmergedStatusCode::Updated,
5988 }
5989 .into(),
5990 proto::GitStatus::Deleted => TrackedStatus {
5991 worktree_status: StatusCode::Deleted,
5992 index_status: StatusCode::Unmodified,
5993 }
5994 .into(),
5995 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
5996 };
5997 return Ok(result);
5998 };
5999
6000 let result = match variant {
6001 Variant::Untracked(_) => FileStatus::Untracked,
6002 Variant::Ignored(_) => FileStatus::Ignored,
6003 Variant::Unmerged(unmerged) => {
6004 let [first_head, second_head] =
6005 [unmerged.first_head, unmerged.second_head].map(|head| {
6006 let code = proto::GitStatus::from_i32(head)
6007 .with_context(|| format!("Invalid git status code: {head}"))?;
6008 let result = match code {
6009 proto::GitStatus::Added => UnmergedStatusCode::Added,
6010 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6011 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6012 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6013 };
6014 Ok(result)
6015 });
6016 let [first_head, second_head] = [first_head?, second_head?];
6017 UnmergedStatus {
6018 first_head,
6019 second_head,
6020 }
6021 .into()
6022 }
6023 Variant::Tracked(tracked) => {
6024 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6025 .map(|status| {
6026 let code = proto::GitStatus::from_i32(status)
6027 .with_context(|| format!("Invalid git status code: {status}"))?;
6028 let result = match code {
6029 proto::GitStatus::Modified => StatusCode::Modified,
6030 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6031 proto::GitStatus::Added => StatusCode::Added,
6032 proto::GitStatus::Deleted => StatusCode::Deleted,
6033 proto::GitStatus::Renamed => StatusCode::Renamed,
6034 proto::GitStatus::Copied => StatusCode::Copied,
6035 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6036 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6037 };
6038 Ok(result)
6039 });
6040 let [index_status, worktree_status] = [index_status?, worktree_status?];
6041 TrackedStatus {
6042 index_status,
6043 worktree_status,
6044 }
6045 .into()
6046 }
6047 };
6048 Ok(result)
6049}
6050
6051fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6052 use proto::git_file_status::{Tracked, Unmerged, Variant};
6053
6054 let variant = match status {
6055 FileStatus::Untracked => Variant::Untracked(Default::default()),
6056 FileStatus::Ignored => Variant::Ignored(Default::default()),
6057 FileStatus::Unmerged(UnmergedStatus {
6058 first_head,
6059 second_head,
6060 }) => Variant::Unmerged(Unmerged {
6061 first_head: unmerged_status_to_proto(first_head),
6062 second_head: unmerged_status_to_proto(second_head),
6063 }),
6064 FileStatus::Tracked(TrackedStatus {
6065 index_status,
6066 worktree_status,
6067 }) => Variant::Tracked(Tracked {
6068 index_status: tracked_status_to_proto(index_status),
6069 worktree_status: tracked_status_to_proto(worktree_status),
6070 }),
6071 };
6072 proto::GitFileStatus {
6073 variant: Some(variant),
6074 }
6075}
6076
6077fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6078 match code {
6079 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6080 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6081 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6082 }
6083}
6084
6085fn tracked_status_to_proto(code: StatusCode) -> i32 {
6086 match code {
6087 StatusCode::Added => proto::GitStatus::Added as _,
6088 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6089 StatusCode::Modified => proto::GitStatus::Modified as _,
6090 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6091 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6092 StatusCode::Copied => proto::GitStatus::Copied as _,
6093 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6094 }
6095}