1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<String>>,
133 index_text: Option<Arc<String>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_delete_branch);
476 client.add_entity_request_handler(Self::handle_git_init);
477 client.add_entity_request_handler(Self::handle_push);
478 client.add_entity_request_handler(Self::handle_pull);
479 client.add_entity_request_handler(Self::handle_fetch);
480 client.add_entity_request_handler(Self::handle_stage);
481 client.add_entity_request_handler(Self::handle_unstage);
482 client.add_entity_request_handler(Self::handle_stash);
483 client.add_entity_request_handler(Self::handle_stash_pop);
484 client.add_entity_request_handler(Self::handle_stash_apply);
485 client.add_entity_request_handler(Self::handle_stash_drop);
486 client.add_entity_request_handler(Self::handle_commit);
487 client.add_entity_request_handler(Self::handle_run_hook);
488 client.add_entity_request_handler(Self::handle_reset);
489 client.add_entity_request_handler(Self::handle_show);
490 client.add_entity_request_handler(Self::handle_load_commit_diff);
491 client.add_entity_request_handler(Self::handle_file_history);
492 client.add_entity_request_handler(Self::handle_checkout_files);
493 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
494 client.add_entity_request_handler(Self::handle_set_index_text);
495 client.add_entity_request_handler(Self::handle_askpass);
496 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
497 client.add_entity_request_handler(Self::handle_git_diff);
498 client.add_entity_request_handler(Self::handle_tree_diff);
499 client.add_entity_request_handler(Self::handle_get_blob_content);
500 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
501 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
502 client.add_entity_message_handler(Self::handle_update_diff_bases);
503 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
504 client.add_entity_request_handler(Self::handle_blame_buffer);
505 client.add_entity_message_handler(Self::handle_update_repository);
506 client.add_entity_message_handler(Self::handle_remove_repository);
507 client.add_entity_request_handler(Self::handle_git_clone);
508 client.add_entity_request_handler(Self::handle_get_worktrees);
509 client.add_entity_request_handler(Self::handle_create_worktree);
510 }
511
512 pub fn is_local(&self) -> bool {
513 matches!(self.state, GitStoreState::Local { .. })
514 }
515 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
516 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
517 let id = repo.read(cx).id;
518 if self.active_repo_id != Some(id) {
519 self.active_repo_id = Some(id);
520 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
521 }
522 }
523 }
524
525 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
526 match &mut self.state {
527 GitStoreState::Remote {
528 downstream: downstream_client,
529 ..
530 } => {
531 for repo in self.repositories.values() {
532 let update = repo.read(cx).snapshot.initial_update(project_id);
533 for update in split_repository_update(update) {
534 client.send(update).log_err();
535 }
536 }
537 *downstream_client = Some((client, ProjectId(project_id)));
538 }
539 GitStoreState::Local {
540 downstream: downstream_client,
541 ..
542 } => {
543 let mut snapshots = HashMap::default();
544 let (updates_tx, mut updates_rx) = mpsc::unbounded();
545 for repo in self.repositories.values() {
546 updates_tx
547 .unbounded_send(DownstreamUpdate::UpdateRepository(
548 repo.read(cx).snapshot.clone(),
549 ))
550 .ok();
551 }
552 *downstream_client = Some(LocalDownstreamState {
553 client: client.clone(),
554 project_id: ProjectId(project_id),
555 updates_tx,
556 _task: cx.spawn(async move |this, cx| {
557 cx.background_spawn(async move {
558 while let Some(update) = updates_rx.next().await {
559 match update {
560 DownstreamUpdate::UpdateRepository(snapshot) => {
561 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
562 {
563 let update =
564 snapshot.build_update(old_snapshot, project_id);
565 *old_snapshot = snapshot;
566 for update in split_repository_update(update) {
567 client.send(update)?;
568 }
569 } else {
570 let update = snapshot.initial_update(project_id);
571 for update in split_repository_update(update) {
572 client.send(update)?;
573 }
574 snapshots.insert(snapshot.id, snapshot);
575 }
576 }
577 DownstreamUpdate::RemoveRepository(id) => {
578 client.send(proto::RemoveRepository {
579 project_id,
580 id: id.to_proto(),
581 })?;
582 }
583 }
584 }
585 anyhow::Ok(())
586 })
587 .await
588 .ok();
589 this.update(cx, |this, _| {
590 if let GitStoreState::Local {
591 downstream: downstream_client,
592 ..
593 } = &mut this.state
594 {
595 downstream_client.take();
596 } else {
597 unreachable!("unshared called on remote store");
598 }
599 })
600 }),
601 });
602 }
603 }
604 }
605
606 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
607 match &mut self.state {
608 GitStoreState::Local {
609 downstream: downstream_client,
610 ..
611 } => {
612 downstream_client.take();
613 }
614 GitStoreState::Remote {
615 downstream: downstream_client,
616 ..
617 } => {
618 downstream_client.take();
619 }
620 }
621 self.shared_diffs.clear();
622 }
623
624 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
625 self.shared_diffs.remove(peer_id);
626 }
627
628 pub fn active_repository(&self) -> Option<Entity<Repository>> {
629 self.active_repo_id
630 .as_ref()
631 .map(|id| self.repositories[id].clone())
632 }
633
634 pub fn open_unstaged_diff(
635 &mut self,
636 buffer: Entity<Buffer>,
637 cx: &mut Context<Self>,
638 ) -> Task<Result<Entity<BufferDiff>>> {
639 let buffer_id = buffer.read(cx).remote_id();
640 if let Some(diff_state) = self.diffs.get(&buffer_id)
641 && let Some(unstaged_diff) = diff_state
642 .read(cx)
643 .unstaged_diff
644 .as_ref()
645 .and_then(|weak| weak.upgrade())
646 {
647 if let Some(task) =
648 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
649 {
650 return cx.background_executor().spawn(async move {
651 task.await;
652 Ok(unstaged_diff)
653 });
654 }
655 return Task::ready(Ok(unstaged_diff));
656 }
657
658 let Some((repo, repo_path)) =
659 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
660 else {
661 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
662 };
663
664 let task = self
665 .loading_diffs
666 .entry((buffer_id, DiffKind::Unstaged))
667 .or_insert_with(|| {
668 let staged_text = repo.update(cx, |repo, cx| {
669 repo.load_staged_text(buffer_id, repo_path, cx)
670 });
671 cx.spawn(async move |this, cx| {
672 Self::open_diff_internal(
673 this,
674 DiffKind::Unstaged,
675 staged_text.await.map(DiffBasesChange::SetIndex),
676 buffer,
677 cx,
678 )
679 .await
680 .map_err(Arc::new)
681 })
682 .shared()
683 })
684 .clone();
685
686 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
687 }
688
689 pub fn open_diff_since(
690 &mut self,
691 oid: Option<git::Oid>,
692 buffer: Entity<Buffer>,
693 repo: Entity<Repository>,
694 languages: Arc<LanguageRegistry>,
695 cx: &mut Context<Self>,
696 ) -> Task<Result<Entity<BufferDiff>>> {
697 cx.spawn(async move |this, cx| {
698 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
699 let content = match oid {
700 None => None,
701 Some(oid) => Some(
702 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
703 .await?,
704 ),
705 };
706 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
707
708 buffer_diff
709 .update(cx, |buffer_diff, cx| {
710 buffer_diff.set_base_text(
711 content.map(Arc::new),
712 buffer_snapshot.language().cloned(),
713 Some(languages.clone()),
714 buffer_snapshot.text,
715 cx,
716 )
717 })?
718 .await?;
719 let unstaged_diff = this
720 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
721 .await?;
722 buffer_diff.update(cx, |buffer_diff, _| {
723 buffer_diff.set_secondary_diff(unstaged_diff);
724 })?;
725
726 this.update(cx, |_, cx| {
727 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
728 .detach();
729 })?;
730
731 Ok(buffer_diff)
732 })
733 }
734
735 pub fn open_uncommitted_diff(
736 &mut self,
737 buffer: Entity<Buffer>,
738 cx: &mut Context<Self>,
739 ) -> Task<Result<Entity<BufferDiff>>> {
740 let buffer_id = buffer.read(cx).remote_id();
741
742 if let Some(diff_state) = self.diffs.get(&buffer_id)
743 && let Some(uncommitted_diff) = diff_state
744 .read(cx)
745 .uncommitted_diff
746 .as_ref()
747 .and_then(|weak| weak.upgrade())
748 {
749 if let Some(task) =
750 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
751 {
752 return cx.background_executor().spawn(async move {
753 task.await;
754 Ok(uncommitted_diff)
755 });
756 }
757 return Task::ready(Ok(uncommitted_diff));
758 }
759
760 let Some((repo, repo_path)) =
761 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
762 else {
763 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
764 };
765
766 let task = self
767 .loading_diffs
768 .entry((buffer_id, DiffKind::Uncommitted))
769 .or_insert_with(|| {
770 let changes = repo.update(cx, |repo, cx| {
771 repo.load_committed_text(buffer_id, repo_path, cx)
772 });
773
774 // todo(lw): hot foreground spawn
775 cx.spawn(async move |this, cx| {
776 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
777 .await
778 .map_err(Arc::new)
779 })
780 .shared()
781 })
782 .clone();
783
784 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
785 }
786
787 async fn open_diff_internal(
788 this: WeakEntity<Self>,
789 kind: DiffKind,
790 texts: Result<DiffBasesChange>,
791 buffer_entity: Entity<Buffer>,
792 cx: &mut AsyncApp,
793 ) -> Result<Entity<BufferDiff>> {
794 let diff_bases_change = match texts {
795 Err(e) => {
796 this.update(cx, |this, cx| {
797 let buffer = buffer_entity.read(cx);
798 let buffer_id = buffer.remote_id();
799 this.loading_diffs.remove(&(buffer_id, kind));
800 })?;
801 return Err(e);
802 }
803 Ok(change) => change,
804 };
805
806 this.update(cx, |this, cx| {
807 let buffer = buffer_entity.read(cx);
808 let buffer_id = buffer.remote_id();
809 let language = buffer.language().cloned();
810 let language_registry = buffer.language_registry();
811 let text_snapshot = buffer.text_snapshot();
812 this.loading_diffs.remove(&(buffer_id, kind));
813
814 let git_store = cx.weak_entity();
815 let diff_state = this
816 .diffs
817 .entry(buffer_id)
818 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
819
820 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
821
822 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
823 diff_state.update(cx, |diff_state, cx| {
824 diff_state.language = language;
825 diff_state.language_registry = language_registry;
826
827 match kind {
828 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
829 DiffKind::Uncommitted => {
830 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
831 diff
832 } else {
833 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
834 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
835 unstaged_diff
836 };
837
838 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
839 diff_state.uncommitted_diff = Some(diff.downgrade())
840 }
841 }
842
843 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
844 let rx = diff_state.wait_for_recalculation();
845
846 anyhow::Ok(async move {
847 if let Some(rx) = rx {
848 rx.await;
849 }
850 Ok(diff)
851 })
852 })
853 })??
854 .await
855 }
856
857 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
858 let diff_state = self.diffs.get(&buffer_id)?;
859 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
860 }
861
862 pub fn get_uncommitted_diff(
863 &self,
864 buffer_id: BufferId,
865 cx: &App,
866 ) -> Option<Entity<BufferDiff>> {
867 let diff_state = self.diffs.get(&buffer_id)?;
868 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
869 }
870
871 pub fn open_conflict_set(
872 &mut self,
873 buffer: Entity<Buffer>,
874 cx: &mut Context<Self>,
875 ) -> Entity<ConflictSet> {
876 log::debug!("open conflict set");
877 let buffer_id = buffer.read(cx).remote_id();
878
879 if let Some(git_state) = self.diffs.get(&buffer_id)
880 && let Some(conflict_set) = git_state
881 .read(cx)
882 .conflict_set
883 .as_ref()
884 .and_then(|weak| weak.upgrade())
885 {
886 let conflict_set = conflict_set;
887 let buffer_snapshot = buffer.read(cx).text_snapshot();
888
889 git_state.update(cx, |state, cx| {
890 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
891 });
892
893 return conflict_set;
894 }
895
896 let is_unmerged = self
897 .repository_and_path_for_buffer_id(buffer_id, cx)
898 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
899 let git_store = cx.weak_entity();
900 let buffer_git_state = self
901 .diffs
902 .entry(buffer_id)
903 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
904 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
905
906 self._subscriptions
907 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
908 cx.emit(GitStoreEvent::ConflictsUpdated);
909 }));
910
911 buffer_git_state.update(cx, |state, cx| {
912 state.conflict_set = Some(conflict_set.downgrade());
913 let buffer_snapshot = buffer.read(cx).text_snapshot();
914 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
915 });
916
917 conflict_set
918 }
919
920 pub fn project_path_git_status(
921 &self,
922 project_path: &ProjectPath,
923 cx: &App,
924 ) -> Option<FileStatus> {
925 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
926 Some(repo.read(cx).status_for_path(&repo_path)?.status)
927 }
928
929 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
930 let mut work_directory_abs_paths = Vec::new();
931 let mut checkpoints = Vec::new();
932 for repository in self.repositories.values() {
933 repository.update(cx, |repository, _| {
934 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
935 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
936 });
937 }
938
939 cx.background_executor().spawn(async move {
940 let checkpoints = future::try_join_all(checkpoints).await?;
941 Ok(GitStoreCheckpoint {
942 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
943 .into_iter()
944 .zip(checkpoints)
945 .collect(),
946 })
947 })
948 }
949
950 pub fn restore_checkpoint(
951 &self,
952 checkpoint: GitStoreCheckpoint,
953 cx: &mut App,
954 ) -> Task<Result<()>> {
955 let repositories_by_work_dir_abs_path = self
956 .repositories
957 .values()
958 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
959 .collect::<HashMap<_, _>>();
960
961 let mut tasks = Vec::new();
962 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
963 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
964 let restore = repository.update(cx, |repository, _| {
965 repository.restore_checkpoint(checkpoint)
966 });
967 tasks.push(async move { restore.await? });
968 }
969 }
970 cx.background_spawn(async move {
971 future::try_join_all(tasks).await?;
972 Ok(())
973 })
974 }
975
976 /// Compares two checkpoints, returning true if they are equal.
977 pub fn compare_checkpoints(
978 &self,
979 left: GitStoreCheckpoint,
980 mut right: GitStoreCheckpoint,
981 cx: &mut App,
982 ) -> Task<Result<bool>> {
983 let repositories_by_work_dir_abs_path = self
984 .repositories
985 .values()
986 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
987 .collect::<HashMap<_, _>>();
988
989 let mut tasks = Vec::new();
990 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
991 if let Some(right_checkpoint) = right
992 .checkpoints_by_work_dir_abs_path
993 .remove(&work_dir_abs_path)
994 {
995 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
996 {
997 let compare = repository.update(cx, |repository, _| {
998 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
999 });
1000
1001 tasks.push(async move { compare.await? });
1002 }
1003 } else {
1004 return Task::ready(Ok(false));
1005 }
1006 }
1007 cx.background_spawn(async move {
1008 Ok(future::try_join_all(tasks)
1009 .await?
1010 .into_iter()
1011 .all(|result| result))
1012 })
1013 }
1014
1015 /// Blames a buffer.
1016 pub fn blame_buffer(
1017 &self,
1018 buffer: &Entity<Buffer>,
1019 version: Option<clock::Global>,
1020 cx: &mut Context<Self>,
1021 ) -> Task<Result<Option<Blame>>> {
1022 let buffer = buffer.read(cx);
1023 let Some((repo, repo_path)) =
1024 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1025 else {
1026 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1027 };
1028 let content = match &version {
1029 Some(version) => buffer.rope_for_version(version),
1030 None => buffer.as_rope().clone(),
1031 };
1032 let version = version.unwrap_or(buffer.version());
1033 let buffer_id = buffer.remote_id();
1034
1035 let repo = repo.downgrade();
1036 cx.spawn(async move |_, cx| {
1037 let repository_state = repo
1038 .update(cx, |repo, _| repo.repository_state.clone())?
1039 .await
1040 .map_err(|err| anyhow::anyhow!(err))?;
1041 match repository_state {
1042 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1043 .blame(repo_path.clone(), content)
1044 .await
1045 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1046 .map(Some),
1047 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1048 let response = client
1049 .request(proto::BlameBuffer {
1050 project_id: project_id.to_proto(),
1051 buffer_id: buffer_id.into(),
1052 version: serialize_version(&version),
1053 })
1054 .await?;
1055 Ok(deserialize_blame_buffer_response(response))
1056 }
1057 }
1058 })
1059 }
1060
1061 pub fn file_history(
1062 &self,
1063 repo: &Entity<Repository>,
1064 path: RepoPath,
1065 cx: &mut App,
1066 ) -> Task<Result<git::repository::FileHistory>> {
1067 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1068
1069 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1070 }
1071
1072 pub fn file_history_paginated(
1073 &self,
1074 repo: &Entity<Repository>,
1075 path: RepoPath,
1076 skip: usize,
1077 limit: Option<usize>,
1078 cx: &mut App,
1079 ) -> Task<Result<git::repository::FileHistory>> {
1080 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1081
1082 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1083 }
1084
1085 pub fn get_permalink_to_line(
1086 &self,
1087 buffer: &Entity<Buffer>,
1088 selection: Range<u32>,
1089 cx: &mut App,
1090 ) -> Task<Result<url::Url>> {
1091 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1092 return Task::ready(Err(anyhow!("buffer has no file")));
1093 };
1094
1095 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1096 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1097 cx,
1098 ) else {
1099 // If we're not in a Git repo, check whether this is a Rust source
1100 // file in the Cargo registry (presumably opened with go-to-definition
1101 // from a normal Rust file). If so, we can put together a permalink
1102 // using crate metadata.
1103 if buffer
1104 .read(cx)
1105 .language()
1106 .is_none_or(|lang| lang.name() != "Rust".into())
1107 {
1108 return Task::ready(Err(anyhow!("no permalink available")));
1109 }
1110 let file_path = file.worktree.read(cx).absolutize(&file.path);
1111 return cx.spawn(async move |cx| {
1112 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1113 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1114 .context("no permalink available")
1115 });
1116 };
1117
1118 let buffer_id = buffer.read(cx).remote_id();
1119 let branch = repo.read(cx).branch.clone();
1120 let remote = branch
1121 .as_ref()
1122 .and_then(|b| b.upstream.as_ref())
1123 .and_then(|b| b.remote_name())
1124 .unwrap_or("origin")
1125 .to_string();
1126
1127 let rx = repo.update(cx, |repo, _| {
1128 repo.send_job(None, move |state, cx| async move {
1129 match state {
1130 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1131 let origin_url = backend
1132 .remote_url(&remote)
1133 .with_context(|| format!("remote \"{remote}\" not found"))?;
1134
1135 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1136
1137 let provider_registry =
1138 cx.update(GitHostingProviderRegistry::default_global)?;
1139
1140 let (provider, remote) =
1141 parse_git_remote_url(provider_registry, &origin_url)
1142 .context("parsing Git remote URL")?;
1143
1144 Ok(provider.build_permalink(
1145 remote,
1146 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1147 ))
1148 }
1149 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1150 let response = client
1151 .request(proto::GetPermalinkToLine {
1152 project_id: project_id.to_proto(),
1153 buffer_id: buffer_id.into(),
1154 selection: Some(proto::Range {
1155 start: selection.start as u64,
1156 end: selection.end as u64,
1157 }),
1158 })
1159 .await?;
1160
1161 url::Url::parse(&response.permalink).context("failed to parse permalink")
1162 }
1163 }
1164 })
1165 });
1166 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1167 }
1168
1169 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1170 match &self.state {
1171 GitStoreState::Local {
1172 downstream: downstream_client,
1173 ..
1174 } => downstream_client
1175 .as_ref()
1176 .map(|state| (state.client.clone(), state.project_id)),
1177 GitStoreState::Remote {
1178 downstream: downstream_client,
1179 ..
1180 } => downstream_client.clone(),
1181 }
1182 }
1183
1184 fn upstream_client(&self) -> Option<AnyProtoClient> {
1185 match &self.state {
1186 GitStoreState::Local { .. } => None,
1187 GitStoreState::Remote {
1188 upstream_client, ..
1189 } => Some(upstream_client.clone()),
1190 }
1191 }
1192
1193 fn on_worktree_store_event(
1194 &mut self,
1195 worktree_store: Entity<WorktreeStore>,
1196 event: &WorktreeStoreEvent,
1197 cx: &mut Context<Self>,
1198 ) {
1199 let GitStoreState::Local {
1200 project_environment,
1201 downstream,
1202 next_repository_id,
1203 fs,
1204 } = &self.state
1205 else {
1206 return;
1207 };
1208
1209 match event {
1210 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1211 if let Some(worktree) = self
1212 .worktree_store
1213 .read(cx)
1214 .worktree_for_id(*worktree_id, cx)
1215 {
1216 let paths_by_git_repo =
1217 self.process_updated_entries(&worktree, updated_entries, cx);
1218 let downstream = downstream
1219 .as_ref()
1220 .map(|downstream| downstream.updates_tx.clone());
1221 cx.spawn(async move |_, cx| {
1222 let paths_by_git_repo = paths_by_git_repo.await;
1223 for (repo, paths) in paths_by_git_repo {
1224 repo.update(cx, |repo, cx| {
1225 repo.paths_changed(paths, downstream.clone(), cx);
1226 })
1227 .ok();
1228 }
1229 })
1230 .detach();
1231 }
1232 }
1233 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1234 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1235 else {
1236 return;
1237 };
1238 if !worktree.read(cx).is_visible() {
1239 log::debug!(
1240 "not adding repositories for local worktree {:?} because it's not visible",
1241 worktree.read(cx).abs_path()
1242 );
1243 return;
1244 }
1245 self.update_repositories_from_worktree(
1246 *worktree_id,
1247 project_environment.clone(),
1248 next_repository_id.clone(),
1249 downstream
1250 .as_ref()
1251 .map(|downstream| downstream.updates_tx.clone()),
1252 changed_repos.clone(),
1253 fs.clone(),
1254 cx,
1255 );
1256 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1257 }
1258 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1259 let repos_without_worktree: Vec<RepositoryId> = self
1260 .worktree_ids
1261 .iter_mut()
1262 .filter_map(|(repo_id, worktree_ids)| {
1263 worktree_ids.remove(worktree_id);
1264 if worktree_ids.is_empty() {
1265 Some(*repo_id)
1266 } else {
1267 None
1268 }
1269 })
1270 .collect();
1271 let is_active_repo_removed = repos_without_worktree
1272 .iter()
1273 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1274
1275 for repo_id in repos_without_worktree {
1276 self.repositories.remove(&repo_id);
1277 self.worktree_ids.remove(&repo_id);
1278 if let Some(updates_tx) =
1279 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1280 {
1281 updates_tx
1282 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1283 .ok();
1284 }
1285 }
1286
1287 if is_active_repo_removed {
1288 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1289 self.active_repo_id = Some(repo_id);
1290 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1291 } else {
1292 self.active_repo_id = None;
1293 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1294 }
1295 }
1296 }
1297 _ => {}
1298 }
1299 }
1300 fn on_repository_event(
1301 &mut self,
1302 repo: Entity<Repository>,
1303 event: &RepositoryEvent,
1304 cx: &mut Context<Self>,
1305 ) {
1306 let id = repo.read(cx).id;
1307 let repo_snapshot = repo.read(cx).snapshot.clone();
1308 for (buffer_id, diff) in self.diffs.iter() {
1309 if let Some((buffer_repo, repo_path)) =
1310 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1311 && buffer_repo == repo
1312 {
1313 diff.update(cx, |diff, cx| {
1314 if let Some(conflict_set) = &diff.conflict_set {
1315 let conflict_status_changed =
1316 conflict_set.update(cx, |conflict_set, cx| {
1317 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1318 conflict_set.set_has_conflict(has_conflict, cx)
1319 })?;
1320 if conflict_status_changed {
1321 let buffer_store = self.buffer_store.read(cx);
1322 if let Some(buffer) = buffer_store.get(*buffer_id) {
1323 let _ = diff
1324 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1325 }
1326 }
1327 }
1328 anyhow::Ok(())
1329 })
1330 .ok();
1331 }
1332 }
1333 cx.emit(GitStoreEvent::RepositoryUpdated(
1334 id,
1335 event.clone(),
1336 self.active_repo_id == Some(id),
1337 ))
1338 }
1339
1340 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1341 cx.emit(GitStoreEvent::JobsUpdated)
1342 }
1343
1344 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1345 fn update_repositories_from_worktree(
1346 &mut self,
1347 worktree_id: WorktreeId,
1348 project_environment: Entity<ProjectEnvironment>,
1349 next_repository_id: Arc<AtomicU64>,
1350 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1351 updated_git_repositories: UpdatedGitRepositoriesSet,
1352 fs: Arc<dyn Fs>,
1353 cx: &mut Context<Self>,
1354 ) {
1355 let mut removed_ids = Vec::new();
1356 for update in updated_git_repositories.iter() {
1357 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1358 let existing_work_directory_abs_path =
1359 repo.read(cx).work_directory_abs_path.clone();
1360 Some(&existing_work_directory_abs_path)
1361 == update.old_work_directory_abs_path.as_ref()
1362 || Some(&existing_work_directory_abs_path)
1363 == update.new_work_directory_abs_path.as_ref()
1364 }) {
1365 let repo_id = *id;
1366 if let Some(new_work_directory_abs_path) =
1367 update.new_work_directory_abs_path.clone()
1368 {
1369 self.worktree_ids
1370 .entry(repo_id)
1371 .or_insert_with(HashSet::new)
1372 .insert(worktree_id);
1373 existing.update(cx, |existing, cx| {
1374 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1375 existing.schedule_scan(updates_tx.clone(), cx);
1376 });
1377 } else {
1378 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1379 worktree_ids.remove(&worktree_id);
1380 if worktree_ids.is_empty() {
1381 removed_ids.push(repo_id);
1382 }
1383 }
1384 }
1385 } else if let UpdatedGitRepository {
1386 new_work_directory_abs_path: Some(work_directory_abs_path),
1387 dot_git_abs_path: Some(dot_git_abs_path),
1388 repository_dir_abs_path: Some(_repository_dir_abs_path),
1389 common_dir_abs_path: Some(_common_dir_abs_path),
1390 ..
1391 } = update
1392 {
1393 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1394 let git_store = cx.weak_entity();
1395 let repo = cx.new(|cx| {
1396 let mut repo = Repository::local(
1397 id,
1398 work_directory_abs_path.clone(),
1399 dot_git_abs_path.clone(),
1400 project_environment.downgrade(),
1401 fs.clone(),
1402 git_store,
1403 cx,
1404 );
1405 if let Some(updates_tx) = updates_tx.as_ref() {
1406 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1407 updates_tx
1408 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1409 .ok();
1410 }
1411 repo.schedule_scan(updates_tx.clone(), cx);
1412 repo
1413 });
1414 self._subscriptions
1415 .push(cx.subscribe(&repo, Self::on_repository_event));
1416 self._subscriptions
1417 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1418 self.repositories.insert(id, repo);
1419 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1420 cx.emit(GitStoreEvent::RepositoryAdded);
1421 self.active_repo_id.get_or_insert_with(|| {
1422 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1423 id
1424 });
1425 }
1426 }
1427
1428 for id in removed_ids {
1429 if self.active_repo_id == Some(id) {
1430 self.active_repo_id = None;
1431 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1432 }
1433 self.repositories.remove(&id);
1434 if let Some(updates_tx) = updates_tx.as_ref() {
1435 updates_tx
1436 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1437 .ok();
1438 }
1439 }
1440 }
1441
1442 fn on_buffer_store_event(
1443 &mut self,
1444 _: Entity<BufferStore>,
1445 event: &BufferStoreEvent,
1446 cx: &mut Context<Self>,
1447 ) {
1448 match event {
1449 BufferStoreEvent::BufferAdded(buffer) => {
1450 cx.subscribe(buffer, |this, buffer, event, cx| {
1451 if let BufferEvent::LanguageChanged = event {
1452 let buffer_id = buffer.read(cx).remote_id();
1453 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1454 diff_state.update(cx, |diff_state, cx| {
1455 diff_state.buffer_language_changed(buffer, cx);
1456 });
1457 }
1458 }
1459 })
1460 .detach();
1461 }
1462 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1463 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1464 diffs.remove(buffer_id);
1465 }
1466 }
1467 BufferStoreEvent::BufferDropped(buffer_id) => {
1468 self.diffs.remove(buffer_id);
1469 for diffs in self.shared_diffs.values_mut() {
1470 diffs.remove(buffer_id);
1471 }
1472 }
1473 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1474 // Whenever a buffer's file path changes, it's possible that the
1475 // new path is actually a path that is being tracked by a git
1476 // repository. In that case, we'll want to update the buffer's
1477 // `BufferDiffState`, in case it already has one.
1478 let buffer_id = buffer.read(cx).remote_id();
1479 let diff_state = self.diffs.get(&buffer_id);
1480 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1481
1482 if let Some(diff_state) = diff_state
1483 && let Some((repo, repo_path)) = repo
1484 {
1485 let buffer = buffer.clone();
1486 let diff_state = diff_state.clone();
1487
1488 cx.spawn(async move |_git_store, cx| {
1489 async {
1490 let diff_bases_change = repo
1491 .update(cx, |repo, cx| {
1492 repo.load_committed_text(buffer_id, repo_path, cx)
1493 })?
1494 .await?;
1495
1496 diff_state.update(cx, |diff_state, cx| {
1497 let buffer_snapshot = buffer.read(cx).text_snapshot();
1498 diff_state.diff_bases_changed(
1499 buffer_snapshot,
1500 Some(diff_bases_change),
1501 cx,
1502 );
1503 })
1504 }
1505 .await
1506 .log_err();
1507 })
1508 .detach();
1509 }
1510 }
1511 _ => {}
1512 }
1513 }
1514
1515 pub fn recalculate_buffer_diffs(
1516 &mut self,
1517 buffers: Vec<Entity<Buffer>>,
1518 cx: &mut Context<Self>,
1519 ) -> impl Future<Output = ()> + use<> {
1520 let mut futures = Vec::new();
1521 for buffer in buffers {
1522 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1523 let buffer = buffer.read(cx).text_snapshot();
1524 diff_state.update(cx, |diff_state, cx| {
1525 diff_state.recalculate_diffs(buffer.clone(), cx);
1526 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1527 });
1528 futures.push(diff_state.update(cx, |diff_state, cx| {
1529 diff_state
1530 .reparse_conflict_markers(buffer, cx)
1531 .map(|_| {})
1532 .boxed()
1533 }));
1534 }
1535 }
1536 async move {
1537 futures::future::join_all(futures).await;
1538 }
1539 }
1540
1541 fn on_buffer_diff_event(
1542 &mut self,
1543 diff: Entity<buffer_diff::BufferDiff>,
1544 event: &BufferDiffEvent,
1545 cx: &mut Context<Self>,
1546 ) {
1547 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1548 let buffer_id = diff.read(cx).buffer_id;
1549 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1550 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1551 diff_state.hunk_staging_operation_count += 1;
1552 diff_state.hunk_staging_operation_count
1553 });
1554 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1555 let recv = repo.update(cx, |repo, cx| {
1556 log::debug!("hunks changed for {}", path.as_unix_str());
1557 repo.spawn_set_index_text_job(
1558 path,
1559 new_index_text.as_ref().map(|rope| rope.to_string()),
1560 Some(hunk_staging_operation_count),
1561 cx,
1562 )
1563 });
1564 let diff = diff.downgrade();
1565 cx.spawn(async move |this, cx| {
1566 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1567 diff.update(cx, |diff, cx| {
1568 diff.clear_pending_hunks(cx);
1569 })
1570 .ok();
1571 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1572 .ok();
1573 }
1574 })
1575 .detach();
1576 }
1577 }
1578 }
1579 }
1580
1581 fn local_worktree_git_repos_changed(
1582 &mut self,
1583 worktree: Entity<Worktree>,
1584 changed_repos: &UpdatedGitRepositoriesSet,
1585 cx: &mut Context<Self>,
1586 ) {
1587 log::debug!("local worktree repos changed");
1588 debug_assert!(worktree.read(cx).is_local());
1589
1590 for repository in self.repositories.values() {
1591 repository.update(cx, |repository, cx| {
1592 let repo_abs_path = &repository.work_directory_abs_path;
1593 if changed_repos.iter().any(|update| {
1594 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1595 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1596 }) {
1597 repository.reload_buffer_diff_bases(cx);
1598 }
1599 });
1600 }
1601 }
1602
1603 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1604 &self.repositories
1605 }
1606
1607 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1608 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1609 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1610 Some(status.status)
1611 }
1612
1613 pub fn repository_and_path_for_buffer_id(
1614 &self,
1615 buffer_id: BufferId,
1616 cx: &App,
1617 ) -> Option<(Entity<Repository>, RepoPath)> {
1618 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1619 let project_path = buffer.read(cx).project_path(cx)?;
1620 self.repository_and_path_for_project_path(&project_path, cx)
1621 }
1622
1623 pub fn repository_and_path_for_project_path(
1624 &self,
1625 path: &ProjectPath,
1626 cx: &App,
1627 ) -> Option<(Entity<Repository>, RepoPath)> {
1628 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1629 self.repositories
1630 .values()
1631 .filter_map(|repo| {
1632 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1633 Some((repo.clone(), repo_path))
1634 })
1635 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1636 }
1637
1638 pub fn git_init(
1639 &self,
1640 path: Arc<Path>,
1641 fallback_branch_name: String,
1642 cx: &App,
1643 ) -> Task<Result<()>> {
1644 match &self.state {
1645 GitStoreState::Local { fs, .. } => {
1646 let fs = fs.clone();
1647 cx.background_executor()
1648 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1649 }
1650 GitStoreState::Remote {
1651 upstream_client,
1652 upstream_project_id: project_id,
1653 ..
1654 } => {
1655 let client = upstream_client.clone();
1656 let project_id = *project_id;
1657 cx.background_executor().spawn(async move {
1658 client
1659 .request(proto::GitInit {
1660 project_id: project_id,
1661 abs_path: path.to_string_lossy().into_owned(),
1662 fallback_branch_name,
1663 })
1664 .await?;
1665 Ok(())
1666 })
1667 }
1668 }
1669 }
1670
1671 pub fn git_clone(
1672 &self,
1673 repo: String,
1674 path: impl Into<Arc<std::path::Path>>,
1675 cx: &App,
1676 ) -> Task<Result<()>> {
1677 let path = path.into();
1678 match &self.state {
1679 GitStoreState::Local { fs, .. } => {
1680 let fs = fs.clone();
1681 cx.background_executor()
1682 .spawn(async move { fs.git_clone(&repo, &path).await })
1683 }
1684 GitStoreState::Remote {
1685 upstream_client,
1686 upstream_project_id,
1687 ..
1688 } => {
1689 if upstream_client.is_via_collab() {
1690 return Task::ready(Err(anyhow!(
1691 "Git Clone isn't supported for project guests"
1692 )));
1693 }
1694 let request = upstream_client.request(proto::GitClone {
1695 project_id: *upstream_project_id,
1696 abs_path: path.to_string_lossy().into_owned(),
1697 remote_repo: repo,
1698 });
1699
1700 cx.background_spawn(async move {
1701 let result = request.await?;
1702
1703 match result.success {
1704 true => Ok(()),
1705 false => Err(anyhow!("Git Clone failed")),
1706 }
1707 })
1708 }
1709 }
1710 }
1711
1712 async fn handle_update_repository(
1713 this: Entity<Self>,
1714 envelope: TypedEnvelope<proto::UpdateRepository>,
1715 mut cx: AsyncApp,
1716 ) -> Result<()> {
1717 this.update(&mut cx, |this, cx| {
1718 let path_style = this.worktree_store.read(cx).path_style();
1719 let mut update = envelope.payload;
1720
1721 let id = RepositoryId::from_proto(update.id);
1722 let client = this.upstream_client().context("no upstream client")?;
1723
1724 let mut repo_subscription = None;
1725 let repo = this.repositories.entry(id).or_insert_with(|| {
1726 let git_store = cx.weak_entity();
1727 let repo = cx.new(|cx| {
1728 Repository::remote(
1729 id,
1730 Path::new(&update.abs_path).into(),
1731 path_style,
1732 ProjectId(update.project_id),
1733 client,
1734 git_store,
1735 cx,
1736 )
1737 });
1738 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1739 cx.emit(GitStoreEvent::RepositoryAdded);
1740 repo
1741 });
1742 this._subscriptions.extend(repo_subscription);
1743
1744 repo.update(cx, {
1745 let update = update.clone();
1746 |repo, cx| repo.apply_remote_update(update, cx)
1747 })?;
1748
1749 this.active_repo_id.get_or_insert_with(|| {
1750 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1751 id
1752 });
1753
1754 if let Some((client, project_id)) = this.downstream_client() {
1755 update.project_id = project_id.to_proto();
1756 client.send(update).log_err();
1757 }
1758 Ok(())
1759 })?
1760 }
1761
1762 async fn handle_remove_repository(
1763 this: Entity<Self>,
1764 envelope: TypedEnvelope<proto::RemoveRepository>,
1765 mut cx: AsyncApp,
1766 ) -> Result<()> {
1767 this.update(&mut cx, |this, cx| {
1768 let mut update = envelope.payload;
1769 let id = RepositoryId::from_proto(update.id);
1770 this.repositories.remove(&id);
1771 if let Some((client, project_id)) = this.downstream_client() {
1772 update.project_id = project_id.to_proto();
1773 client.send(update).log_err();
1774 }
1775 if this.active_repo_id == Some(id) {
1776 this.active_repo_id = None;
1777 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1778 }
1779 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1780 })
1781 }
1782
1783 async fn handle_git_init(
1784 this: Entity<Self>,
1785 envelope: TypedEnvelope<proto::GitInit>,
1786 cx: AsyncApp,
1787 ) -> Result<proto::Ack> {
1788 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1789 let name = envelope.payload.fallback_branch_name;
1790 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1791 .await?;
1792
1793 Ok(proto::Ack {})
1794 }
1795
1796 async fn handle_git_clone(
1797 this: Entity<Self>,
1798 envelope: TypedEnvelope<proto::GitClone>,
1799 cx: AsyncApp,
1800 ) -> Result<proto::GitCloneResponse> {
1801 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1802 let repo_name = envelope.payload.remote_repo;
1803 let result = cx
1804 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1805 .await;
1806
1807 Ok(proto::GitCloneResponse {
1808 success: result.is_ok(),
1809 })
1810 }
1811
1812 async fn handle_fetch(
1813 this: Entity<Self>,
1814 envelope: TypedEnvelope<proto::Fetch>,
1815 mut cx: AsyncApp,
1816 ) -> Result<proto::RemoteMessageResponse> {
1817 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1818 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1819 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1820 let askpass_id = envelope.payload.askpass_id;
1821
1822 let askpass = make_remote_delegate(
1823 this,
1824 envelope.payload.project_id,
1825 repository_id,
1826 askpass_id,
1827 &mut cx,
1828 );
1829
1830 let remote_output = repository_handle
1831 .update(&mut cx, |repository_handle, cx| {
1832 repository_handle.fetch(fetch_options, askpass, cx)
1833 })?
1834 .await??;
1835
1836 Ok(proto::RemoteMessageResponse {
1837 stdout: remote_output.stdout,
1838 stderr: remote_output.stderr,
1839 })
1840 }
1841
1842 async fn handle_push(
1843 this: Entity<Self>,
1844 envelope: TypedEnvelope<proto::Push>,
1845 mut cx: AsyncApp,
1846 ) -> Result<proto::RemoteMessageResponse> {
1847 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1848 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1849
1850 let askpass_id = envelope.payload.askpass_id;
1851 let askpass = make_remote_delegate(
1852 this,
1853 envelope.payload.project_id,
1854 repository_id,
1855 askpass_id,
1856 &mut cx,
1857 );
1858
1859 let options = envelope
1860 .payload
1861 .options
1862 .as_ref()
1863 .map(|_| match envelope.payload.options() {
1864 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1865 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1866 });
1867
1868 let branch_name = envelope.payload.branch_name.into();
1869 let remote_name = envelope.payload.remote_name.into();
1870
1871 let remote_output = repository_handle
1872 .update(&mut cx, |repository_handle, cx| {
1873 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1874 })?
1875 .await??;
1876 Ok(proto::RemoteMessageResponse {
1877 stdout: remote_output.stdout,
1878 stderr: remote_output.stderr,
1879 })
1880 }
1881
1882 async fn handle_pull(
1883 this: Entity<Self>,
1884 envelope: TypedEnvelope<proto::Pull>,
1885 mut cx: AsyncApp,
1886 ) -> Result<proto::RemoteMessageResponse> {
1887 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1888 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1889 let askpass_id = envelope.payload.askpass_id;
1890 let askpass = make_remote_delegate(
1891 this,
1892 envelope.payload.project_id,
1893 repository_id,
1894 askpass_id,
1895 &mut cx,
1896 );
1897
1898 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1899 let remote_name = envelope.payload.remote_name.into();
1900 let rebase = envelope.payload.rebase;
1901
1902 let remote_message = repository_handle
1903 .update(&mut cx, |repository_handle, cx| {
1904 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1905 })?
1906 .await??;
1907
1908 Ok(proto::RemoteMessageResponse {
1909 stdout: remote_message.stdout,
1910 stderr: remote_message.stderr,
1911 })
1912 }
1913
1914 async fn handle_stage(
1915 this: Entity<Self>,
1916 envelope: TypedEnvelope<proto::Stage>,
1917 mut cx: AsyncApp,
1918 ) -> Result<proto::Ack> {
1919 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1920 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1921
1922 let entries = envelope
1923 .payload
1924 .paths
1925 .into_iter()
1926 .map(|path| RepoPath::new(&path))
1927 .collect::<Result<Vec<_>>>()?;
1928
1929 repository_handle
1930 .update(&mut cx, |repository_handle, cx| {
1931 repository_handle.stage_entries(entries, cx)
1932 })?
1933 .await?;
1934 Ok(proto::Ack {})
1935 }
1936
1937 async fn handle_unstage(
1938 this: Entity<Self>,
1939 envelope: TypedEnvelope<proto::Unstage>,
1940 mut cx: AsyncApp,
1941 ) -> Result<proto::Ack> {
1942 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1943 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1944
1945 let entries = envelope
1946 .payload
1947 .paths
1948 .into_iter()
1949 .map(|path| RepoPath::new(&path))
1950 .collect::<Result<Vec<_>>>()?;
1951
1952 repository_handle
1953 .update(&mut cx, |repository_handle, cx| {
1954 repository_handle.unstage_entries(entries, cx)
1955 })?
1956 .await?;
1957
1958 Ok(proto::Ack {})
1959 }
1960
1961 async fn handle_stash(
1962 this: Entity<Self>,
1963 envelope: TypedEnvelope<proto::Stash>,
1964 mut cx: AsyncApp,
1965 ) -> Result<proto::Ack> {
1966 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1967 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1968
1969 let entries = envelope
1970 .payload
1971 .paths
1972 .into_iter()
1973 .map(|path| RepoPath::new(&path))
1974 .collect::<Result<Vec<_>>>()?;
1975
1976 repository_handle
1977 .update(&mut cx, |repository_handle, cx| {
1978 repository_handle.stash_entries(entries, cx)
1979 })?
1980 .await?;
1981
1982 Ok(proto::Ack {})
1983 }
1984
1985 async fn handle_stash_pop(
1986 this: Entity<Self>,
1987 envelope: TypedEnvelope<proto::StashPop>,
1988 mut cx: AsyncApp,
1989 ) -> Result<proto::Ack> {
1990 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1991 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1992 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1993
1994 repository_handle
1995 .update(&mut cx, |repository_handle, cx| {
1996 repository_handle.stash_pop(stash_index, cx)
1997 })?
1998 .await?;
1999
2000 Ok(proto::Ack {})
2001 }
2002
2003 async fn handle_stash_apply(
2004 this: Entity<Self>,
2005 envelope: TypedEnvelope<proto::StashApply>,
2006 mut cx: AsyncApp,
2007 ) -> Result<proto::Ack> {
2008 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2009 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2010 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2011
2012 repository_handle
2013 .update(&mut cx, |repository_handle, cx| {
2014 repository_handle.stash_apply(stash_index, cx)
2015 })?
2016 .await?;
2017
2018 Ok(proto::Ack {})
2019 }
2020
2021 async fn handle_stash_drop(
2022 this: Entity<Self>,
2023 envelope: TypedEnvelope<proto::StashDrop>,
2024 mut cx: AsyncApp,
2025 ) -> Result<proto::Ack> {
2026 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2027 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2028 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2029
2030 repository_handle
2031 .update(&mut cx, |repository_handle, cx| {
2032 repository_handle.stash_drop(stash_index, cx)
2033 })?
2034 .await??;
2035
2036 Ok(proto::Ack {})
2037 }
2038
2039 async fn handle_set_index_text(
2040 this: Entity<Self>,
2041 envelope: TypedEnvelope<proto::SetIndexText>,
2042 mut cx: AsyncApp,
2043 ) -> Result<proto::Ack> {
2044 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2045 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2046 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2047
2048 repository_handle
2049 .update(&mut cx, |repository_handle, cx| {
2050 repository_handle.spawn_set_index_text_job(
2051 repo_path,
2052 envelope.payload.text,
2053 None,
2054 cx,
2055 )
2056 })?
2057 .await??;
2058 Ok(proto::Ack {})
2059 }
2060
2061 async fn handle_run_hook(
2062 this: Entity<Self>,
2063 envelope: TypedEnvelope<proto::RunGitHook>,
2064 mut cx: AsyncApp,
2065 ) -> Result<proto::Ack> {
2066 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2067 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2068 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2069 repository_handle
2070 .update(&mut cx, |repository_handle, cx| {
2071 repository_handle.run_hook(hook, cx)
2072 })?
2073 .await??;
2074 Ok(proto::Ack {})
2075 }
2076
2077 async fn handle_commit(
2078 this: Entity<Self>,
2079 envelope: TypedEnvelope<proto::Commit>,
2080 mut cx: AsyncApp,
2081 ) -> Result<proto::Ack> {
2082 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2083 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2084 let askpass_id = envelope.payload.askpass_id;
2085
2086 let askpass = make_remote_delegate(
2087 this,
2088 envelope.payload.project_id,
2089 repository_id,
2090 askpass_id,
2091 &mut cx,
2092 );
2093
2094 let message = SharedString::from(envelope.payload.message);
2095 let name = envelope.payload.name.map(SharedString::from);
2096 let email = envelope.payload.email.map(SharedString::from);
2097 let options = envelope.payload.options.unwrap_or_default();
2098
2099 repository_handle
2100 .update(&mut cx, |repository_handle, cx| {
2101 repository_handle.commit(
2102 message,
2103 name.zip(email),
2104 CommitOptions {
2105 amend: options.amend,
2106 signoff: options.signoff,
2107 },
2108 askpass,
2109 cx,
2110 )
2111 })?
2112 .await??;
2113 Ok(proto::Ack {})
2114 }
2115
2116 async fn handle_get_remotes(
2117 this: Entity<Self>,
2118 envelope: TypedEnvelope<proto::GetRemotes>,
2119 mut cx: AsyncApp,
2120 ) -> Result<proto::GetRemotesResponse> {
2121 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2122 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2123
2124 let branch_name = envelope.payload.branch_name;
2125 let is_push = envelope.payload.is_push;
2126
2127 let remotes = repository_handle
2128 .update(&mut cx, |repository_handle, _| {
2129 repository_handle.get_remotes(branch_name, is_push)
2130 })?
2131 .await??;
2132
2133 Ok(proto::GetRemotesResponse {
2134 remotes: remotes
2135 .into_iter()
2136 .map(|remotes| proto::get_remotes_response::Remote {
2137 name: remotes.name.to_string(),
2138 })
2139 .collect::<Vec<_>>(),
2140 })
2141 }
2142
2143 async fn handle_get_worktrees(
2144 this: Entity<Self>,
2145 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2146 mut cx: AsyncApp,
2147 ) -> Result<proto::GitWorktreesResponse> {
2148 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2149 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2150
2151 let worktrees = repository_handle
2152 .update(&mut cx, |repository_handle, _| {
2153 repository_handle.worktrees()
2154 })?
2155 .await??;
2156
2157 Ok(proto::GitWorktreesResponse {
2158 worktrees: worktrees
2159 .into_iter()
2160 .map(|worktree| worktree_to_proto(&worktree))
2161 .collect::<Vec<_>>(),
2162 })
2163 }
2164
2165 async fn handle_create_worktree(
2166 this: Entity<Self>,
2167 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2168 mut cx: AsyncApp,
2169 ) -> Result<proto::Ack> {
2170 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2171 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2172 let directory = PathBuf::from(envelope.payload.directory);
2173 let name = envelope.payload.name;
2174 let commit = envelope.payload.commit;
2175
2176 repository_handle
2177 .update(&mut cx, |repository_handle, _| {
2178 repository_handle.create_worktree(name, directory, commit)
2179 })?
2180 .await??;
2181
2182 Ok(proto::Ack {})
2183 }
2184
2185 async fn handle_get_branches(
2186 this: Entity<Self>,
2187 envelope: TypedEnvelope<proto::GitGetBranches>,
2188 mut cx: AsyncApp,
2189 ) -> Result<proto::GitBranchesResponse> {
2190 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2191 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2192
2193 let branches = repository_handle
2194 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2195 .await??;
2196
2197 Ok(proto::GitBranchesResponse {
2198 branches: branches
2199 .into_iter()
2200 .map(|branch| branch_to_proto(&branch))
2201 .collect::<Vec<_>>(),
2202 })
2203 }
2204 async fn handle_get_default_branch(
2205 this: Entity<Self>,
2206 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2207 mut cx: AsyncApp,
2208 ) -> Result<proto::GetDefaultBranchResponse> {
2209 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2210 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2211
2212 let branch = repository_handle
2213 .update(&mut cx, |repository_handle, _| {
2214 repository_handle.default_branch()
2215 })?
2216 .await??
2217 .map(Into::into);
2218
2219 Ok(proto::GetDefaultBranchResponse { branch })
2220 }
2221 async fn handle_create_branch(
2222 this: Entity<Self>,
2223 envelope: TypedEnvelope<proto::GitCreateBranch>,
2224 mut cx: AsyncApp,
2225 ) -> Result<proto::Ack> {
2226 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2227 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2228 let branch_name = envelope.payload.branch_name;
2229
2230 repository_handle
2231 .update(&mut cx, |repository_handle, _| {
2232 repository_handle.create_branch(branch_name, None)
2233 })?
2234 .await??;
2235
2236 Ok(proto::Ack {})
2237 }
2238
2239 async fn handle_change_branch(
2240 this: Entity<Self>,
2241 envelope: TypedEnvelope<proto::GitChangeBranch>,
2242 mut cx: AsyncApp,
2243 ) -> Result<proto::Ack> {
2244 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2245 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2246 let branch_name = envelope.payload.branch_name;
2247
2248 repository_handle
2249 .update(&mut cx, |repository_handle, _| {
2250 repository_handle.change_branch(branch_name)
2251 })?
2252 .await??;
2253
2254 Ok(proto::Ack {})
2255 }
2256
2257 async fn handle_rename_branch(
2258 this: Entity<Self>,
2259 envelope: TypedEnvelope<proto::GitRenameBranch>,
2260 mut cx: AsyncApp,
2261 ) -> Result<proto::Ack> {
2262 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2263 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2264 let branch = envelope.payload.branch;
2265 let new_name = envelope.payload.new_name;
2266
2267 repository_handle
2268 .update(&mut cx, |repository_handle, _| {
2269 repository_handle.rename_branch(branch, new_name)
2270 })?
2271 .await??;
2272
2273 Ok(proto::Ack {})
2274 }
2275
2276 async fn handle_delete_branch(
2277 this: Entity<Self>,
2278 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2279 mut cx: AsyncApp,
2280 ) -> Result<proto::Ack> {
2281 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2282 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2283 let branch_name = envelope.payload.branch_name;
2284
2285 repository_handle
2286 .update(&mut cx, |repository_handle, _| {
2287 repository_handle.delete_branch(branch_name)
2288 })?
2289 .await??;
2290
2291 Ok(proto::Ack {})
2292 }
2293
2294 async fn handle_show(
2295 this: Entity<Self>,
2296 envelope: TypedEnvelope<proto::GitShow>,
2297 mut cx: AsyncApp,
2298 ) -> Result<proto::GitCommitDetails> {
2299 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2300 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2301
2302 let commit = repository_handle
2303 .update(&mut cx, |repository_handle, _| {
2304 repository_handle.show(envelope.payload.commit)
2305 })?
2306 .await??;
2307 Ok(proto::GitCommitDetails {
2308 sha: commit.sha.into(),
2309 message: commit.message.into(),
2310 commit_timestamp: commit.commit_timestamp,
2311 author_email: commit.author_email.into(),
2312 author_name: commit.author_name.into(),
2313 })
2314 }
2315
2316 async fn handle_load_commit_diff(
2317 this: Entity<Self>,
2318 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2319 mut cx: AsyncApp,
2320 ) -> Result<proto::LoadCommitDiffResponse> {
2321 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2322 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2323
2324 let commit_diff = repository_handle
2325 .update(&mut cx, |repository_handle, _| {
2326 repository_handle.load_commit_diff(envelope.payload.commit)
2327 })?
2328 .await??;
2329 Ok(proto::LoadCommitDiffResponse {
2330 files: commit_diff
2331 .files
2332 .into_iter()
2333 .map(|file| proto::CommitFile {
2334 path: file.path.to_proto(),
2335 old_text: file.old_text,
2336 new_text: file.new_text,
2337 })
2338 .collect(),
2339 })
2340 }
2341
2342 async fn handle_file_history(
2343 this: Entity<Self>,
2344 envelope: TypedEnvelope<proto::GitFileHistory>,
2345 mut cx: AsyncApp,
2346 ) -> Result<proto::GitFileHistoryResponse> {
2347 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2348 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2349 let path = RepoPath::from_proto(&envelope.payload.path)?;
2350 let skip = envelope.payload.skip as usize;
2351 let limit = envelope.payload.limit.map(|l| l as usize);
2352
2353 let file_history = repository_handle
2354 .update(&mut cx, |repository_handle, _| {
2355 repository_handle.file_history_paginated(path, skip, limit)
2356 })?
2357 .await??;
2358
2359 Ok(proto::GitFileHistoryResponse {
2360 entries: file_history
2361 .entries
2362 .into_iter()
2363 .map(|entry| proto::FileHistoryEntry {
2364 sha: entry.sha.to_string(),
2365 subject: entry.subject.to_string(),
2366 message: entry.message.to_string(),
2367 commit_timestamp: entry.commit_timestamp,
2368 author_name: entry.author_name.to_string(),
2369 author_email: entry.author_email.to_string(),
2370 })
2371 .collect(),
2372 path: file_history.path.to_proto(),
2373 })
2374 }
2375
2376 async fn handle_reset(
2377 this: Entity<Self>,
2378 envelope: TypedEnvelope<proto::GitReset>,
2379 mut cx: AsyncApp,
2380 ) -> Result<proto::Ack> {
2381 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2382 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2383
2384 let mode = match envelope.payload.mode() {
2385 git_reset::ResetMode::Soft => ResetMode::Soft,
2386 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2387 };
2388
2389 repository_handle
2390 .update(&mut cx, |repository_handle, cx| {
2391 repository_handle.reset(envelope.payload.commit, mode, cx)
2392 })?
2393 .await??;
2394 Ok(proto::Ack {})
2395 }
2396
2397 async fn handle_checkout_files(
2398 this: Entity<Self>,
2399 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2400 mut cx: AsyncApp,
2401 ) -> Result<proto::Ack> {
2402 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2403 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2404 let paths = envelope
2405 .payload
2406 .paths
2407 .iter()
2408 .map(|s| RepoPath::from_proto(s))
2409 .collect::<Result<Vec<_>>>()?;
2410
2411 repository_handle
2412 .update(&mut cx, |repository_handle, cx| {
2413 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2414 })?
2415 .await?;
2416 Ok(proto::Ack {})
2417 }
2418
2419 async fn handle_open_commit_message_buffer(
2420 this: Entity<Self>,
2421 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2422 mut cx: AsyncApp,
2423 ) -> Result<proto::OpenBufferResponse> {
2424 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2425 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2426 let buffer = repository
2427 .update(&mut cx, |repository, cx| {
2428 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2429 })?
2430 .await?;
2431
2432 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2433 this.update(&mut cx, |this, cx| {
2434 this.buffer_store.update(cx, |buffer_store, cx| {
2435 buffer_store
2436 .create_buffer_for_peer(
2437 &buffer,
2438 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2439 cx,
2440 )
2441 .detach_and_log_err(cx);
2442 })
2443 })?;
2444
2445 Ok(proto::OpenBufferResponse {
2446 buffer_id: buffer_id.to_proto(),
2447 })
2448 }
2449
2450 async fn handle_askpass(
2451 this: Entity<Self>,
2452 envelope: TypedEnvelope<proto::AskPassRequest>,
2453 mut cx: AsyncApp,
2454 ) -> Result<proto::AskPassResponse> {
2455 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2456 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2457
2458 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2459 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2460 debug_panic!("no askpass found");
2461 anyhow::bail!("no askpass found");
2462 };
2463
2464 let response = askpass
2465 .ask_password(envelope.payload.prompt)
2466 .await
2467 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2468
2469 delegates
2470 .lock()
2471 .insert(envelope.payload.askpass_id, askpass);
2472
2473 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2474 Ok(proto::AskPassResponse {
2475 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2476 })
2477 }
2478
2479 async fn handle_check_for_pushed_commits(
2480 this: Entity<Self>,
2481 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2482 mut cx: AsyncApp,
2483 ) -> Result<proto::CheckForPushedCommitsResponse> {
2484 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2485 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2486
2487 let branches = repository_handle
2488 .update(&mut cx, |repository_handle, _| {
2489 repository_handle.check_for_pushed_commits()
2490 })?
2491 .await??;
2492 Ok(proto::CheckForPushedCommitsResponse {
2493 pushed_to: branches
2494 .into_iter()
2495 .map(|commit| commit.to_string())
2496 .collect(),
2497 })
2498 }
2499
2500 async fn handle_git_diff(
2501 this: Entity<Self>,
2502 envelope: TypedEnvelope<proto::GitDiff>,
2503 mut cx: AsyncApp,
2504 ) -> Result<proto::GitDiffResponse> {
2505 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2506 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2507 let diff_type = match envelope.payload.diff_type() {
2508 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2509 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2510 };
2511
2512 let mut diff = repository_handle
2513 .update(&mut cx, |repository_handle, cx| {
2514 repository_handle.diff(diff_type, cx)
2515 })?
2516 .await??;
2517 const ONE_MB: usize = 1_000_000;
2518 if diff.len() > ONE_MB {
2519 diff = diff.chars().take(ONE_MB).collect()
2520 }
2521
2522 Ok(proto::GitDiffResponse { diff })
2523 }
2524
2525 async fn handle_tree_diff(
2526 this: Entity<Self>,
2527 request: TypedEnvelope<proto::GetTreeDiff>,
2528 mut cx: AsyncApp,
2529 ) -> Result<proto::GetTreeDiffResponse> {
2530 let repository_id = RepositoryId(request.payload.repository_id);
2531 let diff_type = if request.payload.is_merge {
2532 DiffTreeType::MergeBase {
2533 base: request.payload.base.into(),
2534 head: request.payload.head.into(),
2535 }
2536 } else {
2537 DiffTreeType::Since {
2538 base: request.payload.base.into(),
2539 head: request.payload.head.into(),
2540 }
2541 };
2542
2543 let diff = this
2544 .update(&mut cx, |this, cx| {
2545 let repository = this.repositories().get(&repository_id)?;
2546 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2547 })?
2548 .context("missing repository")?
2549 .await??;
2550
2551 Ok(proto::GetTreeDiffResponse {
2552 entries: diff
2553 .entries
2554 .into_iter()
2555 .map(|(path, status)| proto::TreeDiffStatus {
2556 path: path.as_ref().to_proto(),
2557 status: match status {
2558 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2559 TreeDiffStatus::Modified { .. } => {
2560 proto::tree_diff_status::Status::Modified.into()
2561 }
2562 TreeDiffStatus::Deleted { .. } => {
2563 proto::tree_diff_status::Status::Deleted.into()
2564 }
2565 },
2566 oid: match status {
2567 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2568 Some(old.to_string())
2569 }
2570 TreeDiffStatus::Added => None,
2571 },
2572 })
2573 .collect(),
2574 })
2575 }
2576
2577 async fn handle_get_blob_content(
2578 this: Entity<Self>,
2579 request: TypedEnvelope<proto::GetBlobContent>,
2580 mut cx: AsyncApp,
2581 ) -> Result<proto::GetBlobContentResponse> {
2582 let oid = git::Oid::from_str(&request.payload.oid)?;
2583 let repository_id = RepositoryId(request.payload.repository_id);
2584 let content = this
2585 .update(&mut cx, |this, cx| {
2586 let repository = this.repositories().get(&repository_id)?;
2587 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2588 })?
2589 .context("missing repository")?
2590 .await?;
2591 Ok(proto::GetBlobContentResponse { content })
2592 }
2593
2594 async fn handle_open_unstaged_diff(
2595 this: Entity<Self>,
2596 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2597 mut cx: AsyncApp,
2598 ) -> Result<proto::OpenUnstagedDiffResponse> {
2599 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2600 let diff = this
2601 .update(&mut cx, |this, cx| {
2602 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2603 Some(this.open_unstaged_diff(buffer, cx))
2604 })?
2605 .context("missing buffer")?
2606 .await?;
2607 this.update(&mut cx, |this, _| {
2608 let shared_diffs = this
2609 .shared_diffs
2610 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2611 .or_default();
2612 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2613 })?;
2614 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2615 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2616 }
2617
2618 async fn handle_open_uncommitted_diff(
2619 this: Entity<Self>,
2620 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2621 mut cx: AsyncApp,
2622 ) -> Result<proto::OpenUncommittedDiffResponse> {
2623 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2624 let diff = this
2625 .update(&mut cx, |this, cx| {
2626 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2627 Some(this.open_uncommitted_diff(buffer, cx))
2628 })?
2629 .context("missing buffer")?
2630 .await?;
2631 this.update(&mut cx, |this, _| {
2632 let shared_diffs = this
2633 .shared_diffs
2634 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2635 .or_default();
2636 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2637 })?;
2638 diff.read_with(&cx, |diff, cx| {
2639 use proto::open_uncommitted_diff_response::Mode;
2640
2641 let unstaged_diff = diff.secondary_diff();
2642 let index_snapshot = unstaged_diff.and_then(|diff| {
2643 let diff = diff.read(cx);
2644 diff.base_text_exists().then(|| diff.base_text())
2645 });
2646
2647 let mode;
2648 let staged_text;
2649 let committed_text;
2650 if diff.base_text_exists() {
2651 let committed_snapshot = diff.base_text();
2652 committed_text = Some(committed_snapshot.text());
2653 if let Some(index_text) = index_snapshot {
2654 if index_text.remote_id() == committed_snapshot.remote_id() {
2655 mode = Mode::IndexMatchesHead;
2656 staged_text = None;
2657 } else {
2658 mode = Mode::IndexAndHead;
2659 staged_text = Some(index_text.text());
2660 }
2661 } else {
2662 mode = Mode::IndexAndHead;
2663 staged_text = None;
2664 }
2665 } else {
2666 mode = Mode::IndexAndHead;
2667 committed_text = None;
2668 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2669 }
2670
2671 proto::OpenUncommittedDiffResponse {
2672 committed_text,
2673 staged_text,
2674 mode: mode.into(),
2675 }
2676 })
2677 }
2678
2679 async fn handle_update_diff_bases(
2680 this: Entity<Self>,
2681 request: TypedEnvelope<proto::UpdateDiffBases>,
2682 mut cx: AsyncApp,
2683 ) -> Result<()> {
2684 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2685 this.update(&mut cx, |this, cx| {
2686 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2687 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2688 {
2689 let buffer = buffer.read(cx).text_snapshot();
2690 diff_state.update(cx, |diff_state, cx| {
2691 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2692 })
2693 }
2694 })
2695 }
2696
2697 async fn handle_blame_buffer(
2698 this: Entity<Self>,
2699 envelope: TypedEnvelope<proto::BlameBuffer>,
2700 mut cx: AsyncApp,
2701 ) -> Result<proto::BlameBufferResponse> {
2702 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2703 let version = deserialize_version(&envelope.payload.version);
2704 let buffer = this.read_with(&cx, |this, cx| {
2705 this.buffer_store.read(cx).get_existing(buffer_id)
2706 })??;
2707 buffer
2708 .update(&mut cx, |buffer, _| {
2709 buffer.wait_for_version(version.clone())
2710 })?
2711 .await?;
2712 let blame = this
2713 .update(&mut cx, |this, cx| {
2714 this.blame_buffer(&buffer, Some(version), cx)
2715 })?
2716 .await?;
2717 Ok(serialize_blame_buffer_response(blame))
2718 }
2719
2720 async fn handle_get_permalink_to_line(
2721 this: Entity<Self>,
2722 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2723 mut cx: AsyncApp,
2724 ) -> Result<proto::GetPermalinkToLineResponse> {
2725 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2726 // let version = deserialize_version(&envelope.payload.version);
2727 let selection = {
2728 let proto_selection = envelope
2729 .payload
2730 .selection
2731 .context("no selection to get permalink for defined")?;
2732 proto_selection.start as u32..proto_selection.end as u32
2733 };
2734 let buffer = this.read_with(&cx, |this, cx| {
2735 this.buffer_store.read(cx).get_existing(buffer_id)
2736 })??;
2737 let permalink = this
2738 .update(&mut cx, |this, cx| {
2739 this.get_permalink_to_line(&buffer, selection, cx)
2740 })?
2741 .await?;
2742 Ok(proto::GetPermalinkToLineResponse {
2743 permalink: permalink.to_string(),
2744 })
2745 }
2746
2747 fn repository_for_request(
2748 this: &Entity<Self>,
2749 id: RepositoryId,
2750 cx: &mut AsyncApp,
2751 ) -> Result<Entity<Repository>> {
2752 this.read_with(cx, |this, _| {
2753 this.repositories
2754 .get(&id)
2755 .context("missing repository handle")
2756 .cloned()
2757 })?
2758 }
2759
2760 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2761 self.repositories
2762 .iter()
2763 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2764 .collect()
2765 }
2766
2767 fn process_updated_entries(
2768 &self,
2769 worktree: &Entity<Worktree>,
2770 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2771 cx: &mut App,
2772 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2773 let path_style = worktree.read(cx).path_style();
2774 let mut repo_paths = self
2775 .repositories
2776 .values()
2777 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2778 .collect::<Vec<_>>();
2779 let mut entries: Vec<_> = updated_entries
2780 .iter()
2781 .map(|(path, _, _)| path.clone())
2782 .collect();
2783 entries.sort();
2784 let worktree = worktree.read(cx);
2785
2786 let entries = entries
2787 .into_iter()
2788 .map(|path| worktree.absolutize(&path))
2789 .collect::<Arc<[_]>>();
2790
2791 let executor = cx.background_executor().clone();
2792 cx.background_executor().spawn(async move {
2793 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2794 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2795 let mut tasks = FuturesOrdered::new();
2796 for (repo_path, repo) in repo_paths.into_iter().rev() {
2797 let entries = entries.clone();
2798 let task = executor.spawn(async move {
2799 // Find all repository paths that belong to this repo
2800 let mut ix = entries.partition_point(|path| path < &*repo_path);
2801 if ix == entries.len() {
2802 return None;
2803 };
2804
2805 let mut paths = Vec::new();
2806 // All paths prefixed by a given repo will constitute a continuous range.
2807 while let Some(path) = entries.get(ix)
2808 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2809 &repo_path, path, path_style,
2810 )
2811 {
2812 paths.push((repo_path, ix));
2813 ix += 1;
2814 }
2815 if paths.is_empty() {
2816 None
2817 } else {
2818 Some((repo, paths))
2819 }
2820 });
2821 tasks.push_back(task);
2822 }
2823
2824 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2825 let mut path_was_used = vec![false; entries.len()];
2826 let tasks = tasks.collect::<Vec<_>>().await;
2827 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2828 // We always want to assign a path to it's innermost repository.
2829 for t in tasks {
2830 let Some((repo, paths)) = t else {
2831 continue;
2832 };
2833 let entry = paths_by_git_repo.entry(repo).or_default();
2834 for (repo_path, ix) in paths {
2835 if path_was_used[ix] {
2836 continue;
2837 }
2838 path_was_used[ix] = true;
2839 entry.push(repo_path);
2840 }
2841 }
2842
2843 paths_by_git_repo
2844 })
2845 }
2846}
2847
2848impl BufferGitState {
2849 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2850 Self {
2851 unstaged_diff: Default::default(),
2852 uncommitted_diff: Default::default(),
2853 recalculate_diff_task: Default::default(),
2854 language: Default::default(),
2855 language_registry: Default::default(),
2856 recalculating_tx: postage::watch::channel_with(false).0,
2857 hunk_staging_operation_count: 0,
2858 hunk_staging_operation_count_as_of_write: 0,
2859 head_text: Default::default(),
2860 index_text: Default::default(),
2861 head_changed: Default::default(),
2862 index_changed: Default::default(),
2863 language_changed: Default::default(),
2864 conflict_updated_futures: Default::default(),
2865 conflict_set: Default::default(),
2866 reparse_conflict_markers_task: Default::default(),
2867 }
2868 }
2869
2870 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2871 self.language = buffer.read(cx).language().cloned();
2872 self.language_changed = true;
2873 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2874 }
2875
2876 fn reparse_conflict_markers(
2877 &mut self,
2878 buffer: text::BufferSnapshot,
2879 cx: &mut Context<Self>,
2880 ) -> oneshot::Receiver<()> {
2881 let (tx, rx) = oneshot::channel();
2882
2883 let Some(conflict_set) = self
2884 .conflict_set
2885 .as_ref()
2886 .and_then(|conflict_set| conflict_set.upgrade())
2887 else {
2888 return rx;
2889 };
2890
2891 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2892 if conflict_set.has_conflict {
2893 Some(conflict_set.snapshot())
2894 } else {
2895 None
2896 }
2897 });
2898
2899 if let Some(old_snapshot) = old_snapshot {
2900 self.conflict_updated_futures.push(tx);
2901 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2902 let (snapshot, changed_range) = cx
2903 .background_spawn(async move {
2904 let new_snapshot = ConflictSet::parse(&buffer);
2905 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2906 (new_snapshot, changed_range)
2907 })
2908 .await;
2909 this.update(cx, |this, cx| {
2910 if let Some(conflict_set) = &this.conflict_set {
2911 conflict_set
2912 .update(cx, |conflict_set, cx| {
2913 conflict_set.set_snapshot(snapshot, changed_range, cx);
2914 })
2915 .ok();
2916 }
2917 let futures = std::mem::take(&mut this.conflict_updated_futures);
2918 for tx in futures {
2919 tx.send(()).ok();
2920 }
2921 })
2922 }))
2923 }
2924
2925 rx
2926 }
2927
2928 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2929 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2930 }
2931
2932 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2933 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2934 }
2935
2936 fn handle_base_texts_updated(
2937 &mut self,
2938 buffer: text::BufferSnapshot,
2939 message: proto::UpdateDiffBases,
2940 cx: &mut Context<Self>,
2941 ) {
2942 use proto::update_diff_bases::Mode;
2943
2944 let Some(mode) = Mode::from_i32(message.mode) else {
2945 return;
2946 };
2947
2948 let diff_bases_change = match mode {
2949 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2950 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2951 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2952 Mode::IndexAndHead => DiffBasesChange::SetEach {
2953 index: message.staged_text,
2954 head: message.committed_text,
2955 },
2956 };
2957
2958 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2959 }
2960
2961 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2962 if *self.recalculating_tx.borrow() {
2963 let mut rx = self.recalculating_tx.subscribe();
2964 Some(async move {
2965 loop {
2966 let is_recalculating = rx.recv().await;
2967 if is_recalculating != Some(true) {
2968 break;
2969 }
2970 }
2971 })
2972 } else {
2973 None
2974 }
2975 }
2976
2977 fn diff_bases_changed(
2978 &mut self,
2979 buffer: text::BufferSnapshot,
2980 diff_bases_change: Option<DiffBasesChange>,
2981 cx: &mut Context<Self>,
2982 ) {
2983 match diff_bases_change {
2984 Some(DiffBasesChange::SetIndex(index)) => {
2985 self.index_text = index.map(|mut index| {
2986 text::LineEnding::normalize(&mut index);
2987 Arc::new(index)
2988 });
2989 self.index_changed = true;
2990 }
2991 Some(DiffBasesChange::SetHead(head)) => {
2992 self.head_text = head.map(|mut head| {
2993 text::LineEnding::normalize(&mut head);
2994 Arc::new(head)
2995 });
2996 self.head_changed = true;
2997 }
2998 Some(DiffBasesChange::SetBoth(text)) => {
2999 let text = text.map(|mut text| {
3000 text::LineEnding::normalize(&mut text);
3001 Arc::new(text)
3002 });
3003 self.head_text = text.clone();
3004 self.index_text = text;
3005 self.head_changed = true;
3006 self.index_changed = true;
3007 }
3008 Some(DiffBasesChange::SetEach { index, head }) => {
3009 self.index_text = index.map(|mut index| {
3010 text::LineEnding::normalize(&mut index);
3011 Arc::new(index)
3012 });
3013 self.index_changed = true;
3014 self.head_text = head.map(|mut head| {
3015 text::LineEnding::normalize(&mut head);
3016 Arc::new(head)
3017 });
3018 self.head_changed = true;
3019 }
3020 None => {}
3021 }
3022
3023 self.recalculate_diffs(buffer, cx)
3024 }
3025
3026 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3027 *self.recalculating_tx.borrow_mut() = true;
3028
3029 let language = self.language.clone();
3030 let language_registry = self.language_registry.clone();
3031 let unstaged_diff = self.unstaged_diff();
3032 let uncommitted_diff = self.uncommitted_diff();
3033 let head = self.head_text.clone();
3034 let index = self.index_text.clone();
3035 let index_changed = self.index_changed;
3036 let head_changed = self.head_changed;
3037 let language_changed = self.language_changed;
3038 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3039 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3040 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3041 (None, None) => true,
3042 _ => false,
3043 };
3044 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3045 log::debug!(
3046 "start recalculating diffs for buffer {}",
3047 buffer.remote_id()
3048 );
3049
3050 let mut new_unstaged_diff = None;
3051 if let Some(unstaged_diff) = &unstaged_diff {
3052 new_unstaged_diff = Some(
3053 BufferDiff::update_diff(
3054 unstaged_diff.clone(),
3055 buffer.clone(),
3056 index,
3057 index_changed,
3058 language_changed,
3059 language.clone(),
3060 language_registry.clone(),
3061 cx,
3062 )
3063 .await?,
3064 );
3065 }
3066
3067 // Dropping BufferDiff can be expensive, so yield back to the event loop
3068 // for a bit
3069 yield_now().await;
3070
3071 let mut new_uncommitted_diff = None;
3072 if let Some(uncommitted_diff) = &uncommitted_diff {
3073 new_uncommitted_diff = if index_matches_head {
3074 new_unstaged_diff.clone()
3075 } else {
3076 Some(
3077 BufferDiff::update_diff(
3078 uncommitted_diff.clone(),
3079 buffer.clone(),
3080 head,
3081 head_changed,
3082 language_changed,
3083 language.clone(),
3084 language_registry.clone(),
3085 cx,
3086 )
3087 .await?,
3088 )
3089 }
3090 }
3091
3092 // Dropping BufferDiff can be expensive, so yield back to the event loop
3093 // for a bit
3094 yield_now().await;
3095
3096 let cancel = this.update(cx, |this, _| {
3097 // This checks whether all pending stage/unstage operations
3098 // have quiesced (i.e. both the corresponding write and the
3099 // read of that write have completed). If not, then we cancel
3100 // this recalculation attempt to avoid invalidating pending
3101 // state too quickly; another recalculation will come along
3102 // later and clear the pending state once the state of the index has settled.
3103 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3104 *this.recalculating_tx.borrow_mut() = false;
3105 true
3106 } else {
3107 false
3108 }
3109 })?;
3110 if cancel {
3111 log::debug!(
3112 concat!(
3113 "aborting recalculating diffs for buffer {}",
3114 "due to subsequent hunk operations",
3115 ),
3116 buffer.remote_id()
3117 );
3118 return Ok(());
3119 }
3120
3121 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3122 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3123 {
3124 unstaged_diff.update(cx, |diff, cx| {
3125 if language_changed {
3126 diff.language_changed(cx);
3127 }
3128 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3129 })?
3130 } else {
3131 None
3132 };
3133
3134 yield_now().await;
3135
3136 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3137 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3138 {
3139 uncommitted_diff.update(cx, |diff, cx| {
3140 if language_changed {
3141 diff.language_changed(cx);
3142 }
3143 diff.set_snapshot_with_secondary(
3144 new_uncommitted_diff,
3145 &buffer,
3146 unstaged_changed_range,
3147 true,
3148 cx,
3149 );
3150 })?;
3151 }
3152
3153 log::debug!(
3154 "finished recalculating diffs for buffer {}",
3155 buffer.remote_id()
3156 );
3157
3158 if let Some(this) = this.upgrade() {
3159 this.update(cx, |this, _| {
3160 this.index_changed = false;
3161 this.head_changed = false;
3162 this.language_changed = false;
3163 *this.recalculating_tx.borrow_mut() = false;
3164 })?;
3165 }
3166
3167 Ok(())
3168 }));
3169 }
3170}
3171
3172fn make_remote_delegate(
3173 this: Entity<GitStore>,
3174 project_id: u64,
3175 repository_id: RepositoryId,
3176 askpass_id: u64,
3177 cx: &mut AsyncApp,
3178) -> AskPassDelegate {
3179 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3180 this.update(cx, |this, cx| {
3181 let Some((client, _)) = this.downstream_client() else {
3182 return;
3183 };
3184 let response = client.request(proto::AskPassRequest {
3185 project_id,
3186 repository_id: repository_id.to_proto(),
3187 askpass_id,
3188 prompt,
3189 });
3190 cx.spawn(async move |_, _| {
3191 let mut response = response.await?.response;
3192 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3193 .ok();
3194 response.zeroize();
3195 anyhow::Ok(())
3196 })
3197 .detach_and_log_err(cx);
3198 })
3199 .log_err();
3200 })
3201}
3202
3203impl RepositoryId {
3204 pub fn to_proto(self) -> u64 {
3205 self.0
3206 }
3207
3208 pub fn from_proto(id: u64) -> Self {
3209 RepositoryId(id)
3210 }
3211}
3212
3213impl RepositorySnapshot {
3214 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3215 Self {
3216 id,
3217 statuses_by_path: Default::default(),
3218 work_directory_abs_path,
3219 branch: None,
3220 head_commit: None,
3221 scan_id: 0,
3222 merge: Default::default(),
3223 remote_origin_url: None,
3224 remote_upstream_url: None,
3225 stash_entries: Default::default(),
3226 path_style,
3227 }
3228 }
3229
3230 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3231 proto::UpdateRepository {
3232 branch_summary: self.branch.as_ref().map(branch_to_proto),
3233 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3234 updated_statuses: self
3235 .statuses_by_path
3236 .iter()
3237 .map(|entry| entry.to_proto())
3238 .collect(),
3239 removed_statuses: Default::default(),
3240 current_merge_conflicts: self
3241 .merge
3242 .conflicted_paths
3243 .iter()
3244 .map(|repo_path| repo_path.to_proto())
3245 .collect(),
3246 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3247 project_id,
3248 id: self.id.to_proto(),
3249 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3250 entry_ids: vec![self.id.to_proto()],
3251 scan_id: self.scan_id,
3252 is_last_update: true,
3253 stash_entries: self
3254 .stash_entries
3255 .entries
3256 .iter()
3257 .map(stash_to_proto)
3258 .collect(),
3259 }
3260 }
3261
3262 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3263 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3264 let mut removed_statuses: Vec<String> = Vec::new();
3265
3266 let mut new_statuses = self.statuses_by_path.iter().peekable();
3267 let mut old_statuses = old.statuses_by_path.iter().peekable();
3268
3269 let mut current_new_entry = new_statuses.next();
3270 let mut current_old_entry = old_statuses.next();
3271 loop {
3272 match (current_new_entry, current_old_entry) {
3273 (Some(new_entry), Some(old_entry)) => {
3274 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3275 Ordering::Less => {
3276 updated_statuses.push(new_entry.to_proto());
3277 current_new_entry = new_statuses.next();
3278 }
3279 Ordering::Equal => {
3280 if new_entry.status != old_entry.status {
3281 updated_statuses.push(new_entry.to_proto());
3282 }
3283 current_old_entry = old_statuses.next();
3284 current_new_entry = new_statuses.next();
3285 }
3286 Ordering::Greater => {
3287 removed_statuses.push(old_entry.repo_path.to_proto());
3288 current_old_entry = old_statuses.next();
3289 }
3290 }
3291 }
3292 (None, Some(old_entry)) => {
3293 removed_statuses.push(old_entry.repo_path.to_proto());
3294 current_old_entry = old_statuses.next();
3295 }
3296 (Some(new_entry), None) => {
3297 updated_statuses.push(new_entry.to_proto());
3298 current_new_entry = new_statuses.next();
3299 }
3300 (None, None) => break,
3301 }
3302 }
3303
3304 proto::UpdateRepository {
3305 branch_summary: self.branch.as_ref().map(branch_to_proto),
3306 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3307 updated_statuses,
3308 removed_statuses,
3309 current_merge_conflicts: self
3310 .merge
3311 .conflicted_paths
3312 .iter()
3313 .map(|path| path.to_proto())
3314 .collect(),
3315 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3316 project_id,
3317 id: self.id.to_proto(),
3318 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3319 entry_ids: vec![],
3320 scan_id: self.scan_id,
3321 is_last_update: true,
3322 stash_entries: self
3323 .stash_entries
3324 .entries
3325 .iter()
3326 .map(stash_to_proto)
3327 .collect(),
3328 }
3329 }
3330
3331 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3332 self.statuses_by_path.iter().cloned()
3333 }
3334
3335 pub fn status_summary(&self) -> GitSummary {
3336 self.statuses_by_path.summary().item_summary
3337 }
3338
3339 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3340 self.statuses_by_path
3341 .get(&PathKey(path.as_ref().clone()), ())
3342 .cloned()
3343 }
3344
3345 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3346 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3347 }
3348
3349 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3350 self.path_style
3351 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3352 .unwrap()
3353 .into()
3354 }
3355
3356 #[inline]
3357 fn abs_path_to_repo_path_inner(
3358 work_directory_abs_path: &Path,
3359 abs_path: &Path,
3360 path_style: PathStyle,
3361 ) -> Option<RepoPath> {
3362 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3363 Some(RepoPath::from_rel_path(&rel_path))
3364 }
3365
3366 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3367 self.merge.conflicted_paths.contains(repo_path)
3368 }
3369
3370 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3371 let had_conflict_on_last_merge_head_change =
3372 self.merge.conflicted_paths.contains(repo_path);
3373 let has_conflict_currently = self
3374 .status_for_path(repo_path)
3375 .is_some_and(|entry| entry.status.is_conflicted());
3376 had_conflict_on_last_merge_head_change || has_conflict_currently
3377 }
3378
3379 /// This is the name that will be displayed in the repository selector for this repository.
3380 pub fn display_name(&self) -> SharedString {
3381 self.work_directory_abs_path
3382 .file_name()
3383 .unwrap_or_default()
3384 .to_string_lossy()
3385 .to_string()
3386 .into()
3387 }
3388}
3389
3390pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3391 proto::StashEntry {
3392 oid: entry.oid.as_bytes().to_vec(),
3393 message: entry.message.clone(),
3394 branch: entry.branch.clone(),
3395 index: entry.index as u64,
3396 timestamp: entry.timestamp,
3397 }
3398}
3399
3400pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3401 Ok(StashEntry {
3402 oid: Oid::from_bytes(&entry.oid)?,
3403 message: entry.message.clone(),
3404 index: entry.index as usize,
3405 branch: entry.branch.clone(),
3406 timestamp: entry.timestamp,
3407 })
3408}
3409
3410impl MergeDetails {
3411 async fn load(
3412 backend: &Arc<dyn GitRepository>,
3413 status: &SumTree<StatusEntry>,
3414 prev_snapshot: &RepositorySnapshot,
3415 ) -> Result<(MergeDetails, bool)> {
3416 log::debug!("load merge details");
3417 let message = backend.merge_message().await;
3418 let heads = backend
3419 .revparse_batch(vec![
3420 "MERGE_HEAD".into(),
3421 "CHERRY_PICK_HEAD".into(),
3422 "REBASE_HEAD".into(),
3423 "REVERT_HEAD".into(),
3424 "APPLY_HEAD".into(),
3425 ])
3426 .await
3427 .log_err()
3428 .unwrap_or_default()
3429 .into_iter()
3430 .map(|opt| opt.map(SharedString::from))
3431 .collect::<Vec<_>>();
3432 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3433 let conflicted_paths = if merge_heads_changed {
3434 let current_conflicted_paths = TreeSet::from_ordered_entries(
3435 status
3436 .iter()
3437 .filter(|entry| entry.status.is_conflicted())
3438 .map(|entry| entry.repo_path.clone()),
3439 );
3440
3441 // It can happen that we run a scan while a lengthy merge is in progress
3442 // that will eventually result in conflicts, but before those conflicts
3443 // are reported by `git status`. Since for the moment we only care about
3444 // the merge heads state for the purposes of tracking conflicts, don't update
3445 // this state until we see some conflicts.
3446 if heads.iter().any(Option::is_some)
3447 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3448 && current_conflicted_paths.is_empty()
3449 {
3450 log::debug!("not updating merge heads because no conflicts found");
3451 return Ok((
3452 MergeDetails {
3453 message: message.map(SharedString::from),
3454 ..prev_snapshot.merge.clone()
3455 },
3456 false,
3457 ));
3458 }
3459
3460 current_conflicted_paths
3461 } else {
3462 prev_snapshot.merge.conflicted_paths.clone()
3463 };
3464 let details = MergeDetails {
3465 conflicted_paths,
3466 message: message.map(SharedString::from),
3467 heads,
3468 };
3469 Ok((details, merge_heads_changed))
3470 }
3471}
3472
3473impl Repository {
3474 pub fn snapshot(&self) -> RepositorySnapshot {
3475 self.snapshot.clone()
3476 }
3477
3478 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3479 self.pending_ops.iter().cloned()
3480 }
3481
3482 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3483 self.pending_ops.summary().clone()
3484 }
3485
3486 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3487 self.pending_ops
3488 .get(&PathKey(path.as_ref().clone()), ())
3489 .cloned()
3490 }
3491
3492 fn local(
3493 id: RepositoryId,
3494 work_directory_abs_path: Arc<Path>,
3495 dot_git_abs_path: Arc<Path>,
3496 project_environment: WeakEntity<ProjectEnvironment>,
3497 fs: Arc<dyn Fs>,
3498 git_store: WeakEntity<GitStore>,
3499 cx: &mut Context<Self>,
3500 ) -> Self {
3501 let snapshot =
3502 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3503 let state = cx
3504 .spawn(async move |_, cx| {
3505 LocalRepositoryState::new(
3506 work_directory_abs_path,
3507 dot_git_abs_path,
3508 project_environment,
3509 fs,
3510 cx,
3511 )
3512 .await
3513 .map_err(|err| err.to_string())
3514 })
3515 .shared();
3516 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3517 let state = cx
3518 .spawn(async move |_, _| {
3519 let state = state.await?;
3520 Ok(RepositoryState::Local(state))
3521 })
3522 .shared();
3523
3524 Repository {
3525 this: cx.weak_entity(),
3526 git_store,
3527 snapshot,
3528 pending_ops: Default::default(),
3529 repository_state: state,
3530 commit_message_buffer: None,
3531 askpass_delegates: Default::default(),
3532 paths_needing_status_update: Default::default(),
3533 latest_askpass_id: 0,
3534 job_sender,
3535 job_id: 0,
3536 active_jobs: Default::default(),
3537 }
3538 }
3539
3540 fn remote(
3541 id: RepositoryId,
3542 work_directory_abs_path: Arc<Path>,
3543 path_style: PathStyle,
3544 project_id: ProjectId,
3545 client: AnyProtoClient,
3546 git_store: WeakEntity<GitStore>,
3547 cx: &mut Context<Self>,
3548 ) -> Self {
3549 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3550 let repository_state = RemoteRepositoryState { project_id, client };
3551 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3552 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3553 Self {
3554 this: cx.weak_entity(),
3555 snapshot,
3556 commit_message_buffer: None,
3557 git_store,
3558 pending_ops: Default::default(),
3559 paths_needing_status_update: Default::default(),
3560 job_sender,
3561 repository_state,
3562 askpass_delegates: Default::default(),
3563 latest_askpass_id: 0,
3564 active_jobs: Default::default(),
3565 job_id: 0,
3566 }
3567 }
3568
3569 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3570 self.git_store.upgrade()
3571 }
3572
3573 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3574 let this = cx.weak_entity();
3575 let git_store = self.git_store.clone();
3576 let _ = self.send_keyed_job(
3577 Some(GitJobKey::ReloadBufferDiffBases),
3578 None,
3579 |state, mut cx| async move {
3580 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3581 log::error!("tried to recompute diffs for a non-local repository");
3582 return Ok(());
3583 };
3584
3585 let Some(this) = this.upgrade() else {
3586 return Ok(());
3587 };
3588
3589 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3590 git_store.update(cx, |git_store, cx| {
3591 git_store
3592 .diffs
3593 .iter()
3594 .filter_map(|(buffer_id, diff_state)| {
3595 let buffer_store = git_store.buffer_store.read(cx);
3596 let buffer = buffer_store.get(*buffer_id)?;
3597 let file = File::from_dyn(buffer.read(cx).file())?;
3598 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3599 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3600 log::debug!(
3601 "start reload diff bases for repo path {}",
3602 repo_path.as_unix_str()
3603 );
3604 diff_state.update(cx, |diff_state, _| {
3605 let has_unstaged_diff = diff_state
3606 .unstaged_diff
3607 .as_ref()
3608 .is_some_and(|diff| diff.is_upgradable());
3609 let has_uncommitted_diff = diff_state
3610 .uncommitted_diff
3611 .as_ref()
3612 .is_some_and(|set| set.is_upgradable());
3613
3614 Some((
3615 buffer,
3616 repo_path,
3617 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3618 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3619 ))
3620 })
3621 })
3622 .collect::<Vec<_>>()
3623 })
3624 })??;
3625
3626 let buffer_diff_base_changes = cx
3627 .background_spawn(async move {
3628 let mut changes = Vec::new();
3629 for (buffer, repo_path, current_index_text, current_head_text) in
3630 &repo_diff_state_updates
3631 {
3632 let index_text = if current_index_text.is_some() {
3633 backend.load_index_text(repo_path.clone()).await
3634 } else {
3635 None
3636 };
3637 let head_text = if current_head_text.is_some() {
3638 backend.load_committed_text(repo_path.clone()).await
3639 } else {
3640 None
3641 };
3642
3643 let change =
3644 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3645 (Some(current_index), Some(current_head)) => {
3646 let index_changed =
3647 index_text.as_ref() != current_index.as_deref();
3648 let head_changed =
3649 head_text.as_ref() != current_head.as_deref();
3650 if index_changed && head_changed {
3651 if index_text == head_text {
3652 Some(DiffBasesChange::SetBoth(head_text))
3653 } else {
3654 Some(DiffBasesChange::SetEach {
3655 index: index_text,
3656 head: head_text,
3657 })
3658 }
3659 } else if index_changed {
3660 Some(DiffBasesChange::SetIndex(index_text))
3661 } else if head_changed {
3662 Some(DiffBasesChange::SetHead(head_text))
3663 } else {
3664 None
3665 }
3666 }
3667 (Some(current_index), None) => {
3668 let index_changed =
3669 index_text.as_ref() != current_index.as_deref();
3670 index_changed
3671 .then_some(DiffBasesChange::SetIndex(index_text))
3672 }
3673 (None, Some(current_head)) => {
3674 let head_changed =
3675 head_text.as_ref() != current_head.as_deref();
3676 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3677 }
3678 (None, None) => None,
3679 };
3680
3681 changes.push((buffer.clone(), change))
3682 }
3683 changes
3684 })
3685 .await;
3686
3687 git_store.update(&mut cx, |git_store, cx| {
3688 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3689 let buffer_snapshot = buffer.read(cx).text_snapshot();
3690 let buffer_id = buffer_snapshot.remote_id();
3691 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3692 continue;
3693 };
3694
3695 let downstream_client = git_store.downstream_client();
3696 diff_state.update(cx, |diff_state, cx| {
3697 use proto::update_diff_bases::Mode;
3698
3699 if let Some((diff_bases_change, (client, project_id))) =
3700 diff_bases_change.clone().zip(downstream_client)
3701 {
3702 let (staged_text, committed_text, mode) = match diff_bases_change {
3703 DiffBasesChange::SetIndex(index) => {
3704 (index, None, Mode::IndexOnly)
3705 }
3706 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3707 DiffBasesChange::SetEach { index, head } => {
3708 (index, head, Mode::IndexAndHead)
3709 }
3710 DiffBasesChange::SetBoth(text) => {
3711 (None, text, Mode::IndexMatchesHead)
3712 }
3713 };
3714 client
3715 .send(proto::UpdateDiffBases {
3716 project_id: project_id.to_proto(),
3717 buffer_id: buffer_id.to_proto(),
3718 staged_text,
3719 committed_text,
3720 mode: mode as i32,
3721 })
3722 .log_err();
3723 }
3724
3725 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3726 });
3727 }
3728 })
3729 },
3730 );
3731 }
3732
3733 pub fn send_job<F, Fut, R>(
3734 &mut self,
3735 status: Option<SharedString>,
3736 job: F,
3737 ) -> oneshot::Receiver<R>
3738 where
3739 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3740 Fut: Future<Output = R> + 'static,
3741 R: Send + 'static,
3742 {
3743 self.send_keyed_job(None, status, job)
3744 }
3745
3746 fn send_keyed_job<F, Fut, R>(
3747 &mut self,
3748 key: Option<GitJobKey>,
3749 status: Option<SharedString>,
3750 job: F,
3751 ) -> oneshot::Receiver<R>
3752 where
3753 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3754 Fut: Future<Output = R> + 'static,
3755 R: Send + 'static,
3756 {
3757 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3758 let job_id = post_inc(&mut self.job_id);
3759 let this = self.this.clone();
3760 self.job_sender
3761 .unbounded_send(GitJob {
3762 key,
3763 job: Box::new(move |state, cx: &mut AsyncApp| {
3764 let job = job(state, cx.clone());
3765 cx.spawn(async move |cx| {
3766 if let Some(s) = status.clone() {
3767 this.update(cx, |this, cx| {
3768 this.active_jobs.insert(
3769 job_id,
3770 JobInfo {
3771 start: Instant::now(),
3772 message: s.clone(),
3773 },
3774 );
3775
3776 cx.notify();
3777 })
3778 .ok();
3779 }
3780 let result = job.await;
3781
3782 this.update(cx, |this, cx| {
3783 this.active_jobs.remove(&job_id);
3784 cx.notify();
3785 })
3786 .ok();
3787
3788 result_tx.send(result).ok();
3789 })
3790 }),
3791 })
3792 .ok();
3793 result_rx
3794 }
3795
3796 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3797 let Some(git_store) = self.git_store.upgrade() else {
3798 return;
3799 };
3800 let entity = cx.entity();
3801 git_store.update(cx, |git_store, cx| {
3802 let Some((&id, _)) = git_store
3803 .repositories
3804 .iter()
3805 .find(|(_, handle)| *handle == &entity)
3806 else {
3807 return;
3808 };
3809 git_store.active_repo_id = Some(id);
3810 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3811 });
3812 }
3813
3814 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3815 self.snapshot.status()
3816 }
3817
3818 pub fn cached_stash(&self) -> GitStash {
3819 self.snapshot.stash_entries.clone()
3820 }
3821
3822 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3823 let git_store = self.git_store.upgrade()?;
3824 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3825 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3826 let abs_path = SanitizedPath::new(&abs_path);
3827 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3828 Some(ProjectPath {
3829 worktree_id: worktree.read(cx).id(),
3830 path: relative_path,
3831 })
3832 }
3833
3834 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3835 let git_store = self.git_store.upgrade()?;
3836 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3837 let abs_path = worktree_store.absolutize(path, cx)?;
3838 self.snapshot.abs_path_to_repo_path(&abs_path)
3839 }
3840
3841 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3842 other
3843 .read(cx)
3844 .snapshot
3845 .work_directory_abs_path
3846 .starts_with(&self.snapshot.work_directory_abs_path)
3847 }
3848
3849 pub fn open_commit_buffer(
3850 &mut self,
3851 languages: Option<Arc<LanguageRegistry>>,
3852 buffer_store: Entity<BufferStore>,
3853 cx: &mut Context<Self>,
3854 ) -> Task<Result<Entity<Buffer>>> {
3855 let id = self.id;
3856 if let Some(buffer) = self.commit_message_buffer.clone() {
3857 return Task::ready(Ok(buffer));
3858 }
3859 let this = cx.weak_entity();
3860
3861 let rx = self.send_job(None, move |state, mut cx| async move {
3862 let Some(this) = this.upgrade() else {
3863 bail!("git store was dropped");
3864 };
3865 match state {
3866 RepositoryState::Local(..) => {
3867 this.update(&mut cx, |_, cx| {
3868 Self::open_local_commit_buffer(languages, buffer_store, cx)
3869 })?
3870 .await
3871 }
3872 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3873 let request = client.request(proto::OpenCommitMessageBuffer {
3874 project_id: project_id.0,
3875 repository_id: id.to_proto(),
3876 });
3877 let response = request.await.context("requesting to open commit buffer")?;
3878 let buffer_id = BufferId::new(response.buffer_id)?;
3879 let buffer = buffer_store
3880 .update(&mut cx, |buffer_store, cx| {
3881 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3882 })?
3883 .await?;
3884 if let Some(language_registry) = languages {
3885 let git_commit_language =
3886 language_registry.language_for_name("Git Commit").await?;
3887 buffer.update(&mut cx, |buffer, cx| {
3888 buffer.set_language(Some(git_commit_language), cx);
3889 })?;
3890 }
3891 this.update(&mut cx, |this, _| {
3892 this.commit_message_buffer = Some(buffer.clone());
3893 })?;
3894 Ok(buffer)
3895 }
3896 }
3897 });
3898
3899 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3900 }
3901
3902 fn open_local_commit_buffer(
3903 language_registry: Option<Arc<LanguageRegistry>>,
3904 buffer_store: Entity<BufferStore>,
3905 cx: &mut Context<Self>,
3906 ) -> Task<Result<Entity<Buffer>>> {
3907 cx.spawn(async move |repository, cx| {
3908 let buffer = buffer_store
3909 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3910 .await?;
3911
3912 if let Some(language_registry) = language_registry {
3913 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3914 buffer.update(cx, |buffer, cx| {
3915 buffer.set_language(Some(git_commit_language), cx);
3916 })?;
3917 }
3918
3919 repository.update(cx, |repository, _| {
3920 repository.commit_message_buffer = Some(buffer.clone());
3921 })?;
3922 Ok(buffer)
3923 })
3924 }
3925
3926 pub fn checkout_files(
3927 &mut self,
3928 commit: &str,
3929 paths: Vec<RepoPath>,
3930 cx: &mut Context<Self>,
3931 ) -> Task<Result<()>> {
3932 let commit = commit.to_string();
3933 let id = self.id;
3934
3935 self.spawn_job_with_tracking(
3936 paths.clone(),
3937 pending_op::GitStatus::Reverted,
3938 cx,
3939 async move |this, cx| {
3940 this.update(cx, |this, _cx| {
3941 this.send_job(
3942 Some(format!("git checkout {}", commit).into()),
3943 move |git_repo, _| async move {
3944 match git_repo {
3945 RepositoryState::Local(LocalRepositoryState {
3946 backend,
3947 environment,
3948 ..
3949 }) => {
3950 backend
3951 .checkout_files(commit, paths, environment.clone())
3952 .await
3953 }
3954 RepositoryState::Remote(RemoteRepositoryState {
3955 project_id,
3956 client,
3957 }) => {
3958 client
3959 .request(proto::GitCheckoutFiles {
3960 project_id: project_id.0,
3961 repository_id: id.to_proto(),
3962 commit,
3963 paths: paths
3964 .into_iter()
3965 .map(|p| p.to_proto())
3966 .collect(),
3967 })
3968 .await?;
3969
3970 Ok(())
3971 }
3972 }
3973 },
3974 )
3975 })?
3976 .await?
3977 },
3978 )
3979 }
3980
3981 pub fn reset(
3982 &mut self,
3983 commit: String,
3984 reset_mode: ResetMode,
3985 _cx: &mut App,
3986 ) -> oneshot::Receiver<Result<()>> {
3987 let id = self.id;
3988
3989 self.send_job(None, move |git_repo, _| async move {
3990 match git_repo {
3991 RepositoryState::Local(LocalRepositoryState {
3992 backend,
3993 environment,
3994 ..
3995 }) => backend.reset(commit, reset_mode, environment).await,
3996 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3997 client
3998 .request(proto::GitReset {
3999 project_id: project_id.0,
4000 repository_id: id.to_proto(),
4001 commit,
4002 mode: match reset_mode {
4003 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4004 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4005 },
4006 })
4007 .await?;
4008
4009 Ok(())
4010 }
4011 }
4012 })
4013 }
4014
4015 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4016 let id = self.id;
4017 self.send_job(None, move |git_repo, _cx| async move {
4018 match git_repo {
4019 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4020 backend.show(commit).await
4021 }
4022 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4023 let resp = client
4024 .request(proto::GitShow {
4025 project_id: project_id.0,
4026 repository_id: id.to_proto(),
4027 commit,
4028 })
4029 .await?;
4030
4031 Ok(CommitDetails {
4032 sha: resp.sha.into(),
4033 message: resp.message.into(),
4034 commit_timestamp: resp.commit_timestamp,
4035 author_email: resp.author_email.into(),
4036 author_name: resp.author_name.into(),
4037 })
4038 }
4039 }
4040 })
4041 }
4042
4043 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4044 let id = self.id;
4045 self.send_job(None, move |git_repo, cx| async move {
4046 match git_repo {
4047 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4048 backend.load_commit(commit, cx).await
4049 }
4050 RepositoryState::Remote(RemoteRepositoryState {
4051 client, project_id, ..
4052 }) => {
4053 let response = client
4054 .request(proto::LoadCommitDiff {
4055 project_id: project_id.0,
4056 repository_id: id.to_proto(),
4057 commit,
4058 })
4059 .await?;
4060 Ok(CommitDiff {
4061 files: response
4062 .files
4063 .into_iter()
4064 .map(|file| {
4065 Ok(CommitFile {
4066 path: RepoPath::from_proto(&file.path)?,
4067 old_text: file.old_text,
4068 new_text: file.new_text,
4069 })
4070 })
4071 .collect::<Result<Vec<_>>>()?,
4072 })
4073 }
4074 }
4075 })
4076 }
4077
4078 pub fn file_history(
4079 &mut self,
4080 path: RepoPath,
4081 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4082 self.file_history_paginated(path, 0, None)
4083 }
4084
4085 pub fn file_history_paginated(
4086 &mut self,
4087 path: RepoPath,
4088 skip: usize,
4089 limit: Option<usize>,
4090 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4091 let id = self.id;
4092 self.send_job(None, move |git_repo, _cx| async move {
4093 match git_repo {
4094 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4095 backend.file_history_paginated(path, skip, limit).await
4096 }
4097 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4098 let response = client
4099 .request(proto::GitFileHistory {
4100 project_id: project_id.0,
4101 repository_id: id.to_proto(),
4102 path: path.to_proto(),
4103 skip: skip as u64,
4104 limit: limit.map(|l| l as u64),
4105 })
4106 .await?;
4107 Ok(git::repository::FileHistory {
4108 entries: response
4109 .entries
4110 .into_iter()
4111 .map(|entry| git::repository::FileHistoryEntry {
4112 sha: entry.sha.into(),
4113 subject: entry.subject.into(),
4114 message: entry.message.into(),
4115 commit_timestamp: entry.commit_timestamp,
4116 author_name: entry.author_name.into(),
4117 author_email: entry.author_email.into(),
4118 })
4119 .collect(),
4120 path: RepoPath::from_proto(&response.path)?,
4121 })
4122 }
4123 }
4124 })
4125 }
4126
4127 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4128 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4129 }
4130
4131 fn save_buffers<'a>(
4132 &self,
4133 entries: impl IntoIterator<Item = &'a RepoPath>,
4134 cx: &mut Context<Self>,
4135 ) -> Vec<Task<anyhow::Result<()>>> {
4136 let mut save_futures = Vec::new();
4137 if let Some(buffer_store) = self.buffer_store(cx) {
4138 buffer_store.update(cx, |buffer_store, cx| {
4139 for path in entries {
4140 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4141 continue;
4142 };
4143 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4144 && buffer
4145 .read(cx)
4146 .file()
4147 .is_some_and(|file| file.disk_state().exists())
4148 && buffer.read(cx).has_unsaved_edits()
4149 {
4150 save_futures.push(buffer_store.save_buffer(buffer, cx));
4151 }
4152 }
4153 })
4154 }
4155 save_futures
4156 }
4157
4158 pub fn stage_entries(
4159 &mut self,
4160 entries: Vec<RepoPath>,
4161 cx: &mut Context<Self>,
4162 ) -> Task<anyhow::Result<()>> {
4163 if entries.is_empty() {
4164 return Task::ready(Ok(()));
4165 }
4166 let id = self.id;
4167 let save_tasks = self.save_buffers(&entries, cx);
4168 let paths = entries
4169 .iter()
4170 .map(|p| p.as_unix_str())
4171 .collect::<Vec<_>>()
4172 .join(" ");
4173 let status = format!("git add {paths}");
4174 let job_key = GitJobKey::WriteIndex(entries.clone());
4175
4176 self.spawn_job_with_tracking(
4177 entries.clone(),
4178 pending_op::GitStatus::Staged,
4179 cx,
4180 async move |this, cx| {
4181 for save_task in save_tasks {
4182 save_task.await?;
4183 }
4184
4185 this.update(cx, |this, _| {
4186 this.send_keyed_job(
4187 Some(job_key),
4188 Some(status.into()),
4189 move |git_repo, _cx| async move {
4190 match git_repo {
4191 RepositoryState::Local(LocalRepositoryState {
4192 backend,
4193 environment,
4194 ..
4195 }) => backend.stage_paths(entries, environment.clone()).await,
4196 RepositoryState::Remote(RemoteRepositoryState {
4197 project_id,
4198 client,
4199 }) => {
4200 client
4201 .request(proto::Stage {
4202 project_id: project_id.0,
4203 repository_id: id.to_proto(),
4204 paths: entries
4205 .into_iter()
4206 .map(|repo_path| repo_path.to_proto())
4207 .collect(),
4208 })
4209 .await
4210 .context("sending stage request")?;
4211
4212 Ok(())
4213 }
4214 }
4215 },
4216 )
4217 })?
4218 .await?
4219 },
4220 )
4221 }
4222
4223 pub fn unstage_entries(
4224 &mut self,
4225 entries: Vec<RepoPath>,
4226 cx: &mut Context<Self>,
4227 ) -> Task<anyhow::Result<()>> {
4228 if entries.is_empty() {
4229 return Task::ready(Ok(()));
4230 }
4231 let id = self.id;
4232 let save_tasks = self.save_buffers(&entries, cx);
4233 let paths = entries
4234 .iter()
4235 .map(|p| p.as_unix_str())
4236 .collect::<Vec<_>>()
4237 .join(" ");
4238 let status = format!("git reset {paths}");
4239 let job_key = GitJobKey::WriteIndex(entries.clone());
4240
4241 self.spawn_job_with_tracking(
4242 entries.clone(),
4243 pending_op::GitStatus::Unstaged,
4244 cx,
4245 async move |this, cx| {
4246 for save_task in save_tasks {
4247 save_task.await?;
4248 }
4249
4250 this.update(cx, |this, _| {
4251 this.send_keyed_job(
4252 Some(job_key),
4253 Some(status.into()),
4254 move |git_repo, _cx| async move {
4255 match git_repo {
4256 RepositoryState::Local(LocalRepositoryState {
4257 backend,
4258 environment,
4259 ..
4260 }) => backend.unstage_paths(entries, environment).await,
4261 RepositoryState::Remote(RemoteRepositoryState {
4262 project_id,
4263 client,
4264 }) => {
4265 client
4266 .request(proto::Unstage {
4267 project_id: project_id.0,
4268 repository_id: id.to_proto(),
4269 paths: entries
4270 .into_iter()
4271 .map(|repo_path| repo_path.to_proto())
4272 .collect(),
4273 })
4274 .await
4275 .context("sending unstage request")?;
4276
4277 Ok(())
4278 }
4279 }
4280 },
4281 )
4282 })?
4283 .await?
4284 },
4285 )
4286 }
4287
4288 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4289 let to_stage = self
4290 .cached_status()
4291 .filter_map(|entry| {
4292 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4293 if ops.staging() || ops.staged() {
4294 None
4295 } else {
4296 Some(entry.repo_path)
4297 }
4298 } else if entry.status.staging().is_fully_staged() {
4299 None
4300 } else {
4301 Some(entry.repo_path)
4302 }
4303 })
4304 .collect();
4305 self.stage_entries(to_stage, cx)
4306 }
4307
4308 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4309 let to_unstage = self
4310 .cached_status()
4311 .filter_map(|entry| {
4312 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4313 if !ops.staging() && !ops.staged() {
4314 None
4315 } else {
4316 Some(entry.repo_path)
4317 }
4318 } else if entry.status.staging().is_fully_unstaged() {
4319 None
4320 } else {
4321 Some(entry.repo_path)
4322 }
4323 })
4324 .collect();
4325 self.unstage_entries(to_unstage, cx)
4326 }
4327
4328 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4329 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4330
4331 self.stash_entries(to_stash, cx)
4332 }
4333
4334 pub fn stash_entries(
4335 &mut self,
4336 entries: Vec<RepoPath>,
4337 cx: &mut Context<Self>,
4338 ) -> Task<anyhow::Result<()>> {
4339 let id = self.id;
4340
4341 cx.spawn(async move |this, cx| {
4342 this.update(cx, |this, _| {
4343 this.send_job(None, move |git_repo, _cx| async move {
4344 match git_repo {
4345 RepositoryState::Local(LocalRepositoryState {
4346 backend,
4347 environment,
4348 ..
4349 }) => backend.stash_paths(entries, environment).await,
4350 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4351 client
4352 .request(proto::Stash {
4353 project_id: project_id.0,
4354 repository_id: id.to_proto(),
4355 paths: entries
4356 .into_iter()
4357 .map(|repo_path| repo_path.to_proto())
4358 .collect(),
4359 })
4360 .await
4361 .context("sending stash request")?;
4362 Ok(())
4363 }
4364 }
4365 })
4366 })?
4367 .await??;
4368 Ok(())
4369 })
4370 }
4371
4372 pub fn stash_pop(
4373 &mut self,
4374 index: Option<usize>,
4375 cx: &mut Context<Self>,
4376 ) -> Task<anyhow::Result<()>> {
4377 let id = self.id;
4378 cx.spawn(async move |this, cx| {
4379 this.update(cx, |this, _| {
4380 this.send_job(None, move |git_repo, _cx| async move {
4381 match git_repo {
4382 RepositoryState::Local(LocalRepositoryState {
4383 backend,
4384 environment,
4385 ..
4386 }) => backend.stash_pop(index, environment).await,
4387 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4388 client
4389 .request(proto::StashPop {
4390 project_id: project_id.0,
4391 repository_id: id.to_proto(),
4392 stash_index: index.map(|i| i as u64),
4393 })
4394 .await
4395 .context("sending stash pop request")?;
4396 Ok(())
4397 }
4398 }
4399 })
4400 })?
4401 .await??;
4402 Ok(())
4403 })
4404 }
4405
4406 pub fn stash_apply(
4407 &mut self,
4408 index: Option<usize>,
4409 cx: &mut Context<Self>,
4410 ) -> Task<anyhow::Result<()>> {
4411 let id = self.id;
4412 cx.spawn(async move |this, cx| {
4413 this.update(cx, |this, _| {
4414 this.send_job(None, move |git_repo, _cx| async move {
4415 match git_repo {
4416 RepositoryState::Local(LocalRepositoryState {
4417 backend,
4418 environment,
4419 ..
4420 }) => backend.stash_apply(index, environment).await,
4421 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4422 client
4423 .request(proto::StashApply {
4424 project_id: project_id.0,
4425 repository_id: id.to_proto(),
4426 stash_index: index.map(|i| i as u64),
4427 })
4428 .await
4429 .context("sending stash apply request")?;
4430 Ok(())
4431 }
4432 }
4433 })
4434 })?
4435 .await??;
4436 Ok(())
4437 })
4438 }
4439
4440 pub fn stash_drop(
4441 &mut self,
4442 index: Option<usize>,
4443 cx: &mut Context<Self>,
4444 ) -> oneshot::Receiver<anyhow::Result<()>> {
4445 let id = self.id;
4446 let updates_tx = self
4447 .git_store()
4448 .and_then(|git_store| match &git_store.read(cx).state {
4449 GitStoreState::Local { downstream, .. } => downstream
4450 .as_ref()
4451 .map(|downstream| downstream.updates_tx.clone()),
4452 _ => None,
4453 });
4454 let this = cx.weak_entity();
4455 self.send_job(None, move |git_repo, mut cx| async move {
4456 match git_repo {
4457 RepositoryState::Local(LocalRepositoryState {
4458 backend,
4459 environment,
4460 ..
4461 }) => {
4462 // TODO would be nice to not have to do this manually
4463 let result = backend.stash_drop(index, environment).await;
4464 if result.is_ok()
4465 && let Ok(stash_entries) = backend.stash_entries().await
4466 {
4467 let snapshot = this.update(&mut cx, |this, cx| {
4468 this.snapshot.stash_entries = stash_entries;
4469 cx.emit(RepositoryEvent::StashEntriesChanged);
4470 this.snapshot.clone()
4471 })?;
4472 if let Some(updates_tx) = updates_tx {
4473 updates_tx
4474 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4475 .ok();
4476 }
4477 }
4478
4479 result
4480 }
4481 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4482 client
4483 .request(proto::StashDrop {
4484 project_id: project_id.0,
4485 repository_id: id.to_proto(),
4486 stash_index: index.map(|i| i as u64),
4487 })
4488 .await
4489 .context("sending stash pop request")?;
4490 Ok(())
4491 }
4492 }
4493 })
4494 }
4495
4496 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4497 let id = self.id;
4498 self.send_job(
4499 Some(format!("git hook {}", hook.as_str()).into()),
4500 move |git_repo, _cx| async move {
4501 match git_repo {
4502 RepositoryState::Local(LocalRepositoryState {
4503 backend,
4504 environment,
4505 ..
4506 }) => backend.run_hook(hook, environment.clone()).await,
4507 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4508 client
4509 .request(proto::RunGitHook {
4510 project_id: project_id.0,
4511 repository_id: id.to_proto(),
4512 hook: hook.to_proto(),
4513 })
4514 .await?;
4515
4516 Ok(())
4517 }
4518 }
4519 },
4520 )
4521 }
4522
4523 pub fn commit(
4524 &mut self,
4525 message: SharedString,
4526 name_and_email: Option<(SharedString, SharedString)>,
4527 options: CommitOptions,
4528 askpass: AskPassDelegate,
4529 cx: &mut App,
4530 ) -> oneshot::Receiver<Result<()>> {
4531 let id = self.id;
4532 let askpass_delegates = self.askpass_delegates.clone();
4533 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4534
4535 let rx = self.run_hook(RunHook::PreCommit, cx);
4536
4537 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4538 rx.await??;
4539
4540 match git_repo {
4541 RepositoryState::Local(LocalRepositoryState {
4542 backend,
4543 environment,
4544 ..
4545 }) => {
4546 backend
4547 .commit(message, name_and_email, options, askpass, environment)
4548 .await
4549 }
4550 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4551 askpass_delegates.lock().insert(askpass_id, askpass);
4552 let _defer = util::defer(|| {
4553 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4554 debug_assert!(askpass_delegate.is_some());
4555 });
4556 let (name, email) = name_and_email.unzip();
4557 client
4558 .request(proto::Commit {
4559 project_id: project_id.0,
4560 repository_id: id.to_proto(),
4561 message: String::from(message),
4562 name: name.map(String::from),
4563 email: email.map(String::from),
4564 options: Some(proto::commit::CommitOptions {
4565 amend: options.amend,
4566 signoff: options.signoff,
4567 }),
4568 askpass_id,
4569 })
4570 .await
4571 .context("sending commit request")?;
4572
4573 Ok(())
4574 }
4575 }
4576 })
4577 }
4578
4579 pub fn fetch(
4580 &mut self,
4581 fetch_options: FetchOptions,
4582 askpass: AskPassDelegate,
4583 _cx: &mut App,
4584 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4585 let askpass_delegates = self.askpass_delegates.clone();
4586 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4587 let id = self.id;
4588
4589 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4590 match git_repo {
4591 RepositoryState::Local(LocalRepositoryState {
4592 backend,
4593 environment,
4594 ..
4595 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4596 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4597 askpass_delegates.lock().insert(askpass_id, askpass);
4598 let _defer = util::defer(|| {
4599 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4600 debug_assert!(askpass_delegate.is_some());
4601 });
4602
4603 let response = client
4604 .request(proto::Fetch {
4605 project_id: project_id.0,
4606 repository_id: id.to_proto(),
4607 askpass_id,
4608 remote: fetch_options.to_proto(),
4609 })
4610 .await
4611 .context("sending fetch request")?;
4612
4613 Ok(RemoteCommandOutput {
4614 stdout: response.stdout,
4615 stderr: response.stderr,
4616 })
4617 }
4618 }
4619 })
4620 }
4621
4622 pub fn push(
4623 &mut self,
4624 branch: SharedString,
4625 remote: SharedString,
4626 options: Option<PushOptions>,
4627 askpass: AskPassDelegate,
4628 cx: &mut Context<Self>,
4629 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4630 let askpass_delegates = self.askpass_delegates.clone();
4631 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4632 let id = self.id;
4633
4634 let args = options
4635 .map(|option| match option {
4636 PushOptions::SetUpstream => " --set-upstream",
4637 PushOptions::Force => " --force-with-lease",
4638 })
4639 .unwrap_or("");
4640
4641 let updates_tx = self
4642 .git_store()
4643 .and_then(|git_store| match &git_store.read(cx).state {
4644 GitStoreState::Local { downstream, .. } => downstream
4645 .as_ref()
4646 .map(|downstream| downstream.updates_tx.clone()),
4647 _ => None,
4648 });
4649
4650 let this = cx.weak_entity();
4651 self.send_job(
4652 Some(format!("git push {} {} {}", args, remote, branch).into()),
4653 move |git_repo, mut cx| async move {
4654 match git_repo {
4655 RepositoryState::Local(LocalRepositoryState {
4656 backend,
4657 environment,
4658 ..
4659 }) => {
4660 let result = backend
4661 .push(
4662 branch.to_string(),
4663 remote.to_string(),
4664 options,
4665 askpass,
4666 environment.clone(),
4667 cx.clone(),
4668 )
4669 .await;
4670 // TODO would be nice to not have to do this manually
4671 if result.is_ok() {
4672 let branches = backend.branches().await?;
4673 let branch = branches.into_iter().find(|branch| branch.is_head);
4674 log::info!("head branch after scan is {branch:?}");
4675 let snapshot = this.update(&mut cx, |this, cx| {
4676 this.snapshot.branch = branch;
4677 cx.emit(RepositoryEvent::BranchChanged);
4678 this.snapshot.clone()
4679 })?;
4680 if let Some(updates_tx) = updates_tx {
4681 updates_tx
4682 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4683 .ok();
4684 }
4685 }
4686 result
4687 }
4688 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4689 askpass_delegates.lock().insert(askpass_id, askpass);
4690 let _defer = util::defer(|| {
4691 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4692 debug_assert!(askpass_delegate.is_some());
4693 });
4694 let response = client
4695 .request(proto::Push {
4696 project_id: project_id.0,
4697 repository_id: id.to_proto(),
4698 askpass_id,
4699 branch_name: branch.to_string(),
4700 remote_name: remote.to_string(),
4701 options: options.map(|options| match options {
4702 PushOptions::Force => proto::push::PushOptions::Force,
4703 PushOptions::SetUpstream => {
4704 proto::push::PushOptions::SetUpstream
4705 }
4706 }
4707 as i32),
4708 })
4709 .await
4710 .context("sending push request")?;
4711
4712 Ok(RemoteCommandOutput {
4713 stdout: response.stdout,
4714 stderr: response.stderr,
4715 })
4716 }
4717 }
4718 },
4719 )
4720 }
4721
4722 pub fn pull(
4723 &mut self,
4724 branch: Option<SharedString>,
4725 remote: SharedString,
4726 rebase: bool,
4727 askpass: AskPassDelegate,
4728 _cx: &mut App,
4729 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4730 let askpass_delegates = self.askpass_delegates.clone();
4731 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4732 let id = self.id;
4733
4734 let mut status = "git pull".to_string();
4735 if rebase {
4736 status.push_str(" --rebase");
4737 }
4738 status.push_str(&format!(" {}", remote));
4739 if let Some(b) = &branch {
4740 status.push_str(&format!(" {}", b));
4741 }
4742
4743 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4744 match git_repo {
4745 RepositoryState::Local(LocalRepositoryState {
4746 backend,
4747 environment,
4748 ..
4749 }) => {
4750 backend
4751 .pull(
4752 branch.as_ref().map(|b| b.to_string()),
4753 remote.to_string(),
4754 rebase,
4755 askpass,
4756 environment.clone(),
4757 cx,
4758 )
4759 .await
4760 }
4761 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4762 askpass_delegates.lock().insert(askpass_id, askpass);
4763 let _defer = util::defer(|| {
4764 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4765 debug_assert!(askpass_delegate.is_some());
4766 });
4767 let response = client
4768 .request(proto::Pull {
4769 project_id: project_id.0,
4770 repository_id: id.to_proto(),
4771 askpass_id,
4772 rebase,
4773 branch_name: branch.as_ref().map(|b| b.to_string()),
4774 remote_name: remote.to_string(),
4775 })
4776 .await
4777 .context("sending pull request")?;
4778
4779 Ok(RemoteCommandOutput {
4780 stdout: response.stdout,
4781 stderr: response.stderr,
4782 })
4783 }
4784 }
4785 })
4786 }
4787
4788 fn spawn_set_index_text_job(
4789 &mut self,
4790 path: RepoPath,
4791 content: Option<String>,
4792 hunk_staging_operation_count: Option<usize>,
4793 cx: &mut Context<Self>,
4794 ) -> oneshot::Receiver<anyhow::Result<()>> {
4795 let id = self.id;
4796 let this = cx.weak_entity();
4797 let git_store = self.git_store.clone();
4798 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4799 self.send_keyed_job(
4800 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4801 None,
4802 move |git_repo, mut cx| async move {
4803 log::debug!(
4804 "start updating index text for buffer {}",
4805 path.as_unix_str()
4806 );
4807
4808 match git_repo {
4809 RepositoryState::Local(LocalRepositoryState {
4810 fs,
4811 backend,
4812 environment,
4813 ..
4814 }) => {
4815 let executable = match fs.metadata(&abs_path).await {
4816 Ok(Some(meta)) => meta.is_executable,
4817 Ok(None) => false,
4818 Err(_err) => false,
4819 };
4820 backend
4821 .set_index_text(path.clone(), content, environment.clone(), executable)
4822 .await?;
4823 }
4824 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4825 client
4826 .request(proto::SetIndexText {
4827 project_id: project_id.0,
4828 repository_id: id.to_proto(),
4829 path: path.to_proto(),
4830 text: content,
4831 })
4832 .await?;
4833 }
4834 }
4835 log::debug!(
4836 "finish updating index text for buffer {}",
4837 path.as_unix_str()
4838 );
4839
4840 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4841 let project_path = this
4842 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4843 .ok()
4844 .flatten();
4845 git_store.update(&mut cx, |git_store, cx| {
4846 let buffer_id = git_store
4847 .buffer_store
4848 .read(cx)
4849 .get_by_path(&project_path?)?
4850 .read(cx)
4851 .remote_id();
4852 let diff_state = git_store.diffs.get(&buffer_id)?;
4853 diff_state.update(cx, |diff_state, _| {
4854 diff_state.hunk_staging_operation_count_as_of_write =
4855 hunk_staging_operation_count;
4856 });
4857 Some(())
4858 })?;
4859 }
4860 Ok(())
4861 },
4862 )
4863 }
4864
4865 pub fn get_remotes(
4866 &mut self,
4867 branch_name: Option<String>,
4868 is_push: bool,
4869 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4870 let id = self.id;
4871 self.send_job(None, move |repo, _cx| async move {
4872 match repo {
4873 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4874 let remote = if let Some(branch_name) = branch_name {
4875 if is_push {
4876 backend.get_push_remote(branch_name).await?
4877 } else {
4878 backend.get_branch_remote(branch_name).await?
4879 }
4880 } else {
4881 None
4882 };
4883
4884 match remote {
4885 Some(remote) => Ok(vec![remote]),
4886 None => backend.get_all_remotes().await,
4887 }
4888 }
4889 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4890 let response = client
4891 .request(proto::GetRemotes {
4892 project_id: project_id.0,
4893 repository_id: id.to_proto(),
4894 branch_name,
4895 is_push,
4896 })
4897 .await?;
4898
4899 let remotes = response
4900 .remotes
4901 .into_iter()
4902 .map(|remotes| git::repository::Remote {
4903 name: remotes.name.into(),
4904 })
4905 .collect();
4906
4907 Ok(remotes)
4908 }
4909 }
4910 })
4911 }
4912
4913 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4914 let id = self.id;
4915 self.send_job(None, move |repo, _| async move {
4916 match repo {
4917 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4918 backend.branches().await
4919 }
4920 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4921 let response = client
4922 .request(proto::GitGetBranches {
4923 project_id: project_id.0,
4924 repository_id: id.to_proto(),
4925 })
4926 .await?;
4927
4928 let branches = response
4929 .branches
4930 .into_iter()
4931 .map(|branch| proto_to_branch(&branch))
4932 .collect();
4933
4934 Ok(branches)
4935 }
4936 }
4937 })
4938 }
4939
4940 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4941 let id = self.id;
4942 self.send_job(None, move |repo, _| async move {
4943 match repo {
4944 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4945 backend.worktrees().await
4946 }
4947 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4948 let response = client
4949 .request(proto::GitGetWorktrees {
4950 project_id: project_id.0,
4951 repository_id: id.to_proto(),
4952 })
4953 .await?;
4954
4955 let worktrees = response
4956 .worktrees
4957 .into_iter()
4958 .map(|worktree| proto_to_worktree(&worktree))
4959 .collect();
4960
4961 Ok(worktrees)
4962 }
4963 }
4964 })
4965 }
4966
4967 pub fn create_worktree(
4968 &mut self,
4969 name: String,
4970 path: PathBuf,
4971 commit: Option<String>,
4972 ) -> oneshot::Receiver<Result<()>> {
4973 let id = self.id;
4974 self.send_job(
4975 Some("git worktree add".into()),
4976 move |repo, _cx| async move {
4977 match repo {
4978 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4979 backend.create_worktree(name, path, commit).await
4980 }
4981 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4982 client
4983 .request(proto::GitCreateWorktree {
4984 project_id: project_id.0,
4985 repository_id: id.to_proto(),
4986 name,
4987 directory: path.to_string_lossy().to_string(),
4988 commit,
4989 })
4990 .await?;
4991
4992 Ok(())
4993 }
4994 }
4995 },
4996 )
4997 }
4998
4999 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
5000 let id = self.id;
5001 self.send_job(None, move |repo, _| async move {
5002 match repo {
5003 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5004 backend.default_branch().await
5005 }
5006 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5007 let response = client
5008 .request(proto::GetDefaultBranch {
5009 project_id: project_id.0,
5010 repository_id: id.to_proto(),
5011 })
5012 .await?;
5013
5014 anyhow::Ok(response.branch.map(SharedString::from))
5015 }
5016 }
5017 })
5018 }
5019
5020 pub fn diff_tree(
5021 &mut self,
5022 diff_type: DiffTreeType,
5023 _cx: &App,
5024 ) -> oneshot::Receiver<Result<TreeDiff>> {
5025 let repository_id = self.snapshot.id;
5026 self.send_job(None, move |repo, _cx| async move {
5027 match repo {
5028 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5029 backend.diff_tree(diff_type).await
5030 }
5031 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5032 let response = client
5033 .request(proto::GetTreeDiff {
5034 project_id: project_id.0,
5035 repository_id: repository_id.0,
5036 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5037 base: diff_type.base().to_string(),
5038 head: diff_type.head().to_string(),
5039 })
5040 .await?;
5041
5042 let entries = response
5043 .entries
5044 .into_iter()
5045 .filter_map(|entry| {
5046 let status = match entry.status() {
5047 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5048 proto::tree_diff_status::Status::Modified => {
5049 TreeDiffStatus::Modified {
5050 old: git::Oid::from_str(
5051 &entry.oid.context("missing oid").log_err()?,
5052 )
5053 .log_err()?,
5054 }
5055 }
5056 proto::tree_diff_status::Status::Deleted => {
5057 TreeDiffStatus::Deleted {
5058 old: git::Oid::from_str(
5059 &entry.oid.context("missing oid").log_err()?,
5060 )
5061 .log_err()?,
5062 }
5063 }
5064 };
5065 Some((
5066 RepoPath::from_rel_path(
5067 &RelPath::from_proto(&entry.path).log_err()?,
5068 ),
5069 status,
5070 ))
5071 })
5072 .collect();
5073
5074 Ok(TreeDiff { entries })
5075 }
5076 }
5077 })
5078 }
5079
5080 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5081 let id = self.id;
5082 self.send_job(None, move |repo, _cx| async move {
5083 match repo {
5084 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5085 backend.diff(diff_type).await
5086 }
5087 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5088 let response = client
5089 .request(proto::GitDiff {
5090 project_id: project_id.0,
5091 repository_id: id.to_proto(),
5092 diff_type: match diff_type {
5093 DiffType::HeadToIndex => {
5094 proto::git_diff::DiffType::HeadToIndex.into()
5095 }
5096 DiffType::HeadToWorktree => {
5097 proto::git_diff::DiffType::HeadToWorktree.into()
5098 }
5099 },
5100 })
5101 .await?;
5102
5103 Ok(response.diff)
5104 }
5105 }
5106 })
5107 }
5108
5109 pub fn create_branch(
5110 &mut self,
5111 branch_name: String,
5112 base_branch: Option<String>,
5113 ) -> oneshot::Receiver<Result<()>> {
5114 let id = self.id;
5115 let status_msg = if let Some(ref base) = base_branch {
5116 format!("git switch -c {branch_name} {base}").into()
5117 } else {
5118 format!("git switch -c {branch_name}").into()
5119 };
5120 self.send_job(Some(status_msg), move |repo, _cx| async move {
5121 match repo {
5122 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5123 backend.create_branch(branch_name, base_branch).await
5124 }
5125 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5126 client
5127 .request(proto::GitCreateBranch {
5128 project_id: project_id.0,
5129 repository_id: id.to_proto(),
5130 branch_name,
5131 })
5132 .await?;
5133
5134 Ok(())
5135 }
5136 }
5137 })
5138 }
5139
5140 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5141 let id = self.id;
5142 self.send_job(
5143 Some(format!("git switch {branch_name}").into()),
5144 move |repo, _cx| async move {
5145 match repo {
5146 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5147 backend.change_branch(branch_name).await
5148 }
5149 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5150 client
5151 .request(proto::GitChangeBranch {
5152 project_id: project_id.0,
5153 repository_id: id.to_proto(),
5154 branch_name,
5155 })
5156 .await?;
5157
5158 Ok(())
5159 }
5160 }
5161 },
5162 )
5163 }
5164
5165 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5166 let id = self.id;
5167 self.send_job(
5168 Some(format!("git branch -d {branch_name}").into()),
5169 move |repo, _cx| async move {
5170 match repo {
5171 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5172 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5173 client
5174 .request(proto::GitDeleteBranch {
5175 project_id: project_id.0,
5176 repository_id: id.to_proto(),
5177 branch_name,
5178 })
5179 .await?;
5180
5181 Ok(())
5182 }
5183 }
5184 },
5185 )
5186 }
5187
5188 pub fn rename_branch(
5189 &mut self,
5190 branch: String,
5191 new_name: String,
5192 ) -> oneshot::Receiver<Result<()>> {
5193 let id = self.id;
5194 self.send_job(
5195 Some(format!("git branch -m {branch} {new_name}").into()),
5196 move |repo, _cx| async move {
5197 match repo {
5198 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5199 backend.rename_branch(branch, new_name).await
5200 }
5201 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5202 client
5203 .request(proto::GitRenameBranch {
5204 project_id: project_id.0,
5205 repository_id: id.to_proto(),
5206 branch,
5207 new_name,
5208 })
5209 .await?;
5210
5211 Ok(())
5212 }
5213 }
5214 },
5215 )
5216 }
5217
5218 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5219 let id = self.id;
5220 self.send_job(None, move |repo, _cx| async move {
5221 match repo {
5222 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5223 backend.check_for_pushed_commit().await
5224 }
5225 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5226 let response = client
5227 .request(proto::CheckForPushedCommits {
5228 project_id: project_id.0,
5229 repository_id: id.to_proto(),
5230 })
5231 .await?;
5232
5233 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5234
5235 Ok(branches)
5236 }
5237 }
5238 })
5239 }
5240
5241 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5242 self.send_job(None, |repo, _cx| async move {
5243 match repo {
5244 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5245 backend.checkpoint().await
5246 }
5247 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5248 }
5249 })
5250 }
5251
5252 pub fn restore_checkpoint(
5253 &mut self,
5254 checkpoint: GitRepositoryCheckpoint,
5255 ) -> oneshot::Receiver<Result<()>> {
5256 self.send_job(None, move |repo, _cx| async move {
5257 match repo {
5258 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5259 backend.restore_checkpoint(checkpoint).await
5260 }
5261 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5262 }
5263 })
5264 }
5265
5266 pub(crate) fn apply_remote_update(
5267 &mut self,
5268 update: proto::UpdateRepository,
5269 cx: &mut Context<Self>,
5270 ) -> Result<()> {
5271 let conflicted_paths = TreeSet::from_ordered_entries(
5272 update
5273 .current_merge_conflicts
5274 .into_iter()
5275 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5276 );
5277 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5278 let new_head_commit = update
5279 .head_commit_details
5280 .as_ref()
5281 .map(proto_to_commit_details);
5282 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5283 cx.emit(RepositoryEvent::BranchChanged)
5284 }
5285 self.snapshot.branch = new_branch;
5286 self.snapshot.head_commit = new_head_commit;
5287
5288 self.snapshot.merge.conflicted_paths = conflicted_paths;
5289 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5290 let new_stash_entries = GitStash {
5291 entries: update
5292 .stash_entries
5293 .iter()
5294 .filter_map(|entry| proto_to_stash(entry).ok())
5295 .collect(),
5296 };
5297 if self.snapshot.stash_entries != new_stash_entries {
5298 cx.emit(RepositoryEvent::StashEntriesChanged)
5299 }
5300 self.snapshot.stash_entries = new_stash_entries;
5301
5302 let edits = update
5303 .removed_statuses
5304 .into_iter()
5305 .filter_map(|path| {
5306 Some(sum_tree::Edit::Remove(PathKey(
5307 RelPath::from_proto(&path).log_err()?,
5308 )))
5309 })
5310 .chain(
5311 update
5312 .updated_statuses
5313 .into_iter()
5314 .filter_map(|updated_status| {
5315 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5316 }),
5317 )
5318 .collect::<Vec<_>>();
5319 if !edits.is_empty() {
5320 cx.emit(RepositoryEvent::StatusesChanged);
5321 }
5322 self.snapshot.statuses_by_path.edit(edits, ());
5323 if update.is_last_update {
5324 self.snapshot.scan_id = update.scan_id;
5325 }
5326 self.clear_pending_ops(cx);
5327 Ok(())
5328 }
5329
5330 pub fn compare_checkpoints(
5331 &mut self,
5332 left: GitRepositoryCheckpoint,
5333 right: GitRepositoryCheckpoint,
5334 ) -> oneshot::Receiver<Result<bool>> {
5335 self.send_job(None, move |repo, _cx| async move {
5336 match repo {
5337 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5338 backend.compare_checkpoints(left, right).await
5339 }
5340 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5341 }
5342 })
5343 }
5344
5345 pub fn diff_checkpoints(
5346 &mut self,
5347 base_checkpoint: GitRepositoryCheckpoint,
5348 target_checkpoint: GitRepositoryCheckpoint,
5349 ) -> oneshot::Receiver<Result<String>> {
5350 self.send_job(None, move |repo, _cx| async move {
5351 match repo {
5352 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5353 backend
5354 .diff_checkpoints(base_checkpoint, target_checkpoint)
5355 .await
5356 }
5357 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5358 }
5359 })
5360 }
5361
5362 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5363 let updated = SumTree::from_iter(
5364 self.pending_ops.iter().filter_map(|ops| {
5365 let inner_ops: Vec<PendingOp> =
5366 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5367 if inner_ops.is_empty() {
5368 None
5369 } else {
5370 Some(PendingOps {
5371 repo_path: ops.repo_path.clone(),
5372 ops: inner_ops,
5373 })
5374 }
5375 }),
5376 (),
5377 );
5378
5379 if updated != self.pending_ops {
5380 cx.emit(RepositoryEvent::PendingOpsChanged {
5381 pending_ops: self.pending_ops.clone(),
5382 })
5383 }
5384
5385 self.pending_ops = updated;
5386 }
5387
5388 fn schedule_scan(
5389 &mut self,
5390 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5391 cx: &mut Context<Self>,
5392 ) {
5393 let this = cx.weak_entity();
5394 let _ = self.send_keyed_job(
5395 Some(GitJobKey::ReloadGitState),
5396 None,
5397 |state, mut cx| async move {
5398 log::debug!("run scheduled git status scan");
5399
5400 let Some(this) = this.upgrade() else {
5401 return Ok(());
5402 };
5403 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5404 bail!("not a local repository")
5405 };
5406 let (snapshot, events) = this
5407 .update(&mut cx, |this, _| {
5408 this.paths_needing_status_update.clear();
5409 compute_snapshot(
5410 this.id,
5411 this.work_directory_abs_path.clone(),
5412 this.snapshot.clone(),
5413 backend.clone(),
5414 )
5415 })?
5416 .await?;
5417 this.update(&mut cx, |this, cx| {
5418 this.snapshot = snapshot.clone();
5419 this.clear_pending_ops(cx);
5420 for event in events {
5421 cx.emit(event);
5422 }
5423 })?;
5424 if let Some(updates_tx) = updates_tx {
5425 updates_tx
5426 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5427 .ok();
5428 }
5429 Ok(())
5430 },
5431 );
5432 }
5433
5434 fn spawn_local_git_worker(
5435 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5436 cx: &mut Context<Self>,
5437 ) -> mpsc::UnboundedSender<GitJob> {
5438 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5439
5440 cx.spawn(async move |_, cx| {
5441 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5442 if let Some(git_hosting_provider_registry) =
5443 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5444 {
5445 git_hosting_providers::register_additional_providers(
5446 git_hosting_provider_registry,
5447 state.backend.clone(),
5448 );
5449 }
5450 let state = RepositoryState::Local(state);
5451 let mut jobs = VecDeque::new();
5452 loop {
5453 while let Ok(Some(next_job)) = job_rx.try_next() {
5454 jobs.push_back(next_job);
5455 }
5456
5457 if let Some(job) = jobs.pop_front() {
5458 if let Some(current_key) = &job.key
5459 && jobs
5460 .iter()
5461 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5462 {
5463 continue;
5464 }
5465 (job.job)(state.clone(), cx).await;
5466 } else if let Some(job) = job_rx.next().await {
5467 jobs.push_back(job);
5468 } else {
5469 break;
5470 }
5471 }
5472 anyhow::Ok(())
5473 })
5474 .detach_and_log_err(cx);
5475
5476 job_tx
5477 }
5478
5479 fn spawn_remote_git_worker(
5480 state: RemoteRepositoryState,
5481 cx: &mut Context<Self>,
5482 ) -> mpsc::UnboundedSender<GitJob> {
5483 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5484
5485 cx.spawn(async move |_, cx| {
5486 let state = RepositoryState::Remote(state);
5487 let mut jobs = VecDeque::new();
5488 loop {
5489 while let Ok(Some(next_job)) = job_rx.try_next() {
5490 jobs.push_back(next_job);
5491 }
5492
5493 if let Some(job) = jobs.pop_front() {
5494 if let Some(current_key) = &job.key
5495 && jobs
5496 .iter()
5497 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5498 {
5499 continue;
5500 }
5501 (job.job)(state.clone(), cx).await;
5502 } else if let Some(job) = job_rx.next().await {
5503 jobs.push_back(job);
5504 } else {
5505 break;
5506 }
5507 }
5508 anyhow::Ok(())
5509 })
5510 .detach_and_log_err(cx);
5511
5512 job_tx
5513 }
5514
5515 fn load_staged_text(
5516 &mut self,
5517 buffer_id: BufferId,
5518 repo_path: RepoPath,
5519 cx: &App,
5520 ) -> Task<Result<Option<String>>> {
5521 let rx = self.send_job(None, move |state, _| async move {
5522 match state {
5523 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5524 anyhow::Ok(backend.load_index_text(repo_path).await)
5525 }
5526 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5527 let response = client
5528 .request(proto::OpenUnstagedDiff {
5529 project_id: project_id.to_proto(),
5530 buffer_id: buffer_id.to_proto(),
5531 })
5532 .await?;
5533 Ok(response.staged_text)
5534 }
5535 }
5536 });
5537 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5538 }
5539
5540 fn load_committed_text(
5541 &mut self,
5542 buffer_id: BufferId,
5543 repo_path: RepoPath,
5544 cx: &App,
5545 ) -> Task<Result<DiffBasesChange>> {
5546 let rx = self.send_job(None, move |state, _| async move {
5547 match state {
5548 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5549 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5550 let staged_text = backend.load_index_text(repo_path).await;
5551 let diff_bases_change = if committed_text == staged_text {
5552 DiffBasesChange::SetBoth(committed_text)
5553 } else {
5554 DiffBasesChange::SetEach {
5555 index: staged_text,
5556 head: committed_text,
5557 }
5558 };
5559 anyhow::Ok(diff_bases_change)
5560 }
5561 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5562 use proto::open_uncommitted_diff_response::Mode;
5563
5564 let response = client
5565 .request(proto::OpenUncommittedDiff {
5566 project_id: project_id.to_proto(),
5567 buffer_id: buffer_id.to_proto(),
5568 })
5569 .await?;
5570 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5571 let bases = match mode {
5572 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5573 Mode::IndexAndHead => DiffBasesChange::SetEach {
5574 head: response.committed_text,
5575 index: response.staged_text,
5576 },
5577 };
5578 Ok(bases)
5579 }
5580 }
5581 });
5582
5583 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5584 }
5585 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5586 let repository_id = self.snapshot.id;
5587 let rx = self.send_job(None, move |state, _| async move {
5588 match state {
5589 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5590 backend.load_blob_content(oid).await
5591 }
5592 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5593 let response = client
5594 .request(proto::GetBlobContent {
5595 project_id: project_id.to_proto(),
5596 repository_id: repository_id.0,
5597 oid: oid.to_string(),
5598 })
5599 .await?;
5600 Ok(response.content)
5601 }
5602 }
5603 });
5604 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5605 }
5606
5607 fn paths_changed(
5608 &mut self,
5609 paths: Vec<RepoPath>,
5610 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5611 cx: &mut Context<Self>,
5612 ) {
5613 self.paths_needing_status_update.extend(paths);
5614
5615 let this = cx.weak_entity();
5616 let _ = self.send_keyed_job(
5617 Some(GitJobKey::RefreshStatuses),
5618 None,
5619 |state, mut cx| async move {
5620 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5621 (
5622 this.snapshot.clone(),
5623 mem::take(&mut this.paths_needing_status_update),
5624 )
5625 })?;
5626 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5627 bail!("not a local repository")
5628 };
5629
5630 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5631 if paths.is_empty() {
5632 return Ok(());
5633 }
5634 let statuses = backend.status(&paths).await?;
5635 let stash_entries = backend.stash_entries().await?;
5636
5637 let changed_path_statuses = cx
5638 .background_spawn(async move {
5639 let mut changed_path_statuses = Vec::new();
5640 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5641 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5642
5643 for (repo_path, status) in &*statuses.entries {
5644 changed_paths.remove(repo_path);
5645 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5646 && cursor.item().is_some_and(|entry| entry.status == *status)
5647 {
5648 continue;
5649 }
5650
5651 changed_path_statuses.push(Edit::Insert(StatusEntry {
5652 repo_path: repo_path.clone(),
5653 status: *status,
5654 }));
5655 }
5656 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5657 for path in changed_paths.into_iter() {
5658 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5659 changed_path_statuses
5660 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5661 }
5662 }
5663 changed_path_statuses
5664 })
5665 .await;
5666
5667 this.update(&mut cx, |this, cx| {
5668 if this.snapshot.stash_entries != stash_entries {
5669 cx.emit(RepositoryEvent::StashEntriesChanged);
5670 this.snapshot.stash_entries = stash_entries;
5671 }
5672
5673 if !changed_path_statuses.is_empty() {
5674 cx.emit(RepositoryEvent::StatusesChanged);
5675 this.snapshot
5676 .statuses_by_path
5677 .edit(changed_path_statuses, ());
5678 this.snapshot.scan_id += 1;
5679 }
5680
5681 if let Some(updates_tx) = updates_tx {
5682 updates_tx
5683 .unbounded_send(DownstreamUpdate::UpdateRepository(
5684 this.snapshot.clone(),
5685 ))
5686 .ok();
5687 }
5688 })
5689 },
5690 );
5691 }
5692
5693 /// currently running git command and when it started
5694 pub fn current_job(&self) -> Option<JobInfo> {
5695 self.active_jobs.values().next().cloned()
5696 }
5697
5698 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5699 self.send_job(None, |_, _| async {})
5700 }
5701
5702 fn spawn_job_with_tracking<AsyncFn>(
5703 &mut self,
5704 paths: Vec<RepoPath>,
5705 git_status: pending_op::GitStatus,
5706 cx: &mut Context<Self>,
5707 f: AsyncFn,
5708 ) -> Task<Result<()>>
5709 where
5710 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5711 {
5712 let ids = self.new_pending_ops_for_paths(paths, git_status);
5713
5714 cx.spawn(async move |this, cx| {
5715 let (job_status, result) = match f(this.clone(), cx).await {
5716 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5717 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5718 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5719 };
5720
5721 this.update(cx, |this, _| {
5722 let mut edits = Vec::with_capacity(ids.len());
5723 for (id, entry) in ids {
5724 if let Some(mut ops) = this
5725 .pending_ops
5726 .get(&PathKey(entry.as_ref().clone()), ())
5727 .cloned()
5728 {
5729 if let Some(op) = ops.op_by_id_mut(id) {
5730 op.job_status = job_status;
5731 }
5732 edits.push(sum_tree::Edit::Insert(ops));
5733 }
5734 }
5735 this.pending_ops.edit(edits, ());
5736 })?;
5737
5738 result
5739 })
5740 }
5741
5742 fn new_pending_ops_for_paths(
5743 &mut self,
5744 paths: Vec<RepoPath>,
5745 git_status: pending_op::GitStatus,
5746 ) -> Vec<(PendingOpId, RepoPath)> {
5747 let mut edits = Vec::with_capacity(paths.len());
5748 let mut ids = Vec::with_capacity(paths.len());
5749 for path in paths {
5750 let mut ops = self
5751 .pending_ops
5752 .get(&PathKey(path.as_ref().clone()), ())
5753 .cloned()
5754 .unwrap_or_else(|| PendingOps::new(&path));
5755 let id = ops.max_id() + 1;
5756 ops.ops.push(PendingOp {
5757 id,
5758 git_status,
5759 job_status: pending_op::JobStatus::Running,
5760 });
5761 edits.push(sum_tree::Edit::Insert(ops));
5762 ids.push((id, path));
5763 }
5764 self.pending_ops.edit(edits, ());
5765 ids
5766 }
5767}
5768
5769fn get_permalink_in_rust_registry_src(
5770 provider_registry: Arc<GitHostingProviderRegistry>,
5771 path: PathBuf,
5772 selection: Range<u32>,
5773) -> Result<url::Url> {
5774 #[derive(Deserialize)]
5775 struct CargoVcsGit {
5776 sha1: String,
5777 }
5778
5779 #[derive(Deserialize)]
5780 struct CargoVcsInfo {
5781 git: CargoVcsGit,
5782 path_in_vcs: String,
5783 }
5784
5785 #[derive(Deserialize)]
5786 struct CargoPackage {
5787 repository: String,
5788 }
5789
5790 #[derive(Deserialize)]
5791 struct CargoToml {
5792 package: CargoPackage,
5793 }
5794
5795 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5796 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5797 Some((dir, json))
5798 }) else {
5799 bail!("No .cargo_vcs_info.json found in parent directories")
5800 };
5801 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5802 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5803 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5804 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5805 .context("parsing package.repository field of manifest")?;
5806 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5807 let permalink = provider.build_permalink(
5808 remote,
5809 BuildPermalinkParams::new(
5810 &cargo_vcs_info.git.sha1,
5811 &RepoPath::from_rel_path(
5812 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5813 ),
5814 Some(selection),
5815 ),
5816 );
5817 Ok(permalink)
5818}
5819
5820fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5821 let Some(blame) = blame else {
5822 return proto::BlameBufferResponse {
5823 blame_response: None,
5824 };
5825 };
5826
5827 let entries = blame
5828 .entries
5829 .into_iter()
5830 .map(|entry| proto::BlameEntry {
5831 sha: entry.sha.as_bytes().into(),
5832 start_line: entry.range.start,
5833 end_line: entry.range.end,
5834 original_line_number: entry.original_line_number,
5835 author: entry.author,
5836 author_mail: entry.author_mail,
5837 author_time: entry.author_time,
5838 author_tz: entry.author_tz,
5839 committer: entry.committer_name,
5840 committer_mail: entry.committer_email,
5841 committer_time: entry.committer_time,
5842 committer_tz: entry.committer_tz,
5843 summary: entry.summary,
5844 previous: entry.previous,
5845 filename: entry.filename,
5846 })
5847 .collect::<Vec<_>>();
5848
5849 let messages = blame
5850 .messages
5851 .into_iter()
5852 .map(|(oid, message)| proto::CommitMessage {
5853 oid: oid.as_bytes().into(),
5854 message,
5855 })
5856 .collect::<Vec<_>>();
5857
5858 proto::BlameBufferResponse {
5859 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5860 entries,
5861 messages,
5862 remote_url: blame.remote_url,
5863 }),
5864 }
5865}
5866
5867fn deserialize_blame_buffer_response(
5868 response: proto::BlameBufferResponse,
5869) -> Option<git::blame::Blame> {
5870 let response = response.blame_response?;
5871 let entries = response
5872 .entries
5873 .into_iter()
5874 .filter_map(|entry| {
5875 Some(git::blame::BlameEntry {
5876 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5877 range: entry.start_line..entry.end_line,
5878 original_line_number: entry.original_line_number,
5879 committer_name: entry.committer,
5880 committer_time: entry.committer_time,
5881 committer_tz: entry.committer_tz,
5882 committer_email: entry.committer_mail,
5883 author: entry.author,
5884 author_mail: entry.author_mail,
5885 author_time: entry.author_time,
5886 author_tz: entry.author_tz,
5887 summary: entry.summary,
5888 previous: entry.previous,
5889 filename: entry.filename,
5890 })
5891 })
5892 .collect::<Vec<_>>();
5893
5894 let messages = response
5895 .messages
5896 .into_iter()
5897 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5898 .collect::<HashMap<_, _>>();
5899
5900 Some(Blame {
5901 entries,
5902 messages,
5903 remote_url: response.remote_url,
5904 })
5905}
5906
5907fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5908 proto::Branch {
5909 is_head: branch.is_head,
5910 ref_name: branch.ref_name.to_string(),
5911 unix_timestamp: branch
5912 .most_recent_commit
5913 .as_ref()
5914 .map(|commit| commit.commit_timestamp as u64),
5915 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5916 ref_name: upstream.ref_name.to_string(),
5917 tracking: upstream
5918 .tracking
5919 .status()
5920 .map(|upstream| proto::UpstreamTracking {
5921 ahead: upstream.ahead as u64,
5922 behind: upstream.behind as u64,
5923 }),
5924 }),
5925 most_recent_commit: branch
5926 .most_recent_commit
5927 .as_ref()
5928 .map(|commit| proto::CommitSummary {
5929 sha: commit.sha.to_string(),
5930 subject: commit.subject.to_string(),
5931 commit_timestamp: commit.commit_timestamp,
5932 author_name: commit.author_name.to_string(),
5933 }),
5934 }
5935}
5936
5937fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5938 proto::Worktree {
5939 path: worktree.path.to_string_lossy().to_string(),
5940 ref_name: worktree.ref_name.to_string(),
5941 sha: worktree.sha.to_string(),
5942 }
5943}
5944
5945fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5946 git::repository::Worktree {
5947 path: PathBuf::from(proto.path.clone()),
5948 ref_name: proto.ref_name.clone().into(),
5949 sha: proto.sha.clone().into(),
5950 }
5951}
5952
5953fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5954 git::repository::Branch {
5955 is_head: proto.is_head,
5956 ref_name: proto.ref_name.clone().into(),
5957 upstream: proto
5958 .upstream
5959 .as_ref()
5960 .map(|upstream| git::repository::Upstream {
5961 ref_name: upstream.ref_name.to_string().into(),
5962 tracking: upstream
5963 .tracking
5964 .as_ref()
5965 .map(|tracking| {
5966 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5967 ahead: tracking.ahead as u32,
5968 behind: tracking.behind as u32,
5969 })
5970 })
5971 .unwrap_or(git::repository::UpstreamTracking::Gone),
5972 }),
5973 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5974 git::repository::CommitSummary {
5975 sha: commit.sha.to_string().into(),
5976 subject: commit.subject.to_string().into(),
5977 commit_timestamp: commit.commit_timestamp,
5978 author_name: commit.author_name.to_string().into(),
5979 has_parent: true,
5980 }
5981 }),
5982 }
5983}
5984
5985fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5986 proto::GitCommitDetails {
5987 sha: commit.sha.to_string(),
5988 message: commit.message.to_string(),
5989 commit_timestamp: commit.commit_timestamp,
5990 author_email: commit.author_email.to_string(),
5991 author_name: commit.author_name.to_string(),
5992 }
5993}
5994
5995fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5996 CommitDetails {
5997 sha: proto.sha.clone().into(),
5998 message: proto.message.clone().into(),
5999 commit_timestamp: proto.commit_timestamp,
6000 author_email: proto.author_email.clone().into(),
6001 author_name: proto.author_name.clone().into(),
6002 }
6003}
6004
6005async fn compute_snapshot(
6006 id: RepositoryId,
6007 work_directory_abs_path: Arc<Path>,
6008 prev_snapshot: RepositorySnapshot,
6009 backend: Arc<dyn GitRepository>,
6010) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6011 let mut events = Vec::new();
6012 let branches = backend.branches().await?;
6013 let branch = branches.into_iter().find(|branch| branch.is_head);
6014 let statuses = backend
6015 .status(&[RepoPath::from_rel_path(
6016 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6017 )])
6018 .await?;
6019 let stash_entries = backend.stash_entries().await?;
6020 let statuses_by_path = SumTree::from_iter(
6021 statuses
6022 .entries
6023 .iter()
6024 .map(|(repo_path, status)| StatusEntry {
6025 repo_path: repo_path.clone(),
6026 status: *status,
6027 }),
6028 (),
6029 );
6030 let (merge_details, merge_heads_changed) =
6031 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6032 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6033
6034 if merge_heads_changed {
6035 events.push(RepositoryEvent::MergeHeadsChanged);
6036 }
6037
6038 if statuses_by_path != prev_snapshot.statuses_by_path {
6039 events.push(RepositoryEvent::StatusesChanged)
6040 }
6041
6042 // Useful when branch is None in detached head state
6043 let head_commit = match backend.head_sha().await {
6044 Some(head_sha) => backend.show(head_sha).await.log_err(),
6045 None => None,
6046 };
6047
6048 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6049 events.push(RepositoryEvent::BranchChanged);
6050 }
6051
6052 // Used by edit prediction data collection
6053 let remote_origin_url = backend.remote_url("origin");
6054 let remote_upstream_url = backend.remote_url("upstream");
6055
6056 let snapshot = RepositorySnapshot {
6057 id,
6058 statuses_by_path,
6059 work_directory_abs_path,
6060 path_style: prev_snapshot.path_style,
6061 scan_id: prev_snapshot.scan_id + 1,
6062 branch,
6063 head_commit,
6064 merge: merge_details,
6065 remote_origin_url,
6066 remote_upstream_url,
6067 stash_entries,
6068 };
6069
6070 Ok((snapshot, events))
6071}
6072
6073fn status_from_proto(
6074 simple_status: i32,
6075 status: Option<proto::GitFileStatus>,
6076) -> anyhow::Result<FileStatus> {
6077 use proto::git_file_status::Variant;
6078
6079 let Some(variant) = status.and_then(|status| status.variant) else {
6080 let code = proto::GitStatus::from_i32(simple_status)
6081 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6082 let result = match code {
6083 proto::GitStatus::Added => TrackedStatus {
6084 worktree_status: StatusCode::Added,
6085 index_status: StatusCode::Unmodified,
6086 }
6087 .into(),
6088 proto::GitStatus::Modified => TrackedStatus {
6089 worktree_status: StatusCode::Modified,
6090 index_status: StatusCode::Unmodified,
6091 }
6092 .into(),
6093 proto::GitStatus::Conflict => UnmergedStatus {
6094 first_head: UnmergedStatusCode::Updated,
6095 second_head: UnmergedStatusCode::Updated,
6096 }
6097 .into(),
6098 proto::GitStatus::Deleted => TrackedStatus {
6099 worktree_status: StatusCode::Deleted,
6100 index_status: StatusCode::Unmodified,
6101 }
6102 .into(),
6103 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6104 };
6105 return Ok(result);
6106 };
6107
6108 let result = match variant {
6109 Variant::Untracked(_) => FileStatus::Untracked,
6110 Variant::Ignored(_) => FileStatus::Ignored,
6111 Variant::Unmerged(unmerged) => {
6112 let [first_head, second_head] =
6113 [unmerged.first_head, unmerged.second_head].map(|head| {
6114 let code = proto::GitStatus::from_i32(head)
6115 .with_context(|| format!("Invalid git status code: {head}"))?;
6116 let result = match code {
6117 proto::GitStatus::Added => UnmergedStatusCode::Added,
6118 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6119 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6120 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6121 };
6122 Ok(result)
6123 });
6124 let [first_head, second_head] = [first_head?, second_head?];
6125 UnmergedStatus {
6126 first_head,
6127 second_head,
6128 }
6129 .into()
6130 }
6131 Variant::Tracked(tracked) => {
6132 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6133 .map(|status| {
6134 let code = proto::GitStatus::from_i32(status)
6135 .with_context(|| format!("Invalid git status code: {status}"))?;
6136 let result = match code {
6137 proto::GitStatus::Modified => StatusCode::Modified,
6138 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6139 proto::GitStatus::Added => StatusCode::Added,
6140 proto::GitStatus::Deleted => StatusCode::Deleted,
6141 proto::GitStatus::Renamed => StatusCode::Renamed,
6142 proto::GitStatus::Copied => StatusCode::Copied,
6143 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6144 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6145 };
6146 Ok(result)
6147 });
6148 let [index_status, worktree_status] = [index_status?, worktree_status?];
6149 TrackedStatus {
6150 index_status,
6151 worktree_status,
6152 }
6153 .into()
6154 }
6155 };
6156 Ok(result)
6157}
6158
6159fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6160 use proto::git_file_status::{Tracked, Unmerged, Variant};
6161
6162 let variant = match status {
6163 FileStatus::Untracked => Variant::Untracked(Default::default()),
6164 FileStatus::Ignored => Variant::Ignored(Default::default()),
6165 FileStatus::Unmerged(UnmergedStatus {
6166 first_head,
6167 second_head,
6168 }) => Variant::Unmerged(Unmerged {
6169 first_head: unmerged_status_to_proto(first_head),
6170 second_head: unmerged_status_to_proto(second_head),
6171 }),
6172 FileStatus::Tracked(TrackedStatus {
6173 index_status,
6174 worktree_status,
6175 }) => Variant::Tracked(Tracked {
6176 index_status: tracked_status_to_proto(index_status),
6177 worktree_status: tracked_status_to_proto(worktree_status),
6178 }),
6179 };
6180 proto::GitFileStatus {
6181 variant: Some(variant),
6182 }
6183}
6184
6185fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6186 match code {
6187 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6188 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6189 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6190 }
6191}
6192
6193fn tracked_status_to_proto(code: StatusCode) -> i32 {
6194 match code {
6195 StatusCode::Added => proto::GitStatus::Added as _,
6196 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6197 StatusCode::Modified => proto::GitStatus::Modified as _,
6198 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6199 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6200 StatusCode::Copied => proto::GitStatus::Copied as _,
6201 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6202 }
6203}