1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<String>>,
133 index_text: Option<Arc<String>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_delete_branch);
476 client.add_entity_request_handler(Self::handle_git_init);
477 client.add_entity_request_handler(Self::handle_push);
478 client.add_entity_request_handler(Self::handle_pull);
479 client.add_entity_request_handler(Self::handle_fetch);
480 client.add_entity_request_handler(Self::handle_stage);
481 client.add_entity_request_handler(Self::handle_unstage);
482 client.add_entity_request_handler(Self::handle_stash);
483 client.add_entity_request_handler(Self::handle_stash_pop);
484 client.add_entity_request_handler(Self::handle_stash_apply);
485 client.add_entity_request_handler(Self::handle_stash_drop);
486 client.add_entity_request_handler(Self::handle_commit);
487 client.add_entity_request_handler(Self::handle_run_hook);
488 client.add_entity_request_handler(Self::handle_reset);
489 client.add_entity_request_handler(Self::handle_show);
490 client.add_entity_request_handler(Self::handle_load_commit_diff);
491 client.add_entity_request_handler(Self::handle_file_history);
492 client.add_entity_request_handler(Self::handle_checkout_files);
493 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
494 client.add_entity_request_handler(Self::handle_set_index_text);
495 client.add_entity_request_handler(Self::handle_askpass);
496 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
497 client.add_entity_request_handler(Self::handle_git_diff);
498 client.add_entity_request_handler(Self::handle_tree_diff);
499 client.add_entity_request_handler(Self::handle_get_blob_content);
500 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
501 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
502 client.add_entity_message_handler(Self::handle_update_diff_bases);
503 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
504 client.add_entity_request_handler(Self::handle_blame_buffer);
505 client.add_entity_message_handler(Self::handle_update_repository);
506 client.add_entity_message_handler(Self::handle_remove_repository);
507 client.add_entity_request_handler(Self::handle_git_clone);
508 client.add_entity_request_handler(Self::handle_get_worktrees);
509 client.add_entity_request_handler(Self::handle_create_worktree);
510 }
511
512 pub fn is_local(&self) -> bool {
513 matches!(self.state, GitStoreState::Local { .. })
514 }
515 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
516 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
517 let id = repo.read(cx).id;
518 if self.active_repo_id != Some(id) {
519 self.active_repo_id = Some(id);
520 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
521 }
522 }
523 }
524
525 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
526 match &mut self.state {
527 GitStoreState::Remote {
528 downstream: downstream_client,
529 ..
530 } => {
531 for repo in self.repositories.values() {
532 let update = repo.read(cx).snapshot.initial_update(project_id);
533 for update in split_repository_update(update) {
534 client.send(update).log_err();
535 }
536 }
537 *downstream_client = Some((client, ProjectId(project_id)));
538 }
539 GitStoreState::Local {
540 downstream: downstream_client,
541 ..
542 } => {
543 let mut snapshots = HashMap::default();
544 let (updates_tx, mut updates_rx) = mpsc::unbounded();
545 for repo in self.repositories.values() {
546 updates_tx
547 .unbounded_send(DownstreamUpdate::UpdateRepository(
548 repo.read(cx).snapshot.clone(),
549 ))
550 .ok();
551 }
552 *downstream_client = Some(LocalDownstreamState {
553 client: client.clone(),
554 project_id: ProjectId(project_id),
555 updates_tx,
556 _task: cx.spawn(async move |this, cx| {
557 cx.background_spawn(async move {
558 while let Some(update) = updates_rx.next().await {
559 match update {
560 DownstreamUpdate::UpdateRepository(snapshot) => {
561 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
562 {
563 let update =
564 snapshot.build_update(old_snapshot, project_id);
565 *old_snapshot = snapshot;
566 for update in split_repository_update(update) {
567 client.send(update)?;
568 }
569 } else {
570 let update = snapshot.initial_update(project_id);
571 for update in split_repository_update(update) {
572 client.send(update)?;
573 }
574 snapshots.insert(snapshot.id, snapshot);
575 }
576 }
577 DownstreamUpdate::RemoveRepository(id) => {
578 client.send(proto::RemoveRepository {
579 project_id,
580 id: id.to_proto(),
581 })?;
582 }
583 }
584 }
585 anyhow::Ok(())
586 })
587 .await
588 .ok();
589 this.update(cx, |this, _| {
590 if let GitStoreState::Local {
591 downstream: downstream_client,
592 ..
593 } = &mut this.state
594 {
595 downstream_client.take();
596 } else {
597 unreachable!("unshared called on remote store");
598 }
599 })
600 }),
601 });
602 }
603 }
604 }
605
606 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
607 match &mut self.state {
608 GitStoreState::Local {
609 downstream: downstream_client,
610 ..
611 } => {
612 downstream_client.take();
613 }
614 GitStoreState::Remote {
615 downstream: downstream_client,
616 ..
617 } => {
618 downstream_client.take();
619 }
620 }
621 self.shared_diffs.clear();
622 }
623
624 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
625 self.shared_diffs.remove(peer_id);
626 }
627
628 pub fn active_repository(&self) -> Option<Entity<Repository>> {
629 self.active_repo_id
630 .as_ref()
631 .map(|id| self.repositories[id].clone())
632 }
633
634 pub fn open_unstaged_diff(
635 &mut self,
636 buffer: Entity<Buffer>,
637 cx: &mut Context<Self>,
638 ) -> Task<Result<Entity<BufferDiff>>> {
639 let buffer_id = buffer.read(cx).remote_id();
640 if let Some(diff_state) = self.diffs.get(&buffer_id)
641 && let Some(unstaged_diff) = diff_state
642 .read(cx)
643 .unstaged_diff
644 .as_ref()
645 .and_then(|weak| weak.upgrade())
646 {
647 if let Some(task) =
648 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
649 {
650 return cx.background_executor().spawn(async move {
651 task.await;
652 Ok(unstaged_diff)
653 });
654 }
655 return Task::ready(Ok(unstaged_diff));
656 }
657
658 let Some((repo, repo_path)) =
659 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
660 else {
661 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
662 };
663
664 let task = self
665 .loading_diffs
666 .entry((buffer_id, DiffKind::Unstaged))
667 .or_insert_with(|| {
668 let staged_text = repo.update(cx, |repo, cx| {
669 repo.load_staged_text(buffer_id, repo_path, cx)
670 });
671 cx.spawn(async move |this, cx| {
672 Self::open_diff_internal(
673 this,
674 DiffKind::Unstaged,
675 staged_text.await.map(DiffBasesChange::SetIndex),
676 buffer,
677 cx,
678 )
679 .await
680 .map_err(Arc::new)
681 })
682 .shared()
683 })
684 .clone();
685
686 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
687 }
688
689 pub fn open_diff_since(
690 &mut self,
691 oid: Option<git::Oid>,
692 buffer: Entity<Buffer>,
693 repo: Entity<Repository>,
694 languages: Arc<LanguageRegistry>,
695 cx: &mut Context<Self>,
696 ) -> Task<Result<Entity<BufferDiff>>> {
697 cx.spawn(async move |this, cx| {
698 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
699 let content = match oid {
700 None => None,
701 Some(oid) => Some(
702 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
703 .await?,
704 ),
705 };
706 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
707
708 buffer_diff
709 .update(cx, |buffer_diff, cx| {
710 buffer_diff.set_base_text(
711 content.map(Arc::new),
712 buffer_snapshot.language().cloned(),
713 Some(languages.clone()),
714 buffer_snapshot.text,
715 cx,
716 )
717 })?
718 .await?;
719 let unstaged_diff = this
720 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
721 .await?;
722 buffer_diff.update(cx, |buffer_diff, _| {
723 buffer_diff.set_secondary_diff(unstaged_diff);
724 })?;
725
726 this.update(cx, |_, cx| {
727 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
728 .detach();
729 })?;
730
731 Ok(buffer_diff)
732 })
733 }
734
735 pub fn open_uncommitted_diff(
736 &mut self,
737 buffer: Entity<Buffer>,
738 cx: &mut Context<Self>,
739 ) -> Task<Result<Entity<BufferDiff>>> {
740 let buffer_id = buffer.read(cx).remote_id();
741
742 if let Some(diff_state) = self.diffs.get(&buffer_id)
743 && let Some(uncommitted_diff) = diff_state
744 .read(cx)
745 .uncommitted_diff
746 .as_ref()
747 .and_then(|weak| weak.upgrade())
748 {
749 if let Some(task) =
750 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
751 {
752 return cx.background_executor().spawn(async move {
753 task.await;
754 Ok(uncommitted_diff)
755 });
756 }
757 return Task::ready(Ok(uncommitted_diff));
758 }
759
760 let Some((repo, repo_path)) =
761 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
762 else {
763 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
764 };
765
766 let task = self
767 .loading_diffs
768 .entry((buffer_id, DiffKind::Uncommitted))
769 .or_insert_with(|| {
770 let changes = repo.update(cx, |repo, cx| {
771 repo.load_committed_text(buffer_id, repo_path, cx)
772 });
773
774 // todo(lw): hot foreground spawn
775 cx.spawn(async move |this, cx| {
776 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
777 .await
778 .map_err(Arc::new)
779 })
780 .shared()
781 })
782 .clone();
783
784 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
785 }
786
787 async fn open_diff_internal(
788 this: WeakEntity<Self>,
789 kind: DiffKind,
790 texts: Result<DiffBasesChange>,
791 buffer_entity: Entity<Buffer>,
792 cx: &mut AsyncApp,
793 ) -> Result<Entity<BufferDiff>> {
794 let diff_bases_change = match texts {
795 Err(e) => {
796 this.update(cx, |this, cx| {
797 let buffer = buffer_entity.read(cx);
798 let buffer_id = buffer.remote_id();
799 this.loading_diffs.remove(&(buffer_id, kind));
800 })?;
801 return Err(e);
802 }
803 Ok(change) => change,
804 };
805
806 this.update(cx, |this, cx| {
807 let buffer = buffer_entity.read(cx);
808 let buffer_id = buffer.remote_id();
809 let language = buffer.language().cloned();
810 let language_registry = buffer.language_registry();
811 let text_snapshot = buffer.text_snapshot();
812 this.loading_diffs.remove(&(buffer_id, kind));
813
814 let git_store = cx.weak_entity();
815 let diff_state = this
816 .diffs
817 .entry(buffer_id)
818 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
819
820 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
821
822 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
823 diff_state.update(cx, |diff_state, cx| {
824 diff_state.language = language;
825 diff_state.language_registry = language_registry;
826
827 match kind {
828 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
829 DiffKind::Uncommitted => {
830 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
831 diff
832 } else {
833 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
834 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
835 unstaged_diff
836 };
837
838 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
839 diff_state.uncommitted_diff = Some(diff.downgrade())
840 }
841 }
842
843 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
844 let rx = diff_state.wait_for_recalculation();
845
846 anyhow::Ok(async move {
847 if let Some(rx) = rx {
848 rx.await;
849 }
850 Ok(diff)
851 })
852 })
853 })??
854 .await
855 }
856
857 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
858 let diff_state = self.diffs.get(&buffer_id)?;
859 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
860 }
861
862 pub fn get_uncommitted_diff(
863 &self,
864 buffer_id: BufferId,
865 cx: &App,
866 ) -> Option<Entity<BufferDiff>> {
867 let diff_state = self.diffs.get(&buffer_id)?;
868 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
869 }
870
871 pub fn open_conflict_set(
872 &mut self,
873 buffer: Entity<Buffer>,
874 cx: &mut Context<Self>,
875 ) -> Entity<ConflictSet> {
876 log::debug!("open conflict set");
877 let buffer_id = buffer.read(cx).remote_id();
878
879 if let Some(git_state) = self.diffs.get(&buffer_id)
880 && let Some(conflict_set) = git_state
881 .read(cx)
882 .conflict_set
883 .as_ref()
884 .and_then(|weak| weak.upgrade())
885 {
886 let conflict_set = conflict_set;
887 let buffer_snapshot = buffer.read(cx).text_snapshot();
888
889 git_state.update(cx, |state, cx| {
890 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
891 });
892
893 return conflict_set;
894 }
895
896 let is_unmerged = self
897 .repository_and_path_for_buffer_id(buffer_id, cx)
898 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
899 let git_store = cx.weak_entity();
900 let buffer_git_state = self
901 .diffs
902 .entry(buffer_id)
903 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
904 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
905
906 self._subscriptions
907 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
908 cx.emit(GitStoreEvent::ConflictsUpdated);
909 }));
910
911 buffer_git_state.update(cx, |state, cx| {
912 state.conflict_set = Some(conflict_set.downgrade());
913 let buffer_snapshot = buffer.read(cx).text_snapshot();
914 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
915 });
916
917 conflict_set
918 }
919
920 pub fn project_path_git_status(
921 &self,
922 project_path: &ProjectPath,
923 cx: &App,
924 ) -> Option<FileStatus> {
925 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
926 Some(repo.read(cx).status_for_path(&repo_path)?.status)
927 }
928
929 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
930 let mut work_directory_abs_paths = Vec::new();
931 let mut checkpoints = Vec::new();
932 for repository in self.repositories.values() {
933 repository.update(cx, |repository, _| {
934 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
935 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
936 });
937 }
938
939 cx.background_executor().spawn(async move {
940 let checkpoints = future::try_join_all(checkpoints).await?;
941 Ok(GitStoreCheckpoint {
942 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
943 .into_iter()
944 .zip(checkpoints)
945 .collect(),
946 })
947 })
948 }
949
950 pub fn restore_checkpoint(
951 &self,
952 checkpoint: GitStoreCheckpoint,
953 cx: &mut App,
954 ) -> Task<Result<()>> {
955 let repositories_by_work_dir_abs_path = self
956 .repositories
957 .values()
958 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
959 .collect::<HashMap<_, _>>();
960
961 let mut tasks = Vec::new();
962 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
963 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
964 let restore = repository.update(cx, |repository, _| {
965 repository.restore_checkpoint(checkpoint)
966 });
967 tasks.push(async move { restore.await? });
968 }
969 }
970 cx.background_spawn(async move {
971 future::try_join_all(tasks).await?;
972 Ok(())
973 })
974 }
975
976 /// Compares two checkpoints, returning true if they are equal.
977 pub fn compare_checkpoints(
978 &self,
979 left: GitStoreCheckpoint,
980 mut right: GitStoreCheckpoint,
981 cx: &mut App,
982 ) -> Task<Result<bool>> {
983 let repositories_by_work_dir_abs_path = self
984 .repositories
985 .values()
986 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
987 .collect::<HashMap<_, _>>();
988
989 let mut tasks = Vec::new();
990 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
991 if let Some(right_checkpoint) = right
992 .checkpoints_by_work_dir_abs_path
993 .remove(&work_dir_abs_path)
994 {
995 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
996 {
997 let compare = repository.update(cx, |repository, _| {
998 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
999 });
1000
1001 tasks.push(async move { compare.await? });
1002 }
1003 } else {
1004 return Task::ready(Ok(false));
1005 }
1006 }
1007 cx.background_spawn(async move {
1008 Ok(future::try_join_all(tasks)
1009 .await?
1010 .into_iter()
1011 .all(|result| result))
1012 })
1013 }
1014
1015 /// Blames a buffer.
1016 pub fn blame_buffer(
1017 &self,
1018 buffer: &Entity<Buffer>,
1019 version: Option<clock::Global>,
1020 cx: &mut Context<Self>,
1021 ) -> Task<Result<Option<Blame>>> {
1022 let buffer = buffer.read(cx);
1023 let Some((repo, repo_path)) =
1024 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1025 else {
1026 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1027 };
1028 let content = match &version {
1029 Some(version) => buffer.rope_for_version(version),
1030 None => buffer.as_rope().clone(),
1031 };
1032 let version = version.unwrap_or(buffer.version());
1033 let buffer_id = buffer.remote_id();
1034
1035 let repo = repo.downgrade();
1036 cx.spawn(async move |_, cx| {
1037 let repository_state = repo
1038 .update(cx, |repo, _| repo.repository_state.clone())?
1039 .await
1040 .map_err(|err| anyhow::anyhow!(err))?;
1041 match repository_state {
1042 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1043 .blame(repo_path.clone(), content)
1044 .await
1045 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1046 .map(Some),
1047 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1048 let response = client
1049 .request(proto::BlameBuffer {
1050 project_id: project_id.to_proto(),
1051 buffer_id: buffer_id.into(),
1052 version: serialize_version(&version),
1053 })
1054 .await?;
1055 Ok(deserialize_blame_buffer_response(response))
1056 }
1057 }
1058 })
1059 }
1060
1061 pub fn file_history(
1062 &self,
1063 repo: &Entity<Repository>,
1064 path: RepoPath,
1065 cx: &mut App,
1066 ) -> Task<Result<git::repository::FileHistory>> {
1067 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1068
1069 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1070 }
1071
1072 pub fn file_history_paginated(
1073 &self,
1074 repo: &Entity<Repository>,
1075 path: RepoPath,
1076 skip: usize,
1077 limit: Option<usize>,
1078 cx: &mut App,
1079 ) -> Task<Result<git::repository::FileHistory>> {
1080 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1081
1082 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1083 }
1084
1085 pub fn get_permalink_to_line(
1086 &self,
1087 buffer: &Entity<Buffer>,
1088 selection: Range<u32>,
1089 cx: &mut App,
1090 ) -> Task<Result<url::Url>> {
1091 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1092 return Task::ready(Err(anyhow!("buffer has no file")));
1093 };
1094
1095 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1096 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1097 cx,
1098 ) else {
1099 // If we're not in a Git repo, check whether this is a Rust source
1100 // file in the Cargo registry (presumably opened with go-to-definition
1101 // from a normal Rust file). If so, we can put together a permalink
1102 // using crate metadata.
1103 if buffer
1104 .read(cx)
1105 .language()
1106 .is_none_or(|lang| lang.name() != "Rust".into())
1107 {
1108 return Task::ready(Err(anyhow!("no permalink available")));
1109 }
1110 let file_path = file.worktree.read(cx).absolutize(&file.path);
1111 return cx.spawn(async move |cx| {
1112 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1113 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1114 .context("no permalink available")
1115 });
1116 };
1117
1118 let buffer_id = buffer.read(cx).remote_id();
1119 let branch = repo.read(cx).branch.clone();
1120 let remote = branch
1121 .as_ref()
1122 .and_then(|b| b.upstream.as_ref())
1123 .and_then(|b| b.remote_name())
1124 .unwrap_or("origin")
1125 .to_string();
1126
1127 let rx = repo.update(cx, |repo, _| {
1128 repo.send_job(None, move |state, cx| async move {
1129 match state {
1130 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1131 let origin_url = backend
1132 .remote_url(&remote)
1133 .await
1134 .with_context(|| format!("remote \"{remote}\" not found"))?;
1135
1136 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1137
1138 let provider_registry =
1139 cx.update(GitHostingProviderRegistry::default_global)?;
1140
1141 let (provider, remote) =
1142 parse_git_remote_url(provider_registry, &origin_url)
1143 .context("parsing Git remote URL")?;
1144
1145 Ok(provider.build_permalink(
1146 remote,
1147 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1148 ))
1149 }
1150 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1151 let response = client
1152 .request(proto::GetPermalinkToLine {
1153 project_id: project_id.to_proto(),
1154 buffer_id: buffer_id.into(),
1155 selection: Some(proto::Range {
1156 start: selection.start as u64,
1157 end: selection.end as u64,
1158 }),
1159 })
1160 .await?;
1161
1162 url::Url::parse(&response.permalink).context("failed to parse permalink")
1163 }
1164 }
1165 })
1166 });
1167 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1168 }
1169
1170 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1171 match &self.state {
1172 GitStoreState::Local {
1173 downstream: downstream_client,
1174 ..
1175 } => downstream_client
1176 .as_ref()
1177 .map(|state| (state.client.clone(), state.project_id)),
1178 GitStoreState::Remote {
1179 downstream: downstream_client,
1180 ..
1181 } => downstream_client.clone(),
1182 }
1183 }
1184
1185 fn upstream_client(&self) -> Option<AnyProtoClient> {
1186 match &self.state {
1187 GitStoreState::Local { .. } => None,
1188 GitStoreState::Remote {
1189 upstream_client, ..
1190 } => Some(upstream_client.clone()),
1191 }
1192 }
1193
1194 fn on_worktree_store_event(
1195 &mut self,
1196 worktree_store: Entity<WorktreeStore>,
1197 event: &WorktreeStoreEvent,
1198 cx: &mut Context<Self>,
1199 ) {
1200 let GitStoreState::Local {
1201 project_environment,
1202 downstream,
1203 next_repository_id,
1204 fs,
1205 } = &self.state
1206 else {
1207 return;
1208 };
1209
1210 match event {
1211 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1212 if let Some(worktree) = self
1213 .worktree_store
1214 .read(cx)
1215 .worktree_for_id(*worktree_id, cx)
1216 {
1217 let paths_by_git_repo =
1218 self.process_updated_entries(&worktree, updated_entries, cx);
1219 let downstream = downstream
1220 .as_ref()
1221 .map(|downstream| downstream.updates_tx.clone());
1222 cx.spawn(async move |_, cx| {
1223 let paths_by_git_repo = paths_by_git_repo.await;
1224 for (repo, paths) in paths_by_git_repo {
1225 repo.update(cx, |repo, cx| {
1226 repo.paths_changed(paths, downstream.clone(), cx);
1227 })
1228 .ok();
1229 }
1230 })
1231 .detach();
1232 }
1233 }
1234 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1235 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1236 else {
1237 return;
1238 };
1239 if !worktree.read(cx).is_visible() {
1240 log::debug!(
1241 "not adding repositories for local worktree {:?} because it's not visible",
1242 worktree.read(cx).abs_path()
1243 );
1244 return;
1245 }
1246 self.update_repositories_from_worktree(
1247 *worktree_id,
1248 project_environment.clone(),
1249 next_repository_id.clone(),
1250 downstream
1251 .as_ref()
1252 .map(|downstream| downstream.updates_tx.clone()),
1253 changed_repos.clone(),
1254 fs.clone(),
1255 cx,
1256 );
1257 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1258 }
1259 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1260 let repos_without_worktree: Vec<RepositoryId> = self
1261 .worktree_ids
1262 .iter_mut()
1263 .filter_map(|(repo_id, worktree_ids)| {
1264 worktree_ids.remove(worktree_id);
1265 if worktree_ids.is_empty() {
1266 Some(*repo_id)
1267 } else {
1268 None
1269 }
1270 })
1271 .collect();
1272 let is_active_repo_removed = repos_without_worktree
1273 .iter()
1274 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1275
1276 for repo_id in repos_without_worktree {
1277 self.repositories.remove(&repo_id);
1278 self.worktree_ids.remove(&repo_id);
1279 if let Some(updates_tx) =
1280 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1281 {
1282 updates_tx
1283 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1284 .ok();
1285 }
1286 }
1287
1288 if is_active_repo_removed {
1289 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1290 self.active_repo_id = Some(repo_id);
1291 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1292 } else {
1293 self.active_repo_id = None;
1294 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1295 }
1296 }
1297 }
1298 _ => {}
1299 }
1300 }
1301 fn on_repository_event(
1302 &mut self,
1303 repo: Entity<Repository>,
1304 event: &RepositoryEvent,
1305 cx: &mut Context<Self>,
1306 ) {
1307 let id = repo.read(cx).id;
1308 let repo_snapshot = repo.read(cx).snapshot.clone();
1309 for (buffer_id, diff) in self.diffs.iter() {
1310 if let Some((buffer_repo, repo_path)) =
1311 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1312 && buffer_repo == repo
1313 {
1314 diff.update(cx, |diff, cx| {
1315 if let Some(conflict_set) = &diff.conflict_set {
1316 let conflict_status_changed =
1317 conflict_set.update(cx, |conflict_set, cx| {
1318 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1319 conflict_set.set_has_conflict(has_conflict, cx)
1320 })?;
1321 if conflict_status_changed {
1322 let buffer_store = self.buffer_store.read(cx);
1323 if let Some(buffer) = buffer_store.get(*buffer_id) {
1324 let _ = diff
1325 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1326 }
1327 }
1328 }
1329 anyhow::Ok(())
1330 })
1331 .ok();
1332 }
1333 }
1334 cx.emit(GitStoreEvent::RepositoryUpdated(
1335 id,
1336 event.clone(),
1337 self.active_repo_id == Some(id),
1338 ))
1339 }
1340
1341 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1342 cx.emit(GitStoreEvent::JobsUpdated)
1343 }
1344
1345 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1346 fn update_repositories_from_worktree(
1347 &mut self,
1348 worktree_id: WorktreeId,
1349 project_environment: Entity<ProjectEnvironment>,
1350 next_repository_id: Arc<AtomicU64>,
1351 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1352 updated_git_repositories: UpdatedGitRepositoriesSet,
1353 fs: Arc<dyn Fs>,
1354 cx: &mut Context<Self>,
1355 ) {
1356 let mut removed_ids = Vec::new();
1357 for update in updated_git_repositories.iter() {
1358 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1359 let existing_work_directory_abs_path =
1360 repo.read(cx).work_directory_abs_path.clone();
1361 Some(&existing_work_directory_abs_path)
1362 == update.old_work_directory_abs_path.as_ref()
1363 || Some(&existing_work_directory_abs_path)
1364 == update.new_work_directory_abs_path.as_ref()
1365 }) {
1366 let repo_id = *id;
1367 if let Some(new_work_directory_abs_path) =
1368 update.new_work_directory_abs_path.clone()
1369 {
1370 self.worktree_ids
1371 .entry(repo_id)
1372 .or_insert_with(HashSet::new)
1373 .insert(worktree_id);
1374 existing.update(cx, |existing, cx| {
1375 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1376 existing.schedule_scan(updates_tx.clone(), cx);
1377 });
1378 } else {
1379 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1380 worktree_ids.remove(&worktree_id);
1381 if worktree_ids.is_empty() {
1382 removed_ids.push(repo_id);
1383 }
1384 }
1385 }
1386 } else if let UpdatedGitRepository {
1387 new_work_directory_abs_path: Some(work_directory_abs_path),
1388 dot_git_abs_path: Some(dot_git_abs_path),
1389 repository_dir_abs_path: Some(_repository_dir_abs_path),
1390 common_dir_abs_path: Some(_common_dir_abs_path),
1391 ..
1392 } = update
1393 {
1394 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1395 let git_store = cx.weak_entity();
1396 let repo = cx.new(|cx| {
1397 let mut repo = Repository::local(
1398 id,
1399 work_directory_abs_path.clone(),
1400 dot_git_abs_path.clone(),
1401 project_environment.downgrade(),
1402 fs.clone(),
1403 git_store,
1404 cx,
1405 );
1406 if let Some(updates_tx) = updates_tx.as_ref() {
1407 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1408 updates_tx
1409 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1410 .ok();
1411 }
1412 repo.schedule_scan(updates_tx.clone(), cx);
1413 repo
1414 });
1415 self._subscriptions
1416 .push(cx.subscribe(&repo, Self::on_repository_event));
1417 self._subscriptions
1418 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1419 self.repositories.insert(id, repo);
1420 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1421 cx.emit(GitStoreEvent::RepositoryAdded);
1422 self.active_repo_id.get_or_insert_with(|| {
1423 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1424 id
1425 });
1426 }
1427 }
1428
1429 for id in removed_ids {
1430 if self.active_repo_id == Some(id) {
1431 self.active_repo_id = None;
1432 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1433 }
1434 self.repositories.remove(&id);
1435 if let Some(updates_tx) = updates_tx.as_ref() {
1436 updates_tx
1437 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1438 .ok();
1439 }
1440 }
1441 }
1442
1443 fn on_buffer_store_event(
1444 &mut self,
1445 _: Entity<BufferStore>,
1446 event: &BufferStoreEvent,
1447 cx: &mut Context<Self>,
1448 ) {
1449 match event {
1450 BufferStoreEvent::BufferAdded(buffer) => {
1451 cx.subscribe(buffer, |this, buffer, event, cx| {
1452 if let BufferEvent::LanguageChanged = event {
1453 let buffer_id = buffer.read(cx).remote_id();
1454 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1455 diff_state.update(cx, |diff_state, cx| {
1456 diff_state.buffer_language_changed(buffer, cx);
1457 });
1458 }
1459 }
1460 })
1461 .detach();
1462 }
1463 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1464 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1465 diffs.remove(buffer_id);
1466 }
1467 }
1468 BufferStoreEvent::BufferDropped(buffer_id) => {
1469 self.diffs.remove(buffer_id);
1470 for diffs in self.shared_diffs.values_mut() {
1471 diffs.remove(buffer_id);
1472 }
1473 }
1474 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1475 // Whenever a buffer's file path changes, it's possible that the
1476 // new path is actually a path that is being tracked by a git
1477 // repository. In that case, we'll want to update the buffer's
1478 // `BufferDiffState`, in case it already has one.
1479 let buffer_id = buffer.read(cx).remote_id();
1480 let diff_state = self.diffs.get(&buffer_id);
1481 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1482
1483 if let Some(diff_state) = diff_state
1484 && let Some((repo, repo_path)) = repo
1485 {
1486 let buffer = buffer.clone();
1487 let diff_state = diff_state.clone();
1488
1489 cx.spawn(async move |_git_store, cx| {
1490 async {
1491 let diff_bases_change = repo
1492 .update(cx, |repo, cx| {
1493 repo.load_committed_text(buffer_id, repo_path, cx)
1494 })?
1495 .await?;
1496
1497 diff_state.update(cx, |diff_state, cx| {
1498 let buffer_snapshot = buffer.read(cx).text_snapshot();
1499 diff_state.diff_bases_changed(
1500 buffer_snapshot,
1501 Some(diff_bases_change),
1502 cx,
1503 );
1504 })
1505 }
1506 .await
1507 .log_err();
1508 })
1509 .detach();
1510 }
1511 }
1512 _ => {}
1513 }
1514 }
1515
1516 pub fn recalculate_buffer_diffs(
1517 &mut self,
1518 buffers: Vec<Entity<Buffer>>,
1519 cx: &mut Context<Self>,
1520 ) -> impl Future<Output = ()> + use<> {
1521 let mut futures = Vec::new();
1522 for buffer in buffers {
1523 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1524 let buffer = buffer.read(cx).text_snapshot();
1525 diff_state.update(cx, |diff_state, cx| {
1526 diff_state.recalculate_diffs(buffer.clone(), cx);
1527 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1528 });
1529 futures.push(diff_state.update(cx, |diff_state, cx| {
1530 diff_state
1531 .reparse_conflict_markers(buffer, cx)
1532 .map(|_| {})
1533 .boxed()
1534 }));
1535 }
1536 }
1537 async move {
1538 futures::future::join_all(futures).await;
1539 }
1540 }
1541
1542 fn on_buffer_diff_event(
1543 &mut self,
1544 diff: Entity<buffer_diff::BufferDiff>,
1545 event: &BufferDiffEvent,
1546 cx: &mut Context<Self>,
1547 ) {
1548 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1549 let buffer_id = diff.read(cx).buffer_id;
1550 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1551 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1552 diff_state.hunk_staging_operation_count += 1;
1553 diff_state.hunk_staging_operation_count
1554 });
1555 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1556 let recv = repo.update(cx, |repo, cx| {
1557 log::debug!("hunks changed for {}", path.as_unix_str());
1558 repo.spawn_set_index_text_job(
1559 path,
1560 new_index_text.as_ref().map(|rope| rope.to_string()),
1561 Some(hunk_staging_operation_count),
1562 cx,
1563 )
1564 });
1565 let diff = diff.downgrade();
1566 cx.spawn(async move |this, cx| {
1567 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1568 diff.update(cx, |diff, cx| {
1569 diff.clear_pending_hunks(cx);
1570 })
1571 .ok();
1572 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1573 .ok();
1574 }
1575 })
1576 .detach();
1577 }
1578 }
1579 }
1580 }
1581
1582 fn local_worktree_git_repos_changed(
1583 &mut self,
1584 worktree: Entity<Worktree>,
1585 changed_repos: &UpdatedGitRepositoriesSet,
1586 cx: &mut Context<Self>,
1587 ) {
1588 log::debug!("local worktree repos changed");
1589 debug_assert!(worktree.read(cx).is_local());
1590
1591 for repository in self.repositories.values() {
1592 repository.update(cx, |repository, cx| {
1593 let repo_abs_path = &repository.work_directory_abs_path;
1594 if changed_repos.iter().any(|update| {
1595 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1596 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1597 }) {
1598 repository.reload_buffer_diff_bases(cx);
1599 }
1600 });
1601 }
1602 }
1603
1604 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1605 &self.repositories
1606 }
1607
1608 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1609 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1610 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1611 Some(status.status)
1612 }
1613
1614 pub fn repository_and_path_for_buffer_id(
1615 &self,
1616 buffer_id: BufferId,
1617 cx: &App,
1618 ) -> Option<(Entity<Repository>, RepoPath)> {
1619 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1620 let project_path = buffer.read(cx).project_path(cx)?;
1621 self.repository_and_path_for_project_path(&project_path, cx)
1622 }
1623
1624 pub fn repository_and_path_for_project_path(
1625 &self,
1626 path: &ProjectPath,
1627 cx: &App,
1628 ) -> Option<(Entity<Repository>, RepoPath)> {
1629 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1630 self.repositories
1631 .values()
1632 .filter_map(|repo| {
1633 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1634 Some((repo.clone(), repo_path))
1635 })
1636 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1637 }
1638
1639 pub fn git_init(
1640 &self,
1641 path: Arc<Path>,
1642 fallback_branch_name: String,
1643 cx: &App,
1644 ) -> Task<Result<()>> {
1645 match &self.state {
1646 GitStoreState::Local { fs, .. } => {
1647 let fs = fs.clone();
1648 cx.background_executor()
1649 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1650 }
1651 GitStoreState::Remote {
1652 upstream_client,
1653 upstream_project_id: project_id,
1654 ..
1655 } => {
1656 let client = upstream_client.clone();
1657 let project_id = *project_id;
1658 cx.background_executor().spawn(async move {
1659 client
1660 .request(proto::GitInit {
1661 project_id: project_id,
1662 abs_path: path.to_string_lossy().into_owned(),
1663 fallback_branch_name,
1664 })
1665 .await?;
1666 Ok(())
1667 })
1668 }
1669 }
1670 }
1671
1672 pub fn git_clone(
1673 &self,
1674 repo: String,
1675 path: impl Into<Arc<std::path::Path>>,
1676 cx: &App,
1677 ) -> Task<Result<()>> {
1678 let path = path.into();
1679 match &self.state {
1680 GitStoreState::Local { fs, .. } => {
1681 let fs = fs.clone();
1682 cx.background_executor()
1683 .spawn(async move { fs.git_clone(&repo, &path).await })
1684 }
1685 GitStoreState::Remote {
1686 upstream_client,
1687 upstream_project_id,
1688 ..
1689 } => {
1690 if upstream_client.is_via_collab() {
1691 return Task::ready(Err(anyhow!(
1692 "Git Clone isn't supported for project guests"
1693 )));
1694 }
1695 let request = upstream_client.request(proto::GitClone {
1696 project_id: *upstream_project_id,
1697 abs_path: path.to_string_lossy().into_owned(),
1698 remote_repo: repo,
1699 });
1700
1701 cx.background_spawn(async move {
1702 let result = request.await?;
1703
1704 match result.success {
1705 true => Ok(()),
1706 false => Err(anyhow!("Git Clone failed")),
1707 }
1708 })
1709 }
1710 }
1711 }
1712
1713 async fn handle_update_repository(
1714 this: Entity<Self>,
1715 envelope: TypedEnvelope<proto::UpdateRepository>,
1716 mut cx: AsyncApp,
1717 ) -> Result<()> {
1718 this.update(&mut cx, |this, cx| {
1719 let path_style = this.worktree_store.read(cx).path_style();
1720 let mut update = envelope.payload;
1721
1722 let id = RepositoryId::from_proto(update.id);
1723 let client = this.upstream_client().context("no upstream client")?;
1724
1725 let mut repo_subscription = None;
1726 let repo = this.repositories.entry(id).or_insert_with(|| {
1727 let git_store = cx.weak_entity();
1728 let repo = cx.new(|cx| {
1729 Repository::remote(
1730 id,
1731 Path::new(&update.abs_path).into(),
1732 path_style,
1733 ProjectId(update.project_id),
1734 client,
1735 git_store,
1736 cx,
1737 )
1738 });
1739 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1740 cx.emit(GitStoreEvent::RepositoryAdded);
1741 repo
1742 });
1743 this._subscriptions.extend(repo_subscription);
1744
1745 repo.update(cx, {
1746 let update = update.clone();
1747 |repo, cx| repo.apply_remote_update(update, cx)
1748 })?;
1749
1750 this.active_repo_id.get_or_insert_with(|| {
1751 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1752 id
1753 });
1754
1755 if let Some((client, project_id)) = this.downstream_client() {
1756 update.project_id = project_id.to_proto();
1757 client.send(update).log_err();
1758 }
1759 Ok(())
1760 })?
1761 }
1762
1763 async fn handle_remove_repository(
1764 this: Entity<Self>,
1765 envelope: TypedEnvelope<proto::RemoveRepository>,
1766 mut cx: AsyncApp,
1767 ) -> Result<()> {
1768 this.update(&mut cx, |this, cx| {
1769 let mut update = envelope.payload;
1770 let id = RepositoryId::from_proto(update.id);
1771 this.repositories.remove(&id);
1772 if let Some((client, project_id)) = this.downstream_client() {
1773 update.project_id = project_id.to_proto();
1774 client.send(update).log_err();
1775 }
1776 if this.active_repo_id == Some(id) {
1777 this.active_repo_id = None;
1778 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1779 }
1780 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1781 })
1782 }
1783
1784 async fn handle_git_init(
1785 this: Entity<Self>,
1786 envelope: TypedEnvelope<proto::GitInit>,
1787 cx: AsyncApp,
1788 ) -> Result<proto::Ack> {
1789 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1790 let name = envelope.payload.fallback_branch_name;
1791 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1792 .await?;
1793
1794 Ok(proto::Ack {})
1795 }
1796
1797 async fn handle_git_clone(
1798 this: Entity<Self>,
1799 envelope: TypedEnvelope<proto::GitClone>,
1800 cx: AsyncApp,
1801 ) -> Result<proto::GitCloneResponse> {
1802 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1803 let repo_name = envelope.payload.remote_repo;
1804 let result = cx
1805 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1806 .await;
1807
1808 Ok(proto::GitCloneResponse {
1809 success: result.is_ok(),
1810 })
1811 }
1812
1813 async fn handle_fetch(
1814 this: Entity<Self>,
1815 envelope: TypedEnvelope<proto::Fetch>,
1816 mut cx: AsyncApp,
1817 ) -> Result<proto::RemoteMessageResponse> {
1818 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1819 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1820 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1821 let askpass_id = envelope.payload.askpass_id;
1822
1823 let askpass = make_remote_delegate(
1824 this,
1825 envelope.payload.project_id,
1826 repository_id,
1827 askpass_id,
1828 &mut cx,
1829 );
1830
1831 let remote_output = repository_handle
1832 .update(&mut cx, |repository_handle, cx| {
1833 repository_handle.fetch(fetch_options, askpass, cx)
1834 })?
1835 .await??;
1836
1837 Ok(proto::RemoteMessageResponse {
1838 stdout: remote_output.stdout,
1839 stderr: remote_output.stderr,
1840 })
1841 }
1842
1843 async fn handle_push(
1844 this: Entity<Self>,
1845 envelope: TypedEnvelope<proto::Push>,
1846 mut cx: AsyncApp,
1847 ) -> Result<proto::RemoteMessageResponse> {
1848 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1849 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1850
1851 let askpass_id = envelope.payload.askpass_id;
1852 let askpass = make_remote_delegate(
1853 this,
1854 envelope.payload.project_id,
1855 repository_id,
1856 askpass_id,
1857 &mut cx,
1858 );
1859
1860 let options = envelope
1861 .payload
1862 .options
1863 .as_ref()
1864 .map(|_| match envelope.payload.options() {
1865 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1866 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1867 });
1868
1869 let branch_name = envelope.payload.branch_name.into();
1870 let remote_name = envelope.payload.remote_name.into();
1871
1872 let remote_output = repository_handle
1873 .update(&mut cx, |repository_handle, cx| {
1874 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1875 })?
1876 .await??;
1877 Ok(proto::RemoteMessageResponse {
1878 stdout: remote_output.stdout,
1879 stderr: remote_output.stderr,
1880 })
1881 }
1882
1883 async fn handle_pull(
1884 this: Entity<Self>,
1885 envelope: TypedEnvelope<proto::Pull>,
1886 mut cx: AsyncApp,
1887 ) -> Result<proto::RemoteMessageResponse> {
1888 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1889 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1890 let askpass_id = envelope.payload.askpass_id;
1891 let askpass = make_remote_delegate(
1892 this,
1893 envelope.payload.project_id,
1894 repository_id,
1895 askpass_id,
1896 &mut cx,
1897 );
1898
1899 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1900 let remote_name = envelope.payload.remote_name.into();
1901 let rebase = envelope.payload.rebase;
1902
1903 let remote_message = repository_handle
1904 .update(&mut cx, |repository_handle, cx| {
1905 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1906 })?
1907 .await??;
1908
1909 Ok(proto::RemoteMessageResponse {
1910 stdout: remote_message.stdout,
1911 stderr: remote_message.stderr,
1912 })
1913 }
1914
1915 async fn handle_stage(
1916 this: Entity<Self>,
1917 envelope: TypedEnvelope<proto::Stage>,
1918 mut cx: AsyncApp,
1919 ) -> Result<proto::Ack> {
1920 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1921 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1922
1923 let entries = envelope
1924 .payload
1925 .paths
1926 .into_iter()
1927 .map(|path| RepoPath::new(&path))
1928 .collect::<Result<Vec<_>>>()?;
1929
1930 repository_handle
1931 .update(&mut cx, |repository_handle, cx| {
1932 repository_handle.stage_entries(entries, cx)
1933 })?
1934 .await?;
1935 Ok(proto::Ack {})
1936 }
1937
1938 async fn handle_unstage(
1939 this: Entity<Self>,
1940 envelope: TypedEnvelope<proto::Unstage>,
1941 mut cx: AsyncApp,
1942 ) -> Result<proto::Ack> {
1943 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1944 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1945
1946 let entries = envelope
1947 .payload
1948 .paths
1949 .into_iter()
1950 .map(|path| RepoPath::new(&path))
1951 .collect::<Result<Vec<_>>>()?;
1952
1953 repository_handle
1954 .update(&mut cx, |repository_handle, cx| {
1955 repository_handle.unstage_entries(entries, cx)
1956 })?
1957 .await?;
1958
1959 Ok(proto::Ack {})
1960 }
1961
1962 async fn handle_stash(
1963 this: Entity<Self>,
1964 envelope: TypedEnvelope<proto::Stash>,
1965 mut cx: AsyncApp,
1966 ) -> Result<proto::Ack> {
1967 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1968 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1969
1970 let entries = envelope
1971 .payload
1972 .paths
1973 .into_iter()
1974 .map(|path| RepoPath::new(&path))
1975 .collect::<Result<Vec<_>>>()?;
1976
1977 repository_handle
1978 .update(&mut cx, |repository_handle, cx| {
1979 repository_handle.stash_entries(entries, cx)
1980 })?
1981 .await?;
1982
1983 Ok(proto::Ack {})
1984 }
1985
1986 async fn handle_stash_pop(
1987 this: Entity<Self>,
1988 envelope: TypedEnvelope<proto::StashPop>,
1989 mut cx: AsyncApp,
1990 ) -> Result<proto::Ack> {
1991 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1992 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1993 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1994
1995 repository_handle
1996 .update(&mut cx, |repository_handle, cx| {
1997 repository_handle.stash_pop(stash_index, cx)
1998 })?
1999 .await?;
2000
2001 Ok(proto::Ack {})
2002 }
2003
2004 async fn handle_stash_apply(
2005 this: Entity<Self>,
2006 envelope: TypedEnvelope<proto::StashApply>,
2007 mut cx: AsyncApp,
2008 ) -> Result<proto::Ack> {
2009 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2010 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2011 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2012
2013 repository_handle
2014 .update(&mut cx, |repository_handle, cx| {
2015 repository_handle.stash_apply(stash_index, cx)
2016 })?
2017 .await?;
2018
2019 Ok(proto::Ack {})
2020 }
2021
2022 async fn handle_stash_drop(
2023 this: Entity<Self>,
2024 envelope: TypedEnvelope<proto::StashDrop>,
2025 mut cx: AsyncApp,
2026 ) -> Result<proto::Ack> {
2027 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2028 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2029 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2030
2031 repository_handle
2032 .update(&mut cx, |repository_handle, cx| {
2033 repository_handle.stash_drop(stash_index, cx)
2034 })?
2035 .await??;
2036
2037 Ok(proto::Ack {})
2038 }
2039
2040 async fn handle_set_index_text(
2041 this: Entity<Self>,
2042 envelope: TypedEnvelope<proto::SetIndexText>,
2043 mut cx: AsyncApp,
2044 ) -> Result<proto::Ack> {
2045 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2046 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2047 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2048
2049 repository_handle
2050 .update(&mut cx, |repository_handle, cx| {
2051 repository_handle.spawn_set_index_text_job(
2052 repo_path,
2053 envelope.payload.text,
2054 None,
2055 cx,
2056 )
2057 })?
2058 .await??;
2059 Ok(proto::Ack {})
2060 }
2061
2062 async fn handle_run_hook(
2063 this: Entity<Self>,
2064 envelope: TypedEnvelope<proto::RunGitHook>,
2065 mut cx: AsyncApp,
2066 ) -> Result<proto::Ack> {
2067 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2068 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2069 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2070 repository_handle
2071 .update(&mut cx, |repository_handle, cx| {
2072 repository_handle.run_hook(hook, cx)
2073 })?
2074 .await??;
2075 Ok(proto::Ack {})
2076 }
2077
2078 async fn handle_commit(
2079 this: Entity<Self>,
2080 envelope: TypedEnvelope<proto::Commit>,
2081 mut cx: AsyncApp,
2082 ) -> Result<proto::Ack> {
2083 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2084 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2085 let askpass_id = envelope.payload.askpass_id;
2086
2087 let askpass = make_remote_delegate(
2088 this,
2089 envelope.payload.project_id,
2090 repository_id,
2091 askpass_id,
2092 &mut cx,
2093 );
2094
2095 let message = SharedString::from(envelope.payload.message);
2096 let name = envelope.payload.name.map(SharedString::from);
2097 let email = envelope.payload.email.map(SharedString::from);
2098 let options = envelope.payload.options.unwrap_or_default();
2099
2100 repository_handle
2101 .update(&mut cx, |repository_handle, cx| {
2102 repository_handle.commit(
2103 message,
2104 name.zip(email),
2105 CommitOptions {
2106 amend: options.amend,
2107 signoff: options.signoff,
2108 },
2109 askpass,
2110 cx,
2111 )
2112 })?
2113 .await??;
2114 Ok(proto::Ack {})
2115 }
2116
2117 async fn handle_get_remotes(
2118 this: Entity<Self>,
2119 envelope: TypedEnvelope<proto::GetRemotes>,
2120 mut cx: AsyncApp,
2121 ) -> Result<proto::GetRemotesResponse> {
2122 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2123 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2124
2125 let branch_name = envelope.payload.branch_name;
2126 let is_push = envelope.payload.is_push;
2127
2128 let remotes = repository_handle
2129 .update(&mut cx, |repository_handle, _| {
2130 repository_handle.get_remotes(branch_name, is_push)
2131 })?
2132 .await??;
2133
2134 Ok(proto::GetRemotesResponse {
2135 remotes: remotes
2136 .into_iter()
2137 .map(|remotes| proto::get_remotes_response::Remote {
2138 name: remotes.name.to_string(),
2139 })
2140 .collect::<Vec<_>>(),
2141 })
2142 }
2143
2144 async fn handle_get_worktrees(
2145 this: Entity<Self>,
2146 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2147 mut cx: AsyncApp,
2148 ) -> Result<proto::GitWorktreesResponse> {
2149 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2150 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2151
2152 let worktrees = repository_handle
2153 .update(&mut cx, |repository_handle, _| {
2154 repository_handle.worktrees()
2155 })?
2156 .await??;
2157
2158 Ok(proto::GitWorktreesResponse {
2159 worktrees: worktrees
2160 .into_iter()
2161 .map(|worktree| worktree_to_proto(&worktree))
2162 .collect::<Vec<_>>(),
2163 })
2164 }
2165
2166 async fn handle_create_worktree(
2167 this: Entity<Self>,
2168 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2169 mut cx: AsyncApp,
2170 ) -> Result<proto::Ack> {
2171 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2172 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2173 let directory = PathBuf::from(envelope.payload.directory);
2174 let name = envelope.payload.name;
2175 let commit = envelope.payload.commit;
2176
2177 repository_handle
2178 .update(&mut cx, |repository_handle, _| {
2179 repository_handle.create_worktree(name, directory, commit)
2180 })?
2181 .await??;
2182
2183 Ok(proto::Ack {})
2184 }
2185
2186 async fn handle_get_branches(
2187 this: Entity<Self>,
2188 envelope: TypedEnvelope<proto::GitGetBranches>,
2189 mut cx: AsyncApp,
2190 ) -> Result<proto::GitBranchesResponse> {
2191 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2192 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2193
2194 let branches = repository_handle
2195 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2196 .await??;
2197
2198 Ok(proto::GitBranchesResponse {
2199 branches: branches
2200 .into_iter()
2201 .map(|branch| branch_to_proto(&branch))
2202 .collect::<Vec<_>>(),
2203 })
2204 }
2205 async fn handle_get_default_branch(
2206 this: Entity<Self>,
2207 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2208 mut cx: AsyncApp,
2209 ) -> Result<proto::GetDefaultBranchResponse> {
2210 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2211 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2212
2213 let branch = repository_handle
2214 .update(&mut cx, |repository_handle, _| {
2215 repository_handle.default_branch()
2216 })?
2217 .await??
2218 .map(Into::into);
2219
2220 Ok(proto::GetDefaultBranchResponse { branch })
2221 }
2222 async fn handle_create_branch(
2223 this: Entity<Self>,
2224 envelope: TypedEnvelope<proto::GitCreateBranch>,
2225 mut cx: AsyncApp,
2226 ) -> Result<proto::Ack> {
2227 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2228 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2229 let branch_name = envelope.payload.branch_name;
2230
2231 repository_handle
2232 .update(&mut cx, |repository_handle, _| {
2233 repository_handle.create_branch(branch_name, None)
2234 })?
2235 .await??;
2236
2237 Ok(proto::Ack {})
2238 }
2239
2240 async fn handle_change_branch(
2241 this: Entity<Self>,
2242 envelope: TypedEnvelope<proto::GitChangeBranch>,
2243 mut cx: AsyncApp,
2244 ) -> Result<proto::Ack> {
2245 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2246 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2247 let branch_name = envelope.payload.branch_name;
2248
2249 repository_handle
2250 .update(&mut cx, |repository_handle, _| {
2251 repository_handle.change_branch(branch_name)
2252 })?
2253 .await??;
2254
2255 Ok(proto::Ack {})
2256 }
2257
2258 async fn handle_rename_branch(
2259 this: Entity<Self>,
2260 envelope: TypedEnvelope<proto::GitRenameBranch>,
2261 mut cx: AsyncApp,
2262 ) -> Result<proto::Ack> {
2263 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2264 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2265 let branch = envelope.payload.branch;
2266 let new_name = envelope.payload.new_name;
2267
2268 repository_handle
2269 .update(&mut cx, |repository_handle, _| {
2270 repository_handle.rename_branch(branch, new_name)
2271 })?
2272 .await??;
2273
2274 Ok(proto::Ack {})
2275 }
2276
2277 async fn handle_delete_branch(
2278 this: Entity<Self>,
2279 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2280 mut cx: AsyncApp,
2281 ) -> Result<proto::Ack> {
2282 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2283 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2284 let branch_name = envelope.payload.branch_name;
2285
2286 repository_handle
2287 .update(&mut cx, |repository_handle, _| {
2288 repository_handle.delete_branch(branch_name)
2289 })?
2290 .await??;
2291
2292 Ok(proto::Ack {})
2293 }
2294
2295 async fn handle_show(
2296 this: Entity<Self>,
2297 envelope: TypedEnvelope<proto::GitShow>,
2298 mut cx: AsyncApp,
2299 ) -> Result<proto::GitCommitDetails> {
2300 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2301 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2302
2303 let commit = repository_handle
2304 .update(&mut cx, |repository_handle, _| {
2305 repository_handle.show(envelope.payload.commit)
2306 })?
2307 .await??;
2308 Ok(proto::GitCommitDetails {
2309 sha: commit.sha.into(),
2310 message: commit.message.into(),
2311 commit_timestamp: commit.commit_timestamp,
2312 author_email: commit.author_email.into(),
2313 author_name: commit.author_name.into(),
2314 })
2315 }
2316
2317 async fn handle_load_commit_diff(
2318 this: Entity<Self>,
2319 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2320 mut cx: AsyncApp,
2321 ) -> Result<proto::LoadCommitDiffResponse> {
2322 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2323 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2324
2325 let commit_diff = repository_handle
2326 .update(&mut cx, |repository_handle, _| {
2327 repository_handle.load_commit_diff(envelope.payload.commit)
2328 })?
2329 .await??;
2330 Ok(proto::LoadCommitDiffResponse {
2331 files: commit_diff
2332 .files
2333 .into_iter()
2334 .map(|file| proto::CommitFile {
2335 path: file.path.to_proto(),
2336 old_text: file.old_text,
2337 new_text: file.new_text,
2338 })
2339 .collect(),
2340 })
2341 }
2342
2343 async fn handle_file_history(
2344 this: Entity<Self>,
2345 envelope: TypedEnvelope<proto::GitFileHistory>,
2346 mut cx: AsyncApp,
2347 ) -> Result<proto::GitFileHistoryResponse> {
2348 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2349 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2350 let path = RepoPath::from_proto(&envelope.payload.path)?;
2351 let skip = envelope.payload.skip as usize;
2352 let limit = envelope.payload.limit.map(|l| l as usize);
2353
2354 let file_history = repository_handle
2355 .update(&mut cx, |repository_handle, _| {
2356 repository_handle.file_history_paginated(path, skip, limit)
2357 })?
2358 .await??;
2359
2360 Ok(proto::GitFileHistoryResponse {
2361 entries: file_history
2362 .entries
2363 .into_iter()
2364 .map(|entry| proto::FileHistoryEntry {
2365 sha: entry.sha.to_string(),
2366 subject: entry.subject.to_string(),
2367 message: entry.message.to_string(),
2368 commit_timestamp: entry.commit_timestamp,
2369 author_name: entry.author_name.to_string(),
2370 author_email: entry.author_email.to_string(),
2371 })
2372 .collect(),
2373 path: file_history.path.to_proto(),
2374 })
2375 }
2376
2377 async fn handle_reset(
2378 this: Entity<Self>,
2379 envelope: TypedEnvelope<proto::GitReset>,
2380 mut cx: AsyncApp,
2381 ) -> Result<proto::Ack> {
2382 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2383 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2384
2385 let mode = match envelope.payload.mode() {
2386 git_reset::ResetMode::Soft => ResetMode::Soft,
2387 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2388 };
2389
2390 repository_handle
2391 .update(&mut cx, |repository_handle, cx| {
2392 repository_handle.reset(envelope.payload.commit, mode, cx)
2393 })?
2394 .await??;
2395 Ok(proto::Ack {})
2396 }
2397
2398 async fn handle_checkout_files(
2399 this: Entity<Self>,
2400 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2401 mut cx: AsyncApp,
2402 ) -> Result<proto::Ack> {
2403 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2404 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2405 let paths = envelope
2406 .payload
2407 .paths
2408 .iter()
2409 .map(|s| RepoPath::from_proto(s))
2410 .collect::<Result<Vec<_>>>()?;
2411
2412 repository_handle
2413 .update(&mut cx, |repository_handle, cx| {
2414 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2415 })?
2416 .await?;
2417 Ok(proto::Ack {})
2418 }
2419
2420 async fn handle_open_commit_message_buffer(
2421 this: Entity<Self>,
2422 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2423 mut cx: AsyncApp,
2424 ) -> Result<proto::OpenBufferResponse> {
2425 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2426 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2427 let buffer = repository
2428 .update(&mut cx, |repository, cx| {
2429 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2430 })?
2431 .await?;
2432
2433 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2434 this.update(&mut cx, |this, cx| {
2435 this.buffer_store.update(cx, |buffer_store, cx| {
2436 buffer_store
2437 .create_buffer_for_peer(
2438 &buffer,
2439 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2440 cx,
2441 )
2442 .detach_and_log_err(cx);
2443 })
2444 })?;
2445
2446 Ok(proto::OpenBufferResponse {
2447 buffer_id: buffer_id.to_proto(),
2448 })
2449 }
2450
2451 async fn handle_askpass(
2452 this: Entity<Self>,
2453 envelope: TypedEnvelope<proto::AskPassRequest>,
2454 mut cx: AsyncApp,
2455 ) -> Result<proto::AskPassResponse> {
2456 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2457 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2458
2459 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2460 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2461 debug_panic!("no askpass found");
2462 anyhow::bail!("no askpass found");
2463 };
2464
2465 let response = askpass
2466 .ask_password(envelope.payload.prompt)
2467 .await
2468 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2469
2470 delegates
2471 .lock()
2472 .insert(envelope.payload.askpass_id, askpass);
2473
2474 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2475 Ok(proto::AskPassResponse {
2476 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2477 })
2478 }
2479
2480 async fn handle_check_for_pushed_commits(
2481 this: Entity<Self>,
2482 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2483 mut cx: AsyncApp,
2484 ) -> Result<proto::CheckForPushedCommitsResponse> {
2485 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2486 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2487
2488 let branches = repository_handle
2489 .update(&mut cx, |repository_handle, _| {
2490 repository_handle.check_for_pushed_commits()
2491 })?
2492 .await??;
2493 Ok(proto::CheckForPushedCommitsResponse {
2494 pushed_to: branches
2495 .into_iter()
2496 .map(|commit| commit.to_string())
2497 .collect(),
2498 })
2499 }
2500
2501 async fn handle_git_diff(
2502 this: Entity<Self>,
2503 envelope: TypedEnvelope<proto::GitDiff>,
2504 mut cx: AsyncApp,
2505 ) -> Result<proto::GitDiffResponse> {
2506 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2507 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2508 let diff_type = match envelope.payload.diff_type() {
2509 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2510 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2511 };
2512
2513 let mut diff = repository_handle
2514 .update(&mut cx, |repository_handle, cx| {
2515 repository_handle.diff(diff_type, cx)
2516 })?
2517 .await??;
2518 const ONE_MB: usize = 1_000_000;
2519 if diff.len() > ONE_MB {
2520 diff = diff.chars().take(ONE_MB).collect()
2521 }
2522
2523 Ok(proto::GitDiffResponse { diff })
2524 }
2525
2526 async fn handle_tree_diff(
2527 this: Entity<Self>,
2528 request: TypedEnvelope<proto::GetTreeDiff>,
2529 mut cx: AsyncApp,
2530 ) -> Result<proto::GetTreeDiffResponse> {
2531 let repository_id = RepositoryId(request.payload.repository_id);
2532 let diff_type = if request.payload.is_merge {
2533 DiffTreeType::MergeBase {
2534 base: request.payload.base.into(),
2535 head: request.payload.head.into(),
2536 }
2537 } else {
2538 DiffTreeType::Since {
2539 base: request.payload.base.into(),
2540 head: request.payload.head.into(),
2541 }
2542 };
2543
2544 let diff = this
2545 .update(&mut cx, |this, cx| {
2546 let repository = this.repositories().get(&repository_id)?;
2547 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2548 })?
2549 .context("missing repository")?
2550 .await??;
2551
2552 Ok(proto::GetTreeDiffResponse {
2553 entries: diff
2554 .entries
2555 .into_iter()
2556 .map(|(path, status)| proto::TreeDiffStatus {
2557 path: path.as_ref().to_proto(),
2558 status: match status {
2559 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2560 TreeDiffStatus::Modified { .. } => {
2561 proto::tree_diff_status::Status::Modified.into()
2562 }
2563 TreeDiffStatus::Deleted { .. } => {
2564 proto::tree_diff_status::Status::Deleted.into()
2565 }
2566 },
2567 oid: match status {
2568 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2569 Some(old.to_string())
2570 }
2571 TreeDiffStatus::Added => None,
2572 },
2573 })
2574 .collect(),
2575 })
2576 }
2577
2578 async fn handle_get_blob_content(
2579 this: Entity<Self>,
2580 request: TypedEnvelope<proto::GetBlobContent>,
2581 mut cx: AsyncApp,
2582 ) -> Result<proto::GetBlobContentResponse> {
2583 let oid = git::Oid::from_str(&request.payload.oid)?;
2584 let repository_id = RepositoryId(request.payload.repository_id);
2585 let content = this
2586 .update(&mut cx, |this, cx| {
2587 let repository = this.repositories().get(&repository_id)?;
2588 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2589 })?
2590 .context("missing repository")?
2591 .await?;
2592 Ok(proto::GetBlobContentResponse { content })
2593 }
2594
2595 async fn handle_open_unstaged_diff(
2596 this: Entity<Self>,
2597 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2598 mut cx: AsyncApp,
2599 ) -> Result<proto::OpenUnstagedDiffResponse> {
2600 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2601 let diff = this
2602 .update(&mut cx, |this, cx| {
2603 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2604 Some(this.open_unstaged_diff(buffer, cx))
2605 })?
2606 .context("missing buffer")?
2607 .await?;
2608 this.update(&mut cx, |this, _| {
2609 let shared_diffs = this
2610 .shared_diffs
2611 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2612 .or_default();
2613 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2614 })?;
2615 let staged_text = diff.read_with(&cx, |diff, _| diff.base_text_string())?;
2616 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2617 }
2618
2619 async fn handle_open_uncommitted_diff(
2620 this: Entity<Self>,
2621 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2622 mut cx: AsyncApp,
2623 ) -> Result<proto::OpenUncommittedDiffResponse> {
2624 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2625 let diff = this
2626 .update(&mut cx, |this, cx| {
2627 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2628 Some(this.open_uncommitted_diff(buffer, cx))
2629 })?
2630 .context("missing buffer")?
2631 .await?;
2632 this.update(&mut cx, |this, _| {
2633 let shared_diffs = this
2634 .shared_diffs
2635 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2636 .or_default();
2637 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2638 })?;
2639 diff.read_with(&cx, |diff, cx| {
2640 use proto::open_uncommitted_diff_response::Mode;
2641
2642 let unstaged_diff = diff.secondary_diff();
2643 let index_snapshot = unstaged_diff.and_then(|diff| {
2644 let diff = diff.read(cx);
2645 diff.base_text_exists().then(|| diff.base_text())
2646 });
2647
2648 let mode;
2649 let staged_text;
2650 let committed_text;
2651 if diff.base_text_exists() {
2652 let committed_snapshot = diff.base_text();
2653 committed_text = Some(committed_snapshot.text());
2654 if let Some(index_text) = index_snapshot {
2655 if index_text.remote_id() == committed_snapshot.remote_id() {
2656 mode = Mode::IndexMatchesHead;
2657 staged_text = None;
2658 } else {
2659 mode = Mode::IndexAndHead;
2660 staged_text = Some(index_text.text());
2661 }
2662 } else {
2663 mode = Mode::IndexAndHead;
2664 staged_text = None;
2665 }
2666 } else {
2667 mode = Mode::IndexAndHead;
2668 committed_text = None;
2669 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2670 }
2671
2672 proto::OpenUncommittedDiffResponse {
2673 committed_text,
2674 staged_text,
2675 mode: mode.into(),
2676 }
2677 })
2678 }
2679
2680 async fn handle_update_diff_bases(
2681 this: Entity<Self>,
2682 request: TypedEnvelope<proto::UpdateDiffBases>,
2683 mut cx: AsyncApp,
2684 ) -> Result<()> {
2685 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2686 this.update(&mut cx, |this, cx| {
2687 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2688 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2689 {
2690 let buffer = buffer.read(cx).text_snapshot();
2691 diff_state.update(cx, |diff_state, cx| {
2692 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2693 })
2694 }
2695 })
2696 }
2697
2698 async fn handle_blame_buffer(
2699 this: Entity<Self>,
2700 envelope: TypedEnvelope<proto::BlameBuffer>,
2701 mut cx: AsyncApp,
2702 ) -> Result<proto::BlameBufferResponse> {
2703 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2704 let version = deserialize_version(&envelope.payload.version);
2705 let buffer = this.read_with(&cx, |this, cx| {
2706 this.buffer_store.read(cx).get_existing(buffer_id)
2707 })??;
2708 buffer
2709 .update(&mut cx, |buffer, _| {
2710 buffer.wait_for_version(version.clone())
2711 })?
2712 .await?;
2713 let blame = this
2714 .update(&mut cx, |this, cx| {
2715 this.blame_buffer(&buffer, Some(version), cx)
2716 })?
2717 .await?;
2718 Ok(serialize_blame_buffer_response(blame))
2719 }
2720
2721 async fn handle_get_permalink_to_line(
2722 this: Entity<Self>,
2723 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2724 mut cx: AsyncApp,
2725 ) -> Result<proto::GetPermalinkToLineResponse> {
2726 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2727 // let version = deserialize_version(&envelope.payload.version);
2728 let selection = {
2729 let proto_selection = envelope
2730 .payload
2731 .selection
2732 .context("no selection to get permalink for defined")?;
2733 proto_selection.start as u32..proto_selection.end as u32
2734 };
2735 let buffer = this.read_with(&cx, |this, cx| {
2736 this.buffer_store.read(cx).get_existing(buffer_id)
2737 })??;
2738 let permalink = this
2739 .update(&mut cx, |this, cx| {
2740 this.get_permalink_to_line(&buffer, selection, cx)
2741 })?
2742 .await?;
2743 Ok(proto::GetPermalinkToLineResponse {
2744 permalink: permalink.to_string(),
2745 })
2746 }
2747
2748 fn repository_for_request(
2749 this: &Entity<Self>,
2750 id: RepositoryId,
2751 cx: &mut AsyncApp,
2752 ) -> Result<Entity<Repository>> {
2753 this.read_with(cx, |this, _| {
2754 this.repositories
2755 .get(&id)
2756 .context("missing repository handle")
2757 .cloned()
2758 })?
2759 }
2760
2761 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2762 self.repositories
2763 .iter()
2764 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2765 .collect()
2766 }
2767
2768 fn process_updated_entries(
2769 &self,
2770 worktree: &Entity<Worktree>,
2771 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2772 cx: &mut App,
2773 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2774 let path_style = worktree.read(cx).path_style();
2775 let mut repo_paths = self
2776 .repositories
2777 .values()
2778 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2779 .collect::<Vec<_>>();
2780 let mut entries: Vec<_> = updated_entries
2781 .iter()
2782 .map(|(path, _, _)| path.clone())
2783 .collect();
2784 entries.sort();
2785 let worktree = worktree.read(cx);
2786
2787 let entries = entries
2788 .into_iter()
2789 .map(|path| worktree.absolutize(&path))
2790 .collect::<Arc<[_]>>();
2791
2792 let executor = cx.background_executor().clone();
2793 cx.background_executor().spawn(async move {
2794 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2795 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2796 let mut tasks = FuturesOrdered::new();
2797 for (repo_path, repo) in repo_paths.into_iter().rev() {
2798 let entries = entries.clone();
2799 let task = executor.spawn(async move {
2800 // Find all repository paths that belong to this repo
2801 let mut ix = entries.partition_point(|path| path < &*repo_path);
2802 if ix == entries.len() {
2803 return None;
2804 };
2805
2806 let mut paths = Vec::new();
2807 // All paths prefixed by a given repo will constitute a continuous range.
2808 while let Some(path) = entries.get(ix)
2809 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2810 &repo_path, path, path_style,
2811 )
2812 {
2813 paths.push((repo_path, ix));
2814 ix += 1;
2815 }
2816 if paths.is_empty() {
2817 None
2818 } else {
2819 Some((repo, paths))
2820 }
2821 });
2822 tasks.push_back(task);
2823 }
2824
2825 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2826 let mut path_was_used = vec![false; entries.len()];
2827 let tasks = tasks.collect::<Vec<_>>().await;
2828 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2829 // We always want to assign a path to it's innermost repository.
2830 for t in tasks {
2831 let Some((repo, paths)) = t else {
2832 continue;
2833 };
2834 let entry = paths_by_git_repo.entry(repo).or_default();
2835 for (repo_path, ix) in paths {
2836 if path_was_used[ix] {
2837 continue;
2838 }
2839 path_was_used[ix] = true;
2840 entry.push(repo_path);
2841 }
2842 }
2843
2844 paths_by_git_repo
2845 })
2846 }
2847}
2848
2849impl BufferGitState {
2850 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2851 Self {
2852 unstaged_diff: Default::default(),
2853 uncommitted_diff: Default::default(),
2854 recalculate_diff_task: Default::default(),
2855 language: Default::default(),
2856 language_registry: Default::default(),
2857 recalculating_tx: postage::watch::channel_with(false).0,
2858 hunk_staging_operation_count: 0,
2859 hunk_staging_operation_count_as_of_write: 0,
2860 head_text: Default::default(),
2861 index_text: Default::default(),
2862 head_changed: Default::default(),
2863 index_changed: Default::default(),
2864 language_changed: Default::default(),
2865 conflict_updated_futures: Default::default(),
2866 conflict_set: Default::default(),
2867 reparse_conflict_markers_task: Default::default(),
2868 }
2869 }
2870
2871 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2872 self.language = buffer.read(cx).language().cloned();
2873 self.language_changed = true;
2874 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2875 }
2876
2877 fn reparse_conflict_markers(
2878 &mut self,
2879 buffer: text::BufferSnapshot,
2880 cx: &mut Context<Self>,
2881 ) -> oneshot::Receiver<()> {
2882 let (tx, rx) = oneshot::channel();
2883
2884 let Some(conflict_set) = self
2885 .conflict_set
2886 .as_ref()
2887 .and_then(|conflict_set| conflict_set.upgrade())
2888 else {
2889 return rx;
2890 };
2891
2892 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2893 if conflict_set.has_conflict {
2894 Some(conflict_set.snapshot())
2895 } else {
2896 None
2897 }
2898 });
2899
2900 if let Some(old_snapshot) = old_snapshot {
2901 self.conflict_updated_futures.push(tx);
2902 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2903 let (snapshot, changed_range) = cx
2904 .background_spawn(async move {
2905 let new_snapshot = ConflictSet::parse(&buffer);
2906 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2907 (new_snapshot, changed_range)
2908 })
2909 .await;
2910 this.update(cx, |this, cx| {
2911 if let Some(conflict_set) = &this.conflict_set {
2912 conflict_set
2913 .update(cx, |conflict_set, cx| {
2914 conflict_set.set_snapshot(snapshot, changed_range, cx);
2915 })
2916 .ok();
2917 }
2918 let futures = std::mem::take(&mut this.conflict_updated_futures);
2919 for tx in futures {
2920 tx.send(()).ok();
2921 }
2922 })
2923 }))
2924 }
2925
2926 rx
2927 }
2928
2929 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2930 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2931 }
2932
2933 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2934 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2935 }
2936
2937 fn handle_base_texts_updated(
2938 &mut self,
2939 buffer: text::BufferSnapshot,
2940 message: proto::UpdateDiffBases,
2941 cx: &mut Context<Self>,
2942 ) {
2943 use proto::update_diff_bases::Mode;
2944
2945 let Some(mode) = Mode::from_i32(message.mode) else {
2946 return;
2947 };
2948
2949 let diff_bases_change = match mode {
2950 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2951 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2952 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2953 Mode::IndexAndHead => DiffBasesChange::SetEach {
2954 index: message.staged_text,
2955 head: message.committed_text,
2956 },
2957 };
2958
2959 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2960 }
2961
2962 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2963 if *self.recalculating_tx.borrow() {
2964 let mut rx = self.recalculating_tx.subscribe();
2965 Some(async move {
2966 loop {
2967 let is_recalculating = rx.recv().await;
2968 if is_recalculating != Some(true) {
2969 break;
2970 }
2971 }
2972 })
2973 } else {
2974 None
2975 }
2976 }
2977
2978 fn diff_bases_changed(
2979 &mut self,
2980 buffer: text::BufferSnapshot,
2981 diff_bases_change: Option<DiffBasesChange>,
2982 cx: &mut Context<Self>,
2983 ) {
2984 match diff_bases_change {
2985 Some(DiffBasesChange::SetIndex(index)) => {
2986 self.index_text = index.map(|mut index| {
2987 text::LineEnding::normalize(&mut index);
2988 Arc::new(index)
2989 });
2990 self.index_changed = true;
2991 }
2992 Some(DiffBasesChange::SetHead(head)) => {
2993 self.head_text = head.map(|mut head| {
2994 text::LineEnding::normalize(&mut head);
2995 Arc::new(head)
2996 });
2997 self.head_changed = true;
2998 }
2999 Some(DiffBasesChange::SetBoth(text)) => {
3000 let text = text.map(|mut text| {
3001 text::LineEnding::normalize(&mut text);
3002 Arc::new(text)
3003 });
3004 self.head_text = text.clone();
3005 self.index_text = text;
3006 self.head_changed = true;
3007 self.index_changed = true;
3008 }
3009 Some(DiffBasesChange::SetEach { index, head }) => {
3010 self.index_text = index.map(|mut index| {
3011 text::LineEnding::normalize(&mut index);
3012 Arc::new(index)
3013 });
3014 self.index_changed = true;
3015 self.head_text = head.map(|mut head| {
3016 text::LineEnding::normalize(&mut head);
3017 Arc::new(head)
3018 });
3019 self.head_changed = true;
3020 }
3021 None => {}
3022 }
3023
3024 self.recalculate_diffs(buffer, cx)
3025 }
3026
3027 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3028 *self.recalculating_tx.borrow_mut() = true;
3029
3030 let language = self.language.clone();
3031 let language_registry = self.language_registry.clone();
3032 let unstaged_diff = self.unstaged_diff();
3033 let uncommitted_diff = self.uncommitted_diff();
3034 let head = self.head_text.clone();
3035 let index = self.index_text.clone();
3036 let index_changed = self.index_changed;
3037 let head_changed = self.head_changed;
3038 let language_changed = self.language_changed;
3039 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3040 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3041 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3042 (None, None) => true,
3043 _ => false,
3044 };
3045 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3046 log::debug!(
3047 "start recalculating diffs for buffer {}",
3048 buffer.remote_id()
3049 );
3050
3051 let mut new_unstaged_diff = None;
3052 if let Some(unstaged_diff) = &unstaged_diff {
3053 new_unstaged_diff = Some(
3054 BufferDiff::update_diff(
3055 unstaged_diff.clone(),
3056 buffer.clone(),
3057 index,
3058 index_changed,
3059 language_changed,
3060 language.clone(),
3061 language_registry.clone(),
3062 cx,
3063 )
3064 .await?,
3065 );
3066 }
3067
3068 // Dropping BufferDiff can be expensive, so yield back to the event loop
3069 // for a bit
3070 yield_now().await;
3071
3072 let mut new_uncommitted_diff = None;
3073 if let Some(uncommitted_diff) = &uncommitted_diff {
3074 new_uncommitted_diff = if index_matches_head {
3075 new_unstaged_diff.clone()
3076 } else {
3077 Some(
3078 BufferDiff::update_diff(
3079 uncommitted_diff.clone(),
3080 buffer.clone(),
3081 head,
3082 head_changed,
3083 language_changed,
3084 language.clone(),
3085 language_registry.clone(),
3086 cx,
3087 )
3088 .await?,
3089 )
3090 }
3091 }
3092
3093 // Dropping BufferDiff can be expensive, so yield back to the event loop
3094 // for a bit
3095 yield_now().await;
3096
3097 let cancel = this.update(cx, |this, _| {
3098 // This checks whether all pending stage/unstage operations
3099 // have quiesced (i.e. both the corresponding write and the
3100 // read of that write have completed). If not, then we cancel
3101 // this recalculation attempt to avoid invalidating pending
3102 // state too quickly; another recalculation will come along
3103 // later and clear the pending state once the state of the index has settled.
3104 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3105 *this.recalculating_tx.borrow_mut() = false;
3106 true
3107 } else {
3108 false
3109 }
3110 })?;
3111 if cancel {
3112 log::debug!(
3113 concat!(
3114 "aborting recalculating diffs for buffer {}",
3115 "due to subsequent hunk operations",
3116 ),
3117 buffer.remote_id()
3118 );
3119 return Ok(());
3120 }
3121
3122 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3123 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3124 {
3125 unstaged_diff.update(cx, |diff, cx| {
3126 if language_changed {
3127 diff.language_changed(cx);
3128 }
3129 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3130 })?
3131 } else {
3132 None
3133 };
3134
3135 yield_now().await;
3136
3137 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3138 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3139 {
3140 uncommitted_diff.update(cx, |diff, cx| {
3141 if language_changed {
3142 diff.language_changed(cx);
3143 }
3144 diff.set_snapshot_with_secondary(
3145 new_uncommitted_diff,
3146 &buffer,
3147 unstaged_changed_range,
3148 true,
3149 cx,
3150 );
3151 })?;
3152 }
3153
3154 log::debug!(
3155 "finished recalculating diffs for buffer {}",
3156 buffer.remote_id()
3157 );
3158
3159 if let Some(this) = this.upgrade() {
3160 this.update(cx, |this, _| {
3161 this.index_changed = false;
3162 this.head_changed = false;
3163 this.language_changed = false;
3164 *this.recalculating_tx.borrow_mut() = false;
3165 })?;
3166 }
3167
3168 Ok(())
3169 }));
3170 }
3171}
3172
3173fn make_remote_delegate(
3174 this: Entity<GitStore>,
3175 project_id: u64,
3176 repository_id: RepositoryId,
3177 askpass_id: u64,
3178 cx: &mut AsyncApp,
3179) -> AskPassDelegate {
3180 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3181 this.update(cx, |this, cx| {
3182 let Some((client, _)) = this.downstream_client() else {
3183 return;
3184 };
3185 let response = client.request(proto::AskPassRequest {
3186 project_id,
3187 repository_id: repository_id.to_proto(),
3188 askpass_id,
3189 prompt,
3190 });
3191 cx.spawn(async move |_, _| {
3192 let mut response = response.await?.response;
3193 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3194 .ok();
3195 response.zeroize();
3196 anyhow::Ok(())
3197 })
3198 .detach_and_log_err(cx);
3199 })
3200 .log_err();
3201 })
3202}
3203
3204impl RepositoryId {
3205 pub fn to_proto(self) -> u64 {
3206 self.0
3207 }
3208
3209 pub fn from_proto(id: u64) -> Self {
3210 RepositoryId(id)
3211 }
3212}
3213
3214impl RepositorySnapshot {
3215 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3216 Self {
3217 id,
3218 statuses_by_path: Default::default(),
3219 work_directory_abs_path,
3220 branch: None,
3221 head_commit: None,
3222 scan_id: 0,
3223 merge: Default::default(),
3224 remote_origin_url: None,
3225 remote_upstream_url: None,
3226 stash_entries: Default::default(),
3227 path_style,
3228 }
3229 }
3230
3231 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3232 proto::UpdateRepository {
3233 branch_summary: self.branch.as_ref().map(branch_to_proto),
3234 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3235 updated_statuses: self
3236 .statuses_by_path
3237 .iter()
3238 .map(|entry| entry.to_proto())
3239 .collect(),
3240 removed_statuses: Default::default(),
3241 current_merge_conflicts: self
3242 .merge
3243 .conflicted_paths
3244 .iter()
3245 .map(|repo_path| repo_path.to_proto())
3246 .collect(),
3247 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3248 project_id,
3249 id: self.id.to_proto(),
3250 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3251 entry_ids: vec![self.id.to_proto()],
3252 scan_id: self.scan_id,
3253 is_last_update: true,
3254 stash_entries: self
3255 .stash_entries
3256 .entries
3257 .iter()
3258 .map(stash_to_proto)
3259 .collect(),
3260 }
3261 }
3262
3263 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3264 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3265 let mut removed_statuses: Vec<String> = Vec::new();
3266
3267 let mut new_statuses = self.statuses_by_path.iter().peekable();
3268 let mut old_statuses = old.statuses_by_path.iter().peekable();
3269
3270 let mut current_new_entry = new_statuses.next();
3271 let mut current_old_entry = old_statuses.next();
3272 loop {
3273 match (current_new_entry, current_old_entry) {
3274 (Some(new_entry), Some(old_entry)) => {
3275 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3276 Ordering::Less => {
3277 updated_statuses.push(new_entry.to_proto());
3278 current_new_entry = new_statuses.next();
3279 }
3280 Ordering::Equal => {
3281 if new_entry.status != old_entry.status {
3282 updated_statuses.push(new_entry.to_proto());
3283 }
3284 current_old_entry = old_statuses.next();
3285 current_new_entry = new_statuses.next();
3286 }
3287 Ordering::Greater => {
3288 removed_statuses.push(old_entry.repo_path.to_proto());
3289 current_old_entry = old_statuses.next();
3290 }
3291 }
3292 }
3293 (None, Some(old_entry)) => {
3294 removed_statuses.push(old_entry.repo_path.to_proto());
3295 current_old_entry = old_statuses.next();
3296 }
3297 (Some(new_entry), None) => {
3298 updated_statuses.push(new_entry.to_proto());
3299 current_new_entry = new_statuses.next();
3300 }
3301 (None, None) => break,
3302 }
3303 }
3304
3305 proto::UpdateRepository {
3306 branch_summary: self.branch.as_ref().map(branch_to_proto),
3307 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3308 updated_statuses,
3309 removed_statuses,
3310 current_merge_conflicts: self
3311 .merge
3312 .conflicted_paths
3313 .iter()
3314 .map(|path| path.to_proto())
3315 .collect(),
3316 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3317 project_id,
3318 id: self.id.to_proto(),
3319 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3320 entry_ids: vec![],
3321 scan_id: self.scan_id,
3322 is_last_update: true,
3323 stash_entries: self
3324 .stash_entries
3325 .entries
3326 .iter()
3327 .map(stash_to_proto)
3328 .collect(),
3329 }
3330 }
3331
3332 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3333 self.statuses_by_path.iter().cloned()
3334 }
3335
3336 pub fn status_summary(&self) -> GitSummary {
3337 self.statuses_by_path.summary().item_summary
3338 }
3339
3340 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3341 self.statuses_by_path
3342 .get(&PathKey(path.as_ref().clone()), ())
3343 .cloned()
3344 }
3345
3346 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3347 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3348 }
3349
3350 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3351 self.path_style
3352 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3353 .unwrap()
3354 .into()
3355 }
3356
3357 #[inline]
3358 fn abs_path_to_repo_path_inner(
3359 work_directory_abs_path: &Path,
3360 abs_path: &Path,
3361 path_style: PathStyle,
3362 ) -> Option<RepoPath> {
3363 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3364 Some(RepoPath::from_rel_path(&rel_path))
3365 }
3366
3367 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3368 self.merge.conflicted_paths.contains(repo_path)
3369 }
3370
3371 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3372 let had_conflict_on_last_merge_head_change =
3373 self.merge.conflicted_paths.contains(repo_path);
3374 let has_conflict_currently = self
3375 .status_for_path(repo_path)
3376 .is_some_and(|entry| entry.status.is_conflicted());
3377 had_conflict_on_last_merge_head_change || has_conflict_currently
3378 }
3379
3380 /// This is the name that will be displayed in the repository selector for this repository.
3381 pub fn display_name(&self) -> SharedString {
3382 self.work_directory_abs_path
3383 .file_name()
3384 .unwrap_or_default()
3385 .to_string_lossy()
3386 .to_string()
3387 .into()
3388 }
3389}
3390
3391pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3392 proto::StashEntry {
3393 oid: entry.oid.as_bytes().to_vec(),
3394 message: entry.message.clone(),
3395 branch: entry.branch.clone(),
3396 index: entry.index as u64,
3397 timestamp: entry.timestamp,
3398 }
3399}
3400
3401pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3402 Ok(StashEntry {
3403 oid: Oid::from_bytes(&entry.oid)?,
3404 message: entry.message.clone(),
3405 index: entry.index as usize,
3406 branch: entry.branch.clone(),
3407 timestamp: entry.timestamp,
3408 })
3409}
3410
3411impl MergeDetails {
3412 async fn load(
3413 backend: &Arc<dyn GitRepository>,
3414 status: &SumTree<StatusEntry>,
3415 prev_snapshot: &RepositorySnapshot,
3416 ) -> Result<(MergeDetails, bool)> {
3417 log::debug!("load merge details");
3418 let message = backend.merge_message().await;
3419 let heads = backend
3420 .revparse_batch(vec![
3421 "MERGE_HEAD".into(),
3422 "CHERRY_PICK_HEAD".into(),
3423 "REBASE_HEAD".into(),
3424 "REVERT_HEAD".into(),
3425 "APPLY_HEAD".into(),
3426 ])
3427 .await
3428 .log_err()
3429 .unwrap_or_default()
3430 .into_iter()
3431 .map(|opt| opt.map(SharedString::from))
3432 .collect::<Vec<_>>();
3433 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3434 let conflicted_paths = if merge_heads_changed {
3435 let current_conflicted_paths = TreeSet::from_ordered_entries(
3436 status
3437 .iter()
3438 .filter(|entry| entry.status.is_conflicted())
3439 .map(|entry| entry.repo_path.clone()),
3440 );
3441
3442 // It can happen that we run a scan while a lengthy merge is in progress
3443 // that will eventually result in conflicts, but before those conflicts
3444 // are reported by `git status`. Since for the moment we only care about
3445 // the merge heads state for the purposes of tracking conflicts, don't update
3446 // this state until we see some conflicts.
3447 if heads.iter().any(Option::is_some)
3448 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3449 && current_conflicted_paths.is_empty()
3450 {
3451 log::debug!("not updating merge heads because no conflicts found");
3452 return Ok((
3453 MergeDetails {
3454 message: message.map(SharedString::from),
3455 ..prev_snapshot.merge.clone()
3456 },
3457 false,
3458 ));
3459 }
3460
3461 current_conflicted_paths
3462 } else {
3463 prev_snapshot.merge.conflicted_paths.clone()
3464 };
3465 let details = MergeDetails {
3466 conflicted_paths,
3467 message: message.map(SharedString::from),
3468 heads,
3469 };
3470 Ok((details, merge_heads_changed))
3471 }
3472}
3473
3474impl Repository {
3475 pub fn snapshot(&self) -> RepositorySnapshot {
3476 self.snapshot.clone()
3477 }
3478
3479 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3480 self.pending_ops.iter().cloned()
3481 }
3482
3483 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3484 self.pending_ops.summary().clone()
3485 }
3486
3487 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3488 self.pending_ops
3489 .get(&PathKey(path.as_ref().clone()), ())
3490 .cloned()
3491 }
3492
3493 fn local(
3494 id: RepositoryId,
3495 work_directory_abs_path: Arc<Path>,
3496 dot_git_abs_path: Arc<Path>,
3497 project_environment: WeakEntity<ProjectEnvironment>,
3498 fs: Arc<dyn Fs>,
3499 git_store: WeakEntity<GitStore>,
3500 cx: &mut Context<Self>,
3501 ) -> Self {
3502 let snapshot =
3503 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3504 let state = cx
3505 .spawn(async move |_, cx| {
3506 LocalRepositoryState::new(
3507 work_directory_abs_path,
3508 dot_git_abs_path,
3509 project_environment,
3510 fs,
3511 cx,
3512 )
3513 .await
3514 .map_err(|err| err.to_string())
3515 })
3516 .shared();
3517 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3518 let state = cx
3519 .spawn(async move |_, _| {
3520 let state = state.await?;
3521 Ok(RepositoryState::Local(state))
3522 })
3523 .shared();
3524
3525 Repository {
3526 this: cx.weak_entity(),
3527 git_store,
3528 snapshot,
3529 pending_ops: Default::default(),
3530 repository_state: state,
3531 commit_message_buffer: None,
3532 askpass_delegates: Default::default(),
3533 paths_needing_status_update: Default::default(),
3534 latest_askpass_id: 0,
3535 job_sender,
3536 job_id: 0,
3537 active_jobs: Default::default(),
3538 }
3539 }
3540
3541 fn remote(
3542 id: RepositoryId,
3543 work_directory_abs_path: Arc<Path>,
3544 path_style: PathStyle,
3545 project_id: ProjectId,
3546 client: AnyProtoClient,
3547 git_store: WeakEntity<GitStore>,
3548 cx: &mut Context<Self>,
3549 ) -> Self {
3550 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3551 let repository_state = RemoteRepositoryState { project_id, client };
3552 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3553 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3554 Self {
3555 this: cx.weak_entity(),
3556 snapshot,
3557 commit_message_buffer: None,
3558 git_store,
3559 pending_ops: Default::default(),
3560 paths_needing_status_update: Default::default(),
3561 job_sender,
3562 repository_state,
3563 askpass_delegates: Default::default(),
3564 latest_askpass_id: 0,
3565 active_jobs: Default::default(),
3566 job_id: 0,
3567 }
3568 }
3569
3570 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3571 self.git_store.upgrade()
3572 }
3573
3574 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3575 let this = cx.weak_entity();
3576 let git_store = self.git_store.clone();
3577 let _ = self.send_keyed_job(
3578 Some(GitJobKey::ReloadBufferDiffBases),
3579 None,
3580 |state, mut cx| async move {
3581 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3582 log::error!("tried to recompute diffs for a non-local repository");
3583 return Ok(());
3584 };
3585
3586 let Some(this) = this.upgrade() else {
3587 return Ok(());
3588 };
3589
3590 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3591 git_store.update(cx, |git_store, cx| {
3592 git_store
3593 .diffs
3594 .iter()
3595 .filter_map(|(buffer_id, diff_state)| {
3596 let buffer_store = git_store.buffer_store.read(cx);
3597 let buffer = buffer_store.get(*buffer_id)?;
3598 let file = File::from_dyn(buffer.read(cx).file())?;
3599 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3600 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3601 log::debug!(
3602 "start reload diff bases for repo path {}",
3603 repo_path.as_unix_str()
3604 );
3605 diff_state.update(cx, |diff_state, _| {
3606 let has_unstaged_diff = diff_state
3607 .unstaged_diff
3608 .as_ref()
3609 .is_some_and(|diff| diff.is_upgradable());
3610 let has_uncommitted_diff = diff_state
3611 .uncommitted_diff
3612 .as_ref()
3613 .is_some_and(|set| set.is_upgradable());
3614
3615 Some((
3616 buffer,
3617 repo_path,
3618 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3619 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3620 ))
3621 })
3622 })
3623 .collect::<Vec<_>>()
3624 })
3625 })??;
3626
3627 let buffer_diff_base_changes = cx
3628 .background_spawn(async move {
3629 let mut changes = Vec::new();
3630 for (buffer, repo_path, current_index_text, current_head_text) in
3631 &repo_diff_state_updates
3632 {
3633 let index_text = if current_index_text.is_some() {
3634 backend.load_index_text(repo_path.clone()).await
3635 } else {
3636 None
3637 };
3638 let head_text = if current_head_text.is_some() {
3639 backend.load_committed_text(repo_path.clone()).await
3640 } else {
3641 None
3642 };
3643
3644 let change =
3645 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3646 (Some(current_index), Some(current_head)) => {
3647 let index_changed =
3648 index_text.as_ref() != current_index.as_deref();
3649 let head_changed =
3650 head_text.as_ref() != current_head.as_deref();
3651 if index_changed && head_changed {
3652 if index_text == head_text {
3653 Some(DiffBasesChange::SetBoth(head_text))
3654 } else {
3655 Some(DiffBasesChange::SetEach {
3656 index: index_text,
3657 head: head_text,
3658 })
3659 }
3660 } else if index_changed {
3661 Some(DiffBasesChange::SetIndex(index_text))
3662 } else if head_changed {
3663 Some(DiffBasesChange::SetHead(head_text))
3664 } else {
3665 None
3666 }
3667 }
3668 (Some(current_index), None) => {
3669 let index_changed =
3670 index_text.as_ref() != current_index.as_deref();
3671 index_changed
3672 .then_some(DiffBasesChange::SetIndex(index_text))
3673 }
3674 (None, Some(current_head)) => {
3675 let head_changed =
3676 head_text.as_ref() != current_head.as_deref();
3677 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3678 }
3679 (None, None) => None,
3680 };
3681
3682 changes.push((buffer.clone(), change))
3683 }
3684 changes
3685 })
3686 .await;
3687
3688 git_store.update(&mut cx, |git_store, cx| {
3689 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3690 let buffer_snapshot = buffer.read(cx).text_snapshot();
3691 let buffer_id = buffer_snapshot.remote_id();
3692 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3693 continue;
3694 };
3695
3696 let downstream_client = git_store.downstream_client();
3697 diff_state.update(cx, |diff_state, cx| {
3698 use proto::update_diff_bases::Mode;
3699
3700 if let Some((diff_bases_change, (client, project_id))) =
3701 diff_bases_change.clone().zip(downstream_client)
3702 {
3703 let (staged_text, committed_text, mode) = match diff_bases_change {
3704 DiffBasesChange::SetIndex(index) => {
3705 (index, None, Mode::IndexOnly)
3706 }
3707 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3708 DiffBasesChange::SetEach { index, head } => {
3709 (index, head, Mode::IndexAndHead)
3710 }
3711 DiffBasesChange::SetBoth(text) => {
3712 (None, text, Mode::IndexMatchesHead)
3713 }
3714 };
3715 client
3716 .send(proto::UpdateDiffBases {
3717 project_id: project_id.to_proto(),
3718 buffer_id: buffer_id.to_proto(),
3719 staged_text,
3720 committed_text,
3721 mode: mode as i32,
3722 })
3723 .log_err();
3724 }
3725
3726 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3727 });
3728 }
3729 })
3730 },
3731 );
3732 }
3733
3734 pub fn send_job<F, Fut, R>(
3735 &mut self,
3736 status: Option<SharedString>,
3737 job: F,
3738 ) -> oneshot::Receiver<R>
3739 where
3740 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3741 Fut: Future<Output = R> + 'static,
3742 R: Send + 'static,
3743 {
3744 self.send_keyed_job(None, status, job)
3745 }
3746
3747 fn send_keyed_job<F, Fut, R>(
3748 &mut self,
3749 key: Option<GitJobKey>,
3750 status: Option<SharedString>,
3751 job: F,
3752 ) -> oneshot::Receiver<R>
3753 where
3754 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3755 Fut: Future<Output = R> + 'static,
3756 R: Send + 'static,
3757 {
3758 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3759 let job_id = post_inc(&mut self.job_id);
3760 let this = self.this.clone();
3761 self.job_sender
3762 .unbounded_send(GitJob {
3763 key,
3764 job: Box::new(move |state, cx: &mut AsyncApp| {
3765 let job = job(state, cx.clone());
3766 cx.spawn(async move |cx| {
3767 if let Some(s) = status.clone() {
3768 this.update(cx, |this, cx| {
3769 this.active_jobs.insert(
3770 job_id,
3771 JobInfo {
3772 start: Instant::now(),
3773 message: s.clone(),
3774 },
3775 );
3776
3777 cx.notify();
3778 })
3779 .ok();
3780 }
3781 let result = job.await;
3782
3783 this.update(cx, |this, cx| {
3784 this.active_jobs.remove(&job_id);
3785 cx.notify();
3786 })
3787 .ok();
3788
3789 result_tx.send(result).ok();
3790 })
3791 }),
3792 })
3793 .ok();
3794 result_rx
3795 }
3796
3797 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3798 let Some(git_store) = self.git_store.upgrade() else {
3799 return;
3800 };
3801 let entity = cx.entity();
3802 git_store.update(cx, |git_store, cx| {
3803 let Some((&id, _)) = git_store
3804 .repositories
3805 .iter()
3806 .find(|(_, handle)| *handle == &entity)
3807 else {
3808 return;
3809 };
3810 git_store.active_repo_id = Some(id);
3811 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3812 });
3813 }
3814
3815 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3816 self.snapshot.status()
3817 }
3818
3819 pub fn cached_stash(&self) -> GitStash {
3820 self.snapshot.stash_entries.clone()
3821 }
3822
3823 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3824 let git_store = self.git_store.upgrade()?;
3825 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3826 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3827 let abs_path = SanitizedPath::new(&abs_path);
3828 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3829 Some(ProjectPath {
3830 worktree_id: worktree.read(cx).id(),
3831 path: relative_path,
3832 })
3833 }
3834
3835 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3836 let git_store = self.git_store.upgrade()?;
3837 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3838 let abs_path = worktree_store.absolutize(path, cx)?;
3839 self.snapshot.abs_path_to_repo_path(&abs_path)
3840 }
3841
3842 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3843 other
3844 .read(cx)
3845 .snapshot
3846 .work_directory_abs_path
3847 .starts_with(&self.snapshot.work_directory_abs_path)
3848 }
3849
3850 pub fn open_commit_buffer(
3851 &mut self,
3852 languages: Option<Arc<LanguageRegistry>>,
3853 buffer_store: Entity<BufferStore>,
3854 cx: &mut Context<Self>,
3855 ) -> Task<Result<Entity<Buffer>>> {
3856 let id = self.id;
3857 if let Some(buffer) = self.commit_message_buffer.clone() {
3858 return Task::ready(Ok(buffer));
3859 }
3860 let this = cx.weak_entity();
3861
3862 let rx = self.send_job(None, move |state, mut cx| async move {
3863 let Some(this) = this.upgrade() else {
3864 bail!("git store was dropped");
3865 };
3866 match state {
3867 RepositoryState::Local(..) => {
3868 this.update(&mut cx, |_, cx| {
3869 Self::open_local_commit_buffer(languages, buffer_store, cx)
3870 })?
3871 .await
3872 }
3873 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3874 let request = client.request(proto::OpenCommitMessageBuffer {
3875 project_id: project_id.0,
3876 repository_id: id.to_proto(),
3877 });
3878 let response = request.await.context("requesting to open commit buffer")?;
3879 let buffer_id = BufferId::new(response.buffer_id)?;
3880 let buffer = buffer_store
3881 .update(&mut cx, |buffer_store, cx| {
3882 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3883 })?
3884 .await?;
3885 if let Some(language_registry) = languages {
3886 let git_commit_language =
3887 language_registry.language_for_name("Git Commit").await?;
3888 buffer.update(&mut cx, |buffer, cx| {
3889 buffer.set_language(Some(git_commit_language), cx);
3890 })?;
3891 }
3892 this.update(&mut cx, |this, _| {
3893 this.commit_message_buffer = Some(buffer.clone());
3894 })?;
3895 Ok(buffer)
3896 }
3897 }
3898 });
3899
3900 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3901 }
3902
3903 fn open_local_commit_buffer(
3904 language_registry: Option<Arc<LanguageRegistry>>,
3905 buffer_store: Entity<BufferStore>,
3906 cx: &mut Context<Self>,
3907 ) -> Task<Result<Entity<Buffer>>> {
3908 cx.spawn(async move |repository, cx| {
3909 let buffer = buffer_store
3910 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3911 .await?;
3912
3913 if let Some(language_registry) = language_registry {
3914 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3915 buffer.update(cx, |buffer, cx| {
3916 buffer.set_language(Some(git_commit_language), cx);
3917 })?;
3918 }
3919
3920 repository.update(cx, |repository, _| {
3921 repository.commit_message_buffer = Some(buffer.clone());
3922 })?;
3923 Ok(buffer)
3924 })
3925 }
3926
3927 pub fn checkout_files(
3928 &mut self,
3929 commit: &str,
3930 paths: Vec<RepoPath>,
3931 cx: &mut Context<Self>,
3932 ) -> Task<Result<()>> {
3933 let commit = commit.to_string();
3934 let id = self.id;
3935
3936 self.spawn_job_with_tracking(
3937 paths.clone(),
3938 pending_op::GitStatus::Reverted,
3939 cx,
3940 async move |this, cx| {
3941 this.update(cx, |this, _cx| {
3942 this.send_job(
3943 Some(format!("git checkout {}", commit).into()),
3944 move |git_repo, _| async move {
3945 match git_repo {
3946 RepositoryState::Local(LocalRepositoryState {
3947 backend,
3948 environment,
3949 ..
3950 }) => {
3951 backend
3952 .checkout_files(commit, paths, environment.clone())
3953 .await
3954 }
3955 RepositoryState::Remote(RemoteRepositoryState {
3956 project_id,
3957 client,
3958 }) => {
3959 client
3960 .request(proto::GitCheckoutFiles {
3961 project_id: project_id.0,
3962 repository_id: id.to_proto(),
3963 commit,
3964 paths: paths
3965 .into_iter()
3966 .map(|p| p.to_proto())
3967 .collect(),
3968 })
3969 .await?;
3970
3971 Ok(())
3972 }
3973 }
3974 },
3975 )
3976 })?
3977 .await?
3978 },
3979 )
3980 }
3981
3982 pub fn reset(
3983 &mut self,
3984 commit: String,
3985 reset_mode: ResetMode,
3986 _cx: &mut App,
3987 ) -> oneshot::Receiver<Result<()>> {
3988 let id = self.id;
3989
3990 self.send_job(None, move |git_repo, _| async move {
3991 match git_repo {
3992 RepositoryState::Local(LocalRepositoryState {
3993 backend,
3994 environment,
3995 ..
3996 }) => backend.reset(commit, reset_mode, environment).await,
3997 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3998 client
3999 .request(proto::GitReset {
4000 project_id: project_id.0,
4001 repository_id: id.to_proto(),
4002 commit,
4003 mode: match reset_mode {
4004 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4005 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4006 },
4007 })
4008 .await?;
4009
4010 Ok(())
4011 }
4012 }
4013 })
4014 }
4015
4016 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4017 let id = self.id;
4018 self.send_job(None, move |git_repo, _cx| async move {
4019 match git_repo {
4020 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4021 backend.show(commit).await
4022 }
4023 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4024 let resp = client
4025 .request(proto::GitShow {
4026 project_id: project_id.0,
4027 repository_id: id.to_proto(),
4028 commit,
4029 })
4030 .await?;
4031
4032 Ok(CommitDetails {
4033 sha: resp.sha.into(),
4034 message: resp.message.into(),
4035 commit_timestamp: resp.commit_timestamp,
4036 author_email: resp.author_email.into(),
4037 author_name: resp.author_name.into(),
4038 })
4039 }
4040 }
4041 })
4042 }
4043
4044 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4045 let id = self.id;
4046 self.send_job(None, move |git_repo, cx| async move {
4047 match git_repo {
4048 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4049 backend.load_commit(commit, cx).await
4050 }
4051 RepositoryState::Remote(RemoteRepositoryState {
4052 client, project_id, ..
4053 }) => {
4054 let response = client
4055 .request(proto::LoadCommitDiff {
4056 project_id: project_id.0,
4057 repository_id: id.to_proto(),
4058 commit,
4059 })
4060 .await?;
4061 Ok(CommitDiff {
4062 files: response
4063 .files
4064 .into_iter()
4065 .map(|file| {
4066 Ok(CommitFile {
4067 path: RepoPath::from_proto(&file.path)?,
4068 old_text: file.old_text,
4069 new_text: file.new_text,
4070 })
4071 })
4072 .collect::<Result<Vec<_>>>()?,
4073 })
4074 }
4075 }
4076 })
4077 }
4078
4079 pub fn file_history(
4080 &mut self,
4081 path: RepoPath,
4082 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4083 self.file_history_paginated(path, 0, None)
4084 }
4085
4086 pub fn file_history_paginated(
4087 &mut self,
4088 path: RepoPath,
4089 skip: usize,
4090 limit: Option<usize>,
4091 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4092 let id = self.id;
4093 self.send_job(None, move |git_repo, _cx| async move {
4094 match git_repo {
4095 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4096 backend.file_history_paginated(path, skip, limit).await
4097 }
4098 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4099 let response = client
4100 .request(proto::GitFileHistory {
4101 project_id: project_id.0,
4102 repository_id: id.to_proto(),
4103 path: path.to_proto(),
4104 skip: skip as u64,
4105 limit: limit.map(|l| l as u64),
4106 })
4107 .await?;
4108 Ok(git::repository::FileHistory {
4109 entries: response
4110 .entries
4111 .into_iter()
4112 .map(|entry| git::repository::FileHistoryEntry {
4113 sha: entry.sha.into(),
4114 subject: entry.subject.into(),
4115 message: entry.message.into(),
4116 commit_timestamp: entry.commit_timestamp,
4117 author_name: entry.author_name.into(),
4118 author_email: entry.author_email.into(),
4119 })
4120 .collect(),
4121 path: RepoPath::from_proto(&response.path)?,
4122 })
4123 }
4124 }
4125 })
4126 }
4127
4128 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4129 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4130 }
4131
4132 fn save_buffers<'a>(
4133 &self,
4134 entries: impl IntoIterator<Item = &'a RepoPath>,
4135 cx: &mut Context<Self>,
4136 ) -> Vec<Task<anyhow::Result<()>>> {
4137 let mut save_futures = Vec::new();
4138 if let Some(buffer_store) = self.buffer_store(cx) {
4139 buffer_store.update(cx, |buffer_store, cx| {
4140 for path in entries {
4141 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4142 continue;
4143 };
4144 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4145 && buffer
4146 .read(cx)
4147 .file()
4148 .is_some_and(|file| file.disk_state().exists())
4149 && buffer.read(cx).has_unsaved_edits()
4150 {
4151 save_futures.push(buffer_store.save_buffer(buffer, cx));
4152 }
4153 }
4154 })
4155 }
4156 save_futures
4157 }
4158
4159 pub fn stage_entries(
4160 &mut self,
4161 entries: Vec<RepoPath>,
4162 cx: &mut Context<Self>,
4163 ) -> Task<anyhow::Result<()>> {
4164 if entries.is_empty() {
4165 return Task::ready(Ok(()));
4166 }
4167 let id = self.id;
4168 let save_tasks = self.save_buffers(&entries, cx);
4169 let paths = entries
4170 .iter()
4171 .map(|p| p.as_unix_str())
4172 .collect::<Vec<_>>()
4173 .join(" ");
4174 let status = format!("git add {paths}");
4175 let job_key = GitJobKey::WriteIndex(entries.clone());
4176
4177 self.spawn_job_with_tracking(
4178 entries.clone(),
4179 pending_op::GitStatus::Staged,
4180 cx,
4181 async move |this, cx| {
4182 for save_task in save_tasks {
4183 save_task.await?;
4184 }
4185
4186 this.update(cx, |this, _| {
4187 this.send_keyed_job(
4188 Some(job_key),
4189 Some(status.into()),
4190 move |git_repo, _cx| async move {
4191 match git_repo {
4192 RepositoryState::Local(LocalRepositoryState {
4193 backend,
4194 environment,
4195 ..
4196 }) => backend.stage_paths(entries, environment.clone()).await,
4197 RepositoryState::Remote(RemoteRepositoryState {
4198 project_id,
4199 client,
4200 }) => {
4201 client
4202 .request(proto::Stage {
4203 project_id: project_id.0,
4204 repository_id: id.to_proto(),
4205 paths: entries
4206 .into_iter()
4207 .map(|repo_path| repo_path.to_proto())
4208 .collect(),
4209 })
4210 .await
4211 .context("sending stage request")?;
4212
4213 Ok(())
4214 }
4215 }
4216 },
4217 )
4218 })?
4219 .await?
4220 },
4221 )
4222 }
4223
4224 pub fn unstage_entries(
4225 &mut self,
4226 entries: Vec<RepoPath>,
4227 cx: &mut Context<Self>,
4228 ) -> Task<anyhow::Result<()>> {
4229 if entries.is_empty() {
4230 return Task::ready(Ok(()));
4231 }
4232 let id = self.id;
4233 let save_tasks = self.save_buffers(&entries, cx);
4234 let paths = entries
4235 .iter()
4236 .map(|p| p.as_unix_str())
4237 .collect::<Vec<_>>()
4238 .join(" ");
4239 let status = format!("git reset {paths}");
4240 let job_key = GitJobKey::WriteIndex(entries.clone());
4241
4242 self.spawn_job_with_tracking(
4243 entries.clone(),
4244 pending_op::GitStatus::Unstaged,
4245 cx,
4246 async move |this, cx| {
4247 for save_task in save_tasks {
4248 save_task.await?;
4249 }
4250
4251 this.update(cx, |this, _| {
4252 this.send_keyed_job(
4253 Some(job_key),
4254 Some(status.into()),
4255 move |git_repo, _cx| async move {
4256 match git_repo {
4257 RepositoryState::Local(LocalRepositoryState {
4258 backend,
4259 environment,
4260 ..
4261 }) => backend.unstage_paths(entries, environment).await,
4262 RepositoryState::Remote(RemoteRepositoryState {
4263 project_id,
4264 client,
4265 }) => {
4266 client
4267 .request(proto::Unstage {
4268 project_id: project_id.0,
4269 repository_id: id.to_proto(),
4270 paths: entries
4271 .into_iter()
4272 .map(|repo_path| repo_path.to_proto())
4273 .collect(),
4274 })
4275 .await
4276 .context("sending unstage request")?;
4277
4278 Ok(())
4279 }
4280 }
4281 },
4282 )
4283 })?
4284 .await?
4285 },
4286 )
4287 }
4288
4289 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4290 let to_stage = self
4291 .cached_status()
4292 .filter_map(|entry| {
4293 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4294 if ops.staging() || ops.staged() {
4295 None
4296 } else {
4297 Some(entry.repo_path)
4298 }
4299 } else if entry.status.staging().is_fully_staged() {
4300 None
4301 } else {
4302 Some(entry.repo_path)
4303 }
4304 })
4305 .collect();
4306 self.stage_entries(to_stage, cx)
4307 }
4308
4309 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4310 let to_unstage = self
4311 .cached_status()
4312 .filter_map(|entry| {
4313 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4314 if !ops.staging() && !ops.staged() {
4315 None
4316 } else {
4317 Some(entry.repo_path)
4318 }
4319 } else if entry.status.staging().is_fully_unstaged() {
4320 None
4321 } else {
4322 Some(entry.repo_path)
4323 }
4324 })
4325 .collect();
4326 self.unstage_entries(to_unstage, cx)
4327 }
4328
4329 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4330 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4331
4332 self.stash_entries(to_stash, cx)
4333 }
4334
4335 pub fn stash_entries(
4336 &mut self,
4337 entries: Vec<RepoPath>,
4338 cx: &mut Context<Self>,
4339 ) -> Task<anyhow::Result<()>> {
4340 let id = self.id;
4341
4342 cx.spawn(async move |this, cx| {
4343 this.update(cx, |this, _| {
4344 this.send_job(None, move |git_repo, _cx| async move {
4345 match git_repo {
4346 RepositoryState::Local(LocalRepositoryState {
4347 backend,
4348 environment,
4349 ..
4350 }) => backend.stash_paths(entries, environment).await,
4351 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4352 client
4353 .request(proto::Stash {
4354 project_id: project_id.0,
4355 repository_id: id.to_proto(),
4356 paths: entries
4357 .into_iter()
4358 .map(|repo_path| repo_path.to_proto())
4359 .collect(),
4360 })
4361 .await
4362 .context("sending stash request")?;
4363 Ok(())
4364 }
4365 }
4366 })
4367 })?
4368 .await??;
4369 Ok(())
4370 })
4371 }
4372
4373 pub fn stash_pop(
4374 &mut self,
4375 index: Option<usize>,
4376 cx: &mut Context<Self>,
4377 ) -> Task<anyhow::Result<()>> {
4378 let id = self.id;
4379 cx.spawn(async move |this, cx| {
4380 this.update(cx, |this, _| {
4381 this.send_job(None, move |git_repo, _cx| async move {
4382 match git_repo {
4383 RepositoryState::Local(LocalRepositoryState {
4384 backend,
4385 environment,
4386 ..
4387 }) => backend.stash_pop(index, environment).await,
4388 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4389 client
4390 .request(proto::StashPop {
4391 project_id: project_id.0,
4392 repository_id: id.to_proto(),
4393 stash_index: index.map(|i| i as u64),
4394 })
4395 .await
4396 .context("sending stash pop request")?;
4397 Ok(())
4398 }
4399 }
4400 })
4401 })?
4402 .await??;
4403 Ok(())
4404 })
4405 }
4406
4407 pub fn stash_apply(
4408 &mut self,
4409 index: Option<usize>,
4410 cx: &mut Context<Self>,
4411 ) -> Task<anyhow::Result<()>> {
4412 let id = self.id;
4413 cx.spawn(async move |this, cx| {
4414 this.update(cx, |this, _| {
4415 this.send_job(None, move |git_repo, _cx| async move {
4416 match git_repo {
4417 RepositoryState::Local(LocalRepositoryState {
4418 backend,
4419 environment,
4420 ..
4421 }) => backend.stash_apply(index, environment).await,
4422 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4423 client
4424 .request(proto::StashApply {
4425 project_id: project_id.0,
4426 repository_id: id.to_proto(),
4427 stash_index: index.map(|i| i as u64),
4428 })
4429 .await
4430 .context("sending stash apply request")?;
4431 Ok(())
4432 }
4433 }
4434 })
4435 })?
4436 .await??;
4437 Ok(())
4438 })
4439 }
4440
4441 pub fn stash_drop(
4442 &mut self,
4443 index: Option<usize>,
4444 cx: &mut Context<Self>,
4445 ) -> oneshot::Receiver<anyhow::Result<()>> {
4446 let id = self.id;
4447 let updates_tx = self
4448 .git_store()
4449 .and_then(|git_store| match &git_store.read(cx).state {
4450 GitStoreState::Local { downstream, .. } => downstream
4451 .as_ref()
4452 .map(|downstream| downstream.updates_tx.clone()),
4453 _ => None,
4454 });
4455 let this = cx.weak_entity();
4456 self.send_job(None, move |git_repo, mut cx| async move {
4457 match git_repo {
4458 RepositoryState::Local(LocalRepositoryState {
4459 backend,
4460 environment,
4461 ..
4462 }) => {
4463 // TODO would be nice to not have to do this manually
4464 let result = backend.stash_drop(index, environment).await;
4465 if result.is_ok()
4466 && let Ok(stash_entries) = backend.stash_entries().await
4467 {
4468 let snapshot = this.update(&mut cx, |this, cx| {
4469 this.snapshot.stash_entries = stash_entries;
4470 cx.emit(RepositoryEvent::StashEntriesChanged);
4471 this.snapshot.clone()
4472 })?;
4473 if let Some(updates_tx) = updates_tx {
4474 updates_tx
4475 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4476 .ok();
4477 }
4478 }
4479
4480 result
4481 }
4482 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4483 client
4484 .request(proto::StashDrop {
4485 project_id: project_id.0,
4486 repository_id: id.to_proto(),
4487 stash_index: index.map(|i| i as u64),
4488 })
4489 .await
4490 .context("sending stash pop request")?;
4491 Ok(())
4492 }
4493 }
4494 })
4495 }
4496
4497 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4498 let id = self.id;
4499 self.send_job(
4500 Some(format!("git hook {}", hook.as_str()).into()),
4501 move |git_repo, _cx| async move {
4502 match git_repo {
4503 RepositoryState::Local(LocalRepositoryState {
4504 backend,
4505 environment,
4506 ..
4507 }) => backend.run_hook(hook, environment.clone()).await,
4508 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4509 client
4510 .request(proto::RunGitHook {
4511 project_id: project_id.0,
4512 repository_id: id.to_proto(),
4513 hook: hook.to_proto(),
4514 })
4515 .await?;
4516
4517 Ok(())
4518 }
4519 }
4520 },
4521 )
4522 }
4523
4524 pub fn commit(
4525 &mut self,
4526 message: SharedString,
4527 name_and_email: Option<(SharedString, SharedString)>,
4528 options: CommitOptions,
4529 askpass: AskPassDelegate,
4530 cx: &mut App,
4531 ) -> oneshot::Receiver<Result<()>> {
4532 let id = self.id;
4533 let askpass_delegates = self.askpass_delegates.clone();
4534 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4535
4536 let rx = self.run_hook(RunHook::PreCommit, cx);
4537
4538 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4539 rx.await??;
4540
4541 match git_repo {
4542 RepositoryState::Local(LocalRepositoryState {
4543 backend,
4544 environment,
4545 ..
4546 }) => {
4547 backend
4548 .commit(message, name_and_email, options, askpass, environment)
4549 .await
4550 }
4551 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4552 askpass_delegates.lock().insert(askpass_id, askpass);
4553 let _defer = util::defer(|| {
4554 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4555 debug_assert!(askpass_delegate.is_some());
4556 });
4557 let (name, email) = name_and_email.unzip();
4558 client
4559 .request(proto::Commit {
4560 project_id: project_id.0,
4561 repository_id: id.to_proto(),
4562 message: String::from(message),
4563 name: name.map(String::from),
4564 email: email.map(String::from),
4565 options: Some(proto::commit::CommitOptions {
4566 amend: options.amend,
4567 signoff: options.signoff,
4568 }),
4569 askpass_id,
4570 })
4571 .await
4572 .context("sending commit request")?;
4573
4574 Ok(())
4575 }
4576 }
4577 })
4578 }
4579
4580 pub fn fetch(
4581 &mut self,
4582 fetch_options: FetchOptions,
4583 askpass: AskPassDelegate,
4584 _cx: &mut App,
4585 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4586 let askpass_delegates = self.askpass_delegates.clone();
4587 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4588 let id = self.id;
4589
4590 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4591 match git_repo {
4592 RepositoryState::Local(LocalRepositoryState {
4593 backend,
4594 environment,
4595 ..
4596 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4597 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4598 askpass_delegates.lock().insert(askpass_id, askpass);
4599 let _defer = util::defer(|| {
4600 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4601 debug_assert!(askpass_delegate.is_some());
4602 });
4603
4604 let response = client
4605 .request(proto::Fetch {
4606 project_id: project_id.0,
4607 repository_id: id.to_proto(),
4608 askpass_id,
4609 remote: fetch_options.to_proto(),
4610 })
4611 .await
4612 .context("sending fetch request")?;
4613
4614 Ok(RemoteCommandOutput {
4615 stdout: response.stdout,
4616 stderr: response.stderr,
4617 })
4618 }
4619 }
4620 })
4621 }
4622
4623 pub fn push(
4624 &mut self,
4625 branch: SharedString,
4626 remote: SharedString,
4627 options: Option<PushOptions>,
4628 askpass: AskPassDelegate,
4629 cx: &mut Context<Self>,
4630 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4631 let askpass_delegates = self.askpass_delegates.clone();
4632 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4633 let id = self.id;
4634
4635 let args = options
4636 .map(|option| match option {
4637 PushOptions::SetUpstream => " --set-upstream",
4638 PushOptions::Force => " --force-with-lease",
4639 })
4640 .unwrap_or("");
4641
4642 let updates_tx = self
4643 .git_store()
4644 .and_then(|git_store| match &git_store.read(cx).state {
4645 GitStoreState::Local { downstream, .. } => downstream
4646 .as_ref()
4647 .map(|downstream| downstream.updates_tx.clone()),
4648 _ => None,
4649 });
4650
4651 let this = cx.weak_entity();
4652 let rx = self.run_hook(RunHook::PrePush, cx);
4653 self.send_job(
4654 Some(format!("git push {} {} {}", args, remote, branch).into()),
4655 move |git_repo, mut cx| async move {
4656 rx.await??;
4657 match git_repo {
4658 RepositoryState::Local(LocalRepositoryState {
4659 backend,
4660 environment,
4661 ..
4662 }) => {
4663 let result = backend
4664 .push(
4665 branch.to_string(),
4666 remote.to_string(),
4667 options,
4668 askpass,
4669 environment.clone(),
4670 cx.clone(),
4671 )
4672 .await;
4673 // TODO would be nice to not have to do this manually
4674 if result.is_ok() {
4675 let branches = backend.branches().await?;
4676 let branch = branches.into_iter().find(|branch| branch.is_head);
4677 log::info!("head branch after scan is {branch:?}");
4678 let snapshot = this.update(&mut cx, |this, cx| {
4679 this.snapshot.branch = branch;
4680 cx.emit(RepositoryEvent::BranchChanged);
4681 this.snapshot.clone()
4682 })?;
4683 if let Some(updates_tx) = updates_tx {
4684 updates_tx
4685 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4686 .ok();
4687 }
4688 }
4689 result
4690 }
4691 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4692 askpass_delegates.lock().insert(askpass_id, askpass);
4693 let _defer = util::defer(|| {
4694 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4695 debug_assert!(askpass_delegate.is_some());
4696 });
4697 let response = client
4698 .request(proto::Push {
4699 project_id: project_id.0,
4700 repository_id: id.to_proto(),
4701 askpass_id,
4702 branch_name: branch.to_string(),
4703 remote_name: remote.to_string(),
4704 options: options.map(|options| match options {
4705 PushOptions::Force => proto::push::PushOptions::Force,
4706 PushOptions::SetUpstream => {
4707 proto::push::PushOptions::SetUpstream
4708 }
4709 }
4710 as i32),
4711 })
4712 .await
4713 .context("sending push request")?;
4714
4715 Ok(RemoteCommandOutput {
4716 stdout: response.stdout,
4717 stderr: response.stderr,
4718 })
4719 }
4720 }
4721 },
4722 )
4723 }
4724
4725 pub fn pull(
4726 &mut self,
4727 branch: Option<SharedString>,
4728 remote: SharedString,
4729 rebase: bool,
4730 askpass: AskPassDelegate,
4731 _cx: &mut App,
4732 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4733 let askpass_delegates = self.askpass_delegates.clone();
4734 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4735 let id = self.id;
4736
4737 let mut status = "git pull".to_string();
4738 if rebase {
4739 status.push_str(" --rebase");
4740 }
4741 status.push_str(&format!(" {}", remote));
4742 if let Some(b) = &branch {
4743 status.push_str(&format!(" {}", b));
4744 }
4745
4746 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4747 match git_repo {
4748 RepositoryState::Local(LocalRepositoryState {
4749 backend,
4750 environment,
4751 ..
4752 }) => {
4753 backend
4754 .pull(
4755 branch.as_ref().map(|b| b.to_string()),
4756 remote.to_string(),
4757 rebase,
4758 askpass,
4759 environment.clone(),
4760 cx,
4761 )
4762 .await
4763 }
4764 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4765 askpass_delegates.lock().insert(askpass_id, askpass);
4766 let _defer = util::defer(|| {
4767 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4768 debug_assert!(askpass_delegate.is_some());
4769 });
4770 let response = client
4771 .request(proto::Pull {
4772 project_id: project_id.0,
4773 repository_id: id.to_proto(),
4774 askpass_id,
4775 rebase,
4776 branch_name: branch.as_ref().map(|b| b.to_string()),
4777 remote_name: remote.to_string(),
4778 })
4779 .await
4780 .context("sending pull request")?;
4781
4782 Ok(RemoteCommandOutput {
4783 stdout: response.stdout,
4784 stderr: response.stderr,
4785 })
4786 }
4787 }
4788 })
4789 }
4790
4791 fn spawn_set_index_text_job(
4792 &mut self,
4793 path: RepoPath,
4794 content: Option<String>,
4795 hunk_staging_operation_count: Option<usize>,
4796 cx: &mut Context<Self>,
4797 ) -> oneshot::Receiver<anyhow::Result<()>> {
4798 let id = self.id;
4799 let this = cx.weak_entity();
4800 let git_store = self.git_store.clone();
4801 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4802 self.send_keyed_job(
4803 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4804 None,
4805 move |git_repo, mut cx| async move {
4806 log::debug!(
4807 "start updating index text for buffer {}",
4808 path.as_unix_str()
4809 );
4810
4811 match git_repo {
4812 RepositoryState::Local(LocalRepositoryState {
4813 fs,
4814 backend,
4815 environment,
4816 ..
4817 }) => {
4818 let executable = match fs.metadata(&abs_path).await {
4819 Ok(Some(meta)) => meta.is_executable,
4820 Ok(None) => false,
4821 Err(_err) => false,
4822 };
4823 backend
4824 .set_index_text(path.clone(), content, environment.clone(), executable)
4825 .await?;
4826 }
4827 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4828 client
4829 .request(proto::SetIndexText {
4830 project_id: project_id.0,
4831 repository_id: id.to_proto(),
4832 path: path.to_proto(),
4833 text: content,
4834 })
4835 .await?;
4836 }
4837 }
4838 log::debug!(
4839 "finish updating index text for buffer {}",
4840 path.as_unix_str()
4841 );
4842
4843 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4844 let project_path = this
4845 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4846 .ok()
4847 .flatten();
4848 git_store.update(&mut cx, |git_store, cx| {
4849 let buffer_id = git_store
4850 .buffer_store
4851 .read(cx)
4852 .get_by_path(&project_path?)?
4853 .read(cx)
4854 .remote_id();
4855 let diff_state = git_store.diffs.get(&buffer_id)?;
4856 diff_state.update(cx, |diff_state, _| {
4857 diff_state.hunk_staging_operation_count_as_of_write =
4858 hunk_staging_operation_count;
4859 });
4860 Some(())
4861 })?;
4862 }
4863 Ok(())
4864 },
4865 )
4866 }
4867
4868 pub fn get_remotes(
4869 &mut self,
4870 branch_name: Option<String>,
4871 is_push: bool,
4872 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4873 let id = self.id;
4874 self.send_job(None, move |repo, _cx| async move {
4875 match repo {
4876 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4877 let remote = if let Some(branch_name) = branch_name {
4878 if is_push {
4879 backend.get_push_remote(branch_name).await?
4880 } else {
4881 backend.get_branch_remote(branch_name).await?
4882 }
4883 } else {
4884 None
4885 };
4886
4887 match remote {
4888 Some(remote) => Ok(vec![remote]),
4889 None => backend.get_all_remotes().await,
4890 }
4891 }
4892 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4893 let response = client
4894 .request(proto::GetRemotes {
4895 project_id: project_id.0,
4896 repository_id: id.to_proto(),
4897 branch_name,
4898 is_push,
4899 })
4900 .await?;
4901
4902 let remotes = response
4903 .remotes
4904 .into_iter()
4905 .map(|remotes| git::repository::Remote {
4906 name: remotes.name.into(),
4907 })
4908 .collect();
4909
4910 Ok(remotes)
4911 }
4912 }
4913 })
4914 }
4915
4916 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4917 let id = self.id;
4918 self.send_job(None, move |repo, _| async move {
4919 match repo {
4920 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4921 backend.branches().await
4922 }
4923 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4924 let response = client
4925 .request(proto::GitGetBranches {
4926 project_id: project_id.0,
4927 repository_id: id.to_proto(),
4928 })
4929 .await?;
4930
4931 let branches = response
4932 .branches
4933 .into_iter()
4934 .map(|branch| proto_to_branch(&branch))
4935 .collect();
4936
4937 Ok(branches)
4938 }
4939 }
4940 })
4941 }
4942
4943 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4944 let id = self.id;
4945 self.send_job(None, move |repo, _| async move {
4946 match repo {
4947 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4948 backend.worktrees().await
4949 }
4950 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4951 let response = client
4952 .request(proto::GitGetWorktrees {
4953 project_id: project_id.0,
4954 repository_id: id.to_proto(),
4955 })
4956 .await?;
4957
4958 let worktrees = response
4959 .worktrees
4960 .into_iter()
4961 .map(|worktree| proto_to_worktree(&worktree))
4962 .collect();
4963
4964 Ok(worktrees)
4965 }
4966 }
4967 })
4968 }
4969
4970 pub fn create_worktree(
4971 &mut self,
4972 name: String,
4973 path: PathBuf,
4974 commit: Option<String>,
4975 ) -> oneshot::Receiver<Result<()>> {
4976 let id = self.id;
4977 self.send_job(
4978 Some("git worktree add".into()),
4979 move |repo, _cx| async move {
4980 match repo {
4981 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4982 backend.create_worktree(name, path, commit).await
4983 }
4984 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4985 client
4986 .request(proto::GitCreateWorktree {
4987 project_id: project_id.0,
4988 repository_id: id.to_proto(),
4989 name,
4990 directory: path.to_string_lossy().to_string(),
4991 commit,
4992 })
4993 .await?;
4994
4995 Ok(())
4996 }
4997 }
4998 },
4999 )
5000 }
5001
5002 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
5003 let id = self.id;
5004 self.send_job(None, move |repo, _| async move {
5005 match repo {
5006 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5007 backend.default_branch().await
5008 }
5009 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5010 let response = client
5011 .request(proto::GetDefaultBranch {
5012 project_id: project_id.0,
5013 repository_id: id.to_proto(),
5014 })
5015 .await?;
5016
5017 anyhow::Ok(response.branch.map(SharedString::from))
5018 }
5019 }
5020 })
5021 }
5022
5023 pub fn diff_tree(
5024 &mut self,
5025 diff_type: DiffTreeType,
5026 _cx: &App,
5027 ) -> oneshot::Receiver<Result<TreeDiff>> {
5028 let repository_id = self.snapshot.id;
5029 self.send_job(None, move |repo, _cx| async move {
5030 match repo {
5031 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5032 backend.diff_tree(diff_type).await
5033 }
5034 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5035 let response = client
5036 .request(proto::GetTreeDiff {
5037 project_id: project_id.0,
5038 repository_id: repository_id.0,
5039 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5040 base: diff_type.base().to_string(),
5041 head: diff_type.head().to_string(),
5042 })
5043 .await?;
5044
5045 let entries = response
5046 .entries
5047 .into_iter()
5048 .filter_map(|entry| {
5049 let status = match entry.status() {
5050 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5051 proto::tree_diff_status::Status::Modified => {
5052 TreeDiffStatus::Modified {
5053 old: git::Oid::from_str(
5054 &entry.oid.context("missing oid").log_err()?,
5055 )
5056 .log_err()?,
5057 }
5058 }
5059 proto::tree_diff_status::Status::Deleted => {
5060 TreeDiffStatus::Deleted {
5061 old: git::Oid::from_str(
5062 &entry.oid.context("missing oid").log_err()?,
5063 )
5064 .log_err()?,
5065 }
5066 }
5067 };
5068 Some((
5069 RepoPath::from_rel_path(
5070 &RelPath::from_proto(&entry.path).log_err()?,
5071 ),
5072 status,
5073 ))
5074 })
5075 .collect();
5076
5077 Ok(TreeDiff { entries })
5078 }
5079 }
5080 })
5081 }
5082
5083 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5084 let id = self.id;
5085 self.send_job(None, move |repo, _cx| async move {
5086 match repo {
5087 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5088 backend.diff(diff_type).await
5089 }
5090 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5091 let response = client
5092 .request(proto::GitDiff {
5093 project_id: project_id.0,
5094 repository_id: id.to_proto(),
5095 diff_type: match diff_type {
5096 DiffType::HeadToIndex => {
5097 proto::git_diff::DiffType::HeadToIndex.into()
5098 }
5099 DiffType::HeadToWorktree => {
5100 proto::git_diff::DiffType::HeadToWorktree.into()
5101 }
5102 },
5103 })
5104 .await?;
5105
5106 Ok(response.diff)
5107 }
5108 }
5109 })
5110 }
5111
5112 pub fn create_branch(
5113 &mut self,
5114 branch_name: String,
5115 base_branch: Option<String>,
5116 ) -> oneshot::Receiver<Result<()>> {
5117 let id = self.id;
5118 let status_msg = if let Some(ref base) = base_branch {
5119 format!("git switch -c {branch_name} {base}").into()
5120 } else {
5121 format!("git switch -c {branch_name}").into()
5122 };
5123 self.send_job(Some(status_msg), move |repo, _cx| async move {
5124 match repo {
5125 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5126 backend.create_branch(branch_name, base_branch).await
5127 }
5128 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5129 client
5130 .request(proto::GitCreateBranch {
5131 project_id: project_id.0,
5132 repository_id: id.to_proto(),
5133 branch_name,
5134 })
5135 .await?;
5136
5137 Ok(())
5138 }
5139 }
5140 })
5141 }
5142
5143 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5144 let id = self.id;
5145 self.send_job(
5146 Some(format!("git switch {branch_name}").into()),
5147 move |repo, _cx| async move {
5148 match repo {
5149 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5150 backend.change_branch(branch_name).await
5151 }
5152 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5153 client
5154 .request(proto::GitChangeBranch {
5155 project_id: project_id.0,
5156 repository_id: id.to_proto(),
5157 branch_name,
5158 })
5159 .await?;
5160
5161 Ok(())
5162 }
5163 }
5164 },
5165 )
5166 }
5167
5168 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5169 let id = self.id;
5170 self.send_job(
5171 Some(format!("git branch -d {branch_name}").into()),
5172 move |repo, _cx| async move {
5173 match repo {
5174 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5175 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5176 client
5177 .request(proto::GitDeleteBranch {
5178 project_id: project_id.0,
5179 repository_id: id.to_proto(),
5180 branch_name,
5181 })
5182 .await?;
5183
5184 Ok(())
5185 }
5186 }
5187 },
5188 )
5189 }
5190
5191 pub fn rename_branch(
5192 &mut self,
5193 branch: String,
5194 new_name: String,
5195 ) -> oneshot::Receiver<Result<()>> {
5196 let id = self.id;
5197 self.send_job(
5198 Some(format!("git branch -m {branch} {new_name}").into()),
5199 move |repo, _cx| async move {
5200 match repo {
5201 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5202 backend.rename_branch(branch, new_name).await
5203 }
5204 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5205 client
5206 .request(proto::GitRenameBranch {
5207 project_id: project_id.0,
5208 repository_id: id.to_proto(),
5209 branch,
5210 new_name,
5211 })
5212 .await?;
5213
5214 Ok(())
5215 }
5216 }
5217 },
5218 )
5219 }
5220
5221 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5222 let id = self.id;
5223 self.send_job(None, move |repo, _cx| async move {
5224 match repo {
5225 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5226 backend.check_for_pushed_commit().await
5227 }
5228 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5229 let response = client
5230 .request(proto::CheckForPushedCommits {
5231 project_id: project_id.0,
5232 repository_id: id.to_proto(),
5233 })
5234 .await?;
5235
5236 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5237
5238 Ok(branches)
5239 }
5240 }
5241 })
5242 }
5243
5244 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5245 self.send_job(None, |repo, _cx| async move {
5246 match repo {
5247 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5248 backend.checkpoint().await
5249 }
5250 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5251 }
5252 })
5253 }
5254
5255 pub fn restore_checkpoint(
5256 &mut self,
5257 checkpoint: GitRepositoryCheckpoint,
5258 ) -> oneshot::Receiver<Result<()>> {
5259 self.send_job(None, move |repo, _cx| async move {
5260 match repo {
5261 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5262 backend.restore_checkpoint(checkpoint).await
5263 }
5264 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5265 }
5266 })
5267 }
5268
5269 pub(crate) fn apply_remote_update(
5270 &mut self,
5271 update: proto::UpdateRepository,
5272 cx: &mut Context<Self>,
5273 ) -> Result<()> {
5274 let conflicted_paths = TreeSet::from_ordered_entries(
5275 update
5276 .current_merge_conflicts
5277 .into_iter()
5278 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5279 );
5280 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5281 let new_head_commit = update
5282 .head_commit_details
5283 .as_ref()
5284 .map(proto_to_commit_details);
5285 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5286 cx.emit(RepositoryEvent::BranchChanged)
5287 }
5288 self.snapshot.branch = new_branch;
5289 self.snapshot.head_commit = new_head_commit;
5290
5291 self.snapshot.merge.conflicted_paths = conflicted_paths;
5292 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5293 let new_stash_entries = GitStash {
5294 entries: update
5295 .stash_entries
5296 .iter()
5297 .filter_map(|entry| proto_to_stash(entry).ok())
5298 .collect(),
5299 };
5300 if self.snapshot.stash_entries != new_stash_entries {
5301 cx.emit(RepositoryEvent::StashEntriesChanged)
5302 }
5303 self.snapshot.stash_entries = new_stash_entries;
5304
5305 let edits = update
5306 .removed_statuses
5307 .into_iter()
5308 .filter_map(|path| {
5309 Some(sum_tree::Edit::Remove(PathKey(
5310 RelPath::from_proto(&path).log_err()?,
5311 )))
5312 })
5313 .chain(
5314 update
5315 .updated_statuses
5316 .into_iter()
5317 .filter_map(|updated_status| {
5318 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5319 }),
5320 )
5321 .collect::<Vec<_>>();
5322 if !edits.is_empty() {
5323 cx.emit(RepositoryEvent::StatusesChanged);
5324 }
5325 self.snapshot.statuses_by_path.edit(edits, ());
5326 if update.is_last_update {
5327 self.snapshot.scan_id = update.scan_id;
5328 }
5329 self.clear_pending_ops(cx);
5330 Ok(())
5331 }
5332
5333 pub fn compare_checkpoints(
5334 &mut self,
5335 left: GitRepositoryCheckpoint,
5336 right: GitRepositoryCheckpoint,
5337 ) -> oneshot::Receiver<Result<bool>> {
5338 self.send_job(None, move |repo, _cx| async move {
5339 match repo {
5340 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5341 backend.compare_checkpoints(left, right).await
5342 }
5343 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5344 }
5345 })
5346 }
5347
5348 pub fn diff_checkpoints(
5349 &mut self,
5350 base_checkpoint: GitRepositoryCheckpoint,
5351 target_checkpoint: GitRepositoryCheckpoint,
5352 ) -> oneshot::Receiver<Result<String>> {
5353 self.send_job(None, move |repo, _cx| async move {
5354 match repo {
5355 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5356 backend
5357 .diff_checkpoints(base_checkpoint, target_checkpoint)
5358 .await
5359 }
5360 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5361 }
5362 })
5363 }
5364
5365 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5366 let updated = SumTree::from_iter(
5367 self.pending_ops.iter().filter_map(|ops| {
5368 let inner_ops: Vec<PendingOp> =
5369 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5370 if inner_ops.is_empty() {
5371 None
5372 } else {
5373 Some(PendingOps {
5374 repo_path: ops.repo_path.clone(),
5375 ops: inner_ops,
5376 })
5377 }
5378 }),
5379 (),
5380 );
5381
5382 if updated != self.pending_ops {
5383 cx.emit(RepositoryEvent::PendingOpsChanged {
5384 pending_ops: self.pending_ops.clone(),
5385 })
5386 }
5387
5388 self.pending_ops = updated;
5389 }
5390
5391 fn schedule_scan(
5392 &mut self,
5393 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5394 cx: &mut Context<Self>,
5395 ) {
5396 let this = cx.weak_entity();
5397 let _ = self.send_keyed_job(
5398 Some(GitJobKey::ReloadGitState),
5399 None,
5400 |state, mut cx| async move {
5401 log::debug!("run scheduled git status scan");
5402
5403 let Some(this) = this.upgrade() else {
5404 return Ok(());
5405 };
5406 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5407 bail!("not a local repository")
5408 };
5409 let (snapshot, events) = this
5410 .update(&mut cx, |this, _| {
5411 this.paths_needing_status_update.clear();
5412 compute_snapshot(
5413 this.id,
5414 this.work_directory_abs_path.clone(),
5415 this.snapshot.clone(),
5416 backend.clone(),
5417 )
5418 })?
5419 .await?;
5420 this.update(&mut cx, |this, cx| {
5421 this.snapshot = snapshot.clone();
5422 this.clear_pending_ops(cx);
5423 for event in events {
5424 cx.emit(event);
5425 }
5426 })?;
5427 if let Some(updates_tx) = updates_tx {
5428 updates_tx
5429 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5430 .ok();
5431 }
5432 Ok(())
5433 },
5434 );
5435 }
5436
5437 fn spawn_local_git_worker(
5438 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5439 cx: &mut Context<Self>,
5440 ) -> mpsc::UnboundedSender<GitJob> {
5441 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5442
5443 cx.spawn(async move |_, cx| {
5444 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5445 if let Some(git_hosting_provider_registry) =
5446 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5447 {
5448 git_hosting_providers::register_additional_providers(
5449 git_hosting_provider_registry,
5450 state.backend.clone(),
5451 )
5452 .await;
5453 }
5454 let state = RepositoryState::Local(state);
5455 let mut jobs = VecDeque::new();
5456 loop {
5457 while let Ok(Some(next_job)) = job_rx.try_next() {
5458 jobs.push_back(next_job);
5459 }
5460
5461 if let Some(job) = jobs.pop_front() {
5462 if let Some(current_key) = &job.key
5463 && jobs
5464 .iter()
5465 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5466 {
5467 continue;
5468 }
5469 (job.job)(state.clone(), cx).await;
5470 } else if let Some(job) = job_rx.next().await {
5471 jobs.push_back(job);
5472 } else {
5473 break;
5474 }
5475 }
5476 anyhow::Ok(())
5477 })
5478 .detach_and_log_err(cx);
5479
5480 job_tx
5481 }
5482
5483 fn spawn_remote_git_worker(
5484 state: RemoteRepositoryState,
5485 cx: &mut Context<Self>,
5486 ) -> mpsc::UnboundedSender<GitJob> {
5487 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5488
5489 cx.spawn(async move |_, cx| {
5490 let state = RepositoryState::Remote(state);
5491 let mut jobs = VecDeque::new();
5492 loop {
5493 while let Ok(Some(next_job)) = job_rx.try_next() {
5494 jobs.push_back(next_job);
5495 }
5496
5497 if let Some(job) = jobs.pop_front() {
5498 if let Some(current_key) = &job.key
5499 && jobs
5500 .iter()
5501 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5502 {
5503 continue;
5504 }
5505 (job.job)(state.clone(), cx).await;
5506 } else if let Some(job) = job_rx.next().await {
5507 jobs.push_back(job);
5508 } else {
5509 break;
5510 }
5511 }
5512 anyhow::Ok(())
5513 })
5514 .detach_and_log_err(cx);
5515
5516 job_tx
5517 }
5518
5519 fn load_staged_text(
5520 &mut self,
5521 buffer_id: BufferId,
5522 repo_path: RepoPath,
5523 cx: &App,
5524 ) -> Task<Result<Option<String>>> {
5525 let rx = self.send_job(None, move |state, _| async move {
5526 match state {
5527 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5528 anyhow::Ok(backend.load_index_text(repo_path).await)
5529 }
5530 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5531 let response = client
5532 .request(proto::OpenUnstagedDiff {
5533 project_id: project_id.to_proto(),
5534 buffer_id: buffer_id.to_proto(),
5535 })
5536 .await?;
5537 Ok(response.staged_text)
5538 }
5539 }
5540 });
5541 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5542 }
5543
5544 fn load_committed_text(
5545 &mut self,
5546 buffer_id: BufferId,
5547 repo_path: RepoPath,
5548 cx: &App,
5549 ) -> Task<Result<DiffBasesChange>> {
5550 let rx = self.send_job(None, move |state, _| async move {
5551 match state {
5552 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5553 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5554 let staged_text = backend.load_index_text(repo_path).await;
5555 let diff_bases_change = if committed_text == staged_text {
5556 DiffBasesChange::SetBoth(committed_text)
5557 } else {
5558 DiffBasesChange::SetEach {
5559 index: staged_text,
5560 head: committed_text,
5561 }
5562 };
5563 anyhow::Ok(diff_bases_change)
5564 }
5565 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5566 use proto::open_uncommitted_diff_response::Mode;
5567
5568 let response = client
5569 .request(proto::OpenUncommittedDiff {
5570 project_id: project_id.to_proto(),
5571 buffer_id: buffer_id.to_proto(),
5572 })
5573 .await?;
5574 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5575 let bases = match mode {
5576 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5577 Mode::IndexAndHead => DiffBasesChange::SetEach {
5578 head: response.committed_text,
5579 index: response.staged_text,
5580 },
5581 };
5582 Ok(bases)
5583 }
5584 }
5585 });
5586
5587 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5588 }
5589 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5590 let repository_id = self.snapshot.id;
5591 let rx = self.send_job(None, move |state, _| async move {
5592 match state {
5593 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5594 backend.load_blob_content(oid).await
5595 }
5596 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5597 let response = client
5598 .request(proto::GetBlobContent {
5599 project_id: project_id.to_proto(),
5600 repository_id: repository_id.0,
5601 oid: oid.to_string(),
5602 })
5603 .await?;
5604 Ok(response.content)
5605 }
5606 }
5607 });
5608 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5609 }
5610
5611 fn paths_changed(
5612 &mut self,
5613 paths: Vec<RepoPath>,
5614 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5615 cx: &mut Context<Self>,
5616 ) {
5617 self.paths_needing_status_update.extend(paths);
5618
5619 let this = cx.weak_entity();
5620 let _ = self.send_keyed_job(
5621 Some(GitJobKey::RefreshStatuses),
5622 None,
5623 |state, mut cx| async move {
5624 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5625 (
5626 this.snapshot.clone(),
5627 mem::take(&mut this.paths_needing_status_update),
5628 )
5629 })?;
5630 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5631 bail!("not a local repository")
5632 };
5633
5634 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5635 if paths.is_empty() {
5636 return Ok(());
5637 }
5638 let statuses = backend.status(&paths).await?;
5639 let stash_entries = backend.stash_entries().await?;
5640
5641 let changed_path_statuses = cx
5642 .background_spawn(async move {
5643 let mut changed_path_statuses = Vec::new();
5644 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5645 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5646
5647 for (repo_path, status) in &*statuses.entries {
5648 changed_paths.remove(repo_path);
5649 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5650 && cursor.item().is_some_and(|entry| entry.status == *status)
5651 {
5652 continue;
5653 }
5654
5655 changed_path_statuses.push(Edit::Insert(StatusEntry {
5656 repo_path: repo_path.clone(),
5657 status: *status,
5658 }));
5659 }
5660 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5661 for path in changed_paths.into_iter() {
5662 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5663 changed_path_statuses
5664 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5665 }
5666 }
5667 changed_path_statuses
5668 })
5669 .await;
5670
5671 this.update(&mut cx, |this, cx| {
5672 if this.snapshot.stash_entries != stash_entries {
5673 cx.emit(RepositoryEvent::StashEntriesChanged);
5674 this.snapshot.stash_entries = stash_entries;
5675 }
5676
5677 if !changed_path_statuses.is_empty() {
5678 cx.emit(RepositoryEvent::StatusesChanged);
5679 this.snapshot
5680 .statuses_by_path
5681 .edit(changed_path_statuses, ());
5682 this.snapshot.scan_id += 1;
5683 }
5684
5685 if let Some(updates_tx) = updates_tx {
5686 updates_tx
5687 .unbounded_send(DownstreamUpdate::UpdateRepository(
5688 this.snapshot.clone(),
5689 ))
5690 .ok();
5691 }
5692 })
5693 },
5694 );
5695 }
5696
5697 /// currently running git command and when it started
5698 pub fn current_job(&self) -> Option<JobInfo> {
5699 self.active_jobs.values().next().cloned()
5700 }
5701
5702 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5703 self.send_job(None, |_, _| async {})
5704 }
5705
5706 fn spawn_job_with_tracking<AsyncFn>(
5707 &mut self,
5708 paths: Vec<RepoPath>,
5709 git_status: pending_op::GitStatus,
5710 cx: &mut Context<Self>,
5711 f: AsyncFn,
5712 ) -> Task<Result<()>>
5713 where
5714 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5715 {
5716 let ids = self.new_pending_ops_for_paths(paths, git_status);
5717
5718 cx.spawn(async move |this, cx| {
5719 let (job_status, result) = match f(this.clone(), cx).await {
5720 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5721 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5722 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5723 };
5724
5725 this.update(cx, |this, _| {
5726 let mut edits = Vec::with_capacity(ids.len());
5727 for (id, entry) in ids {
5728 if let Some(mut ops) = this
5729 .pending_ops
5730 .get(&PathKey(entry.as_ref().clone()), ())
5731 .cloned()
5732 {
5733 if let Some(op) = ops.op_by_id_mut(id) {
5734 op.job_status = job_status;
5735 }
5736 edits.push(sum_tree::Edit::Insert(ops));
5737 }
5738 }
5739 this.pending_ops.edit(edits, ());
5740 })?;
5741
5742 result
5743 })
5744 }
5745
5746 fn new_pending_ops_for_paths(
5747 &mut self,
5748 paths: Vec<RepoPath>,
5749 git_status: pending_op::GitStatus,
5750 ) -> Vec<(PendingOpId, RepoPath)> {
5751 let mut edits = Vec::with_capacity(paths.len());
5752 let mut ids = Vec::with_capacity(paths.len());
5753 for path in paths {
5754 let mut ops = self
5755 .pending_ops
5756 .get(&PathKey(path.as_ref().clone()), ())
5757 .cloned()
5758 .unwrap_or_else(|| PendingOps::new(&path));
5759 let id = ops.max_id() + 1;
5760 ops.ops.push(PendingOp {
5761 id,
5762 git_status,
5763 job_status: pending_op::JobStatus::Running,
5764 });
5765 edits.push(sum_tree::Edit::Insert(ops));
5766 ids.push((id, path));
5767 }
5768 self.pending_ops.edit(edits, ());
5769 ids
5770 }
5771}
5772
5773fn get_permalink_in_rust_registry_src(
5774 provider_registry: Arc<GitHostingProviderRegistry>,
5775 path: PathBuf,
5776 selection: Range<u32>,
5777) -> Result<url::Url> {
5778 #[derive(Deserialize)]
5779 struct CargoVcsGit {
5780 sha1: String,
5781 }
5782
5783 #[derive(Deserialize)]
5784 struct CargoVcsInfo {
5785 git: CargoVcsGit,
5786 path_in_vcs: String,
5787 }
5788
5789 #[derive(Deserialize)]
5790 struct CargoPackage {
5791 repository: String,
5792 }
5793
5794 #[derive(Deserialize)]
5795 struct CargoToml {
5796 package: CargoPackage,
5797 }
5798
5799 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5800 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5801 Some((dir, json))
5802 }) else {
5803 bail!("No .cargo_vcs_info.json found in parent directories")
5804 };
5805 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5806 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5807 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5808 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5809 .context("parsing package.repository field of manifest")?;
5810 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5811 let permalink = provider.build_permalink(
5812 remote,
5813 BuildPermalinkParams::new(
5814 &cargo_vcs_info.git.sha1,
5815 &RepoPath::from_rel_path(
5816 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5817 ),
5818 Some(selection),
5819 ),
5820 );
5821 Ok(permalink)
5822}
5823
5824fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5825 let Some(blame) = blame else {
5826 return proto::BlameBufferResponse {
5827 blame_response: None,
5828 };
5829 };
5830
5831 let entries = blame
5832 .entries
5833 .into_iter()
5834 .map(|entry| proto::BlameEntry {
5835 sha: entry.sha.as_bytes().into(),
5836 start_line: entry.range.start,
5837 end_line: entry.range.end,
5838 original_line_number: entry.original_line_number,
5839 author: entry.author,
5840 author_mail: entry.author_mail,
5841 author_time: entry.author_time,
5842 author_tz: entry.author_tz,
5843 committer: entry.committer_name,
5844 committer_mail: entry.committer_email,
5845 committer_time: entry.committer_time,
5846 committer_tz: entry.committer_tz,
5847 summary: entry.summary,
5848 previous: entry.previous,
5849 filename: entry.filename,
5850 })
5851 .collect::<Vec<_>>();
5852
5853 let messages = blame
5854 .messages
5855 .into_iter()
5856 .map(|(oid, message)| proto::CommitMessage {
5857 oid: oid.as_bytes().into(),
5858 message,
5859 })
5860 .collect::<Vec<_>>();
5861
5862 proto::BlameBufferResponse {
5863 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5864 entries,
5865 messages,
5866 remote_url: blame.remote_url,
5867 }),
5868 }
5869}
5870
5871fn deserialize_blame_buffer_response(
5872 response: proto::BlameBufferResponse,
5873) -> Option<git::blame::Blame> {
5874 let response = response.blame_response?;
5875 let entries = response
5876 .entries
5877 .into_iter()
5878 .filter_map(|entry| {
5879 Some(git::blame::BlameEntry {
5880 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5881 range: entry.start_line..entry.end_line,
5882 original_line_number: entry.original_line_number,
5883 committer_name: entry.committer,
5884 committer_time: entry.committer_time,
5885 committer_tz: entry.committer_tz,
5886 committer_email: entry.committer_mail,
5887 author: entry.author,
5888 author_mail: entry.author_mail,
5889 author_time: entry.author_time,
5890 author_tz: entry.author_tz,
5891 summary: entry.summary,
5892 previous: entry.previous,
5893 filename: entry.filename,
5894 })
5895 })
5896 .collect::<Vec<_>>();
5897
5898 let messages = response
5899 .messages
5900 .into_iter()
5901 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5902 .collect::<HashMap<_, _>>();
5903
5904 Some(Blame {
5905 entries,
5906 messages,
5907 remote_url: response.remote_url,
5908 })
5909}
5910
5911fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5912 proto::Branch {
5913 is_head: branch.is_head,
5914 ref_name: branch.ref_name.to_string(),
5915 unix_timestamp: branch
5916 .most_recent_commit
5917 .as_ref()
5918 .map(|commit| commit.commit_timestamp as u64),
5919 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5920 ref_name: upstream.ref_name.to_string(),
5921 tracking: upstream
5922 .tracking
5923 .status()
5924 .map(|upstream| proto::UpstreamTracking {
5925 ahead: upstream.ahead as u64,
5926 behind: upstream.behind as u64,
5927 }),
5928 }),
5929 most_recent_commit: branch
5930 .most_recent_commit
5931 .as_ref()
5932 .map(|commit| proto::CommitSummary {
5933 sha: commit.sha.to_string(),
5934 subject: commit.subject.to_string(),
5935 commit_timestamp: commit.commit_timestamp,
5936 author_name: commit.author_name.to_string(),
5937 }),
5938 }
5939}
5940
5941fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5942 proto::Worktree {
5943 path: worktree.path.to_string_lossy().to_string(),
5944 ref_name: worktree.ref_name.to_string(),
5945 sha: worktree.sha.to_string(),
5946 }
5947}
5948
5949fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5950 git::repository::Worktree {
5951 path: PathBuf::from(proto.path.clone()),
5952 ref_name: proto.ref_name.clone().into(),
5953 sha: proto.sha.clone().into(),
5954 }
5955}
5956
5957fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5958 git::repository::Branch {
5959 is_head: proto.is_head,
5960 ref_name: proto.ref_name.clone().into(),
5961 upstream: proto
5962 .upstream
5963 .as_ref()
5964 .map(|upstream| git::repository::Upstream {
5965 ref_name: upstream.ref_name.to_string().into(),
5966 tracking: upstream
5967 .tracking
5968 .as_ref()
5969 .map(|tracking| {
5970 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5971 ahead: tracking.ahead as u32,
5972 behind: tracking.behind as u32,
5973 })
5974 })
5975 .unwrap_or(git::repository::UpstreamTracking::Gone),
5976 }),
5977 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5978 git::repository::CommitSummary {
5979 sha: commit.sha.to_string().into(),
5980 subject: commit.subject.to_string().into(),
5981 commit_timestamp: commit.commit_timestamp,
5982 author_name: commit.author_name.to_string().into(),
5983 has_parent: true,
5984 }
5985 }),
5986 }
5987}
5988
5989fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5990 proto::GitCommitDetails {
5991 sha: commit.sha.to_string(),
5992 message: commit.message.to_string(),
5993 commit_timestamp: commit.commit_timestamp,
5994 author_email: commit.author_email.to_string(),
5995 author_name: commit.author_name.to_string(),
5996 }
5997}
5998
5999fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6000 CommitDetails {
6001 sha: proto.sha.clone().into(),
6002 message: proto.message.clone().into(),
6003 commit_timestamp: proto.commit_timestamp,
6004 author_email: proto.author_email.clone().into(),
6005 author_name: proto.author_name.clone().into(),
6006 }
6007}
6008
6009async fn compute_snapshot(
6010 id: RepositoryId,
6011 work_directory_abs_path: Arc<Path>,
6012 prev_snapshot: RepositorySnapshot,
6013 backend: Arc<dyn GitRepository>,
6014) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6015 let mut events = Vec::new();
6016 let branches = backend.branches().await?;
6017 let branch = branches.into_iter().find(|branch| branch.is_head);
6018 let statuses = backend
6019 .status(&[RepoPath::from_rel_path(
6020 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6021 )])
6022 .await?;
6023 let stash_entries = backend.stash_entries().await?;
6024 let statuses_by_path = SumTree::from_iter(
6025 statuses
6026 .entries
6027 .iter()
6028 .map(|(repo_path, status)| StatusEntry {
6029 repo_path: repo_path.clone(),
6030 status: *status,
6031 }),
6032 (),
6033 );
6034 let (merge_details, merge_heads_changed) =
6035 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6036 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6037
6038 if merge_heads_changed {
6039 events.push(RepositoryEvent::MergeHeadsChanged);
6040 }
6041
6042 if statuses_by_path != prev_snapshot.statuses_by_path {
6043 events.push(RepositoryEvent::StatusesChanged)
6044 }
6045
6046 // Useful when branch is None in detached head state
6047 let head_commit = match backend.head_sha().await {
6048 Some(head_sha) => backend.show(head_sha).await.log_err(),
6049 None => None,
6050 };
6051
6052 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6053 events.push(RepositoryEvent::BranchChanged);
6054 }
6055
6056 // Used by edit prediction data collection
6057 let remote_origin_url = backend.remote_url("origin").await;
6058 let remote_upstream_url = backend.remote_url("upstream").await;
6059
6060 let snapshot = RepositorySnapshot {
6061 id,
6062 statuses_by_path,
6063 work_directory_abs_path,
6064 path_style: prev_snapshot.path_style,
6065 scan_id: prev_snapshot.scan_id + 1,
6066 branch,
6067 head_commit,
6068 merge: merge_details,
6069 remote_origin_url,
6070 remote_upstream_url,
6071 stash_entries,
6072 };
6073
6074 Ok((snapshot, events))
6075}
6076
6077fn status_from_proto(
6078 simple_status: i32,
6079 status: Option<proto::GitFileStatus>,
6080) -> anyhow::Result<FileStatus> {
6081 use proto::git_file_status::Variant;
6082
6083 let Some(variant) = status.and_then(|status| status.variant) else {
6084 let code = proto::GitStatus::from_i32(simple_status)
6085 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6086 let result = match code {
6087 proto::GitStatus::Added => TrackedStatus {
6088 worktree_status: StatusCode::Added,
6089 index_status: StatusCode::Unmodified,
6090 }
6091 .into(),
6092 proto::GitStatus::Modified => TrackedStatus {
6093 worktree_status: StatusCode::Modified,
6094 index_status: StatusCode::Unmodified,
6095 }
6096 .into(),
6097 proto::GitStatus::Conflict => UnmergedStatus {
6098 first_head: UnmergedStatusCode::Updated,
6099 second_head: UnmergedStatusCode::Updated,
6100 }
6101 .into(),
6102 proto::GitStatus::Deleted => TrackedStatus {
6103 worktree_status: StatusCode::Deleted,
6104 index_status: StatusCode::Unmodified,
6105 }
6106 .into(),
6107 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6108 };
6109 return Ok(result);
6110 };
6111
6112 let result = match variant {
6113 Variant::Untracked(_) => FileStatus::Untracked,
6114 Variant::Ignored(_) => FileStatus::Ignored,
6115 Variant::Unmerged(unmerged) => {
6116 let [first_head, second_head] =
6117 [unmerged.first_head, unmerged.second_head].map(|head| {
6118 let code = proto::GitStatus::from_i32(head)
6119 .with_context(|| format!("Invalid git status code: {head}"))?;
6120 let result = match code {
6121 proto::GitStatus::Added => UnmergedStatusCode::Added,
6122 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6123 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6124 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6125 };
6126 Ok(result)
6127 });
6128 let [first_head, second_head] = [first_head?, second_head?];
6129 UnmergedStatus {
6130 first_head,
6131 second_head,
6132 }
6133 .into()
6134 }
6135 Variant::Tracked(tracked) => {
6136 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6137 .map(|status| {
6138 let code = proto::GitStatus::from_i32(status)
6139 .with_context(|| format!("Invalid git status code: {status}"))?;
6140 let result = match code {
6141 proto::GitStatus::Modified => StatusCode::Modified,
6142 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6143 proto::GitStatus::Added => StatusCode::Added,
6144 proto::GitStatus::Deleted => StatusCode::Deleted,
6145 proto::GitStatus::Renamed => StatusCode::Renamed,
6146 proto::GitStatus::Copied => StatusCode::Copied,
6147 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6148 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6149 };
6150 Ok(result)
6151 });
6152 let [index_status, worktree_status] = [index_status?, worktree_status?];
6153 TrackedStatus {
6154 index_status,
6155 worktree_status,
6156 }
6157 .into()
6158 }
6159 };
6160 Ok(result)
6161}
6162
6163fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6164 use proto::git_file_status::{Tracked, Unmerged, Variant};
6165
6166 let variant = match status {
6167 FileStatus::Untracked => Variant::Untracked(Default::default()),
6168 FileStatus::Ignored => Variant::Ignored(Default::default()),
6169 FileStatus::Unmerged(UnmergedStatus {
6170 first_head,
6171 second_head,
6172 }) => Variant::Unmerged(Unmerged {
6173 first_head: unmerged_status_to_proto(first_head),
6174 second_head: unmerged_status_to_proto(second_head),
6175 }),
6176 FileStatus::Tracked(TrackedStatus {
6177 index_status,
6178 worktree_status,
6179 }) => Variant::Tracked(Tracked {
6180 index_status: tracked_status_to_proto(index_status),
6181 worktree_status: tracked_status_to_proto(worktree_status),
6182 }),
6183 };
6184 proto::GitFileStatus {
6185 variant: Some(variant),
6186 }
6187}
6188
6189fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6190 match code {
6191 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6192 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6193 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6194 }
6195}
6196
6197fn tracked_status_to_proto(code: StatusCode) -> i32 {
6198 match code {
6199 StatusCode::Added => proto::GitStatus::Added as _,
6200 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6201 StatusCode::Modified => proto::GitStatus::Modified as _,
6202 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6203 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6204 StatusCode::Copied => proto::GitStatus::Copied as _,
6205 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6206 }
6207}