1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 worktree_store::{WorktreeStore, WorktreeStoreEvent},
10};
11use anyhow::{Context as _, Result, anyhow, bail};
12use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
13use buffer_diff::{BufferDiff, BufferDiffEvent};
14use client::ProjectId;
15use collections::HashMap;
16pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
17use fs::Fs;
18use futures::{
19 FutureExt, StreamExt,
20 channel::{
21 mpsc,
22 oneshot::{self, Canceled},
23 },
24 future::{self, Shared},
25 stream::FuturesOrdered,
26};
27use git::{
28 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
29 blame::Blame,
30 parse_git_remote_url,
31 repository::{
32 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
33 GitRepository, GitRepositoryCheckpoint, PushOptions, Remote, RemoteCommandOutput, RepoPath,
34 ResetMode, UpstreamTrackingStatus, Worktree as GitWorktree,
35 },
36 stash::{GitStash, StashEntry},
37 status::{
38 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
39 UnmergedStatus, UnmergedStatusCode,
40 },
41};
42use gpui::{
43 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
44 WeakEntity,
45};
46use language::{
47 Buffer, BufferEvent, Language, LanguageRegistry,
48 proto::{deserialize_version, serialize_version},
49};
50use parking_lot::Mutex;
51use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
52use postage::stream::Stream as _;
53use rpc::{
54 AnyProtoClient, TypedEnvelope,
55 proto::{self, git_reset, split_repository_update},
56};
57use serde::Deserialize;
58use settings::WorktreeId;
59use smol::future::yield_now;
60use std::{
61 cmp::Ordering,
62 collections::{BTreeSet, HashSet, VecDeque},
63 future::Future,
64 mem,
65 ops::Range,
66 path::{Path, PathBuf},
67 str::FromStr,
68 sync::{
69 Arc,
70 atomic::{self, AtomicU64},
71 },
72 time::Instant,
73};
74use sum_tree::{Edit, SumTree, TreeSet};
75use task::Shell;
76use text::{Bias, BufferId};
77use util::{
78 ResultExt, debug_panic,
79 paths::{PathStyle, SanitizedPath},
80 post_inc,
81 rel_path::RelPath,
82};
83use worktree::{
84 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
85 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
86};
87use zeroize::Zeroize;
88
89pub struct GitStore {
90 state: GitStoreState,
91 buffer_store: Entity<BufferStore>,
92 worktree_store: Entity<WorktreeStore>,
93 repositories: HashMap<RepositoryId, Entity<Repository>>,
94 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
95 active_repo_id: Option<RepositoryId>,
96 #[allow(clippy::type_complexity)]
97 loading_diffs:
98 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
99 diffs: HashMap<BufferId, Entity<BufferGitState>>,
100 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
101 _subscriptions: Vec<Subscription>,
102}
103
104#[derive(Default)]
105struct SharedDiffs {
106 unstaged: Option<Entity<BufferDiff>>,
107 uncommitted: Option<Entity<BufferDiff>>,
108}
109
110struct BufferGitState {
111 unstaged_diff: Option<WeakEntity<BufferDiff>>,
112 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
113 conflict_set: Option<WeakEntity<ConflictSet>>,
114 recalculate_diff_task: Option<Task<Result<()>>>,
115 reparse_conflict_markers_task: Option<Task<Result<()>>>,
116 language: Option<Arc<Language>>,
117 language_registry: Option<Arc<LanguageRegistry>>,
118 conflict_updated_futures: Vec<oneshot::Sender<()>>,
119 recalculating_tx: postage::watch::Sender<bool>,
120
121 /// These operation counts are used to ensure that head and index text
122 /// values read from the git repository are up-to-date with any hunk staging
123 /// operations that have been performed on the BufferDiff.
124 ///
125 /// The operation count is incremented immediately when the user initiates a
126 /// hunk stage/unstage operation. Then, upon finishing writing the new index
127 /// text do disk, the `operation count as of write` is updated to reflect
128 /// the operation count that prompted the write.
129 hunk_staging_operation_count: usize,
130 hunk_staging_operation_count_as_of_write: usize,
131
132 head_text: Option<Arc<str>>,
133 index_text: Option<Arc<str>>,
134 head_changed: bool,
135 index_changed: bool,
136 language_changed: bool,
137}
138
139#[derive(Clone, Debug)]
140enum DiffBasesChange {
141 SetIndex(Option<String>),
142 SetHead(Option<String>),
143 SetEach {
144 index: Option<String>,
145 head: Option<String>,
146 },
147 SetBoth(Option<String>),
148}
149
150#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
151enum DiffKind {
152 Unstaged,
153 Uncommitted,
154}
155
156enum GitStoreState {
157 Local {
158 next_repository_id: Arc<AtomicU64>,
159 downstream: Option<LocalDownstreamState>,
160 project_environment: Entity<ProjectEnvironment>,
161 fs: Arc<dyn Fs>,
162 },
163 Remote {
164 upstream_client: AnyProtoClient,
165 upstream_project_id: u64,
166 downstream: Option<(AnyProtoClient, ProjectId)>,
167 },
168}
169
170enum DownstreamUpdate {
171 UpdateRepository(RepositorySnapshot),
172 RemoveRepository(RepositoryId),
173}
174
175struct LocalDownstreamState {
176 client: AnyProtoClient,
177 project_id: ProjectId,
178 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
179 _task: Task<Result<()>>,
180}
181
182#[derive(Clone, Debug)]
183pub struct GitStoreCheckpoint {
184 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
185}
186
187#[derive(Clone, Debug, PartialEq, Eq)]
188pub struct StatusEntry {
189 pub repo_path: RepoPath,
190 pub status: FileStatus,
191}
192
193impl StatusEntry {
194 fn to_proto(&self) -> proto::StatusEntry {
195 let simple_status = match self.status {
196 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
197 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
198 FileStatus::Tracked(TrackedStatus {
199 index_status,
200 worktree_status,
201 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
202 worktree_status
203 } else {
204 index_status
205 }),
206 };
207
208 proto::StatusEntry {
209 repo_path: self.repo_path.to_proto(),
210 simple_status,
211 status: Some(status_to_proto(self.status)),
212 }
213 }
214}
215
216impl TryFrom<proto::StatusEntry> for StatusEntry {
217 type Error = anyhow::Error;
218
219 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
220 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
221 let status = status_from_proto(value.simple_status, value.status)?;
222 Ok(Self { repo_path, status })
223 }
224}
225
226impl sum_tree::Item for StatusEntry {
227 type Summary = PathSummary<GitSummary>;
228
229 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
230 PathSummary {
231 max_path: self.repo_path.as_ref().clone(),
232 item_summary: self.status.summary(),
233 }
234 }
235}
236
237impl sum_tree::KeyedItem for StatusEntry {
238 type Key = PathKey;
239
240 fn key(&self) -> Self::Key {
241 PathKey(self.repo_path.as_ref().clone())
242 }
243}
244
245#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
246pub struct RepositoryId(pub u64);
247
248#[derive(Clone, Debug, Default, PartialEq, Eq)]
249pub struct MergeDetails {
250 pub conflicted_paths: TreeSet<RepoPath>,
251 pub message: Option<SharedString>,
252 pub heads: Vec<Option<SharedString>>,
253}
254
255#[derive(Clone, Debug, PartialEq, Eq)]
256pub struct RepositorySnapshot {
257 pub id: RepositoryId,
258 pub statuses_by_path: SumTree<StatusEntry>,
259 pub work_directory_abs_path: Arc<Path>,
260 pub path_style: PathStyle,
261 pub branch: Option<Branch>,
262 pub head_commit: Option<CommitDetails>,
263 pub scan_id: u64,
264 pub merge: MergeDetails,
265 pub remote_origin_url: Option<String>,
266 pub remote_upstream_url: Option<String>,
267 pub stash_entries: GitStash,
268}
269
270type JobId = u64;
271
272#[derive(Clone, Debug, PartialEq, Eq)]
273pub struct JobInfo {
274 pub start: Instant,
275 pub message: SharedString,
276}
277
278pub struct Repository {
279 this: WeakEntity<Self>,
280 snapshot: RepositorySnapshot,
281 commit_message_buffer: Option<Entity<Buffer>>,
282 git_store: WeakEntity<GitStore>,
283 // For a local repository, holds paths that have had worktree events since the last status scan completed,
284 // and that should be examined during the next status scan.
285 paths_needing_status_update: BTreeSet<RepoPath>,
286 job_sender: mpsc::UnboundedSender<GitJob>,
287 active_jobs: HashMap<JobId, JobInfo>,
288 pending_ops: SumTree<PendingOps>,
289 job_id: JobId,
290 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
291 latest_askpass_id: u64,
292 repository_state: Shared<Task<Result<RepositoryState, String>>>,
293}
294
295impl std::ops::Deref for Repository {
296 type Target = RepositorySnapshot;
297
298 fn deref(&self) -> &Self::Target {
299 &self.snapshot
300 }
301}
302
303#[derive(Clone)]
304pub struct LocalRepositoryState {
305 pub fs: Arc<dyn Fs>,
306 pub backend: Arc<dyn GitRepository>,
307 pub environment: Arc<HashMap<String, String>>,
308}
309
310impl LocalRepositoryState {
311 async fn new(
312 work_directory_abs_path: Arc<Path>,
313 dot_git_abs_path: Arc<Path>,
314 project_environment: WeakEntity<ProjectEnvironment>,
315 fs: Arc<dyn Fs>,
316 cx: &mut AsyncApp,
317 ) -> anyhow::Result<Self> {
318 let environment = project_environment
319 .update(cx, |project_environment, cx| {
320 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
321 })?
322 .await
323 .unwrap_or_else(|| {
324 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
325 HashMap::default()
326 });
327 let search_paths = environment.get("PATH").map(|val| val.to_owned());
328 let backend = cx
329 .background_spawn({
330 let fs = fs.clone();
331 async move {
332 let system_git_binary_path = search_paths
333 .and_then(|search_paths| {
334 which::which_in("git", Some(search_paths), &work_directory_abs_path)
335 .ok()
336 })
337 .or_else(|| which::which("git").ok());
338 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
339 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
340 }
341 })
342 .await?;
343 Ok(LocalRepositoryState {
344 backend,
345 environment: Arc::new(environment),
346 fs,
347 })
348 }
349}
350
351#[derive(Clone)]
352pub struct RemoteRepositoryState {
353 pub project_id: ProjectId,
354 pub client: AnyProtoClient,
355}
356
357#[derive(Clone)]
358pub enum RepositoryState {
359 Local(LocalRepositoryState),
360 Remote(RemoteRepositoryState),
361}
362
363#[derive(Clone, Debug, PartialEq, Eq)]
364pub enum RepositoryEvent {
365 StatusesChanged,
366 MergeHeadsChanged,
367 BranchChanged,
368 StashEntriesChanged,
369 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
370}
371
372#[derive(Clone, Debug)]
373pub struct JobsUpdated;
374
375#[derive(Debug)]
376pub enum GitStoreEvent {
377 ActiveRepositoryChanged(Option<RepositoryId>),
378 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
379 RepositoryAdded,
380 RepositoryRemoved(RepositoryId),
381 IndexWriteError(anyhow::Error),
382 JobsUpdated,
383 ConflictsUpdated,
384}
385
386impl EventEmitter<RepositoryEvent> for Repository {}
387impl EventEmitter<JobsUpdated> for Repository {}
388impl EventEmitter<GitStoreEvent> for GitStore {}
389
390pub struct GitJob {
391 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
392 key: Option<GitJobKey>,
393}
394
395#[derive(PartialEq, Eq)]
396enum GitJobKey {
397 WriteIndex(Vec<RepoPath>),
398 ReloadBufferDiffBases,
399 RefreshStatuses,
400 ReloadGitState,
401}
402
403impl GitStore {
404 pub fn local(
405 worktree_store: &Entity<WorktreeStore>,
406 buffer_store: Entity<BufferStore>,
407 environment: Entity<ProjectEnvironment>,
408 fs: Arc<dyn Fs>,
409 cx: &mut Context<Self>,
410 ) -> Self {
411 Self::new(
412 worktree_store.clone(),
413 buffer_store,
414 GitStoreState::Local {
415 next_repository_id: Arc::new(AtomicU64::new(1)),
416 downstream: None,
417 project_environment: environment,
418 fs,
419 },
420 cx,
421 )
422 }
423
424 pub fn remote(
425 worktree_store: &Entity<WorktreeStore>,
426 buffer_store: Entity<BufferStore>,
427 upstream_client: AnyProtoClient,
428 project_id: u64,
429 cx: &mut Context<Self>,
430 ) -> Self {
431 Self::new(
432 worktree_store.clone(),
433 buffer_store,
434 GitStoreState::Remote {
435 upstream_client,
436 upstream_project_id: project_id,
437 downstream: None,
438 },
439 cx,
440 )
441 }
442
443 fn new(
444 worktree_store: Entity<WorktreeStore>,
445 buffer_store: Entity<BufferStore>,
446 state: GitStoreState,
447 cx: &mut Context<Self>,
448 ) -> Self {
449 let _subscriptions = vec![
450 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
451 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
452 ];
453
454 GitStore {
455 state,
456 buffer_store,
457 worktree_store,
458 repositories: HashMap::default(),
459 worktree_ids: HashMap::default(),
460 active_repo_id: None,
461 _subscriptions,
462 loading_diffs: HashMap::default(),
463 shared_diffs: HashMap::default(),
464 diffs: HashMap::default(),
465 }
466 }
467
468 pub fn init(client: &AnyProtoClient) {
469 client.add_entity_request_handler(Self::handle_get_remotes);
470 client.add_entity_request_handler(Self::handle_get_branches);
471 client.add_entity_request_handler(Self::handle_get_default_branch);
472 client.add_entity_request_handler(Self::handle_change_branch);
473 client.add_entity_request_handler(Self::handle_create_branch);
474 client.add_entity_request_handler(Self::handle_rename_branch);
475 client.add_entity_request_handler(Self::handle_delete_branch);
476 client.add_entity_request_handler(Self::handle_git_init);
477 client.add_entity_request_handler(Self::handle_push);
478 client.add_entity_request_handler(Self::handle_pull);
479 client.add_entity_request_handler(Self::handle_fetch);
480 client.add_entity_request_handler(Self::handle_stage);
481 client.add_entity_request_handler(Self::handle_unstage);
482 client.add_entity_request_handler(Self::handle_stash);
483 client.add_entity_request_handler(Self::handle_stash_pop);
484 client.add_entity_request_handler(Self::handle_stash_apply);
485 client.add_entity_request_handler(Self::handle_stash_drop);
486 client.add_entity_request_handler(Self::handle_commit);
487 client.add_entity_request_handler(Self::handle_run_hook);
488 client.add_entity_request_handler(Self::handle_reset);
489 client.add_entity_request_handler(Self::handle_show);
490 client.add_entity_request_handler(Self::handle_load_commit_diff);
491 client.add_entity_request_handler(Self::handle_file_history);
492 client.add_entity_request_handler(Self::handle_checkout_files);
493 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
494 client.add_entity_request_handler(Self::handle_set_index_text);
495 client.add_entity_request_handler(Self::handle_askpass);
496 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
497 client.add_entity_request_handler(Self::handle_git_diff);
498 client.add_entity_request_handler(Self::handle_tree_diff);
499 client.add_entity_request_handler(Self::handle_get_blob_content);
500 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
501 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
502 client.add_entity_message_handler(Self::handle_update_diff_bases);
503 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
504 client.add_entity_request_handler(Self::handle_blame_buffer);
505 client.add_entity_message_handler(Self::handle_update_repository);
506 client.add_entity_message_handler(Self::handle_remove_repository);
507 client.add_entity_request_handler(Self::handle_git_clone);
508 client.add_entity_request_handler(Self::handle_get_worktrees);
509 client.add_entity_request_handler(Self::handle_create_worktree);
510 }
511
512 pub fn is_local(&self) -> bool {
513 matches!(self.state, GitStoreState::Local { .. })
514 }
515 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
516 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
517 let id = repo.read(cx).id;
518 if self.active_repo_id != Some(id) {
519 self.active_repo_id = Some(id);
520 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
521 }
522 }
523 }
524
525 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
526 match &mut self.state {
527 GitStoreState::Remote {
528 downstream: downstream_client,
529 ..
530 } => {
531 for repo in self.repositories.values() {
532 let update = repo.read(cx).snapshot.initial_update(project_id);
533 for update in split_repository_update(update) {
534 client.send(update).log_err();
535 }
536 }
537 *downstream_client = Some((client, ProjectId(project_id)));
538 }
539 GitStoreState::Local {
540 downstream: downstream_client,
541 ..
542 } => {
543 let mut snapshots = HashMap::default();
544 let (updates_tx, mut updates_rx) = mpsc::unbounded();
545 for repo in self.repositories.values() {
546 updates_tx
547 .unbounded_send(DownstreamUpdate::UpdateRepository(
548 repo.read(cx).snapshot.clone(),
549 ))
550 .ok();
551 }
552 *downstream_client = Some(LocalDownstreamState {
553 client: client.clone(),
554 project_id: ProjectId(project_id),
555 updates_tx,
556 _task: cx.spawn(async move |this, cx| {
557 cx.background_spawn(async move {
558 while let Some(update) = updates_rx.next().await {
559 match update {
560 DownstreamUpdate::UpdateRepository(snapshot) => {
561 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
562 {
563 let update =
564 snapshot.build_update(old_snapshot, project_id);
565 *old_snapshot = snapshot;
566 for update in split_repository_update(update) {
567 client.send(update)?;
568 }
569 } else {
570 let update = snapshot.initial_update(project_id);
571 for update in split_repository_update(update) {
572 client.send(update)?;
573 }
574 snapshots.insert(snapshot.id, snapshot);
575 }
576 }
577 DownstreamUpdate::RemoveRepository(id) => {
578 client.send(proto::RemoveRepository {
579 project_id,
580 id: id.to_proto(),
581 })?;
582 }
583 }
584 }
585 anyhow::Ok(())
586 })
587 .await
588 .ok();
589 this.update(cx, |this, _| {
590 if let GitStoreState::Local {
591 downstream: downstream_client,
592 ..
593 } = &mut this.state
594 {
595 downstream_client.take();
596 } else {
597 unreachable!("unshared called on remote store");
598 }
599 })
600 }),
601 });
602 }
603 }
604 }
605
606 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
607 match &mut self.state {
608 GitStoreState::Local {
609 downstream: downstream_client,
610 ..
611 } => {
612 downstream_client.take();
613 }
614 GitStoreState::Remote {
615 downstream: downstream_client,
616 ..
617 } => {
618 downstream_client.take();
619 }
620 }
621 self.shared_diffs.clear();
622 }
623
624 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
625 self.shared_diffs.remove(peer_id);
626 }
627
628 pub fn active_repository(&self) -> Option<Entity<Repository>> {
629 self.active_repo_id
630 .as_ref()
631 .map(|id| self.repositories[id].clone())
632 }
633
634 pub fn open_unstaged_diff(
635 &mut self,
636 buffer: Entity<Buffer>,
637 cx: &mut Context<Self>,
638 ) -> Task<Result<Entity<BufferDiff>>> {
639 let buffer_id = buffer.read(cx).remote_id();
640 if let Some(diff_state) = self.diffs.get(&buffer_id)
641 && let Some(unstaged_diff) = diff_state
642 .read(cx)
643 .unstaged_diff
644 .as_ref()
645 .and_then(|weak| weak.upgrade())
646 {
647 if let Some(task) =
648 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
649 {
650 return cx.background_executor().spawn(async move {
651 task.await;
652 Ok(unstaged_diff)
653 });
654 }
655 return Task::ready(Ok(unstaged_diff));
656 }
657
658 let Some((repo, repo_path)) =
659 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
660 else {
661 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
662 };
663
664 let task = self
665 .loading_diffs
666 .entry((buffer_id, DiffKind::Unstaged))
667 .or_insert_with(|| {
668 let staged_text = repo.update(cx, |repo, cx| {
669 repo.load_staged_text(buffer_id, repo_path, cx)
670 });
671 cx.spawn(async move |this, cx| {
672 Self::open_diff_internal(
673 this,
674 DiffKind::Unstaged,
675 staged_text.await.map(DiffBasesChange::SetIndex),
676 buffer,
677 cx,
678 )
679 .await
680 .map_err(Arc::new)
681 })
682 .shared()
683 })
684 .clone();
685
686 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
687 }
688
689 pub fn open_diff_since(
690 &mut self,
691 oid: Option<git::Oid>,
692 buffer: Entity<Buffer>,
693 repo: Entity<Repository>,
694 cx: &mut Context<Self>,
695 ) -> Task<Result<Entity<BufferDiff>>> {
696 cx.spawn(async move |this, cx| {
697 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot())?;
698 let content = match oid {
699 None => None,
700 Some(oid) => Some(
701 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))?
702 .await?,
703 ),
704 };
705 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx))?;
706
707 buffer_diff
708 .update(cx, |buffer_diff, cx| {
709 buffer_diff.set_base_text(
710 content.map(|s| s.as_str().into()),
711 buffer_snapshot.language().cloned(),
712 buffer_snapshot.text,
713 cx,
714 )
715 })?
716 .await?;
717 let unstaged_diff = this
718 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
719 .await?;
720 buffer_diff.update(cx, |buffer_diff, _| {
721 buffer_diff.set_secondary_diff(unstaged_diff);
722 })?;
723
724 this.update(cx, |_, cx| {
725 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
726 .detach();
727 })?;
728
729 Ok(buffer_diff)
730 })
731 }
732
733 pub fn open_uncommitted_diff(
734 &mut self,
735 buffer: Entity<Buffer>,
736 cx: &mut Context<Self>,
737 ) -> Task<Result<Entity<BufferDiff>>> {
738 let buffer_id = buffer.read(cx).remote_id();
739
740 if let Some(diff_state) = self.diffs.get(&buffer_id)
741 && let Some(uncommitted_diff) = diff_state
742 .read(cx)
743 .uncommitted_diff
744 .as_ref()
745 .and_then(|weak| weak.upgrade())
746 {
747 if let Some(task) =
748 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
749 {
750 return cx.background_executor().spawn(async move {
751 task.await;
752 Ok(uncommitted_diff)
753 });
754 }
755 return Task::ready(Ok(uncommitted_diff));
756 }
757
758 let Some((repo, repo_path)) =
759 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
760 else {
761 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
762 };
763
764 let task = self
765 .loading_diffs
766 .entry((buffer_id, DiffKind::Uncommitted))
767 .or_insert_with(|| {
768 let changes = repo.update(cx, |repo, cx| {
769 repo.load_committed_text(buffer_id, repo_path, cx)
770 });
771
772 // todo(lw): hot foreground spawn
773 cx.spawn(async move |this, cx| {
774 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
775 .await
776 .map_err(Arc::new)
777 })
778 .shared()
779 })
780 .clone();
781
782 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
783 }
784
785 async fn open_diff_internal(
786 this: WeakEntity<Self>,
787 kind: DiffKind,
788 texts: Result<DiffBasesChange>,
789 buffer_entity: Entity<Buffer>,
790 cx: &mut AsyncApp,
791 ) -> Result<Entity<BufferDiff>> {
792 let diff_bases_change = match texts {
793 Err(e) => {
794 this.update(cx, |this, cx| {
795 let buffer = buffer_entity.read(cx);
796 let buffer_id = buffer.remote_id();
797 this.loading_diffs.remove(&(buffer_id, kind));
798 })?;
799 return Err(e);
800 }
801 Ok(change) => change,
802 };
803
804 this.update(cx, |this, cx| {
805 let buffer = buffer_entity.read(cx);
806 let buffer_id = buffer.remote_id();
807 let language = buffer.language().cloned();
808 let language_registry = buffer.language_registry();
809 let text_snapshot = buffer.text_snapshot();
810 this.loading_diffs.remove(&(buffer_id, kind));
811
812 let git_store = cx.weak_entity();
813 let diff_state = this
814 .diffs
815 .entry(buffer_id)
816 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
817
818 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
819
820 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
821 diff_state.update(cx, |diff_state, cx| {
822 diff_state.language = language;
823 diff_state.language_registry = language_registry;
824
825 match kind {
826 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
827 DiffKind::Uncommitted => {
828 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
829 diff
830 } else {
831 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
832 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
833 unstaged_diff
834 };
835
836 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
837 diff_state.uncommitted_diff = Some(diff.downgrade())
838 }
839 }
840
841 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
842 let rx = diff_state.wait_for_recalculation();
843
844 anyhow::Ok(async move {
845 if let Some(rx) = rx {
846 rx.await;
847 }
848 Ok(diff)
849 })
850 })
851 })??
852 .await
853 }
854
855 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
856 let diff_state = self.diffs.get(&buffer_id)?;
857 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
858 }
859
860 pub fn get_uncommitted_diff(
861 &self,
862 buffer_id: BufferId,
863 cx: &App,
864 ) -> Option<Entity<BufferDiff>> {
865 let diff_state = self.diffs.get(&buffer_id)?;
866 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
867 }
868
869 pub fn open_conflict_set(
870 &mut self,
871 buffer: Entity<Buffer>,
872 cx: &mut Context<Self>,
873 ) -> Entity<ConflictSet> {
874 log::debug!("open conflict set");
875 let buffer_id = buffer.read(cx).remote_id();
876
877 if let Some(git_state) = self.diffs.get(&buffer_id)
878 && let Some(conflict_set) = git_state
879 .read(cx)
880 .conflict_set
881 .as_ref()
882 .and_then(|weak| weak.upgrade())
883 {
884 let conflict_set = conflict_set;
885 let buffer_snapshot = buffer.read(cx).text_snapshot();
886
887 git_state.update(cx, |state, cx| {
888 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
889 });
890
891 return conflict_set;
892 }
893
894 let is_unmerged = self
895 .repository_and_path_for_buffer_id(buffer_id, cx)
896 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
897 let git_store = cx.weak_entity();
898 let buffer_git_state = self
899 .diffs
900 .entry(buffer_id)
901 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
902 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
903
904 self._subscriptions
905 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
906 cx.emit(GitStoreEvent::ConflictsUpdated);
907 }));
908
909 buffer_git_state.update(cx, |state, cx| {
910 state.conflict_set = Some(conflict_set.downgrade());
911 let buffer_snapshot = buffer.read(cx).text_snapshot();
912 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
913 });
914
915 conflict_set
916 }
917
918 pub fn project_path_git_status(
919 &self,
920 project_path: &ProjectPath,
921 cx: &App,
922 ) -> Option<FileStatus> {
923 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
924 Some(repo.read(cx).status_for_path(&repo_path)?.status)
925 }
926
927 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
928 let mut work_directory_abs_paths = Vec::new();
929 let mut checkpoints = Vec::new();
930 for repository in self.repositories.values() {
931 repository.update(cx, |repository, _| {
932 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
933 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
934 });
935 }
936
937 cx.background_executor().spawn(async move {
938 let checkpoints = future::try_join_all(checkpoints).await?;
939 Ok(GitStoreCheckpoint {
940 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
941 .into_iter()
942 .zip(checkpoints)
943 .collect(),
944 })
945 })
946 }
947
948 pub fn restore_checkpoint(
949 &self,
950 checkpoint: GitStoreCheckpoint,
951 cx: &mut App,
952 ) -> Task<Result<()>> {
953 let repositories_by_work_dir_abs_path = self
954 .repositories
955 .values()
956 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
957 .collect::<HashMap<_, _>>();
958
959 let mut tasks = Vec::new();
960 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
961 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
962 let restore = repository.update(cx, |repository, _| {
963 repository.restore_checkpoint(checkpoint)
964 });
965 tasks.push(async move { restore.await? });
966 }
967 }
968 cx.background_spawn(async move {
969 future::try_join_all(tasks).await?;
970 Ok(())
971 })
972 }
973
974 /// Compares two checkpoints, returning true if they are equal.
975 pub fn compare_checkpoints(
976 &self,
977 left: GitStoreCheckpoint,
978 mut right: GitStoreCheckpoint,
979 cx: &mut App,
980 ) -> Task<Result<bool>> {
981 let repositories_by_work_dir_abs_path = self
982 .repositories
983 .values()
984 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
985 .collect::<HashMap<_, _>>();
986
987 let mut tasks = Vec::new();
988 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
989 if let Some(right_checkpoint) = right
990 .checkpoints_by_work_dir_abs_path
991 .remove(&work_dir_abs_path)
992 {
993 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
994 {
995 let compare = repository.update(cx, |repository, _| {
996 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
997 });
998
999 tasks.push(async move { compare.await? });
1000 }
1001 } else {
1002 return Task::ready(Ok(false));
1003 }
1004 }
1005 cx.background_spawn(async move {
1006 Ok(future::try_join_all(tasks)
1007 .await?
1008 .into_iter()
1009 .all(|result| result))
1010 })
1011 }
1012
1013 /// Blames a buffer.
1014 pub fn blame_buffer(
1015 &self,
1016 buffer: &Entity<Buffer>,
1017 version: Option<clock::Global>,
1018 cx: &mut Context<Self>,
1019 ) -> Task<Result<Option<Blame>>> {
1020 let buffer = buffer.read(cx);
1021 let Some((repo, repo_path)) =
1022 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1023 else {
1024 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1025 };
1026 let content = match &version {
1027 Some(version) => buffer.rope_for_version(version),
1028 None => buffer.as_rope().clone(),
1029 };
1030 let version = version.unwrap_or(buffer.version());
1031 let buffer_id = buffer.remote_id();
1032
1033 let repo = repo.downgrade();
1034 cx.spawn(async move |_, cx| {
1035 let repository_state = repo
1036 .update(cx, |repo, _| repo.repository_state.clone())?
1037 .await
1038 .map_err(|err| anyhow::anyhow!(err))?;
1039 match repository_state {
1040 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1041 .blame(repo_path.clone(), content)
1042 .await
1043 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1044 .map(Some),
1045 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1046 let response = client
1047 .request(proto::BlameBuffer {
1048 project_id: project_id.to_proto(),
1049 buffer_id: buffer_id.into(),
1050 version: serialize_version(&version),
1051 })
1052 .await?;
1053 Ok(deserialize_blame_buffer_response(response))
1054 }
1055 }
1056 })
1057 }
1058
1059 pub fn file_history(
1060 &self,
1061 repo: &Entity<Repository>,
1062 path: RepoPath,
1063 cx: &mut App,
1064 ) -> Task<Result<git::repository::FileHistory>> {
1065 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1066
1067 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1068 }
1069
1070 pub fn file_history_paginated(
1071 &self,
1072 repo: &Entity<Repository>,
1073 path: RepoPath,
1074 skip: usize,
1075 limit: Option<usize>,
1076 cx: &mut App,
1077 ) -> Task<Result<git::repository::FileHistory>> {
1078 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1079
1080 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1081 }
1082
1083 pub fn get_permalink_to_line(
1084 &self,
1085 buffer: &Entity<Buffer>,
1086 selection: Range<u32>,
1087 cx: &mut App,
1088 ) -> Task<Result<url::Url>> {
1089 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1090 return Task::ready(Err(anyhow!("buffer has no file")));
1091 };
1092
1093 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1094 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1095 cx,
1096 ) else {
1097 // If we're not in a Git repo, check whether this is a Rust source
1098 // file in the Cargo registry (presumably opened with go-to-definition
1099 // from a normal Rust file). If so, we can put together a permalink
1100 // using crate metadata.
1101 if buffer
1102 .read(cx)
1103 .language()
1104 .is_none_or(|lang| lang.name() != "Rust".into())
1105 {
1106 return Task::ready(Err(anyhow!("no permalink available")));
1107 }
1108 let file_path = file.worktree.read(cx).absolutize(&file.path);
1109 return cx.spawn(async move |cx| {
1110 let provider_registry = cx.update(GitHostingProviderRegistry::default_global)?;
1111 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1112 .context("no permalink available")
1113 });
1114 };
1115
1116 let buffer_id = buffer.read(cx).remote_id();
1117 let branch = repo.read(cx).branch.clone();
1118 let remote = branch
1119 .as_ref()
1120 .and_then(|b| b.upstream.as_ref())
1121 .and_then(|b| b.remote_name())
1122 .unwrap_or("origin")
1123 .to_string();
1124
1125 let rx = repo.update(cx, |repo, _| {
1126 repo.send_job(None, move |state, cx| async move {
1127 match state {
1128 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1129 let origin_url = backend
1130 .remote_url(&remote)
1131 .with_context(|| format!("remote \"{remote}\" not found"))?;
1132
1133 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1134
1135 let provider_registry =
1136 cx.update(GitHostingProviderRegistry::default_global)?;
1137
1138 let (provider, remote) =
1139 parse_git_remote_url(provider_registry, &origin_url)
1140 .context("parsing Git remote URL")?;
1141
1142 Ok(provider.build_permalink(
1143 remote,
1144 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1145 ))
1146 }
1147 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1148 let response = client
1149 .request(proto::GetPermalinkToLine {
1150 project_id: project_id.to_proto(),
1151 buffer_id: buffer_id.into(),
1152 selection: Some(proto::Range {
1153 start: selection.start as u64,
1154 end: selection.end as u64,
1155 }),
1156 })
1157 .await?;
1158
1159 url::Url::parse(&response.permalink).context("failed to parse permalink")
1160 }
1161 }
1162 })
1163 });
1164 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1165 }
1166
1167 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1168 match &self.state {
1169 GitStoreState::Local {
1170 downstream: downstream_client,
1171 ..
1172 } => downstream_client
1173 .as_ref()
1174 .map(|state| (state.client.clone(), state.project_id)),
1175 GitStoreState::Remote {
1176 downstream: downstream_client,
1177 ..
1178 } => downstream_client.clone(),
1179 }
1180 }
1181
1182 fn upstream_client(&self) -> Option<AnyProtoClient> {
1183 match &self.state {
1184 GitStoreState::Local { .. } => None,
1185 GitStoreState::Remote {
1186 upstream_client, ..
1187 } => Some(upstream_client.clone()),
1188 }
1189 }
1190
1191 fn on_worktree_store_event(
1192 &mut self,
1193 worktree_store: Entity<WorktreeStore>,
1194 event: &WorktreeStoreEvent,
1195 cx: &mut Context<Self>,
1196 ) {
1197 let GitStoreState::Local {
1198 project_environment,
1199 downstream,
1200 next_repository_id,
1201 fs,
1202 } = &self.state
1203 else {
1204 return;
1205 };
1206
1207 match event {
1208 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1209 if let Some(worktree) = self
1210 .worktree_store
1211 .read(cx)
1212 .worktree_for_id(*worktree_id, cx)
1213 {
1214 let paths_by_git_repo =
1215 self.process_updated_entries(&worktree, updated_entries, cx);
1216 let downstream = downstream
1217 .as_ref()
1218 .map(|downstream| downstream.updates_tx.clone());
1219 cx.spawn(async move |_, cx| {
1220 let paths_by_git_repo = paths_by_git_repo.await;
1221 for (repo, paths) in paths_by_git_repo {
1222 repo.update(cx, |repo, cx| {
1223 repo.paths_changed(paths, downstream.clone(), cx);
1224 })
1225 .ok();
1226 }
1227 })
1228 .detach();
1229 }
1230 }
1231 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1232 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1233 else {
1234 return;
1235 };
1236 if !worktree.read(cx).is_visible() {
1237 log::debug!(
1238 "not adding repositories for local worktree {:?} because it's not visible",
1239 worktree.read(cx).abs_path()
1240 );
1241 return;
1242 }
1243 self.update_repositories_from_worktree(
1244 *worktree_id,
1245 project_environment.clone(),
1246 next_repository_id.clone(),
1247 downstream
1248 .as_ref()
1249 .map(|downstream| downstream.updates_tx.clone()),
1250 changed_repos.clone(),
1251 fs.clone(),
1252 cx,
1253 );
1254 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1255 }
1256 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1257 let repos_without_worktree: Vec<RepositoryId> = self
1258 .worktree_ids
1259 .iter_mut()
1260 .filter_map(|(repo_id, worktree_ids)| {
1261 worktree_ids.remove(worktree_id);
1262 if worktree_ids.is_empty() {
1263 Some(*repo_id)
1264 } else {
1265 None
1266 }
1267 })
1268 .collect();
1269 let is_active_repo_removed = repos_without_worktree
1270 .iter()
1271 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1272
1273 for repo_id in repos_without_worktree {
1274 self.repositories.remove(&repo_id);
1275 self.worktree_ids.remove(&repo_id);
1276 if let Some(updates_tx) =
1277 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1278 {
1279 updates_tx
1280 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1281 .ok();
1282 }
1283 }
1284
1285 if is_active_repo_removed {
1286 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1287 self.active_repo_id = Some(repo_id);
1288 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1289 } else {
1290 self.active_repo_id = None;
1291 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1292 }
1293 }
1294 }
1295 _ => {}
1296 }
1297 }
1298 fn on_repository_event(
1299 &mut self,
1300 repo: Entity<Repository>,
1301 event: &RepositoryEvent,
1302 cx: &mut Context<Self>,
1303 ) {
1304 let id = repo.read(cx).id;
1305 let repo_snapshot = repo.read(cx).snapshot.clone();
1306 for (buffer_id, diff) in self.diffs.iter() {
1307 if let Some((buffer_repo, repo_path)) =
1308 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1309 && buffer_repo == repo
1310 {
1311 diff.update(cx, |diff, cx| {
1312 if let Some(conflict_set) = &diff.conflict_set {
1313 let conflict_status_changed =
1314 conflict_set.update(cx, |conflict_set, cx| {
1315 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1316 conflict_set.set_has_conflict(has_conflict, cx)
1317 })?;
1318 if conflict_status_changed {
1319 let buffer_store = self.buffer_store.read(cx);
1320 if let Some(buffer) = buffer_store.get(*buffer_id) {
1321 let _ = diff
1322 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1323 }
1324 }
1325 }
1326 anyhow::Ok(())
1327 })
1328 .ok();
1329 }
1330 }
1331 cx.emit(GitStoreEvent::RepositoryUpdated(
1332 id,
1333 event.clone(),
1334 self.active_repo_id == Some(id),
1335 ))
1336 }
1337
1338 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1339 cx.emit(GitStoreEvent::JobsUpdated)
1340 }
1341
1342 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1343 fn update_repositories_from_worktree(
1344 &mut self,
1345 worktree_id: WorktreeId,
1346 project_environment: Entity<ProjectEnvironment>,
1347 next_repository_id: Arc<AtomicU64>,
1348 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1349 updated_git_repositories: UpdatedGitRepositoriesSet,
1350 fs: Arc<dyn Fs>,
1351 cx: &mut Context<Self>,
1352 ) {
1353 let mut removed_ids = Vec::new();
1354 for update in updated_git_repositories.iter() {
1355 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1356 let existing_work_directory_abs_path =
1357 repo.read(cx).work_directory_abs_path.clone();
1358 Some(&existing_work_directory_abs_path)
1359 == update.old_work_directory_abs_path.as_ref()
1360 || Some(&existing_work_directory_abs_path)
1361 == update.new_work_directory_abs_path.as_ref()
1362 }) {
1363 let repo_id = *id;
1364 if let Some(new_work_directory_abs_path) =
1365 update.new_work_directory_abs_path.clone()
1366 {
1367 self.worktree_ids
1368 .entry(repo_id)
1369 .or_insert_with(HashSet::new)
1370 .insert(worktree_id);
1371 existing.update(cx, |existing, cx| {
1372 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1373 existing.schedule_scan(updates_tx.clone(), cx);
1374 });
1375 } else {
1376 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1377 worktree_ids.remove(&worktree_id);
1378 if worktree_ids.is_empty() {
1379 removed_ids.push(repo_id);
1380 }
1381 }
1382 }
1383 } else if let UpdatedGitRepository {
1384 new_work_directory_abs_path: Some(work_directory_abs_path),
1385 dot_git_abs_path: Some(dot_git_abs_path),
1386 repository_dir_abs_path: Some(_repository_dir_abs_path),
1387 common_dir_abs_path: Some(_common_dir_abs_path),
1388 ..
1389 } = update
1390 {
1391 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1392 let git_store = cx.weak_entity();
1393 let repo = cx.new(|cx| {
1394 let mut repo = Repository::local(
1395 id,
1396 work_directory_abs_path.clone(),
1397 dot_git_abs_path.clone(),
1398 project_environment.downgrade(),
1399 fs.clone(),
1400 git_store,
1401 cx,
1402 );
1403 if let Some(updates_tx) = updates_tx.as_ref() {
1404 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1405 updates_tx
1406 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1407 .ok();
1408 }
1409 repo.schedule_scan(updates_tx.clone(), cx);
1410 repo
1411 });
1412 self._subscriptions
1413 .push(cx.subscribe(&repo, Self::on_repository_event));
1414 self._subscriptions
1415 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1416 self.repositories.insert(id, repo);
1417 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1418 cx.emit(GitStoreEvent::RepositoryAdded);
1419 self.active_repo_id.get_or_insert_with(|| {
1420 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1421 id
1422 });
1423 }
1424 }
1425
1426 for id in removed_ids {
1427 if self.active_repo_id == Some(id) {
1428 self.active_repo_id = None;
1429 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1430 }
1431 self.repositories.remove(&id);
1432 if let Some(updates_tx) = updates_tx.as_ref() {
1433 updates_tx
1434 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1435 .ok();
1436 }
1437 }
1438 }
1439
1440 fn on_buffer_store_event(
1441 &mut self,
1442 _: Entity<BufferStore>,
1443 event: &BufferStoreEvent,
1444 cx: &mut Context<Self>,
1445 ) {
1446 match event {
1447 BufferStoreEvent::BufferAdded(buffer) => {
1448 cx.subscribe(buffer, |this, buffer, event, cx| {
1449 if let BufferEvent::LanguageChanged = event {
1450 let buffer_id = buffer.read(cx).remote_id();
1451 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1452 diff_state.update(cx, |diff_state, cx| {
1453 diff_state.buffer_language_changed(buffer, cx);
1454 });
1455 }
1456 }
1457 })
1458 .detach();
1459 }
1460 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1461 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1462 diffs.remove(buffer_id);
1463 }
1464 }
1465 BufferStoreEvent::BufferDropped(buffer_id) => {
1466 self.diffs.remove(buffer_id);
1467 for diffs in self.shared_diffs.values_mut() {
1468 diffs.remove(buffer_id);
1469 }
1470 }
1471 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1472 // Whenever a buffer's file path changes, it's possible that the
1473 // new path is actually a path that is being tracked by a git
1474 // repository. In that case, we'll want to update the buffer's
1475 // `BufferDiffState`, in case it already has one.
1476 let buffer_id = buffer.read(cx).remote_id();
1477 let diff_state = self.diffs.get(&buffer_id);
1478 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1479
1480 if let Some(diff_state) = diff_state
1481 && let Some((repo, repo_path)) = repo
1482 {
1483 let buffer = buffer.clone();
1484 let diff_state = diff_state.clone();
1485
1486 cx.spawn(async move |_git_store, cx| {
1487 async {
1488 let diff_bases_change = repo
1489 .update(cx, |repo, cx| {
1490 repo.load_committed_text(buffer_id, repo_path, cx)
1491 })?
1492 .await?;
1493
1494 diff_state.update(cx, |diff_state, cx| {
1495 let buffer_snapshot = buffer.read(cx).text_snapshot();
1496 diff_state.diff_bases_changed(
1497 buffer_snapshot,
1498 Some(diff_bases_change),
1499 cx,
1500 );
1501 })
1502 }
1503 .await
1504 .log_err();
1505 })
1506 .detach();
1507 }
1508 }
1509 _ => {}
1510 }
1511 }
1512
1513 pub fn recalculate_buffer_diffs(
1514 &mut self,
1515 buffers: Vec<Entity<Buffer>>,
1516 cx: &mut Context<Self>,
1517 ) -> impl Future<Output = ()> + use<> {
1518 let mut futures = Vec::new();
1519 for buffer in buffers {
1520 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1521 let buffer = buffer.read(cx).text_snapshot();
1522 diff_state.update(cx, |diff_state, cx| {
1523 diff_state.recalculate_diffs(buffer.clone(), cx);
1524 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1525 });
1526 futures.push(diff_state.update(cx, |diff_state, cx| {
1527 diff_state
1528 .reparse_conflict_markers(buffer, cx)
1529 .map(|_| {})
1530 .boxed()
1531 }));
1532 }
1533 }
1534 async move {
1535 futures::future::join_all(futures).await;
1536 }
1537 }
1538
1539 fn on_buffer_diff_event(
1540 &mut self,
1541 diff: Entity<buffer_diff::BufferDiff>,
1542 event: &BufferDiffEvent,
1543 cx: &mut Context<Self>,
1544 ) {
1545 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1546 let buffer_id = diff.read(cx).buffer_id;
1547 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1548 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1549 diff_state.hunk_staging_operation_count += 1;
1550 diff_state.hunk_staging_operation_count
1551 });
1552 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1553 let recv = repo.update(cx, |repo, cx| {
1554 log::debug!("hunks changed for {}", path.as_unix_str());
1555 repo.spawn_set_index_text_job(
1556 path,
1557 new_index_text.as_ref().map(|rope| rope.to_string()),
1558 Some(hunk_staging_operation_count),
1559 cx,
1560 )
1561 });
1562 let diff = diff.downgrade();
1563 cx.spawn(async move |this, cx| {
1564 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1565 diff.update(cx, |diff, cx| {
1566 diff.clear_pending_hunks(cx);
1567 })
1568 .ok();
1569 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1570 .ok();
1571 }
1572 })
1573 .detach();
1574 }
1575 }
1576 }
1577 }
1578
1579 fn local_worktree_git_repos_changed(
1580 &mut self,
1581 worktree: Entity<Worktree>,
1582 changed_repos: &UpdatedGitRepositoriesSet,
1583 cx: &mut Context<Self>,
1584 ) {
1585 log::debug!("local worktree repos changed");
1586 debug_assert!(worktree.read(cx).is_local());
1587
1588 for repository in self.repositories.values() {
1589 repository.update(cx, |repository, cx| {
1590 let repo_abs_path = &repository.work_directory_abs_path;
1591 if changed_repos.iter().any(|update| {
1592 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1593 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1594 }) {
1595 repository.reload_buffer_diff_bases(cx);
1596 }
1597 });
1598 }
1599 }
1600
1601 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1602 &self.repositories
1603 }
1604
1605 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1606 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1607 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1608 Some(status.status)
1609 }
1610
1611 pub fn repository_and_path_for_buffer_id(
1612 &self,
1613 buffer_id: BufferId,
1614 cx: &App,
1615 ) -> Option<(Entity<Repository>, RepoPath)> {
1616 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1617 let project_path = buffer.read(cx).project_path(cx)?;
1618 self.repository_and_path_for_project_path(&project_path, cx)
1619 }
1620
1621 pub fn repository_and_path_for_project_path(
1622 &self,
1623 path: &ProjectPath,
1624 cx: &App,
1625 ) -> Option<(Entity<Repository>, RepoPath)> {
1626 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1627 self.repositories
1628 .values()
1629 .filter_map(|repo| {
1630 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1631 Some((repo.clone(), repo_path))
1632 })
1633 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1634 }
1635
1636 pub fn git_init(
1637 &self,
1638 path: Arc<Path>,
1639 fallback_branch_name: String,
1640 cx: &App,
1641 ) -> Task<Result<()>> {
1642 match &self.state {
1643 GitStoreState::Local { fs, .. } => {
1644 let fs = fs.clone();
1645 cx.background_executor()
1646 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1647 }
1648 GitStoreState::Remote {
1649 upstream_client,
1650 upstream_project_id: project_id,
1651 ..
1652 } => {
1653 let client = upstream_client.clone();
1654 let project_id = *project_id;
1655 cx.background_executor().spawn(async move {
1656 client
1657 .request(proto::GitInit {
1658 project_id: project_id,
1659 abs_path: path.to_string_lossy().into_owned(),
1660 fallback_branch_name,
1661 })
1662 .await?;
1663 Ok(())
1664 })
1665 }
1666 }
1667 }
1668
1669 pub fn git_clone(
1670 &self,
1671 repo: String,
1672 path: impl Into<Arc<std::path::Path>>,
1673 cx: &App,
1674 ) -> Task<Result<()>> {
1675 let path = path.into();
1676 match &self.state {
1677 GitStoreState::Local { fs, .. } => {
1678 let fs = fs.clone();
1679 cx.background_executor()
1680 .spawn(async move { fs.git_clone(&repo, &path).await })
1681 }
1682 GitStoreState::Remote {
1683 upstream_client,
1684 upstream_project_id,
1685 ..
1686 } => {
1687 if upstream_client.is_via_collab() {
1688 return Task::ready(Err(anyhow!(
1689 "Git Clone isn't supported for project guests"
1690 )));
1691 }
1692 let request = upstream_client.request(proto::GitClone {
1693 project_id: *upstream_project_id,
1694 abs_path: path.to_string_lossy().into_owned(),
1695 remote_repo: repo,
1696 });
1697
1698 cx.background_spawn(async move {
1699 let result = request.await?;
1700
1701 match result.success {
1702 true => Ok(()),
1703 false => Err(anyhow!("Git Clone failed")),
1704 }
1705 })
1706 }
1707 }
1708 }
1709
1710 async fn handle_update_repository(
1711 this: Entity<Self>,
1712 envelope: TypedEnvelope<proto::UpdateRepository>,
1713 mut cx: AsyncApp,
1714 ) -> Result<()> {
1715 this.update(&mut cx, |this, cx| {
1716 let path_style = this.worktree_store.read(cx).path_style();
1717 let mut update = envelope.payload;
1718
1719 let id = RepositoryId::from_proto(update.id);
1720 let client = this.upstream_client().context("no upstream client")?;
1721
1722 let mut repo_subscription = None;
1723 let repo = this.repositories.entry(id).or_insert_with(|| {
1724 let git_store = cx.weak_entity();
1725 let repo = cx.new(|cx| {
1726 Repository::remote(
1727 id,
1728 Path::new(&update.abs_path).into(),
1729 path_style,
1730 ProjectId(update.project_id),
1731 client,
1732 git_store,
1733 cx,
1734 )
1735 });
1736 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1737 cx.emit(GitStoreEvent::RepositoryAdded);
1738 repo
1739 });
1740 this._subscriptions.extend(repo_subscription);
1741
1742 repo.update(cx, {
1743 let update = update.clone();
1744 |repo, cx| repo.apply_remote_update(update, cx)
1745 })?;
1746
1747 this.active_repo_id.get_or_insert_with(|| {
1748 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1749 id
1750 });
1751
1752 if let Some((client, project_id)) = this.downstream_client() {
1753 update.project_id = project_id.to_proto();
1754 client.send(update).log_err();
1755 }
1756 Ok(())
1757 })?
1758 }
1759
1760 async fn handle_remove_repository(
1761 this: Entity<Self>,
1762 envelope: TypedEnvelope<proto::RemoveRepository>,
1763 mut cx: AsyncApp,
1764 ) -> Result<()> {
1765 this.update(&mut cx, |this, cx| {
1766 let mut update = envelope.payload;
1767 let id = RepositoryId::from_proto(update.id);
1768 this.repositories.remove(&id);
1769 if let Some((client, project_id)) = this.downstream_client() {
1770 update.project_id = project_id.to_proto();
1771 client.send(update).log_err();
1772 }
1773 if this.active_repo_id == Some(id) {
1774 this.active_repo_id = None;
1775 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1776 }
1777 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1778 })
1779 }
1780
1781 async fn handle_git_init(
1782 this: Entity<Self>,
1783 envelope: TypedEnvelope<proto::GitInit>,
1784 cx: AsyncApp,
1785 ) -> Result<proto::Ack> {
1786 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1787 let name = envelope.payload.fallback_branch_name;
1788 cx.update(|cx| this.read(cx).git_init(path, name, cx))?
1789 .await?;
1790
1791 Ok(proto::Ack {})
1792 }
1793
1794 async fn handle_git_clone(
1795 this: Entity<Self>,
1796 envelope: TypedEnvelope<proto::GitClone>,
1797 cx: AsyncApp,
1798 ) -> Result<proto::GitCloneResponse> {
1799 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1800 let repo_name = envelope.payload.remote_repo;
1801 let result = cx
1802 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))?
1803 .await;
1804
1805 Ok(proto::GitCloneResponse {
1806 success: result.is_ok(),
1807 })
1808 }
1809
1810 async fn handle_fetch(
1811 this: Entity<Self>,
1812 envelope: TypedEnvelope<proto::Fetch>,
1813 mut cx: AsyncApp,
1814 ) -> Result<proto::RemoteMessageResponse> {
1815 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1816 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1817 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
1818 let askpass_id = envelope.payload.askpass_id;
1819
1820 let askpass = make_remote_delegate(
1821 this,
1822 envelope.payload.project_id,
1823 repository_id,
1824 askpass_id,
1825 &mut cx,
1826 );
1827
1828 let remote_output = repository_handle
1829 .update(&mut cx, |repository_handle, cx| {
1830 repository_handle.fetch(fetch_options, askpass, cx)
1831 })?
1832 .await??;
1833
1834 Ok(proto::RemoteMessageResponse {
1835 stdout: remote_output.stdout,
1836 stderr: remote_output.stderr,
1837 })
1838 }
1839
1840 async fn handle_push(
1841 this: Entity<Self>,
1842 envelope: TypedEnvelope<proto::Push>,
1843 mut cx: AsyncApp,
1844 ) -> Result<proto::RemoteMessageResponse> {
1845 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1846 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1847
1848 let askpass_id = envelope.payload.askpass_id;
1849 let askpass = make_remote_delegate(
1850 this,
1851 envelope.payload.project_id,
1852 repository_id,
1853 askpass_id,
1854 &mut cx,
1855 );
1856
1857 let options = envelope
1858 .payload
1859 .options
1860 .as_ref()
1861 .map(|_| match envelope.payload.options() {
1862 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
1863 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
1864 });
1865
1866 let branch_name = envelope.payload.branch_name.into();
1867 let remote_name = envelope.payload.remote_name.into();
1868
1869 let remote_output = repository_handle
1870 .update(&mut cx, |repository_handle, cx| {
1871 repository_handle.push(branch_name, remote_name, options, askpass, cx)
1872 })?
1873 .await??;
1874 Ok(proto::RemoteMessageResponse {
1875 stdout: remote_output.stdout,
1876 stderr: remote_output.stderr,
1877 })
1878 }
1879
1880 async fn handle_pull(
1881 this: Entity<Self>,
1882 envelope: TypedEnvelope<proto::Pull>,
1883 mut cx: AsyncApp,
1884 ) -> Result<proto::RemoteMessageResponse> {
1885 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1886 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1887 let askpass_id = envelope.payload.askpass_id;
1888 let askpass = make_remote_delegate(
1889 this,
1890 envelope.payload.project_id,
1891 repository_id,
1892 askpass_id,
1893 &mut cx,
1894 );
1895
1896 let branch_name = envelope.payload.branch_name.map(|name| name.into());
1897 let remote_name = envelope.payload.remote_name.into();
1898 let rebase = envelope.payload.rebase;
1899
1900 let remote_message = repository_handle
1901 .update(&mut cx, |repository_handle, cx| {
1902 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
1903 })?
1904 .await??;
1905
1906 Ok(proto::RemoteMessageResponse {
1907 stdout: remote_message.stdout,
1908 stderr: remote_message.stderr,
1909 })
1910 }
1911
1912 async fn handle_stage(
1913 this: Entity<Self>,
1914 envelope: TypedEnvelope<proto::Stage>,
1915 mut cx: AsyncApp,
1916 ) -> Result<proto::Ack> {
1917 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1918 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1919
1920 let entries = envelope
1921 .payload
1922 .paths
1923 .into_iter()
1924 .map(|path| RepoPath::new(&path))
1925 .collect::<Result<Vec<_>>>()?;
1926
1927 repository_handle
1928 .update(&mut cx, |repository_handle, cx| {
1929 repository_handle.stage_entries(entries, cx)
1930 })?
1931 .await?;
1932 Ok(proto::Ack {})
1933 }
1934
1935 async fn handle_unstage(
1936 this: Entity<Self>,
1937 envelope: TypedEnvelope<proto::Unstage>,
1938 mut cx: AsyncApp,
1939 ) -> Result<proto::Ack> {
1940 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1941 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1942
1943 let entries = envelope
1944 .payload
1945 .paths
1946 .into_iter()
1947 .map(|path| RepoPath::new(&path))
1948 .collect::<Result<Vec<_>>>()?;
1949
1950 repository_handle
1951 .update(&mut cx, |repository_handle, cx| {
1952 repository_handle.unstage_entries(entries, cx)
1953 })?
1954 .await?;
1955
1956 Ok(proto::Ack {})
1957 }
1958
1959 async fn handle_stash(
1960 this: Entity<Self>,
1961 envelope: TypedEnvelope<proto::Stash>,
1962 mut cx: AsyncApp,
1963 ) -> Result<proto::Ack> {
1964 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1965 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1966
1967 let entries = envelope
1968 .payload
1969 .paths
1970 .into_iter()
1971 .map(|path| RepoPath::new(&path))
1972 .collect::<Result<Vec<_>>>()?;
1973
1974 repository_handle
1975 .update(&mut cx, |repository_handle, cx| {
1976 repository_handle.stash_entries(entries, cx)
1977 })?
1978 .await?;
1979
1980 Ok(proto::Ack {})
1981 }
1982
1983 async fn handle_stash_pop(
1984 this: Entity<Self>,
1985 envelope: TypedEnvelope<proto::StashPop>,
1986 mut cx: AsyncApp,
1987 ) -> Result<proto::Ack> {
1988 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
1989 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
1990 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
1991
1992 repository_handle
1993 .update(&mut cx, |repository_handle, cx| {
1994 repository_handle.stash_pop(stash_index, cx)
1995 })?
1996 .await?;
1997
1998 Ok(proto::Ack {})
1999 }
2000
2001 async fn handle_stash_apply(
2002 this: Entity<Self>,
2003 envelope: TypedEnvelope<proto::StashApply>,
2004 mut cx: AsyncApp,
2005 ) -> Result<proto::Ack> {
2006 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2007 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2008 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2009
2010 repository_handle
2011 .update(&mut cx, |repository_handle, cx| {
2012 repository_handle.stash_apply(stash_index, cx)
2013 })?
2014 .await?;
2015
2016 Ok(proto::Ack {})
2017 }
2018
2019 async fn handle_stash_drop(
2020 this: Entity<Self>,
2021 envelope: TypedEnvelope<proto::StashDrop>,
2022 mut cx: AsyncApp,
2023 ) -> Result<proto::Ack> {
2024 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2025 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2026 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2027
2028 repository_handle
2029 .update(&mut cx, |repository_handle, cx| {
2030 repository_handle.stash_drop(stash_index, cx)
2031 })?
2032 .await??;
2033
2034 Ok(proto::Ack {})
2035 }
2036
2037 async fn handle_set_index_text(
2038 this: Entity<Self>,
2039 envelope: TypedEnvelope<proto::SetIndexText>,
2040 mut cx: AsyncApp,
2041 ) -> Result<proto::Ack> {
2042 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2043 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2044 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2045
2046 repository_handle
2047 .update(&mut cx, |repository_handle, cx| {
2048 repository_handle.spawn_set_index_text_job(
2049 repo_path,
2050 envelope.payload.text,
2051 None,
2052 cx,
2053 )
2054 })?
2055 .await??;
2056 Ok(proto::Ack {})
2057 }
2058
2059 async fn handle_run_hook(
2060 this: Entity<Self>,
2061 envelope: TypedEnvelope<proto::RunGitHook>,
2062 mut cx: AsyncApp,
2063 ) -> Result<proto::Ack> {
2064 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2065 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2066 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2067 repository_handle
2068 .update(&mut cx, |repository_handle, cx| {
2069 repository_handle.run_hook(hook, cx)
2070 })?
2071 .await??;
2072 Ok(proto::Ack {})
2073 }
2074
2075 async fn handle_commit(
2076 this: Entity<Self>,
2077 envelope: TypedEnvelope<proto::Commit>,
2078 mut cx: AsyncApp,
2079 ) -> Result<proto::Ack> {
2080 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2081 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2082 let askpass_id = envelope.payload.askpass_id;
2083
2084 let askpass = make_remote_delegate(
2085 this,
2086 envelope.payload.project_id,
2087 repository_id,
2088 askpass_id,
2089 &mut cx,
2090 );
2091
2092 let message = SharedString::from(envelope.payload.message);
2093 let name = envelope.payload.name.map(SharedString::from);
2094 let email = envelope.payload.email.map(SharedString::from);
2095 let options = envelope.payload.options.unwrap_or_default();
2096
2097 repository_handle
2098 .update(&mut cx, |repository_handle, cx| {
2099 repository_handle.commit(
2100 message,
2101 name.zip(email),
2102 CommitOptions {
2103 amend: options.amend,
2104 signoff: options.signoff,
2105 },
2106 askpass,
2107 cx,
2108 )
2109 })?
2110 .await??;
2111 Ok(proto::Ack {})
2112 }
2113
2114 async fn handle_get_remotes(
2115 this: Entity<Self>,
2116 envelope: TypedEnvelope<proto::GetRemotes>,
2117 mut cx: AsyncApp,
2118 ) -> Result<proto::GetRemotesResponse> {
2119 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2120 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2121
2122 let branch_name = envelope.payload.branch_name;
2123 let is_push = envelope.payload.is_push;
2124
2125 let remotes = repository_handle
2126 .update(&mut cx, |repository_handle, _| {
2127 repository_handle.get_remotes(branch_name, is_push)
2128 })?
2129 .await??;
2130
2131 Ok(proto::GetRemotesResponse {
2132 remotes: remotes
2133 .into_iter()
2134 .map(|remotes| proto::get_remotes_response::Remote {
2135 name: remotes.name.to_string(),
2136 })
2137 .collect::<Vec<_>>(),
2138 })
2139 }
2140
2141 async fn handle_get_worktrees(
2142 this: Entity<Self>,
2143 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2144 mut cx: AsyncApp,
2145 ) -> Result<proto::GitWorktreesResponse> {
2146 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2147 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2148
2149 let worktrees = repository_handle
2150 .update(&mut cx, |repository_handle, _| {
2151 repository_handle.worktrees()
2152 })?
2153 .await??;
2154
2155 Ok(proto::GitWorktreesResponse {
2156 worktrees: worktrees
2157 .into_iter()
2158 .map(|worktree| worktree_to_proto(&worktree))
2159 .collect::<Vec<_>>(),
2160 })
2161 }
2162
2163 async fn handle_create_worktree(
2164 this: Entity<Self>,
2165 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2166 mut cx: AsyncApp,
2167 ) -> Result<proto::Ack> {
2168 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2169 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2170 let directory = PathBuf::from(envelope.payload.directory);
2171 let name = envelope.payload.name;
2172 let commit = envelope.payload.commit;
2173
2174 repository_handle
2175 .update(&mut cx, |repository_handle, _| {
2176 repository_handle.create_worktree(name, directory, commit)
2177 })?
2178 .await??;
2179
2180 Ok(proto::Ack {})
2181 }
2182
2183 async fn handle_get_branches(
2184 this: Entity<Self>,
2185 envelope: TypedEnvelope<proto::GitGetBranches>,
2186 mut cx: AsyncApp,
2187 ) -> Result<proto::GitBranchesResponse> {
2188 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2189 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2190
2191 let branches = repository_handle
2192 .update(&mut cx, |repository_handle, _| repository_handle.branches())?
2193 .await??;
2194
2195 Ok(proto::GitBranchesResponse {
2196 branches: branches
2197 .into_iter()
2198 .map(|branch| branch_to_proto(&branch))
2199 .collect::<Vec<_>>(),
2200 })
2201 }
2202 async fn handle_get_default_branch(
2203 this: Entity<Self>,
2204 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2205 mut cx: AsyncApp,
2206 ) -> Result<proto::GetDefaultBranchResponse> {
2207 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2208 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2209
2210 let branch = repository_handle
2211 .update(&mut cx, |repository_handle, _| {
2212 repository_handle.default_branch()
2213 })?
2214 .await??
2215 .map(Into::into);
2216
2217 Ok(proto::GetDefaultBranchResponse { branch })
2218 }
2219 async fn handle_create_branch(
2220 this: Entity<Self>,
2221 envelope: TypedEnvelope<proto::GitCreateBranch>,
2222 mut cx: AsyncApp,
2223 ) -> Result<proto::Ack> {
2224 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2225 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2226 let branch_name = envelope.payload.branch_name;
2227
2228 repository_handle
2229 .update(&mut cx, |repository_handle, _| {
2230 repository_handle.create_branch(branch_name, None)
2231 })?
2232 .await??;
2233
2234 Ok(proto::Ack {})
2235 }
2236
2237 async fn handle_change_branch(
2238 this: Entity<Self>,
2239 envelope: TypedEnvelope<proto::GitChangeBranch>,
2240 mut cx: AsyncApp,
2241 ) -> Result<proto::Ack> {
2242 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2243 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2244 let branch_name = envelope.payload.branch_name;
2245
2246 repository_handle
2247 .update(&mut cx, |repository_handle, _| {
2248 repository_handle.change_branch(branch_name)
2249 })?
2250 .await??;
2251
2252 Ok(proto::Ack {})
2253 }
2254
2255 async fn handle_rename_branch(
2256 this: Entity<Self>,
2257 envelope: TypedEnvelope<proto::GitRenameBranch>,
2258 mut cx: AsyncApp,
2259 ) -> Result<proto::Ack> {
2260 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2261 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2262 let branch = envelope.payload.branch;
2263 let new_name = envelope.payload.new_name;
2264
2265 repository_handle
2266 .update(&mut cx, |repository_handle, _| {
2267 repository_handle.rename_branch(branch, new_name)
2268 })?
2269 .await??;
2270
2271 Ok(proto::Ack {})
2272 }
2273
2274 async fn handle_delete_branch(
2275 this: Entity<Self>,
2276 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2277 mut cx: AsyncApp,
2278 ) -> Result<proto::Ack> {
2279 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2280 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2281 let branch_name = envelope.payload.branch_name;
2282
2283 repository_handle
2284 .update(&mut cx, |repository_handle, _| {
2285 repository_handle.delete_branch(branch_name)
2286 })?
2287 .await??;
2288
2289 Ok(proto::Ack {})
2290 }
2291
2292 async fn handle_show(
2293 this: Entity<Self>,
2294 envelope: TypedEnvelope<proto::GitShow>,
2295 mut cx: AsyncApp,
2296 ) -> Result<proto::GitCommitDetails> {
2297 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2298 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2299
2300 let commit = repository_handle
2301 .update(&mut cx, |repository_handle, _| {
2302 repository_handle.show(envelope.payload.commit)
2303 })?
2304 .await??;
2305 Ok(proto::GitCommitDetails {
2306 sha: commit.sha.into(),
2307 message: commit.message.into(),
2308 commit_timestamp: commit.commit_timestamp,
2309 author_email: commit.author_email.into(),
2310 author_name: commit.author_name.into(),
2311 })
2312 }
2313
2314 async fn handle_load_commit_diff(
2315 this: Entity<Self>,
2316 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2317 mut cx: AsyncApp,
2318 ) -> Result<proto::LoadCommitDiffResponse> {
2319 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2320 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2321
2322 let commit_diff = repository_handle
2323 .update(&mut cx, |repository_handle, _| {
2324 repository_handle.load_commit_diff(envelope.payload.commit)
2325 })?
2326 .await??;
2327 Ok(proto::LoadCommitDiffResponse {
2328 files: commit_diff
2329 .files
2330 .into_iter()
2331 .map(|file| proto::CommitFile {
2332 path: file.path.to_proto(),
2333 old_text: file.old_text,
2334 new_text: file.new_text,
2335 })
2336 .collect(),
2337 })
2338 }
2339
2340 async fn handle_file_history(
2341 this: Entity<Self>,
2342 envelope: TypedEnvelope<proto::GitFileHistory>,
2343 mut cx: AsyncApp,
2344 ) -> Result<proto::GitFileHistoryResponse> {
2345 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2346 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2347 let path = RepoPath::from_proto(&envelope.payload.path)?;
2348 let skip = envelope.payload.skip as usize;
2349 let limit = envelope.payload.limit.map(|l| l as usize);
2350
2351 let file_history = repository_handle
2352 .update(&mut cx, |repository_handle, _| {
2353 repository_handle.file_history_paginated(path, skip, limit)
2354 })?
2355 .await??;
2356
2357 Ok(proto::GitFileHistoryResponse {
2358 entries: file_history
2359 .entries
2360 .into_iter()
2361 .map(|entry| proto::FileHistoryEntry {
2362 sha: entry.sha.to_string(),
2363 subject: entry.subject.to_string(),
2364 message: entry.message.to_string(),
2365 commit_timestamp: entry.commit_timestamp,
2366 author_name: entry.author_name.to_string(),
2367 author_email: entry.author_email.to_string(),
2368 })
2369 .collect(),
2370 path: file_history.path.to_proto(),
2371 })
2372 }
2373
2374 async fn handle_reset(
2375 this: Entity<Self>,
2376 envelope: TypedEnvelope<proto::GitReset>,
2377 mut cx: AsyncApp,
2378 ) -> Result<proto::Ack> {
2379 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2380 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2381
2382 let mode = match envelope.payload.mode() {
2383 git_reset::ResetMode::Soft => ResetMode::Soft,
2384 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2385 };
2386
2387 repository_handle
2388 .update(&mut cx, |repository_handle, cx| {
2389 repository_handle.reset(envelope.payload.commit, mode, cx)
2390 })?
2391 .await??;
2392 Ok(proto::Ack {})
2393 }
2394
2395 async fn handle_checkout_files(
2396 this: Entity<Self>,
2397 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2398 mut cx: AsyncApp,
2399 ) -> Result<proto::Ack> {
2400 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2401 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2402 let paths = envelope
2403 .payload
2404 .paths
2405 .iter()
2406 .map(|s| RepoPath::from_proto(s))
2407 .collect::<Result<Vec<_>>>()?;
2408
2409 repository_handle
2410 .update(&mut cx, |repository_handle, cx| {
2411 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2412 })?
2413 .await?;
2414 Ok(proto::Ack {})
2415 }
2416
2417 async fn handle_open_commit_message_buffer(
2418 this: Entity<Self>,
2419 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2420 mut cx: AsyncApp,
2421 ) -> Result<proto::OpenBufferResponse> {
2422 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2423 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2424 let buffer = repository
2425 .update(&mut cx, |repository, cx| {
2426 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2427 })?
2428 .await?;
2429
2430 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id())?;
2431 this.update(&mut cx, |this, cx| {
2432 this.buffer_store.update(cx, |buffer_store, cx| {
2433 buffer_store
2434 .create_buffer_for_peer(
2435 &buffer,
2436 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2437 cx,
2438 )
2439 .detach_and_log_err(cx);
2440 })
2441 })?;
2442
2443 Ok(proto::OpenBufferResponse {
2444 buffer_id: buffer_id.to_proto(),
2445 })
2446 }
2447
2448 async fn handle_askpass(
2449 this: Entity<Self>,
2450 envelope: TypedEnvelope<proto::AskPassRequest>,
2451 mut cx: AsyncApp,
2452 ) -> Result<proto::AskPassResponse> {
2453 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2454 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2455
2456 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone())?;
2457 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2458 debug_panic!("no askpass found");
2459 anyhow::bail!("no askpass found");
2460 };
2461
2462 let response = askpass
2463 .ask_password(envelope.payload.prompt)
2464 .await
2465 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2466
2467 delegates
2468 .lock()
2469 .insert(envelope.payload.askpass_id, askpass);
2470
2471 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2472 Ok(proto::AskPassResponse {
2473 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2474 })
2475 }
2476
2477 async fn handle_check_for_pushed_commits(
2478 this: Entity<Self>,
2479 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2480 mut cx: AsyncApp,
2481 ) -> Result<proto::CheckForPushedCommitsResponse> {
2482 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2483 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2484
2485 let branches = repository_handle
2486 .update(&mut cx, |repository_handle, _| {
2487 repository_handle.check_for_pushed_commits()
2488 })?
2489 .await??;
2490 Ok(proto::CheckForPushedCommitsResponse {
2491 pushed_to: branches
2492 .into_iter()
2493 .map(|commit| commit.to_string())
2494 .collect(),
2495 })
2496 }
2497
2498 async fn handle_git_diff(
2499 this: Entity<Self>,
2500 envelope: TypedEnvelope<proto::GitDiff>,
2501 mut cx: AsyncApp,
2502 ) -> Result<proto::GitDiffResponse> {
2503 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2504 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2505 let diff_type = match envelope.payload.diff_type() {
2506 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2507 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2508 };
2509
2510 let mut diff = repository_handle
2511 .update(&mut cx, |repository_handle, cx| {
2512 repository_handle.diff(diff_type, cx)
2513 })?
2514 .await??;
2515 const ONE_MB: usize = 1_000_000;
2516 if diff.len() > ONE_MB {
2517 diff = diff.chars().take(ONE_MB).collect()
2518 }
2519
2520 Ok(proto::GitDiffResponse { diff })
2521 }
2522
2523 async fn handle_tree_diff(
2524 this: Entity<Self>,
2525 request: TypedEnvelope<proto::GetTreeDiff>,
2526 mut cx: AsyncApp,
2527 ) -> Result<proto::GetTreeDiffResponse> {
2528 let repository_id = RepositoryId(request.payload.repository_id);
2529 let diff_type = if request.payload.is_merge {
2530 DiffTreeType::MergeBase {
2531 base: request.payload.base.into(),
2532 head: request.payload.head.into(),
2533 }
2534 } else {
2535 DiffTreeType::Since {
2536 base: request.payload.base.into(),
2537 head: request.payload.head.into(),
2538 }
2539 };
2540
2541 let diff = this
2542 .update(&mut cx, |this, cx| {
2543 let repository = this.repositories().get(&repository_id)?;
2544 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2545 })?
2546 .context("missing repository")?
2547 .await??;
2548
2549 Ok(proto::GetTreeDiffResponse {
2550 entries: diff
2551 .entries
2552 .into_iter()
2553 .map(|(path, status)| proto::TreeDiffStatus {
2554 path: path.as_ref().to_proto(),
2555 status: match status {
2556 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2557 TreeDiffStatus::Modified { .. } => {
2558 proto::tree_diff_status::Status::Modified.into()
2559 }
2560 TreeDiffStatus::Deleted { .. } => {
2561 proto::tree_diff_status::Status::Deleted.into()
2562 }
2563 },
2564 oid: match status {
2565 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2566 Some(old.to_string())
2567 }
2568 TreeDiffStatus::Added => None,
2569 },
2570 })
2571 .collect(),
2572 })
2573 }
2574
2575 async fn handle_get_blob_content(
2576 this: Entity<Self>,
2577 request: TypedEnvelope<proto::GetBlobContent>,
2578 mut cx: AsyncApp,
2579 ) -> Result<proto::GetBlobContentResponse> {
2580 let oid = git::Oid::from_str(&request.payload.oid)?;
2581 let repository_id = RepositoryId(request.payload.repository_id);
2582 let content = this
2583 .update(&mut cx, |this, cx| {
2584 let repository = this.repositories().get(&repository_id)?;
2585 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2586 })?
2587 .context("missing repository")?
2588 .await?;
2589 Ok(proto::GetBlobContentResponse { content })
2590 }
2591
2592 async fn handle_open_unstaged_diff(
2593 this: Entity<Self>,
2594 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2595 mut cx: AsyncApp,
2596 ) -> Result<proto::OpenUnstagedDiffResponse> {
2597 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2598 let diff = this
2599 .update(&mut cx, |this, cx| {
2600 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2601 Some(this.open_unstaged_diff(buffer, cx))
2602 })?
2603 .context("missing buffer")?
2604 .await?;
2605 this.update(&mut cx, |this, _| {
2606 let shared_diffs = this
2607 .shared_diffs
2608 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2609 .or_default();
2610 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2611 })?;
2612 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx))?;
2613 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2614 }
2615
2616 async fn handle_open_uncommitted_diff(
2617 this: Entity<Self>,
2618 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2619 mut cx: AsyncApp,
2620 ) -> Result<proto::OpenUncommittedDiffResponse> {
2621 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2622 let diff = this
2623 .update(&mut cx, |this, cx| {
2624 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2625 Some(this.open_uncommitted_diff(buffer, cx))
2626 })?
2627 .context("missing buffer")?
2628 .await?;
2629 this.update(&mut cx, |this, _| {
2630 let shared_diffs = this
2631 .shared_diffs
2632 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2633 .or_default();
2634 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2635 })?;
2636 diff.read_with(&cx, |diff, cx| {
2637 use proto::open_uncommitted_diff_response::Mode;
2638
2639 let unstaged_diff = diff.secondary_diff();
2640 let index_snapshot = unstaged_diff.and_then(|diff| {
2641 let diff = diff.read(cx);
2642 diff.base_text_exists().then(|| diff.base_text(cx))
2643 });
2644
2645 let mode;
2646 let staged_text;
2647 let committed_text;
2648 if diff.base_text_exists() {
2649 let committed_snapshot = diff.base_text(cx);
2650 committed_text = Some(committed_snapshot.text());
2651 if let Some(index_text) = index_snapshot {
2652 if index_text.remote_id() == committed_snapshot.remote_id() {
2653 mode = Mode::IndexMatchesHead;
2654 staged_text = None;
2655 } else {
2656 mode = Mode::IndexAndHead;
2657 staged_text = Some(index_text.text());
2658 }
2659 } else {
2660 mode = Mode::IndexAndHead;
2661 staged_text = None;
2662 }
2663 } else {
2664 mode = Mode::IndexAndHead;
2665 committed_text = None;
2666 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2667 }
2668
2669 proto::OpenUncommittedDiffResponse {
2670 committed_text,
2671 staged_text,
2672 mode: mode.into(),
2673 }
2674 })
2675 }
2676
2677 async fn handle_update_diff_bases(
2678 this: Entity<Self>,
2679 request: TypedEnvelope<proto::UpdateDiffBases>,
2680 mut cx: AsyncApp,
2681 ) -> Result<()> {
2682 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2683 this.update(&mut cx, |this, cx| {
2684 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2685 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2686 {
2687 let buffer = buffer.read(cx).text_snapshot();
2688 diff_state.update(cx, |diff_state, cx| {
2689 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2690 })
2691 }
2692 })
2693 }
2694
2695 async fn handle_blame_buffer(
2696 this: Entity<Self>,
2697 envelope: TypedEnvelope<proto::BlameBuffer>,
2698 mut cx: AsyncApp,
2699 ) -> Result<proto::BlameBufferResponse> {
2700 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2701 let version = deserialize_version(&envelope.payload.version);
2702 let buffer = this.read_with(&cx, |this, cx| {
2703 this.buffer_store.read(cx).get_existing(buffer_id)
2704 })??;
2705 buffer
2706 .update(&mut cx, |buffer, _| {
2707 buffer.wait_for_version(version.clone())
2708 })?
2709 .await?;
2710 let blame = this
2711 .update(&mut cx, |this, cx| {
2712 this.blame_buffer(&buffer, Some(version), cx)
2713 })?
2714 .await?;
2715 Ok(serialize_blame_buffer_response(blame))
2716 }
2717
2718 async fn handle_get_permalink_to_line(
2719 this: Entity<Self>,
2720 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
2721 mut cx: AsyncApp,
2722 ) -> Result<proto::GetPermalinkToLineResponse> {
2723 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2724 // let version = deserialize_version(&envelope.payload.version);
2725 let selection = {
2726 let proto_selection = envelope
2727 .payload
2728 .selection
2729 .context("no selection to get permalink for defined")?;
2730 proto_selection.start as u32..proto_selection.end as u32
2731 };
2732 let buffer = this.read_with(&cx, |this, cx| {
2733 this.buffer_store.read(cx).get_existing(buffer_id)
2734 })??;
2735 let permalink = this
2736 .update(&mut cx, |this, cx| {
2737 this.get_permalink_to_line(&buffer, selection, cx)
2738 })?
2739 .await?;
2740 Ok(proto::GetPermalinkToLineResponse {
2741 permalink: permalink.to_string(),
2742 })
2743 }
2744
2745 fn repository_for_request(
2746 this: &Entity<Self>,
2747 id: RepositoryId,
2748 cx: &mut AsyncApp,
2749 ) -> Result<Entity<Repository>> {
2750 this.read_with(cx, |this, _| {
2751 this.repositories
2752 .get(&id)
2753 .context("missing repository handle")
2754 .cloned()
2755 })?
2756 }
2757
2758 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
2759 self.repositories
2760 .iter()
2761 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
2762 .collect()
2763 }
2764
2765 fn process_updated_entries(
2766 &self,
2767 worktree: &Entity<Worktree>,
2768 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
2769 cx: &mut App,
2770 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
2771 let path_style = worktree.read(cx).path_style();
2772 let mut repo_paths = self
2773 .repositories
2774 .values()
2775 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
2776 .collect::<Vec<_>>();
2777 let mut entries: Vec<_> = updated_entries
2778 .iter()
2779 .map(|(path, _, _)| path.clone())
2780 .collect();
2781 entries.sort();
2782 let worktree = worktree.read(cx);
2783
2784 let entries = entries
2785 .into_iter()
2786 .map(|path| worktree.absolutize(&path))
2787 .collect::<Arc<[_]>>();
2788
2789 let executor = cx.background_executor().clone();
2790 cx.background_executor().spawn(async move {
2791 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
2792 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
2793 let mut tasks = FuturesOrdered::new();
2794 for (repo_path, repo) in repo_paths.into_iter().rev() {
2795 let entries = entries.clone();
2796 let task = executor.spawn(async move {
2797 // Find all repository paths that belong to this repo
2798 let mut ix = entries.partition_point(|path| path < &*repo_path);
2799 if ix == entries.len() {
2800 return None;
2801 };
2802
2803 let mut paths = Vec::new();
2804 // All paths prefixed by a given repo will constitute a continuous range.
2805 while let Some(path) = entries.get(ix)
2806 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
2807 &repo_path, path, path_style,
2808 )
2809 {
2810 paths.push((repo_path, ix));
2811 ix += 1;
2812 }
2813 if paths.is_empty() {
2814 None
2815 } else {
2816 Some((repo, paths))
2817 }
2818 });
2819 tasks.push_back(task);
2820 }
2821
2822 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
2823 let mut path_was_used = vec![false; entries.len()];
2824 let tasks = tasks.collect::<Vec<_>>().await;
2825 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
2826 // We always want to assign a path to it's innermost repository.
2827 for t in tasks {
2828 let Some((repo, paths)) = t else {
2829 continue;
2830 };
2831 let entry = paths_by_git_repo.entry(repo).or_default();
2832 for (repo_path, ix) in paths {
2833 if path_was_used[ix] {
2834 continue;
2835 }
2836 path_was_used[ix] = true;
2837 entry.push(repo_path);
2838 }
2839 }
2840
2841 paths_by_git_repo
2842 })
2843 }
2844}
2845
2846impl BufferGitState {
2847 fn new(_git_store: WeakEntity<GitStore>) -> Self {
2848 Self {
2849 unstaged_diff: Default::default(),
2850 uncommitted_diff: Default::default(),
2851 recalculate_diff_task: Default::default(),
2852 language: Default::default(),
2853 language_registry: Default::default(),
2854 recalculating_tx: postage::watch::channel_with(false).0,
2855 hunk_staging_operation_count: 0,
2856 hunk_staging_operation_count_as_of_write: 0,
2857 head_text: Default::default(),
2858 index_text: Default::default(),
2859 head_changed: Default::default(),
2860 index_changed: Default::default(),
2861 language_changed: Default::default(),
2862 conflict_updated_futures: Default::default(),
2863 conflict_set: Default::default(),
2864 reparse_conflict_markers_task: Default::default(),
2865 }
2866 }
2867
2868 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
2869 self.language = buffer.read(cx).language().cloned();
2870 self.language_changed = true;
2871 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
2872 }
2873
2874 fn reparse_conflict_markers(
2875 &mut self,
2876 buffer: text::BufferSnapshot,
2877 cx: &mut Context<Self>,
2878 ) -> oneshot::Receiver<()> {
2879 let (tx, rx) = oneshot::channel();
2880
2881 let Some(conflict_set) = self
2882 .conflict_set
2883 .as_ref()
2884 .and_then(|conflict_set| conflict_set.upgrade())
2885 else {
2886 return rx;
2887 };
2888
2889 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
2890 if conflict_set.has_conflict {
2891 Some(conflict_set.snapshot())
2892 } else {
2893 None
2894 }
2895 });
2896
2897 if let Some(old_snapshot) = old_snapshot {
2898 self.conflict_updated_futures.push(tx);
2899 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
2900 let (snapshot, changed_range) = cx
2901 .background_spawn(async move {
2902 let new_snapshot = ConflictSet::parse(&buffer);
2903 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
2904 (new_snapshot, changed_range)
2905 })
2906 .await;
2907 this.update(cx, |this, cx| {
2908 if let Some(conflict_set) = &this.conflict_set {
2909 conflict_set
2910 .update(cx, |conflict_set, cx| {
2911 conflict_set.set_snapshot(snapshot, changed_range, cx);
2912 })
2913 .ok();
2914 }
2915 let futures = std::mem::take(&mut this.conflict_updated_futures);
2916 for tx in futures {
2917 tx.send(()).ok();
2918 }
2919 })
2920 }))
2921 }
2922
2923 rx
2924 }
2925
2926 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
2927 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
2928 }
2929
2930 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
2931 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
2932 }
2933
2934 fn handle_base_texts_updated(
2935 &mut self,
2936 buffer: text::BufferSnapshot,
2937 message: proto::UpdateDiffBases,
2938 cx: &mut Context<Self>,
2939 ) {
2940 use proto::update_diff_bases::Mode;
2941
2942 let Some(mode) = Mode::from_i32(message.mode) else {
2943 return;
2944 };
2945
2946 let diff_bases_change = match mode {
2947 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
2948 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
2949 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
2950 Mode::IndexAndHead => DiffBasesChange::SetEach {
2951 index: message.staged_text,
2952 head: message.committed_text,
2953 },
2954 };
2955
2956 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
2957 }
2958
2959 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
2960 if *self.recalculating_tx.borrow() {
2961 let mut rx = self.recalculating_tx.subscribe();
2962 Some(async move {
2963 loop {
2964 let is_recalculating = rx.recv().await;
2965 if is_recalculating != Some(true) {
2966 break;
2967 }
2968 }
2969 })
2970 } else {
2971 None
2972 }
2973 }
2974
2975 fn diff_bases_changed(
2976 &mut self,
2977 buffer: text::BufferSnapshot,
2978 diff_bases_change: Option<DiffBasesChange>,
2979 cx: &mut Context<Self>,
2980 ) {
2981 match diff_bases_change {
2982 Some(DiffBasesChange::SetIndex(index)) => {
2983 self.index_text = index.map(|mut index| {
2984 text::LineEnding::normalize(&mut index);
2985 Arc::from(index.as_str())
2986 });
2987 self.index_changed = true;
2988 }
2989 Some(DiffBasesChange::SetHead(head)) => {
2990 self.head_text = head.map(|mut head| {
2991 text::LineEnding::normalize(&mut head);
2992 Arc::from(head.as_str())
2993 });
2994 self.head_changed = true;
2995 }
2996 Some(DiffBasesChange::SetBoth(text)) => {
2997 let text = text.map(|mut text| {
2998 text::LineEnding::normalize(&mut text);
2999 Arc::from(text.as_str())
3000 });
3001 self.head_text = text.clone();
3002 self.index_text = text;
3003 self.head_changed = true;
3004 self.index_changed = true;
3005 }
3006 Some(DiffBasesChange::SetEach { index, head }) => {
3007 self.index_text = index.map(|mut index| {
3008 text::LineEnding::normalize(&mut index);
3009 Arc::from(index.as_str())
3010 });
3011 self.index_changed = true;
3012 self.head_text = head.map(|mut head| {
3013 text::LineEnding::normalize(&mut head);
3014 Arc::from(head.as_str())
3015 });
3016 self.head_changed = true;
3017 }
3018 None => {}
3019 }
3020
3021 self.recalculate_diffs(buffer, cx)
3022 }
3023
3024 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3025 *self.recalculating_tx.borrow_mut() = true;
3026
3027 let language = self.language.clone();
3028 let language_registry = self.language_registry.clone();
3029 let unstaged_diff = self.unstaged_diff();
3030 let uncommitted_diff = self.uncommitted_diff();
3031 let head = self.head_text.clone();
3032 let index = self.index_text.clone();
3033 let index_changed = self.index_changed;
3034 let head_changed = self.head_changed;
3035 let language_changed = self.language_changed;
3036 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3037 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3038 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3039 (None, None) => true,
3040 _ => false,
3041 };
3042 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3043 log::debug!(
3044 "start recalculating diffs for buffer {}",
3045 buffer.remote_id()
3046 );
3047
3048 let mut new_unstaged_diff = None;
3049 if let Some(unstaged_diff) = &unstaged_diff {
3050 new_unstaged_diff = Some(
3051 cx.update(|cx| {
3052 unstaged_diff.read(cx).update_diff(
3053 buffer.clone(),
3054 index,
3055 index_changed,
3056 language.clone(),
3057 cx,
3058 )
3059 })?
3060 .await,
3061 );
3062 }
3063
3064 // Dropping BufferDiff can be expensive, so yield back to the event loop
3065 // for a bit
3066 yield_now().await;
3067
3068 let mut new_uncommitted_diff = None;
3069 if let Some(uncommitted_diff) = &uncommitted_diff {
3070 new_uncommitted_diff = if index_matches_head {
3071 new_unstaged_diff.clone()
3072 } else {
3073 Some(
3074 cx.update(|cx| {
3075 uncommitted_diff.read(cx).update_diff(
3076 buffer.clone(),
3077 head,
3078 head_changed,
3079 language.clone(),
3080 cx,
3081 )
3082 })?
3083 .await,
3084 )
3085 }
3086 }
3087
3088 // Dropping BufferDiff can be expensive, so yield back to the event loop
3089 // for a bit
3090 yield_now().await;
3091
3092 let cancel = this.update(cx, |this, _| {
3093 // This checks whether all pending stage/unstage operations
3094 // have quiesced (i.e. both the corresponding write and the
3095 // read of that write have completed). If not, then we cancel
3096 // this recalculation attempt to avoid invalidating pending
3097 // state too quickly; another recalculation will come along
3098 // later and clear the pending state once the state of the index has settled.
3099 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3100 *this.recalculating_tx.borrow_mut() = false;
3101 true
3102 } else {
3103 false
3104 }
3105 })?;
3106 if cancel {
3107 log::debug!(
3108 concat!(
3109 "aborting recalculating diffs for buffer {}",
3110 "due to subsequent hunk operations",
3111 ),
3112 buffer.remote_id()
3113 );
3114 return Ok(());
3115 }
3116
3117 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3118 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3119 {
3120 unstaged_diff.update(cx, |diff, cx| {
3121 if language_changed {
3122 diff.language_changed(cx);
3123 }
3124 diff.set_snapshot(new_unstaged_diff, &buffer, index_changed, cx)
3125 })?
3126 } else {
3127 None
3128 };
3129
3130 yield_now().await;
3131
3132 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3133 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3134 {
3135 uncommitted_diff.update(cx, |diff, cx| {
3136 if language_changed {
3137 diff.language_changed(cx);
3138 }
3139 diff.set_snapshot_with_secondary(
3140 new_uncommitted_diff,
3141 &buffer,
3142 unstaged_changed_range,
3143 head_changed,
3144 true,
3145 cx,
3146 );
3147 })?;
3148 }
3149
3150 log::debug!(
3151 "finished recalculating diffs for buffer {}",
3152 buffer.remote_id()
3153 );
3154
3155 if let Some(this) = this.upgrade() {
3156 this.update(cx, |this, _| {
3157 this.index_changed = false;
3158 this.head_changed = false;
3159 this.language_changed = false;
3160 *this.recalculating_tx.borrow_mut() = false;
3161 })?;
3162 }
3163
3164 Ok(())
3165 }));
3166 }
3167}
3168
3169fn make_remote_delegate(
3170 this: Entity<GitStore>,
3171 project_id: u64,
3172 repository_id: RepositoryId,
3173 askpass_id: u64,
3174 cx: &mut AsyncApp,
3175) -> AskPassDelegate {
3176 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3177 this.update(cx, |this, cx| {
3178 let Some((client, _)) = this.downstream_client() else {
3179 return;
3180 };
3181 let response = client.request(proto::AskPassRequest {
3182 project_id,
3183 repository_id: repository_id.to_proto(),
3184 askpass_id,
3185 prompt,
3186 });
3187 cx.spawn(async move |_, _| {
3188 let mut response = response.await?.response;
3189 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3190 .ok();
3191 response.zeroize();
3192 anyhow::Ok(())
3193 })
3194 .detach_and_log_err(cx);
3195 })
3196 .log_err();
3197 })
3198}
3199
3200impl RepositoryId {
3201 pub fn to_proto(self) -> u64 {
3202 self.0
3203 }
3204
3205 pub fn from_proto(id: u64) -> Self {
3206 RepositoryId(id)
3207 }
3208}
3209
3210impl RepositorySnapshot {
3211 fn empty(id: RepositoryId, work_directory_abs_path: Arc<Path>, path_style: PathStyle) -> Self {
3212 Self {
3213 id,
3214 statuses_by_path: Default::default(),
3215 work_directory_abs_path,
3216 branch: None,
3217 head_commit: None,
3218 scan_id: 0,
3219 merge: Default::default(),
3220 remote_origin_url: None,
3221 remote_upstream_url: None,
3222 stash_entries: Default::default(),
3223 path_style,
3224 }
3225 }
3226
3227 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3228 proto::UpdateRepository {
3229 branch_summary: self.branch.as_ref().map(branch_to_proto),
3230 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3231 updated_statuses: self
3232 .statuses_by_path
3233 .iter()
3234 .map(|entry| entry.to_proto())
3235 .collect(),
3236 removed_statuses: Default::default(),
3237 current_merge_conflicts: self
3238 .merge
3239 .conflicted_paths
3240 .iter()
3241 .map(|repo_path| repo_path.to_proto())
3242 .collect(),
3243 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3244 project_id,
3245 id: self.id.to_proto(),
3246 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3247 entry_ids: vec![self.id.to_proto()],
3248 scan_id: self.scan_id,
3249 is_last_update: true,
3250 stash_entries: self
3251 .stash_entries
3252 .entries
3253 .iter()
3254 .map(stash_to_proto)
3255 .collect(),
3256 }
3257 }
3258
3259 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3260 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3261 let mut removed_statuses: Vec<String> = Vec::new();
3262
3263 let mut new_statuses = self.statuses_by_path.iter().peekable();
3264 let mut old_statuses = old.statuses_by_path.iter().peekable();
3265
3266 let mut current_new_entry = new_statuses.next();
3267 let mut current_old_entry = old_statuses.next();
3268 loop {
3269 match (current_new_entry, current_old_entry) {
3270 (Some(new_entry), Some(old_entry)) => {
3271 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3272 Ordering::Less => {
3273 updated_statuses.push(new_entry.to_proto());
3274 current_new_entry = new_statuses.next();
3275 }
3276 Ordering::Equal => {
3277 if new_entry.status != old_entry.status {
3278 updated_statuses.push(new_entry.to_proto());
3279 }
3280 current_old_entry = old_statuses.next();
3281 current_new_entry = new_statuses.next();
3282 }
3283 Ordering::Greater => {
3284 removed_statuses.push(old_entry.repo_path.to_proto());
3285 current_old_entry = old_statuses.next();
3286 }
3287 }
3288 }
3289 (None, Some(old_entry)) => {
3290 removed_statuses.push(old_entry.repo_path.to_proto());
3291 current_old_entry = old_statuses.next();
3292 }
3293 (Some(new_entry), None) => {
3294 updated_statuses.push(new_entry.to_proto());
3295 current_new_entry = new_statuses.next();
3296 }
3297 (None, None) => break,
3298 }
3299 }
3300
3301 proto::UpdateRepository {
3302 branch_summary: self.branch.as_ref().map(branch_to_proto),
3303 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3304 updated_statuses,
3305 removed_statuses,
3306 current_merge_conflicts: self
3307 .merge
3308 .conflicted_paths
3309 .iter()
3310 .map(|path| path.to_proto())
3311 .collect(),
3312 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3313 project_id,
3314 id: self.id.to_proto(),
3315 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3316 entry_ids: vec![],
3317 scan_id: self.scan_id,
3318 is_last_update: true,
3319 stash_entries: self
3320 .stash_entries
3321 .entries
3322 .iter()
3323 .map(stash_to_proto)
3324 .collect(),
3325 }
3326 }
3327
3328 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3329 self.statuses_by_path.iter().cloned()
3330 }
3331
3332 pub fn status_summary(&self) -> GitSummary {
3333 self.statuses_by_path.summary().item_summary
3334 }
3335
3336 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3337 self.statuses_by_path
3338 .get(&PathKey(path.as_ref().clone()), ())
3339 .cloned()
3340 }
3341
3342 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3343 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3344 }
3345
3346 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3347 self.path_style
3348 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3349 .unwrap()
3350 .into()
3351 }
3352
3353 #[inline]
3354 fn abs_path_to_repo_path_inner(
3355 work_directory_abs_path: &Path,
3356 abs_path: &Path,
3357 path_style: PathStyle,
3358 ) -> Option<RepoPath> {
3359 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3360 Some(RepoPath::from_rel_path(&rel_path))
3361 }
3362
3363 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3364 self.merge.conflicted_paths.contains(repo_path)
3365 }
3366
3367 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3368 let had_conflict_on_last_merge_head_change =
3369 self.merge.conflicted_paths.contains(repo_path);
3370 let has_conflict_currently = self
3371 .status_for_path(repo_path)
3372 .is_some_and(|entry| entry.status.is_conflicted());
3373 had_conflict_on_last_merge_head_change || has_conflict_currently
3374 }
3375
3376 /// This is the name that will be displayed in the repository selector for this repository.
3377 pub fn display_name(&self) -> SharedString {
3378 self.work_directory_abs_path
3379 .file_name()
3380 .unwrap_or_default()
3381 .to_string_lossy()
3382 .to_string()
3383 .into()
3384 }
3385}
3386
3387pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3388 proto::StashEntry {
3389 oid: entry.oid.as_bytes().to_vec(),
3390 message: entry.message.clone(),
3391 branch: entry.branch.clone(),
3392 index: entry.index as u64,
3393 timestamp: entry.timestamp,
3394 }
3395}
3396
3397pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3398 Ok(StashEntry {
3399 oid: Oid::from_bytes(&entry.oid)?,
3400 message: entry.message.clone(),
3401 index: entry.index as usize,
3402 branch: entry.branch.clone(),
3403 timestamp: entry.timestamp,
3404 })
3405}
3406
3407impl MergeDetails {
3408 async fn load(
3409 backend: &Arc<dyn GitRepository>,
3410 status: &SumTree<StatusEntry>,
3411 prev_snapshot: &RepositorySnapshot,
3412 ) -> Result<(MergeDetails, bool)> {
3413 log::debug!("load merge details");
3414 let message = backend.merge_message().await;
3415 let heads = backend
3416 .revparse_batch(vec![
3417 "MERGE_HEAD".into(),
3418 "CHERRY_PICK_HEAD".into(),
3419 "REBASE_HEAD".into(),
3420 "REVERT_HEAD".into(),
3421 "APPLY_HEAD".into(),
3422 ])
3423 .await
3424 .log_err()
3425 .unwrap_or_default()
3426 .into_iter()
3427 .map(|opt| opt.map(SharedString::from))
3428 .collect::<Vec<_>>();
3429 let merge_heads_changed = heads != prev_snapshot.merge.heads;
3430 let conflicted_paths = if merge_heads_changed {
3431 let current_conflicted_paths = TreeSet::from_ordered_entries(
3432 status
3433 .iter()
3434 .filter(|entry| entry.status.is_conflicted())
3435 .map(|entry| entry.repo_path.clone()),
3436 );
3437
3438 // It can happen that we run a scan while a lengthy merge is in progress
3439 // that will eventually result in conflicts, but before those conflicts
3440 // are reported by `git status`. Since for the moment we only care about
3441 // the merge heads state for the purposes of tracking conflicts, don't update
3442 // this state until we see some conflicts.
3443 if heads.iter().any(Option::is_some)
3444 && !prev_snapshot.merge.heads.iter().any(Option::is_some)
3445 && current_conflicted_paths.is_empty()
3446 {
3447 log::debug!("not updating merge heads because no conflicts found");
3448 return Ok((
3449 MergeDetails {
3450 message: message.map(SharedString::from),
3451 ..prev_snapshot.merge.clone()
3452 },
3453 false,
3454 ));
3455 }
3456
3457 current_conflicted_paths
3458 } else {
3459 prev_snapshot.merge.conflicted_paths.clone()
3460 };
3461 let details = MergeDetails {
3462 conflicted_paths,
3463 message: message.map(SharedString::from),
3464 heads,
3465 };
3466 Ok((details, merge_heads_changed))
3467 }
3468}
3469
3470impl Repository {
3471 pub fn snapshot(&self) -> RepositorySnapshot {
3472 self.snapshot.clone()
3473 }
3474
3475 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3476 self.pending_ops.iter().cloned()
3477 }
3478
3479 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3480 self.pending_ops.summary().clone()
3481 }
3482
3483 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3484 self.pending_ops
3485 .get(&PathKey(path.as_ref().clone()), ())
3486 .cloned()
3487 }
3488
3489 fn local(
3490 id: RepositoryId,
3491 work_directory_abs_path: Arc<Path>,
3492 dot_git_abs_path: Arc<Path>,
3493 project_environment: WeakEntity<ProjectEnvironment>,
3494 fs: Arc<dyn Fs>,
3495 git_store: WeakEntity<GitStore>,
3496 cx: &mut Context<Self>,
3497 ) -> Self {
3498 let snapshot =
3499 RepositorySnapshot::empty(id, work_directory_abs_path.clone(), PathStyle::local());
3500 let state = cx
3501 .spawn(async move |_, cx| {
3502 LocalRepositoryState::new(
3503 work_directory_abs_path,
3504 dot_git_abs_path,
3505 project_environment,
3506 fs,
3507 cx,
3508 )
3509 .await
3510 .map_err(|err| err.to_string())
3511 })
3512 .shared();
3513 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3514 let state = cx
3515 .spawn(async move |_, _| {
3516 let state = state.await?;
3517 Ok(RepositoryState::Local(state))
3518 })
3519 .shared();
3520
3521 Repository {
3522 this: cx.weak_entity(),
3523 git_store,
3524 snapshot,
3525 pending_ops: Default::default(),
3526 repository_state: state,
3527 commit_message_buffer: None,
3528 askpass_delegates: Default::default(),
3529 paths_needing_status_update: Default::default(),
3530 latest_askpass_id: 0,
3531 job_sender,
3532 job_id: 0,
3533 active_jobs: Default::default(),
3534 }
3535 }
3536
3537 fn remote(
3538 id: RepositoryId,
3539 work_directory_abs_path: Arc<Path>,
3540 path_style: PathStyle,
3541 project_id: ProjectId,
3542 client: AnyProtoClient,
3543 git_store: WeakEntity<GitStore>,
3544 cx: &mut Context<Self>,
3545 ) -> Self {
3546 let snapshot = RepositorySnapshot::empty(id, work_directory_abs_path, path_style);
3547 let repository_state = RemoteRepositoryState { project_id, client };
3548 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3549 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3550 Self {
3551 this: cx.weak_entity(),
3552 snapshot,
3553 commit_message_buffer: None,
3554 git_store,
3555 pending_ops: Default::default(),
3556 paths_needing_status_update: Default::default(),
3557 job_sender,
3558 repository_state,
3559 askpass_delegates: Default::default(),
3560 latest_askpass_id: 0,
3561 active_jobs: Default::default(),
3562 job_id: 0,
3563 }
3564 }
3565
3566 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3567 self.git_store.upgrade()
3568 }
3569
3570 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3571 let this = cx.weak_entity();
3572 let git_store = self.git_store.clone();
3573 let _ = self.send_keyed_job(
3574 Some(GitJobKey::ReloadBufferDiffBases),
3575 None,
3576 |state, mut cx| async move {
3577 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3578 log::error!("tried to recompute diffs for a non-local repository");
3579 return Ok(());
3580 };
3581
3582 let Some(this) = this.upgrade() else {
3583 return Ok(());
3584 };
3585
3586 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3587 git_store.update(cx, |git_store, cx| {
3588 git_store
3589 .diffs
3590 .iter()
3591 .filter_map(|(buffer_id, diff_state)| {
3592 let buffer_store = git_store.buffer_store.read(cx);
3593 let buffer = buffer_store.get(*buffer_id)?;
3594 let file = File::from_dyn(buffer.read(cx).file())?;
3595 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3596 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3597 log::debug!(
3598 "start reload diff bases for repo path {}",
3599 repo_path.as_unix_str()
3600 );
3601 diff_state.update(cx, |diff_state, _| {
3602 let has_unstaged_diff = diff_state
3603 .unstaged_diff
3604 .as_ref()
3605 .is_some_and(|diff| diff.is_upgradable());
3606 let has_uncommitted_diff = diff_state
3607 .uncommitted_diff
3608 .as_ref()
3609 .is_some_and(|set| set.is_upgradable());
3610
3611 Some((
3612 buffer,
3613 repo_path,
3614 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3615 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3616 ))
3617 })
3618 })
3619 .collect::<Vec<_>>()
3620 })
3621 })??;
3622
3623 let buffer_diff_base_changes = cx
3624 .background_spawn(async move {
3625 let mut changes = Vec::new();
3626 for (buffer, repo_path, current_index_text, current_head_text) in
3627 &repo_diff_state_updates
3628 {
3629 let index_text = if current_index_text.is_some() {
3630 backend.load_index_text(repo_path.clone()).await
3631 } else {
3632 None
3633 };
3634 let head_text = if current_head_text.is_some() {
3635 backend.load_committed_text(repo_path.clone()).await
3636 } else {
3637 None
3638 };
3639
3640 let change =
3641 match (current_index_text.as_ref(), current_head_text.as_ref()) {
3642 (Some(current_index), Some(current_head)) => {
3643 let index_changed =
3644 index_text.as_deref() != current_index.as_deref();
3645 let head_changed =
3646 head_text.as_deref() != current_head.as_deref();
3647 if index_changed && head_changed {
3648 if index_text == head_text {
3649 Some(DiffBasesChange::SetBoth(head_text))
3650 } else {
3651 Some(DiffBasesChange::SetEach {
3652 index: index_text,
3653 head: head_text,
3654 })
3655 }
3656 } else if index_changed {
3657 Some(DiffBasesChange::SetIndex(index_text))
3658 } else if head_changed {
3659 Some(DiffBasesChange::SetHead(head_text))
3660 } else {
3661 None
3662 }
3663 }
3664 (Some(current_index), None) => {
3665 let index_changed =
3666 index_text.as_deref() != current_index.as_deref();
3667 index_changed
3668 .then_some(DiffBasesChange::SetIndex(index_text))
3669 }
3670 (None, Some(current_head)) => {
3671 let head_changed =
3672 head_text.as_deref() != current_head.as_deref();
3673 head_changed.then_some(DiffBasesChange::SetHead(head_text))
3674 }
3675 (None, None) => None,
3676 };
3677
3678 changes.push((buffer.clone(), change))
3679 }
3680 changes
3681 })
3682 .await;
3683
3684 git_store.update(&mut cx, |git_store, cx| {
3685 for (buffer, diff_bases_change) in buffer_diff_base_changes {
3686 let buffer_snapshot = buffer.read(cx).text_snapshot();
3687 let buffer_id = buffer_snapshot.remote_id();
3688 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
3689 continue;
3690 };
3691
3692 let downstream_client = git_store.downstream_client();
3693 diff_state.update(cx, |diff_state, cx| {
3694 use proto::update_diff_bases::Mode;
3695
3696 if let Some((diff_bases_change, (client, project_id))) =
3697 diff_bases_change.clone().zip(downstream_client)
3698 {
3699 let (staged_text, committed_text, mode) = match diff_bases_change {
3700 DiffBasesChange::SetIndex(index) => {
3701 (index, None, Mode::IndexOnly)
3702 }
3703 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
3704 DiffBasesChange::SetEach { index, head } => {
3705 (index, head, Mode::IndexAndHead)
3706 }
3707 DiffBasesChange::SetBoth(text) => {
3708 (None, text, Mode::IndexMatchesHead)
3709 }
3710 };
3711 client
3712 .send(proto::UpdateDiffBases {
3713 project_id: project_id.to_proto(),
3714 buffer_id: buffer_id.to_proto(),
3715 staged_text,
3716 committed_text,
3717 mode: mode as i32,
3718 })
3719 .log_err();
3720 }
3721
3722 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
3723 });
3724 }
3725 })
3726 },
3727 );
3728 }
3729
3730 pub fn send_job<F, Fut, R>(
3731 &mut self,
3732 status: Option<SharedString>,
3733 job: F,
3734 ) -> oneshot::Receiver<R>
3735 where
3736 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3737 Fut: Future<Output = R> + 'static,
3738 R: Send + 'static,
3739 {
3740 self.send_keyed_job(None, status, job)
3741 }
3742
3743 fn send_keyed_job<F, Fut, R>(
3744 &mut self,
3745 key: Option<GitJobKey>,
3746 status: Option<SharedString>,
3747 job: F,
3748 ) -> oneshot::Receiver<R>
3749 where
3750 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
3751 Fut: Future<Output = R> + 'static,
3752 R: Send + 'static,
3753 {
3754 let (result_tx, result_rx) = futures::channel::oneshot::channel();
3755 let job_id = post_inc(&mut self.job_id);
3756 let this = self.this.clone();
3757 self.job_sender
3758 .unbounded_send(GitJob {
3759 key,
3760 job: Box::new(move |state, cx: &mut AsyncApp| {
3761 let job = job(state, cx.clone());
3762 cx.spawn(async move |cx| {
3763 if let Some(s) = status.clone() {
3764 this.update(cx, |this, cx| {
3765 this.active_jobs.insert(
3766 job_id,
3767 JobInfo {
3768 start: Instant::now(),
3769 message: s.clone(),
3770 },
3771 );
3772
3773 cx.notify();
3774 })
3775 .ok();
3776 }
3777 let result = job.await;
3778
3779 this.update(cx, |this, cx| {
3780 this.active_jobs.remove(&job_id);
3781 cx.notify();
3782 })
3783 .ok();
3784
3785 result_tx.send(result).ok();
3786 })
3787 }),
3788 })
3789 .ok();
3790 result_rx
3791 }
3792
3793 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
3794 let Some(git_store) = self.git_store.upgrade() else {
3795 return;
3796 };
3797 let entity = cx.entity();
3798 git_store.update(cx, |git_store, cx| {
3799 let Some((&id, _)) = git_store
3800 .repositories
3801 .iter()
3802 .find(|(_, handle)| *handle == &entity)
3803 else {
3804 return;
3805 };
3806 git_store.active_repo_id = Some(id);
3807 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
3808 });
3809 }
3810
3811 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
3812 self.snapshot.status()
3813 }
3814
3815 pub fn cached_stash(&self) -> GitStash {
3816 self.snapshot.stash_entries.clone()
3817 }
3818
3819 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
3820 let git_store = self.git_store.upgrade()?;
3821 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3822 let abs_path = self.snapshot.repo_path_to_abs_path(path);
3823 let abs_path = SanitizedPath::new(&abs_path);
3824 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
3825 Some(ProjectPath {
3826 worktree_id: worktree.read(cx).id(),
3827 path: relative_path,
3828 })
3829 }
3830
3831 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
3832 let git_store = self.git_store.upgrade()?;
3833 let worktree_store = git_store.read(cx).worktree_store.read(cx);
3834 let abs_path = worktree_store.absolutize(path, cx)?;
3835 self.snapshot.abs_path_to_repo_path(&abs_path)
3836 }
3837
3838 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
3839 other
3840 .read(cx)
3841 .snapshot
3842 .work_directory_abs_path
3843 .starts_with(&self.snapshot.work_directory_abs_path)
3844 }
3845
3846 pub fn open_commit_buffer(
3847 &mut self,
3848 languages: Option<Arc<LanguageRegistry>>,
3849 buffer_store: Entity<BufferStore>,
3850 cx: &mut Context<Self>,
3851 ) -> Task<Result<Entity<Buffer>>> {
3852 let id = self.id;
3853 if let Some(buffer) = self.commit_message_buffer.clone() {
3854 return Task::ready(Ok(buffer));
3855 }
3856 let this = cx.weak_entity();
3857
3858 let rx = self.send_job(None, move |state, mut cx| async move {
3859 let Some(this) = this.upgrade() else {
3860 bail!("git store was dropped");
3861 };
3862 match state {
3863 RepositoryState::Local(..) => {
3864 this.update(&mut cx, |_, cx| {
3865 Self::open_local_commit_buffer(languages, buffer_store, cx)
3866 })?
3867 .await
3868 }
3869 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3870 let request = client.request(proto::OpenCommitMessageBuffer {
3871 project_id: project_id.0,
3872 repository_id: id.to_proto(),
3873 });
3874 let response = request.await.context("requesting to open commit buffer")?;
3875 let buffer_id = BufferId::new(response.buffer_id)?;
3876 let buffer = buffer_store
3877 .update(&mut cx, |buffer_store, cx| {
3878 buffer_store.wait_for_remote_buffer(buffer_id, cx)
3879 })?
3880 .await?;
3881 if let Some(language_registry) = languages {
3882 let git_commit_language =
3883 language_registry.language_for_name("Git Commit").await?;
3884 buffer.update(&mut cx, |buffer, cx| {
3885 buffer.set_language(Some(git_commit_language), cx);
3886 })?;
3887 }
3888 this.update(&mut cx, |this, _| {
3889 this.commit_message_buffer = Some(buffer.clone());
3890 })?;
3891 Ok(buffer)
3892 }
3893 }
3894 });
3895
3896 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
3897 }
3898
3899 fn open_local_commit_buffer(
3900 language_registry: Option<Arc<LanguageRegistry>>,
3901 buffer_store: Entity<BufferStore>,
3902 cx: &mut Context<Self>,
3903 ) -> Task<Result<Entity<Buffer>>> {
3904 cx.spawn(async move |repository, cx| {
3905 let buffer = buffer_store
3906 .update(cx, |buffer_store, cx| buffer_store.create_buffer(false, cx))?
3907 .await?;
3908
3909 if let Some(language_registry) = language_registry {
3910 let git_commit_language = language_registry.language_for_name("Git Commit").await?;
3911 buffer.update(cx, |buffer, cx| {
3912 buffer.set_language(Some(git_commit_language), cx);
3913 })?;
3914 }
3915
3916 repository.update(cx, |repository, _| {
3917 repository.commit_message_buffer = Some(buffer.clone());
3918 })?;
3919 Ok(buffer)
3920 })
3921 }
3922
3923 pub fn checkout_files(
3924 &mut self,
3925 commit: &str,
3926 paths: Vec<RepoPath>,
3927 cx: &mut Context<Self>,
3928 ) -> Task<Result<()>> {
3929 let commit = commit.to_string();
3930 let id = self.id;
3931
3932 self.spawn_job_with_tracking(
3933 paths.clone(),
3934 pending_op::GitStatus::Reverted,
3935 cx,
3936 async move |this, cx| {
3937 this.update(cx, |this, _cx| {
3938 this.send_job(
3939 Some(format!("git checkout {}", commit).into()),
3940 move |git_repo, _| async move {
3941 match git_repo {
3942 RepositoryState::Local(LocalRepositoryState {
3943 backend,
3944 environment,
3945 ..
3946 }) => {
3947 backend
3948 .checkout_files(commit, paths, environment.clone())
3949 .await
3950 }
3951 RepositoryState::Remote(RemoteRepositoryState {
3952 project_id,
3953 client,
3954 }) => {
3955 client
3956 .request(proto::GitCheckoutFiles {
3957 project_id: project_id.0,
3958 repository_id: id.to_proto(),
3959 commit,
3960 paths: paths
3961 .into_iter()
3962 .map(|p| p.to_proto())
3963 .collect(),
3964 })
3965 .await?;
3966
3967 Ok(())
3968 }
3969 }
3970 },
3971 )
3972 })?
3973 .await?
3974 },
3975 )
3976 }
3977
3978 pub fn reset(
3979 &mut self,
3980 commit: String,
3981 reset_mode: ResetMode,
3982 _cx: &mut App,
3983 ) -> oneshot::Receiver<Result<()>> {
3984 let id = self.id;
3985
3986 self.send_job(None, move |git_repo, _| async move {
3987 match git_repo {
3988 RepositoryState::Local(LocalRepositoryState {
3989 backend,
3990 environment,
3991 ..
3992 }) => backend.reset(commit, reset_mode, environment).await,
3993 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
3994 client
3995 .request(proto::GitReset {
3996 project_id: project_id.0,
3997 repository_id: id.to_proto(),
3998 commit,
3999 mode: match reset_mode {
4000 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4001 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4002 },
4003 })
4004 .await?;
4005
4006 Ok(())
4007 }
4008 }
4009 })
4010 }
4011
4012 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4013 let id = self.id;
4014 self.send_job(None, move |git_repo, _cx| async move {
4015 match git_repo {
4016 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4017 backend.show(commit).await
4018 }
4019 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4020 let resp = client
4021 .request(proto::GitShow {
4022 project_id: project_id.0,
4023 repository_id: id.to_proto(),
4024 commit,
4025 })
4026 .await?;
4027
4028 Ok(CommitDetails {
4029 sha: resp.sha.into(),
4030 message: resp.message.into(),
4031 commit_timestamp: resp.commit_timestamp,
4032 author_email: resp.author_email.into(),
4033 author_name: resp.author_name.into(),
4034 })
4035 }
4036 }
4037 })
4038 }
4039
4040 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4041 let id = self.id;
4042 self.send_job(None, move |git_repo, cx| async move {
4043 match git_repo {
4044 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4045 backend.load_commit(commit, cx).await
4046 }
4047 RepositoryState::Remote(RemoteRepositoryState {
4048 client, project_id, ..
4049 }) => {
4050 let response = client
4051 .request(proto::LoadCommitDiff {
4052 project_id: project_id.0,
4053 repository_id: id.to_proto(),
4054 commit,
4055 })
4056 .await?;
4057 Ok(CommitDiff {
4058 files: response
4059 .files
4060 .into_iter()
4061 .map(|file| {
4062 Ok(CommitFile {
4063 path: RepoPath::from_proto(&file.path)?,
4064 old_text: file.old_text,
4065 new_text: file.new_text,
4066 })
4067 })
4068 .collect::<Result<Vec<_>>>()?,
4069 })
4070 }
4071 }
4072 })
4073 }
4074
4075 pub fn file_history(
4076 &mut self,
4077 path: RepoPath,
4078 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4079 self.file_history_paginated(path, 0, None)
4080 }
4081
4082 pub fn file_history_paginated(
4083 &mut self,
4084 path: RepoPath,
4085 skip: usize,
4086 limit: Option<usize>,
4087 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4088 let id = self.id;
4089 self.send_job(None, move |git_repo, _cx| async move {
4090 match git_repo {
4091 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4092 backend.file_history_paginated(path, skip, limit).await
4093 }
4094 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4095 let response = client
4096 .request(proto::GitFileHistory {
4097 project_id: project_id.0,
4098 repository_id: id.to_proto(),
4099 path: path.to_proto(),
4100 skip: skip as u64,
4101 limit: limit.map(|l| l as u64),
4102 })
4103 .await?;
4104 Ok(git::repository::FileHistory {
4105 entries: response
4106 .entries
4107 .into_iter()
4108 .map(|entry| git::repository::FileHistoryEntry {
4109 sha: entry.sha.into(),
4110 subject: entry.subject.into(),
4111 message: entry.message.into(),
4112 commit_timestamp: entry.commit_timestamp,
4113 author_name: entry.author_name.into(),
4114 author_email: entry.author_email.into(),
4115 })
4116 .collect(),
4117 path: RepoPath::from_proto(&response.path)?,
4118 })
4119 }
4120 }
4121 })
4122 }
4123
4124 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4125 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4126 }
4127
4128 fn save_buffers<'a>(
4129 &self,
4130 entries: impl IntoIterator<Item = &'a RepoPath>,
4131 cx: &mut Context<Self>,
4132 ) -> Vec<Task<anyhow::Result<()>>> {
4133 let mut save_futures = Vec::new();
4134 if let Some(buffer_store) = self.buffer_store(cx) {
4135 buffer_store.update(cx, |buffer_store, cx| {
4136 for path in entries {
4137 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4138 continue;
4139 };
4140 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4141 && buffer
4142 .read(cx)
4143 .file()
4144 .is_some_and(|file| file.disk_state().exists())
4145 && buffer.read(cx).has_unsaved_edits()
4146 {
4147 save_futures.push(buffer_store.save_buffer(buffer, cx));
4148 }
4149 }
4150 })
4151 }
4152 save_futures
4153 }
4154
4155 pub fn stage_entries(
4156 &mut self,
4157 entries: Vec<RepoPath>,
4158 cx: &mut Context<Self>,
4159 ) -> Task<anyhow::Result<()>> {
4160 if entries.is_empty() {
4161 return Task::ready(Ok(()));
4162 }
4163 let id = self.id;
4164 let save_tasks = self.save_buffers(&entries, cx);
4165 let paths = entries
4166 .iter()
4167 .map(|p| p.as_unix_str())
4168 .collect::<Vec<_>>()
4169 .join(" ");
4170 let status = format!("git add {paths}");
4171 let job_key = GitJobKey::WriteIndex(entries.clone());
4172
4173 self.spawn_job_with_tracking(
4174 entries.clone(),
4175 pending_op::GitStatus::Staged,
4176 cx,
4177 async move |this, cx| {
4178 for save_task in save_tasks {
4179 save_task.await?;
4180 }
4181
4182 this.update(cx, |this, _| {
4183 this.send_keyed_job(
4184 Some(job_key),
4185 Some(status.into()),
4186 move |git_repo, _cx| async move {
4187 match git_repo {
4188 RepositoryState::Local(LocalRepositoryState {
4189 backend,
4190 environment,
4191 ..
4192 }) => backend.stage_paths(entries, environment.clone()).await,
4193 RepositoryState::Remote(RemoteRepositoryState {
4194 project_id,
4195 client,
4196 }) => {
4197 client
4198 .request(proto::Stage {
4199 project_id: project_id.0,
4200 repository_id: id.to_proto(),
4201 paths: entries
4202 .into_iter()
4203 .map(|repo_path| repo_path.to_proto())
4204 .collect(),
4205 })
4206 .await
4207 .context("sending stage request")?;
4208
4209 Ok(())
4210 }
4211 }
4212 },
4213 )
4214 })?
4215 .await?
4216 },
4217 )
4218 }
4219
4220 pub fn unstage_entries(
4221 &mut self,
4222 entries: Vec<RepoPath>,
4223 cx: &mut Context<Self>,
4224 ) -> Task<anyhow::Result<()>> {
4225 if entries.is_empty() {
4226 return Task::ready(Ok(()));
4227 }
4228 let id = self.id;
4229 let save_tasks = self.save_buffers(&entries, cx);
4230 let paths = entries
4231 .iter()
4232 .map(|p| p.as_unix_str())
4233 .collect::<Vec<_>>()
4234 .join(" ");
4235 let status = format!("git reset {paths}");
4236 let job_key = GitJobKey::WriteIndex(entries.clone());
4237
4238 self.spawn_job_with_tracking(
4239 entries.clone(),
4240 pending_op::GitStatus::Unstaged,
4241 cx,
4242 async move |this, cx| {
4243 for save_task in save_tasks {
4244 save_task.await?;
4245 }
4246
4247 this.update(cx, |this, _| {
4248 this.send_keyed_job(
4249 Some(job_key),
4250 Some(status.into()),
4251 move |git_repo, _cx| async move {
4252 match git_repo {
4253 RepositoryState::Local(LocalRepositoryState {
4254 backend,
4255 environment,
4256 ..
4257 }) => backend.unstage_paths(entries, environment).await,
4258 RepositoryState::Remote(RemoteRepositoryState {
4259 project_id,
4260 client,
4261 }) => {
4262 client
4263 .request(proto::Unstage {
4264 project_id: project_id.0,
4265 repository_id: id.to_proto(),
4266 paths: entries
4267 .into_iter()
4268 .map(|repo_path| repo_path.to_proto())
4269 .collect(),
4270 })
4271 .await
4272 .context("sending unstage request")?;
4273
4274 Ok(())
4275 }
4276 }
4277 },
4278 )
4279 })?
4280 .await?
4281 },
4282 )
4283 }
4284
4285 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4286 let to_stage = self
4287 .cached_status()
4288 .filter_map(|entry| {
4289 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4290 if ops.staging() || ops.staged() {
4291 None
4292 } else {
4293 Some(entry.repo_path)
4294 }
4295 } else if entry.status.staging().is_fully_staged() {
4296 None
4297 } else {
4298 Some(entry.repo_path)
4299 }
4300 })
4301 .collect();
4302 self.stage_entries(to_stage, cx)
4303 }
4304
4305 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4306 let to_unstage = self
4307 .cached_status()
4308 .filter_map(|entry| {
4309 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4310 if !ops.staging() && !ops.staged() {
4311 None
4312 } else {
4313 Some(entry.repo_path)
4314 }
4315 } else if entry.status.staging().is_fully_unstaged() {
4316 None
4317 } else {
4318 Some(entry.repo_path)
4319 }
4320 })
4321 .collect();
4322 self.unstage_entries(to_unstage, cx)
4323 }
4324
4325 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4326 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
4327
4328 self.stash_entries(to_stash, cx)
4329 }
4330
4331 pub fn stash_entries(
4332 &mut self,
4333 entries: Vec<RepoPath>,
4334 cx: &mut Context<Self>,
4335 ) -> Task<anyhow::Result<()>> {
4336 let id = self.id;
4337
4338 cx.spawn(async move |this, cx| {
4339 this.update(cx, |this, _| {
4340 this.send_job(None, move |git_repo, _cx| async move {
4341 match git_repo {
4342 RepositoryState::Local(LocalRepositoryState {
4343 backend,
4344 environment,
4345 ..
4346 }) => backend.stash_paths(entries, environment).await,
4347 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4348 client
4349 .request(proto::Stash {
4350 project_id: project_id.0,
4351 repository_id: id.to_proto(),
4352 paths: entries
4353 .into_iter()
4354 .map(|repo_path| repo_path.to_proto())
4355 .collect(),
4356 })
4357 .await
4358 .context("sending stash request")?;
4359 Ok(())
4360 }
4361 }
4362 })
4363 })?
4364 .await??;
4365 Ok(())
4366 })
4367 }
4368
4369 pub fn stash_pop(
4370 &mut self,
4371 index: Option<usize>,
4372 cx: &mut Context<Self>,
4373 ) -> Task<anyhow::Result<()>> {
4374 let id = self.id;
4375 cx.spawn(async move |this, cx| {
4376 this.update(cx, |this, _| {
4377 this.send_job(None, move |git_repo, _cx| async move {
4378 match git_repo {
4379 RepositoryState::Local(LocalRepositoryState {
4380 backend,
4381 environment,
4382 ..
4383 }) => backend.stash_pop(index, environment).await,
4384 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4385 client
4386 .request(proto::StashPop {
4387 project_id: project_id.0,
4388 repository_id: id.to_proto(),
4389 stash_index: index.map(|i| i as u64),
4390 })
4391 .await
4392 .context("sending stash pop request")?;
4393 Ok(())
4394 }
4395 }
4396 })
4397 })?
4398 .await??;
4399 Ok(())
4400 })
4401 }
4402
4403 pub fn stash_apply(
4404 &mut self,
4405 index: Option<usize>,
4406 cx: &mut Context<Self>,
4407 ) -> Task<anyhow::Result<()>> {
4408 let id = self.id;
4409 cx.spawn(async move |this, cx| {
4410 this.update(cx, |this, _| {
4411 this.send_job(None, move |git_repo, _cx| async move {
4412 match git_repo {
4413 RepositoryState::Local(LocalRepositoryState {
4414 backend,
4415 environment,
4416 ..
4417 }) => backend.stash_apply(index, environment).await,
4418 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4419 client
4420 .request(proto::StashApply {
4421 project_id: project_id.0,
4422 repository_id: id.to_proto(),
4423 stash_index: index.map(|i| i as u64),
4424 })
4425 .await
4426 .context("sending stash apply request")?;
4427 Ok(())
4428 }
4429 }
4430 })
4431 })?
4432 .await??;
4433 Ok(())
4434 })
4435 }
4436
4437 pub fn stash_drop(
4438 &mut self,
4439 index: Option<usize>,
4440 cx: &mut Context<Self>,
4441 ) -> oneshot::Receiver<anyhow::Result<()>> {
4442 let id = self.id;
4443 let updates_tx = self
4444 .git_store()
4445 .and_then(|git_store| match &git_store.read(cx).state {
4446 GitStoreState::Local { downstream, .. } => downstream
4447 .as_ref()
4448 .map(|downstream| downstream.updates_tx.clone()),
4449 _ => None,
4450 });
4451 let this = cx.weak_entity();
4452 self.send_job(None, move |git_repo, mut cx| async move {
4453 match git_repo {
4454 RepositoryState::Local(LocalRepositoryState {
4455 backend,
4456 environment,
4457 ..
4458 }) => {
4459 // TODO would be nice to not have to do this manually
4460 let result = backend.stash_drop(index, environment).await;
4461 if result.is_ok()
4462 && let Ok(stash_entries) = backend.stash_entries().await
4463 {
4464 let snapshot = this.update(&mut cx, |this, cx| {
4465 this.snapshot.stash_entries = stash_entries;
4466 cx.emit(RepositoryEvent::StashEntriesChanged);
4467 this.snapshot.clone()
4468 })?;
4469 if let Some(updates_tx) = updates_tx {
4470 updates_tx
4471 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4472 .ok();
4473 }
4474 }
4475
4476 result
4477 }
4478 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4479 client
4480 .request(proto::StashDrop {
4481 project_id: project_id.0,
4482 repository_id: id.to_proto(),
4483 stash_index: index.map(|i| i as u64),
4484 })
4485 .await
4486 .context("sending stash pop request")?;
4487 Ok(())
4488 }
4489 }
4490 })
4491 }
4492
4493 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
4494 let id = self.id;
4495 self.send_job(
4496 Some(format!("git hook {}", hook.as_str()).into()),
4497 move |git_repo, _cx| async move {
4498 match git_repo {
4499 RepositoryState::Local(LocalRepositoryState {
4500 backend,
4501 environment,
4502 ..
4503 }) => backend.run_hook(hook, environment.clone()).await,
4504 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4505 client
4506 .request(proto::RunGitHook {
4507 project_id: project_id.0,
4508 repository_id: id.to_proto(),
4509 hook: hook.to_proto(),
4510 })
4511 .await?;
4512
4513 Ok(())
4514 }
4515 }
4516 },
4517 )
4518 }
4519
4520 pub fn commit(
4521 &mut self,
4522 message: SharedString,
4523 name_and_email: Option<(SharedString, SharedString)>,
4524 options: CommitOptions,
4525 askpass: AskPassDelegate,
4526 cx: &mut App,
4527 ) -> oneshot::Receiver<Result<()>> {
4528 let id = self.id;
4529 let askpass_delegates = self.askpass_delegates.clone();
4530 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4531
4532 let rx = self.run_hook(RunHook::PreCommit, cx);
4533
4534 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
4535 rx.await??;
4536
4537 match git_repo {
4538 RepositoryState::Local(LocalRepositoryState {
4539 backend,
4540 environment,
4541 ..
4542 }) => {
4543 backend
4544 .commit(message, name_and_email, options, askpass, environment)
4545 .await
4546 }
4547 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4548 askpass_delegates.lock().insert(askpass_id, askpass);
4549 let _defer = util::defer(|| {
4550 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4551 debug_assert!(askpass_delegate.is_some());
4552 });
4553 let (name, email) = name_and_email.unzip();
4554 client
4555 .request(proto::Commit {
4556 project_id: project_id.0,
4557 repository_id: id.to_proto(),
4558 message: String::from(message),
4559 name: name.map(String::from),
4560 email: email.map(String::from),
4561 options: Some(proto::commit::CommitOptions {
4562 amend: options.amend,
4563 signoff: options.signoff,
4564 }),
4565 askpass_id,
4566 })
4567 .await
4568 .context("sending commit request")?;
4569
4570 Ok(())
4571 }
4572 }
4573 })
4574 }
4575
4576 pub fn fetch(
4577 &mut self,
4578 fetch_options: FetchOptions,
4579 askpass: AskPassDelegate,
4580 _cx: &mut App,
4581 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4582 let askpass_delegates = self.askpass_delegates.clone();
4583 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4584 let id = self.id;
4585
4586 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
4587 match git_repo {
4588 RepositoryState::Local(LocalRepositoryState {
4589 backend,
4590 environment,
4591 ..
4592 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
4593 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4594 askpass_delegates.lock().insert(askpass_id, askpass);
4595 let _defer = util::defer(|| {
4596 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4597 debug_assert!(askpass_delegate.is_some());
4598 });
4599
4600 let response = client
4601 .request(proto::Fetch {
4602 project_id: project_id.0,
4603 repository_id: id.to_proto(),
4604 askpass_id,
4605 remote: fetch_options.to_proto(),
4606 })
4607 .await
4608 .context("sending fetch request")?;
4609
4610 Ok(RemoteCommandOutput {
4611 stdout: response.stdout,
4612 stderr: response.stderr,
4613 })
4614 }
4615 }
4616 })
4617 }
4618
4619 pub fn push(
4620 &mut self,
4621 branch: SharedString,
4622 remote: SharedString,
4623 options: Option<PushOptions>,
4624 askpass: AskPassDelegate,
4625 cx: &mut Context<Self>,
4626 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4627 let askpass_delegates = self.askpass_delegates.clone();
4628 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4629 let id = self.id;
4630
4631 let args = options
4632 .map(|option| match option {
4633 PushOptions::SetUpstream => " --set-upstream",
4634 PushOptions::Force => " --force-with-lease",
4635 })
4636 .unwrap_or("");
4637
4638 let updates_tx = self
4639 .git_store()
4640 .and_then(|git_store| match &git_store.read(cx).state {
4641 GitStoreState::Local { downstream, .. } => downstream
4642 .as_ref()
4643 .map(|downstream| downstream.updates_tx.clone()),
4644 _ => None,
4645 });
4646
4647 let this = cx.weak_entity();
4648 let rx = self.run_hook(RunHook::PrePush, cx);
4649 self.send_job(
4650 Some(format!("git push {} {} {}", args, remote, branch).into()),
4651 move |git_repo, mut cx| async move {
4652 rx.await??;
4653 match git_repo {
4654 RepositoryState::Local(LocalRepositoryState {
4655 backend,
4656 environment,
4657 ..
4658 }) => {
4659 let result = backend
4660 .push(
4661 branch.to_string(),
4662 remote.to_string(),
4663 options,
4664 askpass,
4665 environment.clone(),
4666 cx.clone(),
4667 )
4668 .await;
4669 // TODO would be nice to not have to do this manually
4670 if result.is_ok() {
4671 let branches = backend.branches().await?;
4672 let branch = branches.into_iter().find(|branch| branch.is_head);
4673 log::info!("head branch after scan is {branch:?}");
4674 let snapshot = this.update(&mut cx, |this, cx| {
4675 this.snapshot.branch = branch;
4676 cx.emit(RepositoryEvent::BranchChanged);
4677 this.snapshot.clone()
4678 })?;
4679 if let Some(updates_tx) = updates_tx {
4680 updates_tx
4681 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
4682 .ok();
4683 }
4684 }
4685 result
4686 }
4687 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4688 askpass_delegates.lock().insert(askpass_id, askpass);
4689 let _defer = util::defer(|| {
4690 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4691 debug_assert!(askpass_delegate.is_some());
4692 });
4693 let response = client
4694 .request(proto::Push {
4695 project_id: project_id.0,
4696 repository_id: id.to_proto(),
4697 askpass_id,
4698 branch_name: branch.to_string(),
4699 remote_name: remote.to_string(),
4700 options: options.map(|options| match options {
4701 PushOptions::Force => proto::push::PushOptions::Force,
4702 PushOptions::SetUpstream => {
4703 proto::push::PushOptions::SetUpstream
4704 }
4705 }
4706 as i32),
4707 })
4708 .await
4709 .context("sending push request")?;
4710
4711 Ok(RemoteCommandOutput {
4712 stdout: response.stdout,
4713 stderr: response.stderr,
4714 })
4715 }
4716 }
4717 },
4718 )
4719 }
4720
4721 pub fn pull(
4722 &mut self,
4723 branch: Option<SharedString>,
4724 remote: SharedString,
4725 rebase: bool,
4726 askpass: AskPassDelegate,
4727 _cx: &mut App,
4728 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
4729 let askpass_delegates = self.askpass_delegates.clone();
4730 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
4731 let id = self.id;
4732
4733 let mut status = "git pull".to_string();
4734 if rebase {
4735 status.push_str(" --rebase");
4736 }
4737 status.push_str(&format!(" {}", remote));
4738 if let Some(b) = &branch {
4739 status.push_str(&format!(" {}", b));
4740 }
4741
4742 self.send_job(Some(status.into()), move |git_repo, cx| async move {
4743 match git_repo {
4744 RepositoryState::Local(LocalRepositoryState {
4745 backend,
4746 environment,
4747 ..
4748 }) => {
4749 backend
4750 .pull(
4751 branch.as_ref().map(|b| b.to_string()),
4752 remote.to_string(),
4753 rebase,
4754 askpass,
4755 environment.clone(),
4756 cx,
4757 )
4758 .await
4759 }
4760 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4761 askpass_delegates.lock().insert(askpass_id, askpass);
4762 let _defer = util::defer(|| {
4763 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
4764 debug_assert!(askpass_delegate.is_some());
4765 });
4766 let response = client
4767 .request(proto::Pull {
4768 project_id: project_id.0,
4769 repository_id: id.to_proto(),
4770 askpass_id,
4771 rebase,
4772 branch_name: branch.as_ref().map(|b| b.to_string()),
4773 remote_name: remote.to_string(),
4774 })
4775 .await
4776 .context("sending pull request")?;
4777
4778 Ok(RemoteCommandOutput {
4779 stdout: response.stdout,
4780 stderr: response.stderr,
4781 })
4782 }
4783 }
4784 })
4785 }
4786
4787 fn spawn_set_index_text_job(
4788 &mut self,
4789 path: RepoPath,
4790 content: Option<String>,
4791 hunk_staging_operation_count: Option<usize>,
4792 cx: &mut Context<Self>,
4793 ) -> oneshot::Receiver<anyhow::Result<()>> {
4794 let id = self.id;
4795 let this = cx.weak_entity();
4796 let git_store = self.git_store.clone();
4797 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
4798 self.send_keyed_job(
4799 Some(GitJobKey::WriteIndex(vec![path.clone()])),
4800 None,
4801 move |git_repo, mut cx| async move {
4802 log::debug!(
4803 "start updating index text for buffer {}",
4804 path.as_unix_str()
4805 );
4806
4807 match git_repo {
4808 RepositoryState::Local(LocalRepositoryState {
4809 fs,
4810 backend,
4811 environment,
4812 ..
4813 }) => {
4814 let executable = match fs.metadata(&abs_path).await {
4815 Ok(Some(meta)) => meta.is_executable,
4816 Ok(None) => false,
4817 Err(_err) => false,
4818 };
4819 backend
4820 .set_index_text(path.clone(), content, environment.clone(), executable)
4821 .await?;
4822 }
4823 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4824 client
4825 .request(proto::SetIndexText {
4826 project_id: project_id.0,
4827 repository_id: id.to_proto(),
4828 path: path.to_proto(),
4829 text: content,
4830 })
4831 .await?;
4832 }
4833 }
4834 log::debug!(
4835 "finish updating index text for buffer {}",
4836 path.as_unix_str()
4837 );
4838
4839 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
4840 let project_path = this
4841 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
4842 .ok()
4843 .flatten();
4844 git_store.update(&mut cx, |git_store, cx| {
4845 let buffer_id = git_store
4846 .buffer_store
4847 .read(cx)
4848 .get_by_path(&project_path?)?
4849 .read(cx)
4850 .remote_id();
4851 let diff_state = git_store.diffs.get(&buffer_id)?;
4852 diff_state.update(cx, |diff_state, _| {
4853 diff_state.hunk_staging_operation_count_as_of_write =
4854 hunk_staging_operation_count;
4855 });
4856 Some(())
4857 })?;
4858 }
4859 Ok(())
4860 },
4861 )
4862 }
4863
4864 pub fn get_remotes(
4865 &mut self,
4866 branch_name: Option<String>,
4867 is_push: bool,
4868 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
4869 let id = self.id;
4870 self.send_job(None, move |repo, _cx| async move {
4871 match repo {
4872 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4873 let remote = if let Some(branch_name) = branch_name {
4874 if is_push {
4875 backend.get_push_remote(branch_name).await?
4876 } else {
4877 backend.get_branch_remote(branch_name).await?
4878 }
4879 } else {
4880 None
4881 };
4882
4883 match remote {
4884 Some(remote) => Ok(vec![remote]),
4885 None => backend.get_all_remotes().await,
4886 }
4887 }
4888 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4889 let response = client
4890 .request(proto::GetRemotes {
4891 project_id: project_id.0,
4892 repository_id: id.to_proto(),
4893 branch_name,
4894 is_push,
4895 })
4896 .await?;
4897
4898 let remotes = response
4899 .remotes
4900 .into_iter()
4901 .map(|remotes| git::repository::Remote {
4902 name: remotes.name.into(),
4903 })
4904 .collect();
4905
4906 Ok(remotes)
4907 }
4908 }
4909 })
4910 }
4911
4912 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
4913 let id = self.id;
4914 self.send_job(None, move |repo, _| async move {
4915 match repo {
4916 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4917 backend.branches().await
4918 }
4919 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4920 let response = client
4921 .request(proto::GitGetBranches {
4922 project_id: project_id.0,
4923 repository_id: id.to_proto(),
4924 })
4925 .await?;
4926
4927 let branches = response
4928 .branches
4929 .into_iter()
4930 .map(|branch| proto_to_branch(&branch))
4931 .collect();
4932
4933 Ok(branches)
4934 }
4935 }
4936 })
4937 }
4938
4939 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
4940 let id = self.id;
4941 self.send_job(None, move |repo, _| async move {
4942 match repo {
4943 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4944 backend.worktrees().await
4945 }
4946 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4947 let response = client
4948 .request(proto::GitGetWorktrees {
4949 project_id: project_id.0,
4950 repository_id: id.to_proto(),
4951 })
4952 .await?;
4953
4954 let worktrees = response
4955 .worktrees
4956 .into_iter()
4957 .map(|worktree| proto_to_worktree(&worktree))
4958 .collect();
4959
4960 Ok(worktrees)
4961 }
4962 }
4963 })
4964 }
4965
4966 pub fn create_worktree(
4967 &mut self,
4968 name: String,
4969 path: PathBuf,
4970 commit: Option<String>,
4971 ) -> oneshot::Receiver<Result<()>> {
4972 let id = self.id;
4973 self.send_job(
4974 Some("git worktree add".into()),
4975 move |repo, _cx| async move {
4976 match repo {
4977 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4978 backend.create_worktree(name, path, commit).await
4979 }
4980 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4981 client
4982 .request(proto::GitCreateWorktree {
4983 project_id: project_id.0,
4984 repository_id: id.to_proto(),
4985 name,
4986 directory: path.to_string_lossy().to_string(),
4987 commit,
4988 })
4989 .await?;
4990
4991 Ok(())
4992 }
4993 }
4994 },
4995 )
4996 }
4997
4998 pub fn default_branch(&mut self) -> oneshot::Receiver<Result<Option<SharedString>>> {
4999 let id = self.id;
5000 self.send_job(None, move |repo, _| async move {
5001 match repo {
5002 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5003 backend.default_branch().await
5004 }
5005 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5006 let response = client
5007 .request(proto::GetDefaultBranch {
5008 project_id: project_id.0,
5009 repository_id: id.to_proto(),
5010 })
5011 .await?;
5012
5013 anyhow::Ok(response.branch.map(SharedString::from))
5014 }
5015 }
5016 })
5017 }
5018
5019 pub fn diff_tree(
5020 &mut self,
5021 diff_type: DiffTreeType,
5022 _cx: &App,
5023 ) -> oneshot::Receiver<Result<TreeDiff>> {
5024 let repository_id = self.snapshot.id;
5025 self.send_job(None, move |repo, _cx| async move {
5026 match repo {
5027 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5028 backend.diff_tree(diff_type).await
5029 }
5030 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5031 let response = client
5032 .request(proto::GetTreeDiff {
5033 project_id: project_id.0,
5034 repository_id: repository_id.0,
5035 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5036 base: diff_type.base().to_string(),
5037 head: diff_type.head().to_string(),
5038 })
5039 .await?;
5040
5041 let entries = response
5042 .entries
5043 .into_iter()
5044 .filter_map(|entry| {
5045 let status = match entry.status() {
5046 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5047 proto::tree_diff_status::Status::Modified => {
5048 TreeDiffStatus::Modified {
5049 old: git::Oid::from_str(
5050 &entry.oid.context("missing oid").log_err()?,
5051 )
5052 .log_err()?,
5053 }
5054 }
5055 proto::tree_diff_status::Status::Deleted => {
5056 TreeDiffStatus::Deleted {
5057 old: git::Oid::from_str(
5058 &entry.oid.context("missing oid").log_err()?,
5059 )
5060 .log_err()?,
5061 }
5062 }
5063 };
5064 Some((
5065 RepoPath::from_rel_path(
5066 &RelPath::from_proto(&entry.path).log_err()?,
5067 ),
5068 status,
5069 ))
5070 })
5071 .collect();
5072
5073 Ok(TreeDiff { entries })
5074 }
5075 }
5076 })
5077 }
5078
5079 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5080 let id = self.id;
5081 self.send_job(None, move |repo, _cx| async move {
5082 match repo {
5083 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5084 backend.diff(diff_type).await
5085 }
5086 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5087 let response = client
5088 .request(proto::GitDiff {
5089 project_id: project_id.0,
5090 repository_id: id.to_proto(),
5091 diff_type: match diff_type {
5092 DiffType::HeadToIndex => {
5093 proto::git_diff::DiffType::HeadToIndex.into()
5094 }
5095 DiffType::HeadToWorktree => {
5096 proto::git_diff::DiffType::HeadToWorktree.into()
5097 }
5098 },
5099 })
5100 .await?;
5101
5102 Ok(response.diff)
5103 }
5104 }
5105 })
5106 }
5107
5108 pub fn create_branch(
5109 &mut self,
5110 branch_name: String,
5111 base_branch: Option<String>,
5112 ) -> oneshot::Receiver<Result<()>> {
5113 let id = self.id;
5114 let status_msg = if let Some(ref base) = base_branch {
5115 format!("git switch -c {branch_name} {base}").into()
5116 } else {
5117 format!("git switch -c {branch_name}").into()
5118 };
5119 self.send_job(Some(status_msg), move |repo, _cx| async move {
5120 match repo {
5121 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5122 backend.create_branch(branch_name, base_branch).await
5123 }
5124 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5125 client
5126 .request(proto::GitCreateBranch {
5127 project_id: project_id.0,
5128 repository_id: id.to_proto(),
5129 branch_name,
5130 })
5131 .await?;
5132
5133 Ok(())
5134 }
5135 }
5136 })
5137 }
5138
5139 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5140 let id = self.id;
5141 self.send_job(
5142 Some(format!("git switch {branch_name}").into()),
5143 move |repo, _cx| async move {
5144 match repo {
5145 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5146 backend.change_branch(branch_name).await
5147 }
5148 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5149 client
5150 .request(proto::GitChangeBranch {
5151 project_id: project_id.0,
5152 repository_id: id.to_proto(),
5153 branch_name,
5154 })
5155 .await?;
5156
5157 Ok(())
5158 }
5159 }
5160 },
5161 )
5162 }
5163
5164 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5165 let id = self.id;
5166 self.send_job(
5167 Some(format!("git branch -d {branch_name}").into()),
5168 move |repo, _cx| async move {
5169 match repo {
5170 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
5171 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5172 client
5173 .request(proto::GitDeleteBranch {
5174 project_id: project_id.0,
5175 repository_id: id.to_proto(),
5176 branch_name,
5177 })
5178 .await?;
5179
5180 Ok(())
5181 }
5182 }
5183 },
5184 )
5185 }
5186
5187 pub fn rename_branch(
5188 &mut self,
5189 branch: String,
5190 new_name: String,
5191 ) -> oneshot::Receiver<Result<()>> {
5192 let id = self.id;
5193 self.send_job(
5194 Some(format!("git branch -m {branch} {new_name}").into()),
5195 move |repo, _cx| async move {
5196 match repo {
5197 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5198 backend.rename_branch(branch, new_name).await
5199 }
5200 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5201 client
5202 .request(proto::GitRenameBranch {
5203 project_id: project_id.0,
5204 repository_id: id.to_proto(),
5205 branch,
5206 new_name,
5207 })
5208 .await?;
5209
5210 Ok(())
5211 }
5212 }
5213 },
5214 )
5215 }
5216
5217 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
5218 let id = self.id;
5219 self.send_job(None, move |repo, _cx| async move {
5220 match repo {
5221 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5222 backend.check_for_pushed_commit().await
5223 }
5224 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5225 let response = client
5226 .request(proto::CheckForPushedCommits {
5227 project_id: project_id.0,
5228 repository_id: id.to_proto(),
5229 })
5230 .await?;
5231
5232 let branches = response.pushed_to.into_iter().map(Into::into).collect();
5233
5234 Ok(branches)
5235 }
5236 }
5237 })
5238 }
5239
5240 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
5241 self.send_job(None, |repo, _cx| async move {
5242 match repo {
5243 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5244 backend.checkpoint().await
5245 }
5246 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
5247 }
5248 })
5249 }
5250
5251 pub fn restore_checkpoint(
5252 &mut self,
5253 checkpoint: GitRepositoryCheckpoint,
5254 ) -> oneshot::Receiver<Result<()>> {
5255 self.send_job(None, move |repo, _cx| async move {
5256 match repo {
5257 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5258 backend.restore_checkpoint(checkpoint).await
5259 }
5260 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5261 }
5262 })
5263 }
5264
5265 pub(crate) fn apply_remote_update(
5266 &mut self,
5267 update: proto::UpdateRepository,
5268 cx: &mut Context<Self>,
5269 ) -> Result<()> {
5270 let conflicted_paths = TreeSet::from_ordered_entries(
5271 update
5272 .current_merge_conflicts
5273 .into_iter()
5274 .filter_map(|path| RepoPath::from_proto(&path).log_err()),
5275 );
5276 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
5277 let new_head_commit = update
5278 .head_commit_details
5279 .as_ref()
5280 .map(proto_to_commit_details);
5281 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
5282 cx.emit(RepositoryEvent::BranchChanged)
5283 }
5284 self.snapshot.branch = new_branch;
5285 self.snapshot.head_commit = new_head_commit;
5286
5287 self.snapshot.merge.conflicted_paths = conflicted_paths;
5288 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
5289 let new_stash_entries = GitStash {
5290 entries: update
5291 .stash_entries
5292 .iter()
5293 .filter_map(|entry| proto_to_stash(entry).ok())
5294 .collect(),
5295 };
5296 if self.snapshot.stash_entries != new_stash_entries {
5297 cx.emit(RepositoryEvent::StashEntriesChanged)
5298 }
5299 self.snapshot.stash_entries = new_stash_entries;
5300
5301 let edits = update
5302 .removed_statuses
5303 .into_iter()
5304 .filter_map(|path| {
5305 Some(sum_tree::Edit::Remove(PathKey(
5306 RelPath::from_proto(&path).log_err()?,
5307 )))
5308 })
5309 .chain(
5310 update
5311 .updated_statuses
5312 .into_iter()
5313 .filter_map(|updated_status| {
5314 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
5315 }),
5316 )
5317 .collect::<Vec<_>>();
5318 if !edits.is_empty() {
5319 cx.emit(RepositoryEvent::StatusesChanged);
5320 }
5321 self.snapshot.statuses_by_path.edit(edits, ());
5322 if update.is_last_update {
5323 self.snapshot.scan_id = update.scan_id;
5324 }
5325 self.clear_pending_ops(cx);
5326 Ok(())
5327 }
5328
5329 pub fn compare_checkpoints(
5330 &mut self,
5331 left: GitRepositoryCheckpoint,
5332 right: GitRepositoryCheckpoint,
5333 ) -> oneshot::Receiver<Result<bool>> {
5334 self.send_job(None, move |repo, _cx| async move {
5335 match repo {
5336 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5337 backend.compare_checkpoints(left, right).await
5338 }
5339 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5340 }
5341 })
5342 }
5343
5344 pub fn diff_checkpoints(
5345 &mut self,
5346 base_checkpoint: GitRepositoryCheckpoint,
5347 target_checkpoint: GitRepositoryCheckpoint,
5348 ) -> oneshot::Receiver<Result<String>> {
5349 self.send_job(None, move |repo, _cx| async move {
5350 match repo {
5351 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5352 backend
5353 .diff_checkpoints(base_checkpoint, target_checkpoint)
5354 .await
5355 }
5356 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
5357 }
5358 })
5359 }
5360
5361 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
5362 let updated = SumTree::from_iter(
5363 self.pending_ops.iter().filter_map(|ops| {
5364 let inner_ops: Vec<PendingOp> =
5365 ops.ops.iter().filter(|op| op.running()).cloned().collect();
5366 if inner_ops.is_empty() {
5367 None
5368 } else {
5369 Some(PendingOps {
5370 repo_path: ops.repo_path.clone(),
5371 ops: inner_ops,
5372 })
5373 }
5374 }),
5375 (),
5376 );
5377
5378 if updated != self.pending_ops {
5379 cx.emit(RepositoryEvent::PendingOpsChanged {
5380 pending_ops: self.pending_ops.clone(),
5381 })
5382 }
5383
5384 self.pending_ops = updated;
5385 }
5386
5387 fn schedule_scan(
5388 &mut self,
5389 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5390 cx: &mut Context<Self>,
5391 ) {
5392 let this = cx.weak_entity();
5393 let _ = self.send_keyed_job(
5394 Some(GitJobKey::ReloadGitState),
5395 None,
5396 |state, mut cx| async move {
5397 log::debug!("run scheduled git status scan");
5398
5399 let Some(this) = this.upgrade() else {
5400 return Ok(());
5401 };
5402 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5403 bail!("not a local repository")
5404 };
5405 let (snapshot, events) = this
5406 .update(&mut cx, |this, _| {
5407 this.paths_needing_status_update.clear();
5408 compute_snapshot(
5409 this.id,
5410 this.work_directory_abs_path.clone(),
5411 this.snapshot.clone(),
5412 backend.clone(),
5413 )
5414 })?
5415 .await?;
5416 this.update(&mut cx, |this, cx| {
5417 this.snapshot = snapshot.clone();
5418 this.clear_pending_ops(cx);
5419 for event in events {
5420 cx.emit(event);
5421 }
5422 })?;
5423 if let Some(updates_tx) = updates_tx {
5424 updates_tx
5425 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5426 .ok();
5427 }
5428 Ok(())
5429 },
5430 );
5431 }
5432
5433 fn spawn_local_git_worker(
5434 state: Shared<Task<Result<LocalRepositoryState, String>>>,
5435 cx: &mut Context<Self>,
5436 ) -> mpsc::UnboundedSender<GitJob> {
5437 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5438
5439 cx.spawn(async move |_, cx| {
5440 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
5441 if let Some(git_hosting_provider_registry) =
5442 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))?
5443 {
5444 git_hosting_providers::register_additional_providers(
5445 git_hosting_provider_registry,
5446 state.backend.clone(),
5447 );
5448 }
5449 let state = RepositoryState::Local(state);
5450 let mut jobs = VecDeque::new();
5451 loop {
5452 while let Ok(Some(next_job)) = job_rx.try_next() {
5453 jobs.push_back(next_job);
5454 }
5455
5456 if let Some(job) = jobs.pop_front() {
5457 if let Some(current_key) = &job.key
5458 && jobs
5459 .iter()
5460 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5461 {
5462 continue;
5463 }
5464 (job.job)(state.clone(), cx).await;
5465 } else if let Some(job) = job_rx.next().await {
5466 jobs.push_back(job);
5467 } else {
5468 break;
5469 }
5470 }
5471 anyhow::Ok(())
5472 })
5473 .detach_and_log_err(cx);
5474
5475 job_tx
5476 }
5477
5478 fn spawn_remote_git_worker(
5479 state: RemoteRepositoryState,
5480 cx: &mut Context<Self>,
5481 ) -> mpsc::UnboundedSender<GitJob> {
5482 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
5483
5484 cx.spawn(async move |_, cx| {
5485 let state = RepositoryState::Remote(state);
5486 let mut jobs = VecDeque::new();
5487 loop {
5488 while let Ok(Some(next_job)) = job_rx.try_next() {
5489 jobs.push_back(next_job);
5490 }
5491
5492 if let Some(job) = jobs.pop_front() {
5493 if let Some(current_key) = &job.key
5494 && jobs
5495 .iter()
5496 .any(|other_job| other_job.key.as_ref() == Some(current_key))
5497 {
5498 continue;
5499 }
5500 (job.job)(state.clone(), cx).await;
5501 } else if let Some(job) = job_rx.next().await {
5502 jobs.push_back(job);
5503 } else {
5504 break;
5505 }
5506 }
5507 anyhow::Ok(())
5508 })
5509 .detach_and_log_err(cx);
5510
5511 job_tx
5512 }
5513
5514 fn load_staged_text(
5515 &mut self,
5516 buffer_id: BufferId,
5517 repo_path: RepoPath,
5518 cx: &App,
5519 ) -> Task<Result<Option<String>>> {
5520 let rx = self.send_job(None, move |state, _| async move {
5521 match state {
5522 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5523 anyhow::Ok(backend.load_index_text(repo_path).await)
5524 }
5525 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5526 let response = client
5527 .request(proto::OpenUnstagedDiff {
5528 project_id: project_id.to_proto(),
5529 buffer_id: buffer_id.to_proto(),
5530 })
5531 .await?;
5532 Ok(response.staged_text)
5533 }
5534 }
5535 });
5536 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5537 }
5538
5539 fn load_committed_text(
5540 &mut self,
5541 buffer_id: BufferId,
5542 repo_path: RepoPath,
5543 cx: &App,
5544 ) -> Task<Result<DiffBasesChange>> {
5545 let rx = self.send_job(None, move |state, _| async move {
5546 match state {
5547 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5548 let committed_text = backend.load_committed_text(repo_path.clone()).await;
5549 let staged_text = backend.load_index_text(repo_path).await;
5550 let diff_bases_change = if committed_text == staged_text {
5551 DiffBasesChange::SetBoth(committed_text)
5552 } else {
5553 DiffBasesChange::SetEach {
5554 index: staged_text,
5555 head: committed_text,
5556 }
5557 };
5558 anyhow::Ok(diff_bases_change)
5559 }
5560 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5561 use proto::open_uncommitted_diff_response::Mode;
5562
5563 let response = client
5564 .request(proto::OpenUncommittedDiff {
5565 project_id: project_id.to_proto(),
5566 buffer_id: buffer_id.to_proto(),
5567 })
5568 .await?;
5569 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
5570 let bases = match mode {
5571 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
5572 Mode::IndexAndHead => DiffBasesChange::SetEach {
5573 head: response.committed_text,
5574 index: response.staged_text,
5575 },
5576 };
5577 Ok(bases)
5578 }
5579 }
5580 });
5581
5582 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5583 }
5584 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
5585 let repository_id = self.snapshot.id;
5586 let rx = self.send_job(None, move |state, _| async move {
5587 match state {
5588 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5589 backend.load_blob_content(oid).await
5590 }
5591 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5592 let response = client
5593 .request(proto::GetBlobContent {
5594 project_id: project_id.to_proto(),
5595 repository_id: repository_id.0,
5596 oid: oid.to_string(),
5597 })
5598 .await?;
5599 Ok(response.content)
5600 }
5601 }
5602 });
5603 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
5604 }
5605
5606 fn paths_changed(
5607 &mut self,
5608 paths: Vec<RepoPath>,
5609 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
5610 cx: &mut Context<Self>,
5611 ) {
5612 self.paths_needing_status_update.extend(paths);
5613
5614 let this = cx.weak_entity();
5615 let _ = self.send_keyed_job(
5616 Some(GitJobKey::RefreshStatuses),
5617 None,
5618 |state, mut cx| async move {
5619 let (prev_snapshot, mut changed_paths) = this.update(&mut cx, |this, _| {
5620 (
5621 this.snapshot.clone(),
5622 mem::take(&mut this.paths_needing_status_update),
5623 )
5624 })?;
5625 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
5626 bail!("not a local repository")
5627 };
5628
5629 let paths = changed_paths.iter().cloned().collect::<Vec<_>>();
5630 if paths.is_empty() {
5631 return Ok(());
5632 }
5633 let statuses = backend.status(&paths).await?;
5634 let stash_entries = backend.stash_entries().await?;
5635
5636 let changed_path_statuses = cx
5637 .background_spawn(async move {
5638 let mut changed_path_statuses = Vec::new();
5639 let prev_statuses = prev_snapshot.statuses_by_path.clone();
5640 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5641
5642 for (repo_path, status) in &*statuses.entries {
5643 changed_paths.remove(repo_path);
5644 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
5645 && cursor.item().is_some_and(|entry| entry.status == *status)
5646 {
5647 continue;
5648 }
5649
5650 changed_path_statuses.push(Edit::Insert(StatusEntry {
5651 repo_path: repo_path.clone(),
5652 status: *status,
5653 }));
5654 }
5655 let mut cursor = prev_statuses.cursor::<PathProgress>(());
5656 for path in changed_paths.into_iter() {
5657 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
5658 changed_path_statuses
5659 .push(Edit::Remove(PathKey(path.as_ref().clone())));
5660 }
5661 }
5662 changed_path_statuses
5663 })
5664 .await;
5665
5666 this.update(&mut cx, |this, cx| {
5667 if this.snapshot.stash_entries != stash_entries {
5668 cx.emit(RepositoryEvent::StashEntriesChanged);
5669 this.snapshot.stash_entries = stash_entries;
5670 }
5671
5672 if !changed_path_statuses.is_empty() {
5673 cx.emit(RepositoryEvent::StatusesChanged);
5674 this.snapshot
5675 .statuses_by_path
5676 .edit(changed_path_statuses, ());
5677 this.snapshot.scan_id += 1;
5678 }
5679
5680 if let Some(updates_tx) = updates_tx {
5681 updates_tx
5682 .unbounded_send(DownstreamUpdate::UpdateRepository(
5683 this.snapshot.clone(),
5684 ))
5685 .ok();
5686 }
5687 })
5688 },
5689 );
5690 }
5691
5692 /// currently running git command and when it started
5693 pub fn current_job(&self) -> Option<JobInfo> {
5694 self.active_jobs.values().next().cloned()
5695 }
5696
5697 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
5698 self.send_job(None, |_, _| async {})
5699 }
5700
5701 fn spawn_job_with_tracking<AsyncFn>(
5702 &mut self,
5703 paths: Vec<RepoPath>,
5704 git_status: pending_op::GitStatus,
5705 cx: &mut Context<Self>,
5706 f: AsyncFn,
5707 ) -> Task<Result<()>>
5708 where
5709 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
5710 {
5711 let ids = self.new_pending_ops_for_paths(paths, git_status);
5712
5713 cx.spawn(async move |this, cx| {
5714 let (job_status, result) = match f(this.clone(), cx).await {
5715 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
5716 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
5717 Err(err) => (pending_op::JobStatus::Error, Err(err)),
5718 };
5719
5720 this.update(cx, |this, _| {
5721 let mut edits = Vec::with_capacity(ids.len());
5722 for (id, entry) in ids {
5723 if let Some(mut ops) = this
5724 .pending_ops
5725 .get(&PathKey(entry.as_ref().clone()), ())
5726 .cloned()
5727 {
5728 if let Some(op) = ops.op_by_id_mut(id) {
5729 op.job_status = job_status;
5730 }
5731 edits.push(sum_tree::Edit::Insert(ops));
5732 }
5733 }
5734 this.pending_ops.edit(edits, ());
5735 })?;
5736
5737 result
5738 })
5739 }
5740
5741 fn new_pending_ops_for_paths(
5742 &mut self,
5743 paths: Vec<RepoPath>,
5744 git_status: pending_op::GitStatus,
5745 ) -> Vec<(PendingOpId, RepoPath)> {
5746 let mut edits = Vec::with_capacity(paths.len());
5747 let mut ids = Vec::with_capacity(paths.len());
5748 for path in paths {
5749 let mut ops = self
5750 .pending_ops
5751 .get(&PathKey(path.as_ref().clone()), ())
5752 .cloned()
5753 .unwrap_or_else(|| PendingOps::new(&path));
5754 let id = ops.max_id() + 1;
5755 ops.ops.push(PendingOp {
5756 id,
5757 git_status,
5758 job_status: pending_op::JobStatus::Running,
5759 });
5760 edits.push(sum_tree::Edit::Insert(ops));
5761 ids.push((id, path));
5762 }
5763 self.pending_ops.edit(edits, ());
5764 ids
5765 }
5766}
5767
5768fn get_permalink_in_rust_registry_src(
5769 provider_registry: Arc<GitHostingProviderRegistry>,
5770 path: PathBuf,
5771 selection: Range<u32>,
5772) -> Result<url::Url> {
5773 #[derive(Deserialize)]
5774 struct CargoVcsGit {
5775 sha1: String,
5776 }
5777
5778 #[derive(Deserialize)]
5779 struct CargoVcsInfo {
5780 git: CargoVcsGit,
5781 path_in_vcs: String,
5782 }
5783
5784 #[derive(Deserialize)]
5785 struct CargoPackage {
5786 repository: String,
5787 }
5788
5789 #[derive(Deserialize)]
5790 struct CargoToml {
5791 package: CargoPackage,
5792 }
5793
5794 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
5795 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
5796 Some((dir, json))
5797 }) else {
5798 bail!("No .cargo_vcs_info.json found in parent directories")
5799 };
5800 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
5801 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
5802 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
5803 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
5804 .context("parsing package.repository field of manifest")?;
5805 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
5806 let permalink = provider.build_permalink(
5807 remote,
5808 BuildPermalinkParams::new(
5809 &cargo_vcs_info.git.sha1,
5810 &RepoPath::from_rel_path(
5811 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
5812 ),
5813 Some(selection),
5814 ),
5815 );
5816 Ok(permalink)
5817}
5818
5819fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
5820 let Some(blame) = blame else {
5821 return proto::BlameBufferResponse {
5822 blame_response: None,
5823 };
5824 };
5825
5826 let entries = blame
5827 .entries
5828 .into_iter()
5829 .map(|entry| proto::BlameEntry {
5830 sha: entry.sha.as_bytes().into(),
5831 start_line: entry.range.start,
5832 end_line: entry.range.end,
5833 original_line_number: entry.original_line_number,
5834 author: entry.author,
5835 author_mail: entry.author_mail,
5836 author_time: entry.author_time,
5837 author_tz: entry.author_tz,
5838 committer: entry.committer_name,
5839 committer_mail: entry.committer_email,
5840 committer_time: entry.committer_time,
5841 committer_tz: entry.committer_tz,
5842 summary: entry.summary,
5843 previous: entry.previous,
5844 filename: entry.filename,
5845 })
5846 .collect::<Vec<_>>();
5847
5848 let messages = blame
5849 .messages
5850 .into_iter()
5851 .map(|(oid, message)| proto::CommitMessage {
5852 oid: oid.as_bytes().into(),
5853 message,
5854 })
5855 .collect::<Vec<_>>();
5856
5857 proto::BlameBufferResponse {
5858 blame_response: Some(proto::blame_buffer_response::BlameResponse {
5859 entries,
5860 messages,
5861 remote_url: blame.remote_url,
5862 }),
5863 }
5864}
5865
5866fn deserialize_blame_buffer_response(
5867 response: proto::BlameBufferResponse,
5868) -> Option<git::blame::Blame> {
5869 let response = response.blame_response?;
5870 let entries = response
5871 .entries
5872 .into_iter()
5873 .filter_map(|entry| {
5874 Some(git::blame::BlameEntry {
5875 sha: git::Oid::from_bytes(&entry.sha).ok()?,
5876 range: entry.start_line..entry.end_line,
5877 original_line_number: entry.original_line_number,
5878 committer_name: entry.committer,
5879 committer_time: entry.committer_time,
5880 committer_tz: entry.committer_tz,
5881 committer_email: entry.committer_mail,
5882 author: entry.author,
5883 author_mail: entry.author_mail,
5884 author_time: entry.author_time,
5885 author_tz: entry.author_tz,
5886 summary: entry.summary,
5887 previous: entry.previous,
5888 filename: entry.filename,
5889 })
5890 })
5891 .collect::<Vec<_>>();
5892
5893 let messages = response
5894 .messages
5895 .into_iter()
5896 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
5897 .collect::<HashMap<_, _>>();
5898
5899 Some(Blame {
5900 entries,
5901 messages,
5902 remote_url: response.remote_url,
5903 })
5904}
5905
5906fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
5907 proto::Branch {
5908 is_head: branch.is_head,
5909 ref_name: branch.ref_name.to_string(),
5910 unix_timestamp: branch
5911 .most_recent_commit
5912 .as_ref()
5913 .map(|commit| commit.commit_timestamp as u64),
5914 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
5915 ref_name: upstream.ref_name.to_string(),
5916 tracking: upstream
5917 .tracking
5918 .status()
5919 .map(|upstream| proto::UpstreamTracking {
5920 ahead: upstream.ahead as u64,
5921 behind: upstream.behind as u64,
5922 }),
5923 }),
5924 most_recent_commit: branch
5925 .most_recent_commit
5926 .as_ref()
5927 .map(|commit| proto::CommitSummary {
5928 sha: commit.sha.to_string(),
5929 subject: commit.subject.to_string(),
5930 commit_timestamp: commit.commit_timestamp,
5931 author_name: commit.author_name.to_string(),
5932 }),
5933 }
5934}
5935
5936fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
5937 proto::Worktree {
5938 path: worktree.path.to_string_lossy().to_string(),
5939 ref_name: worktree.ref_name.to_string(),
5940 sha: worktree.sha.to_string(),
5941 }
5942}
5943
5944fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
5945 git::repository::Worktree {
5946 path: PathBuf::from(proto.path.clone()),
5947 ref_name: proto.ref_name.clone().into(),
5948 sha: proto.sha.clone().into(),
5949 }
5950}
5951
5952fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
5953 git::repository::Branch {
5954 is_head: proto.is_head,
5955 ref_name: proto.ref_name.clone().into(),
5956 upstream: proto
5957 .upstream
5958 .as_ref()
5959 .map(|upstream| git::repository::Upstream {
5960 ref_name: upstream.ref_name.to_string().into(),
5961 tracking: upstream
5962 .tracking
5963 .as_ref()
5964 .map(|tracking| {
5965 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
5966 ahead: tracking.ahead as u32,
5967 behind: tracking.behind as u32,
5968 })
5969 })
5970 .unwrap_or(git::repository::UpstreamTracking::Gone),
5971 }),
5972 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
5973 git::repository::CommitSummary {
5974 sha: commit.sha.to_string().into(),
5975 subject: commit.subject.to_string().into(),
5976 commit_timestamp: commit.commit_timestamp,
5977 author_name: commit.author_name.to_string().into(),
5978 has_parent: true,
5979 }
5980 }),
5981 }
5982}
5983
5984fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
5985 proto::GitCommitDetails {
5986 sha: commit.sha.to_string(),
5987 message: commit.message.to_string(),
5988 commit_timestamp: commit.commit_timestamp,
5989 author_email: commit.author_email.to_string(),
5990 author_name: commit.author_name.to_string(),
5991 }
5992}
5993
5994fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
5995 CommitDetails {
5996 sha: proto.sha.clone().into(),
5997 message: proto.message.clone().into(),
5998 commit_timestamp: proto.commit_timestamp,
5999 author_email: proto.author_email.clone().into(),
6000 author_name: proto.author_name.clone().into(),
6001 }
6002}
6003
6004async fn compute_snapshot(
6005 id: RepositoryId,
6006 work_directory_abs_path: Arc<Path>,
6007 prev_snapshot: RepositorySnapshot,
6008 backend: Arc<dyn GitRepository>,
6009) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6010 let mut events = Vec::new();
6011 let branches = backend.branches().await?;
6012 let branch = branches.into_iter().find(|branch| branch.is_head);
6013 let statuses = backend
6014 .status(&[RepoPath::from_rel_path(
6015 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6016 )])
6017 .await?;
6018 let stash_entries = backend.stash_entries().await?;
6019 let statuses_by_path = SumTree::from_iter(
6020 statuses
6021 .entries
6022 .iter()
6023 .map(|(repo_path, status)| StatusEntry {
6024 repo_path: repo_path.clone(),
6025 status: *status,
6026 }),
6027 (),
6028 );
6029 let (merge_details, merge_heads_changed) =
6030 MergeDetails::load(&backend, &statuses_by_path, &prev_snapshot).await?;
6031 log::debug!("new merge details (changed={merge_heads_changed:?}): {merge_details:?}");
6032
6033 if merge_heads_changed {
6034 events.push(RepositoryEvent::MergeHeadsChanged);
6035 }
6036
6037 if statuses_by_path != prev_snapshot.statuses_by_path {
6038 events.push(RepositoryEvent::StatusesChanged)
6039 }
6040
6041 // Useful when branch is None in detached head state
6042 let head_commit = match backend.head_sha().await {
6043 Some(head_sha) => backend.show(head_sha).await.log_err(),
6044 None => None,
6045 };
6046
6047 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6048 events.push(RepositoryEvent::BranchChanged);
6049 }
6050
6051 // Used by edit prediction data collection
6052 let remote_origin_url = backend.remote_url("origin");
6053 let remote_upstream_url = backend.remote_url("upstream");
6054
6055 let snapshot = RepositorySnapshot {
6056 id,
6057 statuses_by_path,
6058 work_directory_abs_path,
6059 path_style: prev_snapshot.path_style,
6060 scan_id: prev_snapshot.scan_id + 1,
6061 branch,
6062 head_commit,
6063 merge: merge_details,
6064 remote_origin_url,
6065 remote_upstream_url,
6066 stash_entries,
6067 };
6068
6069 Ok((snapshot, events))
6070}
6071
6072fn status_from_proto(
6073 simple_status: i32,
6074 status: Option<proto::GitFileStatus>,
6075) -> anyhow::Result<FileStatus> {
6076 use proto::git_file_status::Variant;
6077
6078 let Some(variant) = status.and_then(|status| status.variant) else {
6079 let code = proto::GitStatus::from_i32(simple_status)
6080 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6081 let result = match code {
6082 proto::GitStatus::Added => TrackedStatus {
6083 worktree_status: StatusCode::Added,
6084 index_status: StatusCode::Unmodified,
6085 }
6086 .into(),
6087 proto::GitStatus::Modified => TrackedStatus {
6088 worktree_status: StatusCode::Modified,
6089 index_status: StatusCode::Unmodified,
6090 }
6091 .into(),
6092 proto::GitStatus::Conflict => UnmergedStatus {
6093 first_head: UnmergedStatusCode::Updated,
6094 second_head: UnmergedStatusCode::Updated,
6095 }
6096 .into(),
6097 proto::GitStatus::Deleted => TrackedStatus {
6098 worktree_status: StatusCode::Deleted,
6099 index_status: StatusCode::Unmodified,
6100 }
6101 .into(),
6102 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6103 };
6104 return Ok(result);
6105 };
6106
6107 let result = match variant {
6108 Variant::Untracked(_) => FileStatus::Untracked,
6109 Variant::Ignored(_) => FileStatus::Ignored,
6110 Variant::Unmerged(unmerged) => {
6111 let [first_head, second_head] =
6112 [unmerged.first_head, unmerged.second_head].map(|head| {
6113 let code = proto::GitStatus::from_i32(head)
6114 .with_context(|| format!("Invalid git status code: {head}"))?;
6115 let result = match code {
6116 proto::GitStatus::Added => UnmergedStatusCode::Added,
6117 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6118 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6119 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6120 };
6121 Ok(result)
6122 });
6123 let [first_head, second_head] = [first_head?, second_head?];
6124 UnmergedStatus {
6125 first_head,
6126 second_head,
6127 }
6128 .into()
6129 }
6130 Variant::Tracked(tracked) => {
6131 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6132 .map(|status| {
6133 let code = proto::GitStatus::from_i32(status)
6134 .with_context(|| format!("Invalid git status code: {status}"))?;
6135 let result = match code {
6136 proto::GitStatus::Modified => StatusCode::Modified,
6137 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6138 proto::GitStatus::Added => StatusCode::Added,
6139 proto::GitStatus::Deleted => StatusCode::Deleted,
6140 proto::GitStatus::Renamed => StatusCode::Renamed,
6141 proto::GitStatus::Copied => StatusCode::Copied,
6142 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6143 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6144 };
6145 Ok(result)
6146 });
6147 let [index_status, worktree_status] = [index_status?, worktree_status?];
6148 TrackedStatus {
6149 index_status,
6150 worktree_status,
6151 }
6152 .into()
6153 }
6154 };
6155 Ok(result)
6156}
6157
6158fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
6159 use proto::git_file_status::{Tracked, Unmerged, Variant};
6160
6161 let variant = match status {
6162 FileStatus::Untracked => Variant::Untracked(Default::default()),
6163 FileStatus::Ignored => Variant::Ignored(Default::default()),
6164 FileStatus::Unmerged(UnmergedStatus {
6165 first_head,
6166 second_head,
6167 }) => Variant::Unmerged(Unmerged {
6168 first_head: unmerged_status_to_proto(first_head),
6169 second_head: unmerged_status_to_proto(second_head),
6170 }),
6171 FileStatus::Tracked(TrackedStatus {
6172 index_status,
6173 worktree_status,
6174 }) => Variant::Tracked(Tracked {
6175 index_status: tracked_status_to_proto(index_status),
6176 worktree_status: tracked_status_to_proto(worktree_status),
6177 }),
6178 };
6179 proto::GitFileStatus {
6180 variant: Some(variant),
6181 }
6182}
6183
6184fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
6185 match code {
6186 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
6187 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
6188 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
6189 }
6190}
6191
6192fn tracked_status_to_proto(code: StatusCode) -> i32 {
6193 match code {
6194 StatusCode::Added => proto::GitStatus::Added as _,
6195 StatusCode::Deleted => proto::GitStatus::Deleted as _,
6196 StatusCode::Modified => proto::GitStatus::Modified as _,
6197 StatusCode::Renamed => proto::GitStatus::Renamed as _,
6198 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
6199 StatusCode::Copied => proto::GitStatus::Copied as _,
6200 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
6201 }
6202}