1pub mod branch_diff;
2mod conflict_set;
3pub mod git_traversal;
4pub mod pending_op;
5
6use crate::{
7 ProjectEnvironment, ProjectItem, ProjectPath,
8 buffer_store::{BufferStore, BufferStoreEvent},
9 trusted_worktrees::{
10 PathTrust, TrustedWorktrees, TrustedWorktreesEvent, TrustedWorktreesStore,
11 },
12 worktree_store::{WorktreeStore, WorktreeStoreEvent},
13};
14use anyhow::{Context as _, Result, anyhow, bail};
15use askpass::{AskPassDelegate, EncryptedPassword, IKnowWhatIAmDoingAndIHaveReadTheDocs};
16use buffer_diff::{BufferDiff, BufferDiffEvent};
17use client::ProjectId;
18use collections::HashMap;
19pub use conflict_set::{ConflictRegion, ConflictSet, ConflictSetSnapshot, ConflictSetUpdate};
20use fs::Fs;
21use futures::{
22 FutureExt, StreamExt,
23 channel::{
24 mpsc,
25 oneshot::{self, Canceled},
26 },
27 future::{self, Shared},
28 stream::FuturesOrdered,
29};
30use git::{
31 BuildPermalinkParams, GitHostingProviderRegistry, Oid, RunHook,
32 blame::Blame,
33 parse_git_remote_url,
34 repository::{
35 Branch, CommitDetails, CommitDiff, CommitFile, CommitOptions, DiffType, FetchOptions,
36 GitRepository, GitRepositoryCheckpoint, GraphCommitData, InitialGraphCommitData, LogOrder,
37 LogSource, PushOptions, Remote, RemoteCommandOutput, RepoPath, ResetMode,
38 UpstreamTrackingStatus, Worktree as GitWorktree,
39 },
40 stash::{GitStash, StashEntry},
41 status::{
42 DiffTreeType, FileStatus, GitSummary, StatusCode, TrackedStatus, TreeDiff, TreeDiffStatus,
43 UnmergedStatus, UnmergedStatusCode,
44 },
45};
46use gpui::{
47 App, AppContext, AsyncApp, Context, Entity, EventEmitter, SharedString, Subscription, Task,
48 WeakEntity,
49};
50use language::{
51 Buffer, BufferEvent, Language, LanguageRegistry,
52 proto::{deserialize_version, serialize_version},
53};
54use parking_lot::Mutex;
55use pending_op::{PendingOp, PendingOpId, PendingOps, PendingOpsSummary};
56use postage::stream::Stream as _;
57use rpc::{
58 AnyProtoClient, TypedEnvelope,
59 proto::{self, git_reset, split_repository_update},
60};
61use serde::Deserialize;
62use settings::WorktreeId;
63use smol::future::yield_now;
64use std::{
65 cmp::Ordering,
66 collections::{BTreeSet, HashSet, VecDeque, hash_map::Entry},
67 future::Future,
68 mem,
69 ops::Range,
70 path::{Path, PathBuf},
71 str::FromStr,
72 sync::{
73 Arc,
74 atomic::{self, AtomicU64},
75 },
76 time::Instant,
77};
78use sum_tree::{Edit, SumTree, TreeMap};
79use task::Shell;
80use text::{Bias, BufferId};
81use util::{
82 ResultExt, debug_panic,
83 paths::{PathStyle, SanitizedPath},
84 post_inc,
85 rel_path::RelPath,
86};
87use worktree::{
88 File, PathChange, PathKey, PathProgress, PathSummary, PathTarget, ProjectEntryId,
89 UpdatedGitRepositoriesSet, UpdatedGitRepository, Worktree,
90};
91use zeroize::Zeroize;
92
93pub struct GitStore {
94 state: GitStoreState,
95 buffer_store: Entity<BufferStore>,
96 worktree_store: Entity<WorktreeStore>,
97 repositories: HashMap<RepositoryId, Entity<Repository>>,
98 worktree_ids: HashMap<RepositoryId, HashSet<WorktreeId>>,
99 active_repo_id: Option<RepositoryId>,
100 #[allow(clippy::type_complexity)]
101 loading_diffs:
102 HashMap<(BufferId, DiffKind), Shared<Task<Result<Entity<BufferDiff>, Arc<anyhow::Error>>>>>,
103 diffs: HashMap<BufferId, Entity<BufferGitState>>,
104 shared_diffs: HashMap<proto::PeerId, HashMap<BufferId, SharedDiffs>>,
105 _subscriptions: Vec<Subscription>,
106}
107
108#[derive(Default)]
109struct SharedDiffs {
110 unstaged: Option<Entity<BufferDiff>>,
111 uncommitted: Option<Entity<BufferDiff>>,
112}
113
114struct BufferGitState {
115 unstaged_diff: Option<WeakEntity<BufferDiff>>,
116 uncommitted_diff: Option<WeakEntity<BufferDiff>>,
117 oid_diffs: HashMap<Option<git::Oid>, WeakEntity<BufferDiff>>,
118 conflict_set: Option<WeakEntity<ConflictSet>>,
119 recalculate_diff_task: Option<Task<Result<()>>>,
120 reparse_conflict_markers_task: Option<Task<Result<()>>>,
121 language: Option<Arc<Language>>,
122 language_registry: Option<Arc<LanguageRegistry>>,
123 conflict_updated_futures: Vec<oneshot::Sender<()>>,
124 recalculating_tx: postage::watch::Sender<bool>,
125
126 /// These operation counts are used to ensure that head and index text
127 /// values read from the git repository are up-to-date with any hunk staging
128 /// operations that have been performed on the BufferDiff.
129 ///
130 /// The operation count is incremented immediately when the user initiates a
131 /// hunk stage/unstage operation. Then, upon finishing writing the new index
132 /// text do disk, the `operation count as of write` is updated to reflect
133 /// the operation count that prompted the write.
134 hunk_staging_operation_count: usize,
135 hunk_staging_operation_count_as_of_write: usize,
136
137 head_text: Option<Arc<str>>,
138 index_text: Option<Arc<str>>,
139 oid_texts: HashMap<git::Oid, Arc<str>>,
140 head_changed: bool,
141 index_changed: bool,
142 language_changed: bool,
143}
144
145#[derive(Clone, Debug)]
146enum DiffBasesChange {
147 SetIndex(Option<String>),
148 SetHead(Option<String>),
149 SetEach {
150 index: Option<String>,
151 head: Option<String>,
152 },
153 SetBoth(Option<String>),
154}
155
156#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
157enum DiffKind {
158 Unstaged,
159 Uncommitted,
160 SinceOid(Option<git::Oid>),
161}
162
163enum GitStoreState {
164 Local {
165 next_repository_id: Arc<AtomicU64>,
166 downstream: Option<LocalDownstreamState>,
167 project_environment: Entity<ProjectEnvironment>,
168 fs: Arc<dyn Fs>,
169 },
170 Remote {
171 upstream_client: AnyProtoClient,
172 upstream_project_id: u64,
173 downstream: Option<(AnyProtoClient, ProjectId)>,
174 },
175}
176
177enum DownstreamUpdate {
178 UpdateRepository(RepositorySnapshot),
179 RemoveRepository(RepositoryId),
180}
181
182struct LocalDownstreamState {
183 client: AnyProtoClient,
184 project_id: ProjectId,
185 updates_tx: mpsc::UnboundedSender<DownstreamUpdate>,
186 _task: Task<Result<()>>,
187}
188
189#[derive(Clone, Debug)]
190pub struct GitStoreCheckpoint {
191 checkpoints_by_work_dir_abs_path: HashMap<Arc<Path>, GitRepositoryCheckpoint>,
192}
193
194#[derive(Clone, Debug, PartialEq, Eq)]
195pub struct StatusEntry {
196 pub repo_path: RepoPath,
197 pub status: FileStatus,
198}
199
200impl StatusEntry {
201 fn to_proto(&self) -> proto::StatusEntry {
202 let simple_status = match self.status {
203 FileStatus::Ignored | FileStatus::Untracked => proto::GitStatus::Added as i32,
204 FileStatus::Unmerged { .. } => proto::GitStatus::Conflict as i32,
205 FileStatus::Tracked(TrackedStatus {
206 index_status,
207 worktree_status,
208 }) => tracked_status_to_proto(if worktree_status != StatusCode::Unmodified {
209 worktree_status
210 } else {
211 index_status
212 }),
213 };
214
215 proto::StatusEntry {
216 repo_path: self.repo_path.to_proto(),
217 simple_status,
218 status: Some(status_to_proto(self.status)),
219 }
220 }
221}
222
223impl TryFrom<proto::StatusEntry> for StatusEntry {
224 type Error = anyhow::Error;
225
226 fn try_from(value: proto::StatusEntry) -> Result<Self, Self::Error> {
227 let repo_path = RepoPath::from_proto(&value.repo_path).context("invalid repo path")?;
228 let status = status_from_proto(value.simple_status, value.status)?;
229 Ok(Self { repo_path, status })
230 }
231}
232
233impl sum_tree::Item for StatusEntry {
234 type Summary = PathSummary<GitSummary>;
235
236 fn summary(&self, _: <Self::Summary as sum_tree::Summary>::Context<'_>) -> Self::Summary {
237 PathSummary {
238 max_path: self.repo_path.as_ref().clone(),
239 item_summary: self.status.summary(),
240 }
241 }
242}
243
244impl sum_tree::KeyedItem for StatusEntry {
245 type Key = PathKey;
246
247 fn key(&self) -> Self::Key {
248 PathKey(self.repo_path.as_ref().clone())
249 }
250}
251
252#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
253pub struct RepositoryId(pub u64);
254
255#[derive(Clone, Debug, Default, PartialEq, Eq)]
256pub struct MergeDetails {
257 pub merge_heads_by_conflicted_path: TreeMap<RepoPath, Vec<Option<SharedString>>>,
258 pub message: Option<SharedString>,
259}
260
261#[derive(Clone)]
262pub enum CommitDataState {
263 Loading,
264 Loaded(Arc<GraphCommitData>),
265}
266
267#[derive(Clone, Debug, PartialEq, Eq)]
268pub struct RepositorySnapshot {
269 pub id: RepositoryId,
270 pub statuses_by_path: SumTree<StatusEntry>,
271 pub work_directory_abs_path: Arc<Path>,
272 /// The working directory of the original repository. For a normal
273 /// checkout this equals `work_directory_abs_path`. For a git worktree
274 /// checkout, this is the original repo's working directory — used to
275 /// anchor new worktree creation so they don't nest.
276 pub original_repo_abs_path: Arc<Path>,
277 pub path_style: PathStyle,
278 pub branch: Option<Branch>,
279 pub head_commit: Option<CommitDetails>,
280 pub scan_id: u64,
281 pub merge: MergeDetails,
282 pub remote_origin_url: Option<String>,
283 pub remote_upstream_url: Option<String>,
284 pub stash_entries: GitStash,
285}
286
287type JobId = u64;
288
289#[derive(Clone, Debug, PartialEq, Eq)]
290pub struct JobInfo {
291 pub start: Instant,
292 pub message: SharedString,
293}
294
295struct GraphCommitDataHandler {
296 _task: Task<()>,
297 commit_data_request: smol::channel::Sender<Oid>,
298}
299
300enum GraphCommitHandlerState {
301 Starting,
302 Open(GraphCommitDataHandler),
303 Closed,
304}
305
306pub struct InitialGitGraphData {
307 fetch_task: Task<()>,
308 pub error: Option<SharedString>,
309 pub commit_data: Vec<Arc<InitialGraphCommitData>>,
310 pub commit_oid_to_index: HashMap<Oid, usize>,
311}
312
313pub struct GraphDataResponse<'a> {
314 pub commits: &'a [Arc<InitialGraphCommitData>],
315 pub is_loading: bool,
316 pub error: Option<SharedString>,
317}
318
319pub struct Repository {
320 this: WeakEntity<Self>,
321 snapshot: RepositorySnapshot,
322 commit_message_buffer: Option<Entity<Buffer>>,
323 git_store: WeakEntity<GitStore>,
324 // For a local repository, holds paths that have had worktree events since the last status scan completed,
325 // and that should be examined during the next status scan.
326 paths_needing_status_update: Vec<Vec<RepoPath>>,
327 job_sender: mpsc::UnboundedSender<GitJob>,
328 active_jobs: HashMap<JobId, JobInfo>,
329 pending_ops: SumTree<PendingOps>,
330 job_id: JobId,
331 askpass_delegates: Arc<Mutex<HashMap<u64, AskPassDelegate>>>,
332 latest_askpass_id: u64,
333 repository_state: Shared<Task<Result<RepositoryState, String>>>,
334 initial_graph_data: HashMap<(LogSource, LogOrder), InitialGitGraphData>,
335 graph_commit_data_handler: GraphCommitHandlerState,
336 commit_data: HashMap<Oid, CommitDataState>,
337}
338
339impl std::ops::Deref for Repository {
340 type Target = RepositorySnapshot;
341
342 fn deref(&self) -> &Self::Target {
343 &self.snapshot
344 }
345}
346
347#[derive(Clone)]
348pub struct LocalRepositoryState {
349 pub fs: Arc<dyn Fs>,
350 pub backend: Arc<dyn GitRepository>,
351 pub environment: Arc<HashMap<String, String>>,
352}
353
354impl LocalRepositoryState {
355 async fn new(
356 work_directory_abs_path: Arc<Path>,
357 dot_git_abs_path: Arc<Path>,
358 project_environment: WeakEntity<ProjectEnvironment>,
359 fs: Arc<dyn Fs>,
360 is_trusted: bool,
361 cx: &mut AsyncApp,
362 ) -> anyhow::Result<Self> {
363 let environment = project_environment
364 .update(cx, |project_environment, cx| {
365 project_environment.local_directory_environment(&Shell::System, work_directory_abs_path.clone(), cx)
366 })?
367 .await
368 .unwrap_or_else(|| {
369 log::error!("failed to get working directory environment for repository {work_directory_abs_path:?}");
370 HashMap::default()
371 });
372 let search_paths = environment.get("PATH").map(|val| val.to_owned());
373 let backend = cx
374 .background_spawn({
375 let fs = fs.clone();
376 async move {
377 let system_git_binary_path = search_paths
378 .and_then(|search_paths| {
379 which::which_in("git", Some(search_paths), &work_directory_abs_path)
380 .ok()
381 })
382 .or_else(|| which::which("git").ok());
383 fs.open_repo(&dot_git_abs_path, system_git_binary_path.as_deref())
384 .with_context(|| format!("opening repository at {dot_git_abs_path:?}"))
385 }
386 })
387 .await?;
388 backend.set_trusted(is_trusted);
389 Ok(LocalRepositoryState {
390 backend,
391 environment: Arc::new(environment),
392 fs,
393 })
394 }
395}
396
397#[derive(Clone)]
398pub struct RemoteRepositoryState {
399 pub project_id: ProjectId,
400 pub client: AnyProtoClient,
401}
402
403#[derive(Clone)]
404pub enum RepositoryState {
405 Local(LocalRepositoryState),
406 Remote(RemoteRepositoryState),
407}
408
409#[derive(Clone, Debug, PartialEq, Eq)]
410pub enum GitGraphEvent {
411 CountUpdated(usize),
412 FullyLoaded,
413 LoadingError,
414}
415
416#[derive(Clone, Debug, PartialEq, Eq)]
417pub enum RepositoryEvent {
418 StatusesChanged,
419 BranchChanged,
420 StashEntriesChanged,
421 PendingOpsChanged { pending_ops: SumTree<PendingOps> },
422 GraphEvent((LogSource, LogOrder), GitGraphEvent),
423}
424
425#[derive(Clone, Debug)]
426pub struct JobsUpdated;
427
428#[derive(Debug)]
429pub enum GitStoreEvent {
430 ActiveRepositoryChanged(Option<RepositoryId>),
431 /// Bool is true when the repository that's updated is the active repository
432 RepositoryUpdated(RepositoryId, RepositoryEvent, bool),
433 RepositoryAdded,
434 RepositoryRemoved(RepositoryId),
435 IndexWriteError(anyhow::Error),
436 JobsUpdated,
437 ConflictsUpdated,
438}
439
440impl EventEmitter<RepositoryEvent> for Repository {}
441impl EventEmitter<JobsUpdated> for Repository {}
442impl EventEmitter<GitStoreEvent> for GitStore {}
443
444pub struct GitJob {
445 job: Box<dyn FnOnce(RepositoryState, &mut AsyncApp) -> Task<()>>,
446 key: Option<GitJobKey>,
447}
448
449#[derive(PartialEq, Eq)]
450enum GitJobKey {
451 WriteIndex(Vec<RepoPath>),
452 ReloadBufferDiffBases,
453 RefreshStatuses,
454 ReloadGitState,
455}
456
457impl GitStore {
458 pub fn local(
459 worktree_store: &Entity<WorktreeStore>,
460 buffer_store: Entity<BufferStore>,
461 environment: Entity<ProjectEnvironment>,
462 fs: Arc<dyn Fs>,
463 cx: &mut Context<Self>,
464 ) -> Self {
465 Self::new(
466 worktree_store.clone(),
467 buffer_store,
468 GitStoreState::Local {
469 next_repository_id: Arc::new(AtomicU64::new(1)),
470 downstream: None,
471 project_environment: environment,
472 fs,
473 },
474 cx,
475 )
476 }
477
478 pub fn remote(
479 worktree_store: &Entity<WorktreeStore>,
480 buffer_store: Entity<BufferStore>,
481 upstream_client: AnyProtoClient,
482 project_id: u64,
483 cx: &mut Context<Self>,
484 ) -> Self {
485 Self::new(
486 worktree_store.clone(),
487 buffer_store,
488 GitStoreState::Remote {
489 upstream_client,
490 upstream_project_id: project_id,
491 downstream: None,
492 },
493 cx,
494 )
495 }
496
497 fn new(
498 worktree_store: Entity<WorktreeStore>,
499 buffer_store: Entity<BufferStore>,
500 state: GitStoreState,
501 cx: &mut Context<Self>,
502 ) -> Self {
503 let mut _subscriptions = vec![
504 cx.subscribe(&worktree_store, Self::on_worktree_store_event),
505 cx.subscribe(&buffer_store, Self::on_buffer_store_event),
506 ];
507
508 if let Some(trusted_worktrees) = TrustedWorktrees::try_get_global(cx) {
509 _subscriptions.push(cx.subscribe(&trusted_worktrees, Self::on_trusted_worktrees_event));
510 }
511
512 GitStore {
513 state,
514 buffer_store,
515 worktree_store,
516 repositories: HashMap::default(),
517 worktree_ids: HashMap::default(),
518 active_repo_id: None,
519 _subscriptions,
520 loading_diffs: HashMap::default(),
521 shared_diffs: HashMap::default(),
522 diffs: HashMap::default(),
523 }
524 }
525
526 pub fn init(client: &AnyProtoClient) {
527 client.add_entity_request_handler(Self::handle_get_remotes);
528 client.add_entity_request_handler(Self::handle_get_branches);
529 client.add_entity_request_handler(Self::handle_get_default_branch);
530 client.add_entity_request_handler(Self::handle_change_branch);
531 client.add_entity_request_handler(Self::handle_create_branch);
532 client.add_entity_request_handler(Self::handle_rename_branch);
533 client.add_entity_request_handler(Self::handle_create_remote);
534 client.add_entity_request_handler(Self::handle_remove_remote);
535 client.add_entity_request_handler(Self::handle_delete_branch);
536 client.add_entity_request_handler(Self::handle_git_init);
537 client.add_entity_request_handler(Self::handle_push);
538 client.add_entity_request_handler(Self::handle_pull);
539 client.add_entity_request_handler(Self::handle_fetch);
540 client.add_entity_request_handler(Self::handle_stage);
541 client.add_entity_request_handler(Self::handle_unstage);
542 client.add_entity_request_handler(Self::handle_stash);
543 client.add_entity_request_handler(Self::handle_stash_pop);
544 client.add_entity_request_handler(Self::handle_stash_apply);
545 client.add_entity_request_handler(Self::handle_stash_drop);
546 client.add_entity_request_handler(Self::handle_commit);
547 client.add_entity_request_handler(Self::handle_run_hook);
548 client.add_entity_request_handler(Self::handle_reset);
549 client.add_entity_request_handler(Self::handle_show);
550 client.add_entity_request_handler(Self::handle_load_commit_diff);
551 client.add_entity_request_handler(Self::handle_file_history);
552 client.add_entity_request_handler(Self::handle_checkout_files);
553 client.add_entity_request_handler(Self::handle_open_commit_message_buffer);
554 client.add_entity_request_handler(Self::handle_set_index_text);
555 client.add_entity_request_handler(Self::handle_askpass);
556 client.add_entity_request_handler(Self::handle_check_for_pushed_commits);
557 client.add_entity_request_handler(Self::handle_git_diff);
558 client.add_entity_request_handler(Self::handle_git_diff_stat);
559 client.add_entity_request_handler(Self::handle_tree_diff);
560 client.add_entity_request_handler(Self::handle_get_blob_content);
561 client.add_entity_request_handler(Self::handle_open_unstaged_diff);
562 client.add_entity_request_handler(Self::handle_open_uncommitted_diff);
563 client.add_entity_message_handler(Self::handle_update_diff_bases);
564 client.add_entity_request_handler(Self::handle_get_permalink_to_line);
565 client.add_entity_request_handler(Self::handle_blame_buffer);
566 client.add_entity_message_handler(Self::handle_update_repository);
567 client.add_entity_message_handler(Self::handle_remove_repository);
568 client.add_entity_request_handler(Self::handle_git_clone);
569 client.add_entity_request_handler(Self::handle_get_worktrees);
570 client.add_entity_request_handler(Self::handle_create_worktree);
571 }
572
573 pub fn is_local(&self) -> bool {
574 matches!(self.state, GitStoreState::Local { .. })
575 }
576 pub fn set_active_repo_for_path(&mut self, project_path: &ProjectPath, cx: &mut Context<Self>) {
577 if let Some((repo, _)) = self.repository_and_path_for_project_path(project_path, cx) {
578 let id = repo.read(cx).id;
579 if self.active_repo_id != Some(id) {
580 self.active_repo_id = Some(id);
581 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
582 }
583 }
584 }
585
586 pub fn shared(&mut self, project_id: u64, client: AnyProtoClient, cx: &mut Context<Self>) {
587 match &mut self.state {
588 GitStoreState::Remote {
589 downstream: downstream_client,
590 ..
591 } => {
592 for repo in self.repositories.values() {
593 let update = repo.read(cx).snapshot.initial_update(project_id);
594 for update in split_repository_update(update) {
595 client.send(update).log_err();
596 }
597 }
598 *downstream_client = Some((client, ProjectId(project_id)));
599 }
600 GitStoreState::Local {
601 downstream: downstream_client,
602 ..
603 } => {
604 let mut snapshots = HashMap::default();
605 let (updates_tx, mut updates_rx) = mpsc::unbounded();
606 for repo in self.repositories.values() {
607 updates_tx
608 .unbounded_send(DownstreamUpdate::UpdateRepository(
609 repo.read(cx).snapshot.clone(),
610 ))
611 .ok();
612 }
613 *downstream_client = Some(LocalDownstreamState {
614 client: client.clone(),
615 project_id: ProjectId(project_id),
616 updates_tx,
617 _task: cx.spawn(async move |this, cx| {
618 cx.background_spawn(async move {
619 while let Some(update) = updates_rx.next().await {
620 match update {
621 DownstreamUpdate::UpdateRepository(snapshot) => {
622 if let Some(old_snapshot) = snapshots.get_mut(&snapshot.id)
623 {
624 let update =
625 snapshot.build_update(old_snapshot, project_id);
626 *old_snapshot = snapshot;
627 for update in split_repository_update(update) {
628 client.send(update)?;
629 }
630 } else {
631 let update = snapshot.initial_update(project_id);
632 for update in split_repository_update(update) {
633 client.send(update)?;
634 }
635 snapshots.insert(snapshot.id, snapshot);
636 }
637 }
638 DownstreamUpdate::RemoveRepository(id) => {
639 client.send(proto::RemoveRepository {
640 project_id,
641 id: id.to_proto(),
642 })?;
643 }
644 }
645 }
646 anyhow::Ok(())
647 })
648 .await
649 .ok();
650 this.update(cx, |this, _| {
651 if let GitStoreState::Local {
652 downstream: downstream_client,
653 ..
654 } = &mut this.state
655 {
656 downstream_client.take();
657 } else {
658 unreachable!("unshared called on remote store");
659 }
660 })
661 }),
662 });
663 }
664 }
665 }
666
667 pub fn unshared(&mut self, _cx: &mut Context<Self>) {
668 match &mut self.state {
669 GitStoreState::Local {
670 downstream: downstream_client,
671 ..
672 } => {
673 downstream_client.take();
674 }
675 GitStoreState::Remote {
676 downstream: downstream_client,
677 ..
678 } => {
679 downstream_client.take();
680 }
681 }
682 self.shared_diffs.clear();
683 }
684
685 pub(crate) fn forget_shared_diffs_for(&mut self, peer_id: &proto::PeerId) {
686 self.shared_diffs.remove(peer_id);
687 }
688
689 pub fn active_repository(&self) -> Option<Entity<Repository>> {
690 self.active_repo_id
691 .as_ref()
692 .map(|id| self.repositories[id].clone())
693 }
694
695 pub fn open_unstaged_diff(
696 &mut self,
697 buffer: Entity<Buffer>,
698 cx: &mut Context<Self>,
699 ) -> Task<Result<Entity<BufferDiff>>> {
700 let buffer_id = buffer.read(cx).remote_id();
701 if let Some(diff_state) = self.diffs.get(&buffer_id)
702 && let Some(unstaged_diff) = diff_state
703 .read(cx)
704 .unstaged_diff
705 .as_ref()
706 .and_then(|weak| weak.upgrade())
707 {
708 if let Some(task) =
709 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
710 {
711 return cx.background_executor().spawn(async move {
712 task.await;
713 Ok(unstaged_diff)
714 });
715 }
716 return Task::ready(Ok(unstaged_diff));
717 }
718
719 let Some((repo, repo_path)) =
720 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
721 else {
722 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
723 };
724
725 let task = self
726 .loading_diffs
727 .entry((buffer_id, DiffKind::Unstaged))
728 .or_insert_with(|| {
729 let staged_text = repo.update(cx, |repo, cx| {
730 repo.load_staged_text(buffer_id, repo_path, cx)
731 });
732 cx.spawn(async move |this, cx| {
733 Self::open_diff_internal(
734 this,
735 DiffKind::Unstaged,
736 staged_text.await.map(DiffBasesChange::SetIndex),
737 buffer,
738 cx,
739 )
740 .await
741 .map_err(Arc::new)
742 })
743 .shared()
744 })
745 .clone();
746
747 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
748 }
749
750 pub fn open_diff_since(
751 &mut self,
752 oid: Option<git::Oid>,
753 buffer: Entity<Buffer>,
754 repo: Entity<Repository>,
755 cx: &mut Context<Self>,
756 ) -> Task<Result<Entity<BufferDiff>>> {
757 let buffer_id = buffer.read(cx).remote_id();
758
759 if let Some(diff_state) = self.diffs.get(&buffer_id)
760 && let Some(oid_diff) = diff_state.read(cx).oid_diff(oid)
761 {
762 if let Some(task) =
763 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
764 {
765 return cx.background_executor().spawn(async move {
766 task.await;
767 Ok(oid_diff)
768 });
769 }
770 return Task::ready(Ok(oid_diff));
771 }
772
773 let diff_kind = DiffKind::SinceOid(oid);
774 if let Some(task) = self.loading_diffs.get(&(buffer_id, diff_kind)) {
775 let task = task.clone();
776 return cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) });
777 }
778
779 let task = cx
780 .spawn(async move |this, cx| {
781 let result: Result<Entity<BufferDiff>> = async {
782 let buffer_snapshot = buffer.update(cx, |buffer, _| buffer.snapshot());
783 let language_registry =
784 buffer.update(cx, |buffer, _| buffer.language_registry());
785 let content: Option<Arc<str>> = match oid {
786 None => None,
787 Some(oid) => Some(
788 repo.update(cx, |repo, cx| repo.load_blob_content(oid, cx))
789 .await?
790 .into(),
791 ),
792 };
793 let buffer_diff = cx.new(|cx| BufferDiff::new(&buffer_snapshot, cx));
794
795 buffer_diff
796 .update(cx, |buffer_diff, cx| {
797 buffer_diff.language_changed(
798 buffer_snapshot.language().cloned(),
799 language_registry,
800 cx,
801 );
802 buffer_diff.set_base_text(
803 content.clone(),
804 buffer_snapshot.language().cloned(),
805 buffer_snapshot.text,
806 cx,
807 )
808 })
809 .await?;
810 let unstaged_diff = this
811 .update(cx, |this, cx| this.open_unstaged_diff(buffer.clone(), cx))?
812 .await?;
813 buffer_diff.update(cx, |buffer_diff, _| {
814 buffer_diff.set_secondary_diff(unstaged_diff);
815 });
816
817 this.update(cx, |this, cx| {
818 cx.subscribe(&buffer_diff, Self::on_buffer_diff_event)
819 .detach();
820
821 this.loading_diffs.remove(&(buffer_id, diff_kind));
822
823 let git_store = cx.weak_entity();
824 let diff_state = this
825 .diffs
826 .entry(buffer_id)
827 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
828
829 diff_state.update(cx, |state, _| {
830 if let Some(oid) = oid {
831 if let Some(content) = content {
832 state.oid_texts.insert(oid, content);
833 }
834 }
835 state.oid_diffs.insert(oid, buffer_diff.downgrade());
836 });
837 })?;
838
839 Ok(buffer_diff)
840 }
841 .await;
842 result.map_err(Arc::new)
843 })
844 .shared();
845
846 self.loading_diffs
847 .insert((buffer_id, diff_kind), task.clone());
848 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
849 }
850
851 #[ztracing::instrument(skip_all)]
852 pub fn open_uncommitted_diff(
853 &mut self,
854 buffer: Entity<Buffer>,
855 cx: &mut Context<Self>,
856 ) -> Task<Result<Entity<BufferDiff>>> {
857 let buffer_id = buffer.read(cx).remote_id();
858
859 if let Some(diff_state) = self.diffs.get(&buffer_id)
860 && let Some(uncommitted_diff) = diff_state
861 .read(cx)
862 .uncommitted_diff
863 .as_ref()
864 .and_then(|weak| weak.upgrade())
865 {
866 if let Some(task) =
867 diff_state.update(cx, |diff_state, _| diff_state.wait_for_recalculation())
868 {
869 return cx.background_executor().spawn(async move {
870 task.await;
871 Ok(uncommitted_diff)
872 });
873 }
874 return Task::ready(Ok(uncommitted_diff));
875 }
876
877 let Some((repo, repo_path)) =
878 self.repository_and_path_for_buffer_id(buffer.read(cx).remote_id(), cx)
879 else {
880 return Task::ready(Err(anyhow!("failed to find git repository for buffer")));
881 };
882
883 let task = self
884 .loading_diffs
885 .entry((buffer_id, DiffKind::Uncommitted))
886 .or_insert_with(|| {
887 let changes = repo.update(cx, |repo, cx| {
888 repo.load_committed_text(buffer_id, repo_path, cx)
889 });
890
891 // todo(lw): hot foreground spawn
892 cx.spawn(async move |this, cx| {
893 Self::open_diff_internal(this, DiffKind::Uncommitted, changes.await, buffer, cx)
894 .await
895 .map_err(Arc::new)
896 })
897 .shared()
898 })
899 .clone();
900
901 cx.background_spawn(async move { task.await.map_err(|e| anyhow!("{e}")) })
902 }
903
904 #[ztracing::instrument(skip_all)]
905 async fn open_diff_internal(
906 this: WeakEntity<Self>,
907 kind: DiffKind,
908 texts: Result<DiffBasesChange>,
909 buffer_entity: Entity<Buffer>,
910 cx: &mut AsyncApp,
911 ) -> Result<Entity<BufferDiff>> {
912 let diff_bases_change = match texts {
913 Err(e) => {
914 this.update(cx, |this, cx| {
915 let buffer = buffer_entity.read(cx);
916 let buffer_id = buffer.remote_id();
917 this.loading_diffs.remove(&(buffer_id, kind));
918 })?;
919 return Err(e);
920 }
921 Ok(change) => change,
922 };
923
924 this.update(cx, |this, cx| {
925 let buffer = buffer_entity.read(cx);
926 let buffer_id = buffer.remote_id();
927 let language = buffer.language().cloned();
928 let language_registry = buffer.language_registry();
929 let text_snapshot = buffer.text_snapshot();
930 this.loading_diffs.remove(&(buffer_id, kind));
931
932 let git_store = cx.weak_entity();
933 let diff_state = this
934 .diffs
935 .entry(buffer_id)
936 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
937
938 let diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
939
940 cx.subscribe(&diff, Self::on_buffer_diff_event).detach();
941 diff_state.update(cx, |diff_state, cx| {
942 diff_state.language_changed = true;
943 diff_state.language = language;
944 diff_state.language_registry = language_registry;
945
946 match kind {
947 DiffKind::Unstaged => diff_state.unstaged_diff = Some(diff.downgrade()),
948 DiffKind::Uncommitted => {
949 let unstaged_diff = if let Some(diff) = diff_state.unstaged_diff() {
950 diff
951 } else {
952 let unstaged_diff = cx.new(|cx| BufferDiff::new(&text_snapshot, cx));
953 diff_state.unstaged_diff = Some(unstaged_diff.downgrade());
954 unstaged_diff
955 };
956
957 diff.update(cx, |diff, _| diff.set_secondary_diff(unstaged_diff));
958 diff_state.uncommitted_diff = Some(diff.downgrade())
959 }
960 DiffKind::SinceOid(_) => {
961 unreachable!("open_diff_internal is not used for OID diffs")
962 }
963 }
964
965 diff_state.diff_bases_changed(text_snapshot, Some(diff_bases_change), cx);
966 let rx = diff_state.wait_for_recalculation();
967
968 anyhow::Ok(async move {
969 if let Some(rx) = rx {
970 rx.await;
971 }
972 Ok(diff)
973 })
974 })
975 })??
976 .await
977 }
978
979 pub fn get_unstaged_diff(&self, buffer_id: BufferId, cx: &App) -> Option<Entity<BufferDiff>> {
980 let diff_state = self.diffs.get(&buffer_id)?;
981 diff_state.read(cx).unstaged_diff.as_ref()?.upgrade()
982 }
983
984 pub fn get_uncommitted_diff(
985 &self,
986 buffer_id: BufferId,
987 cx: &App,
988 ) -> Option<Entity<BufferDiff>> {
989 let diff_state = self.diffs.get(&buffer_id)?;
990 diff_state.read(cx).uncommitted_diff.as_ref()?.upgrade()
991 }
992
993 pub fn get_diff_since_oid(
994 &self,
995 buffer_id: BufferId,
996 oid: Option<git::Oid>,
997 cx: &App,
998 ) -> Option<Entity<BufferDiff>> {
999 let diff_state = self.diffs.get(&buffer_id)?;
1000 diff_state.read(cx).oid_diff(oid)
1001 }
1002
1003 pub fn open_conflict_set(
1004 &mut self,
1005 buffer: Entity<Buffer>,
1006 cx: &mut Context<Self>,
1007 ) -> Entity<ConflictSet> {
1008 log::debug!("open conflict set");
1009 let buffer_id = buffer.read(cx).remote_id();
1010
1011 if let Some(git_state) = self.diffs.get(&buffer_id)
1012 && let Some(conflict_set) = git_state
1013 .read(cx)
1014 .conflict_set
1015 .as_ref()
1016 .and_then(|weak| weak.upgrade())
1017 {
1018 let conflict_set = conflict_set;
1019 let buffer_snapshot = buffer.read(cx).text_snapshot();
1020
1021 git_state.update(cx, |state, cx| {
1022 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1023 });
1024
1025 return conflict_set;
1026 }
1027
1028 let is_unmerged = self
1029 .repository_and_path_for_buffer_id(buffer_id, cx)
1030 .is_some_and(|(repo, path)| repo.read(cx).snapshot.has_conflict(&path));
1031 let git_store = cx.weak_entity();
1032 let buffer_git_state = self
1033 .diffs
1034 .entry(buffer_id)
1035 .or_insert_with(|| cx.new(|_| BufferGitState::new(git_store)));
1036 let conflict_set = cx.new(|cx| ConflictSet::new(buffer_id, is_unmerged, cx));
1037
1038 self._subscriptions
1039 .push(cx.subscribe(&conflict_set, |_, _, _, cx| {
1040 cx.emit(GitStoreEvent::ConflictsUpdated);
1041 }));
1042
1043 buffer_git_state.update(cx, |state, cx| {
1044 state.conflict_set = Some(conflict_set.downgrade());
1045 let buffer_snapshot = buffer.read(cx).text_snapshot();
1046 let _ = state.reparse_conflict_markers(buffer_snapshot, cx);
1047 });
1048
1049 conflict_set
1050 }
1051
1052 pub fn project_path_git_status(
1053 &self,
1054 project_path: &ProjectPath,
1055 cx: &App,
1056 ) -> Option<FileStatus> {
1057 let (repo, repo_path) = self.repository_and_path_for_project_path(project_path, cx)?;
1058 Some(repo.read(cx).status_for_path(&repo_path)?.status)
1059 }
1060
1061 pub fn checkpoint(&self, cx: &mut App) -> Task<Result<GitStoreCheckpoint>> {
1062 let mut work_directory_abs_paths = Vec::new();
1063 let mut checkpoints = Vec::new();
1064 for repository in self.repositories.values() {
1065 repository.update(cx, |repository, _| {
1066 work_directory_abs_paths.push(repository.snapshot.work_directory_abs_path.clone());
1067 checkpoints.push(repository.checkpoint().map(|checkpoint| checkpoint?));
1068 });
1069 }
1070
1071 cx.background_executor().spawn(async move {
1072 let checkpoints = future::try_join_all(checkpoints).await?;
1073 Ok(GitStoreCheckpoint {
1074 checkpoints_by_work_dir_abs_path: work_directory_abs_paths
1075 .into_iter()
1076 .zip(checkpoints)
1077 .collect(),
1078 })
1079 })
1080 }
1081
1082 pub fn restore_checkpoint(
1083 &self,
1084 checkpoint: GitStoreCheckpoint,
1085 cx: &mut App,
1086 ) -> Task<Result<()>> {
1087 let repositories_by_work_dir_abs_path = self
1088 .repositories
1089 .values()
1090 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1091 .collect::<HashMap<_, _>>();
1092
1093 let mut tasks = Vec::new();
1094 for (work_dir_abs_path, checkpoint) in checkpoint.checkpoints_by_work_dir_abs_path {
1095 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path) {
1096 let restore = repository.update(cx, |repository, _| {
1097 repository.restore_checkpoint(checkpoint)
1098 });
1099 tasks.push(async move { restore.await? });
1100 }
1101 }
1102 cx.background_spawn(async move {
1103 future::try_join_all(tasks).await?;
1104 Ok(())
1105 })
1106 }
1107
1108 /// Compares two checkpoints, returning true if they are equal.
1109 pub fn compare_checkpoints(
1110 &self,
1111 left: GitStoreCheckpoint,
1112 mut right: GitStoreCheckpoint,
1113 cx: &mut App,
1114 ) -> Task<Result<bool>> {
1115 let repositories_by_work_dir_abs_path = self
1116 .repositories
1117 .values()
1118 .map(|repo| (repo.read(cx).snapshot.work_directory_abs_path.clone(), repo))
1119 .collect::<HashMap<_, _>>();
1120
1121 let mut tasks = Vec::new();
1122 for (work_dir_abs_path, left_checkpoint) in left.checkpoints_by_work_dir_abs_path {
1123 if let Some(right_checkpoint) = right
1124 .checkpoints_by_work_dir_abs_path
1125 .remove(&work_dir_abs_path)
1126 {
1127 if let Some(repository) = repositories_by_work_dir_abs_path.get(&work_dir_abs_path)
1128 {
1129 let compare = repository.update(cx, |repository, _| {
1130 repository.compare_checkpoints(left_checkpoint, right_checkpoint)
1131 });
1132
1133 tasks.push(async move { compare.await? });
1134 }
1135 } else {
1136 return Task::ready(Ok(false));
1137 }
1138 }
1139 cx.background_spawn(async move {
1140 Ok(future::try_join_all(tasks)
1141 .await?
1142 .into_iter()
1143 .all(|result| result))
1144 })
1145 }
1146
1147 /// Blames a buffer.
1148 pub fn blame_buffer(
1149 &self,
1150 buffer: &Entity<Buffer>,
1151 version: Option<clock::Global>,
1152 cx: &mut Context<Self>,
1153 ) -> Task<Result<Option<Blame>>> {
1154 let buffer = buffer.read(cx);
1155 let Some((repo, repo_path)) =
1156 self.repository_and_path_for_buffer_id(buffer.remote_id(), cx)
1157 else {
1158 return Task::ready(Err(anyhow!("failed to find a git repository for buffer")));
1159 };
1160 let content = match &version {
1161 Some(version) => buffer.rope_for_version(version),
1162 None => buffer.as_rope().clone(),
1163 };
1164 let line_ending = buffer.line_ending();
1165 let version = version.unwrap_or(buffer.version());
1166 let buffer_id = buffer.remote_id();
1167
1168 let repo = repo.downgrade();
1169 cx.spawn(async move |_, cx| {
1170 let repository_state = repo
1171 .update(cx, |repo, _| repo.repository_state.clone())?
1172 .await
1173 .map_err(|err| anyhow::anyhow!(err))?;
1174 match repository_state {
1175 RepositoryState::Local(LocalRepositoryState { backend, .. }) => backend
1176 .blame(repo_path.clone(), content, line_ending)
1177 .await
1178 .with_context(|| format!("Failed to blame {:?}", repo_path.as_ref()))
1179 .map(Some),
1180 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1181 let response = client
1182 .request(proto::BlameBuffer {
1183 project_id: project_id.to_proto(),
1184 buffer_id: buffer_id.into(),
1185 version: serialize_version(&version),
1186 })
1187 .await?;
1188 Ok(deserialize_blame_buffer_response(response))
1189 }
1190 }
1191 })
1192 }
1193
1194 pub fn file_history(
1195 &self,
1196 repo: &Entity<Repository>,
1197 path: RepoPath,
1198 cx: &mut App,
1199 ) -> Task<Result<git::repository::FileHistory>> {
1200 let rx = repo.update(cx, |repo, _| repo.file_history(path));
1201
1202 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1203 }
1204
1205 pub fn file_history_paginated(
1206 &self,
1207 repo: &Entity<Repository>,
1208 path: RepoPath,
1209 skip: usize,
1210 limit: Option<usize>,
1211 cx: &mut App,
1212 ) -> Task<Result<git::repository::FileHistory>> {
1213 let rx = repo.update(cx, |repo, _| repo.file_history_paginated(path, skip, limit));
1214
1215 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1216 }
1217
1218 pub fn get_permalink_to_line(
1219 &self,
1220 buffer: &Entity<Buffer>,
1221 selection: Range<u32>,
1222 cx: &mut App,
1223 ) -> Task<Result<url::Url>> {
1224 let Some(file) = File::from_dyn(buffer.read(cx).file()) else {
1225 return Task::ready(Err(anyhow!("buffer has no file")));
1226 };
1227
1228 let Some((repo, repo_path)) = self.repository_and_path_for_project_path(
1229 &(file.worktree.read(cx).id(), file.path.clone()).into(),
1230 cx,
1231 ) else {
1232 // If we're not in a Git repo, check whether this is a Rust source
1233 // file in the Cargo registry (presumably opened with go-to-definition
1234 // from a normal Rust file). If so, we can put together a permalink
1235 // using crate metadata.
1236 if buffer
1237 .read(cx)
1238 .language()
1239 .is_none_or(|lang| lang.name() != "Rust")
1240 {
1241 return Task::ready(Err(anyhow!("no permalink available")));
1242 }
1243 let file_path = file.worktree.read(cx).absolutize(&file.path);
1244 return cx.spawn(async move |cx| {
1245 let provider_registry = cx.update(GitHostingProviderRegistry::default_global);
1246 get_permalink_in_rust_registry_src(provider_registry, file_path, selection)
1247 .context("no permalink available")
1248 });
1249 };
1250
1251 let buffer_id = buffer.read(cx).remote_id();
1252 let branch = repo.read(cx).branch.clone();
1253 let remote = branch
1254 .as_ref()
1255 .and_then(|b| b.upstream.as_ref())
1256 .and_then(|b| b.remote_name())
1257 .unwrap_or("origin")
1258 .to_string();
1259
1260 let rx = repo.update(cx, |repo, _| {
1261 repo.send_job(None, move |state, cx| async move {
1262 match state {
1263 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
1264 let origin_url = backend
1265 .remote_url(&remote)
1266 .await
1267 .with_context(|| format!("remote \"{remote}\" not found"))?;
1268
1269 let sha = backend.head_sha().await.context("reading HEAD SHA")?;
1270
1271 let provider_registry =
1272 cx.update(GitHostingProviderRegistry::default_global);
1273
1274 let (provider, remote) =
1275 parse_git_remote_url(provider_registry, &origin_url)
1276 .context("parsing Git remote URL")?;
1277
1278 Ok(provider.build_permalink(
1279 remote,
1280 BuildPermalinkParams::new(&sha, &repo_path, Some(selection)),
1281 ))
1282 }
1283 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
1284 let response = client
1285 .request(proto::GetPermalinkToLine {
1286 project_id: project_id.to_proto(),
1287 buffer_id: buffer_id.into(),
1288 selection: Some(proto::Range {
1289 start: selection.start as u64,
1290 end: selection.end as u64,
1291 }),
1292 })
1293 .await?;
1294
1295 url::Url::parse(&response.permalink).context("failed to parse permalink")
1296 }
1297 }
1298 })
1299 });
1300 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
1301 }
1302
1303 fn downstream_client(&self) -> Option<(AnyProtoClient, ProjectId)> {
1304 match &self.state {
1305 GitStoreState::Local {
1306 downstream: downstream_client,
1307 ..
1308 } => downstream_client
1309 .as_ref()
1310 .map(|state| (state.client.clone(), state.project_id)),
1311 GitStoreState::Remote {
1312 downstream: downstream_client,
1313 ..
1314 } => downstream_client.clone(),
1315 }
1316 }
1317
1318 fn upstream_client(&self) -> Option<AnyProtoClient> {
1319 match &self.state {
1320 GitStoreState::Local { .. } => None,
1321 GitStoreState::Remote {
1322 upstream_client, ..
1323 } => Some(upstream_client.clone()),
1324 }
1325 }
1326
1327 fn on_worktree_store_event(
1328 &mut self,
1329 worktree_store: Entity<WorktreeStore>,
1330 event: &WorktreeStoreEvent,
1331 cx: &mut Context<Self>,
1332 ) {
1333 let GitStoreState::Local {
1334 project_environment,
1335 downstream,
1336 next_repository_id,
1337 fs,
1338 } = &self.state
1339 else {
1340 return;
1341 };
1342
1343 match event {
1344 WorktreeStoreEvent::WorktreeUpdatedEntries(worktree_id, updated_entries) => {
1345 if let Some(worktree) = self
1346 .worktree_store
1347 .read(cx)
1348 .worktree_for_id(*worktree_id, cx)
1349 {
1350 let paths_by_git_repo =
1351 self.process_updated_entries(&worktree, updated_entries, cx);
1352 let downstream = downstream
1353 .as_ref()
1354 .map(|downstream| downstream.updates_tx.clone());
1355 cx.spawn(async move |_, cx| {
1356 let paths_by_git_repo = paths_by_git_repo.await;
1357 for (repo, paths) in paths_by_git_repo {
1358 repo.update(cx, |repo, cx| {
1359 repo.paths_changed(paths, downstream.clone(), cx);
1360 });
1361 }
1362 })
1363 .detach();
1364 }
1365 }
1366 WorktreeStoreEvent::WorktreeUpdatedGitRepositories(worktree_id, changed_repos) => {
1367 let Some(worktree) = worktree_store.read(cx).worktree_for_id(*worktree_id, cx)
1368 else {
1369 return;
1370 };
1371 if !worktree.read(cx).is_visible() {
1372 log::debug!(
1373 "not adding repositories for local worktree {:?} because it's not visible",
1374 worktree.read(cx).abs_path()
1375 );
1376 return;
1377 }
1378 self.update_repositories_from_worktree(
1379 *worktree_id,
1380 project_environment.clone(),
1381 next_repository_id.clone(),
1382 downstream
1383 .as_ref()
1384 .map(|downstream| downstream.updates_tx.clone()),
1385 changed_repos.clone(),
1386 fs.clone(),
1387 cx,
1388 );
1389 self.local_worktree_git_repos_changed(worktree, changed_repos, cx);
1390 }
1391 WorktreeStoreEvent::WorktreeRemoved(_entity_id, worktree_id) => {
1392 let repos_without_worktree: Vec<RepositoryId> = self
1393 .worktree_ids
1394 .iter_mut()
1395 .filter_map(|(repo_id, worktree_ids)| {
1396 worktree_ids.remove(worktree_id);
1397 if worktree_ids.is_empty() {
1398 Some(*repo_id)
1399 } else {
1400 None
1401 }
1402 })
1403 .collect();
1404 let is_active_repo_removed = repos_without_worktree
1405 .iter()
1406 .any(|repo_id| self.active_repo_id == Some(*repo_id));
1407
1408 for repo_id in repos_without_worktree {
1409 self.repositories.remove(&repo_id);
1410 self.worktree_ids.remove(&repo_id);
1411 if let Some(updates_tx) =
1412 downstream.as_ref().map(|downstream| &downstream.updates_tx)
1413 {
1414 updates_tx
1415 .unbounded_send(DownstreamUpdate::RemoveRepository(repo_id))
1416 .ok();
1417 }
1418 }
1419
1420 if is_active_repo_removed {
1421 if let Some((&repo_id, _)) = self.repositories.iter().next() {
1422 self.active_repo_id = Some(repo_id);
1423 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(repo_id)));
1424 } else {
1425 self.active_repo_id = None;
1426 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1427 }
1428 }
1429 }
1430 _ => {}
1431 }
1432 }
1433 fn on_repository_event(
1434 &mut self,
1435 repo: Entity<Repository>,
1436 event: &RepositoryEvent,
1437 cx: &mut Context<Self>,
1438 ) {
1439 let id = repo.read(cx).id;
1440 let repo_snapshot = repo.read(cx).snapshot.clone();
1441 for (buffer_id, diff) in self.diffs.iter() {
1442 if let Some((buffer_repo, repo_path)) =
1443 self.repository_and_path_for_buffer_id(*buffer_id, cx)
1444 && buffer_repo == repo
1445 {
1446 diff.update(cx, |diff, cx| {
1447 if let Some(conflict_set) = &diff.conflict_set {
1448 let conflict_status_changed =
1449 conflict_set.update(cx, |conflict_set, cx| {
1450 let has_conflict = repo_snapshot.has_conflict(&repo_path);
1451 conflict_set.set_has_conflict(has_conflict, cx)
1452 })?;
1453 if conflict_status_changed {
1454 let buffer_store = self.buffer_store.read(cx);
1455 if let Some(buffer) = buffer_store.get(*buffer_id) {
1456 let _ = diff
1457 .reparse_conflict_markers(buffer.read(cx).text_snapshot(), cx);
1458 }
1459 }
1460 }
1461 anyhow::Ok(())
1462 })
1463 .ok();
1464 }
1465 }
1466 cx.emit(GitStoreEvent::RepositoryUpdated(
1467 id,
1468 event.clone(),
1469 self.active_repo_id == Some(id),
1470 ))
1471 }
1472
1473 fn on_jobs_updated(&mut self, _: Entity<Repository>, _: &JobsUpdated, cx: &mut Context<Self>) {
1474 cx.emit(GitStoreEvent::JobsUpdated)
1475 }
1476
1477 /// Update our list of repositories and schedule git scans in response to a notification from a worktree,
1478 fn update_repositories_from_worktree(
1479 &mut self,
1480 worktree_id: WorktreeId,
1481 project_environment: Entity<ProjectEnvironment>,
1482 next_repository_id: Arc<AtomicU64>,
1483 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
1484 updated_git_repositories: UpdatedGitRepositoriesSet,
1485 fs: Arc<dyn Fs>,
1486 cx: &mut Context<Self>,
1487 ) {
1488 let mut removed_ids = Vec::new();
1489 for update in updated_git_repositories.iter() {
1490 if let Some((id, existing)) = self.repositories.iter().find(|(_, repo)| {
1491 let existing_work_directory_abs_path =
1492 repo.read(cx).work_directory_abs_path.clone();
1493 Some(&existing_work_directory_abs_path)
1494 == update.old_work_directory_abs_path.as_ref()
1495 || Some(&existing_work_directory_abs_path)
1496 == update.new_work_directory_abs_path.as_ref()
1497 }) {
1498 let repo_id = *id;
1499 if let Some(new_work_directory_abs_path) =
1500 update.new_work_directory_abs_path.clone()
1501 {
1502 self.worktree_ids
1503 .entry(repo_id)
1504 .or_insert_with(HashSet::new)
1505 .insert(worktree_id);
1506 existing.update(cx, |existing, cx| {
1507 existing.snapshot.work_directory_abs_path = new_work_directory_abs_path;
1508 existing.schedule_scan(updates_tx.clone(), cx);
1509 });
1510 } else {
1511 if let Some(worktree_ids) = self.worktree_ids.get_mut(&repo_id) {
1512 worktree_ids.remove(&worktree_id);
1513 if worktree_ids.is_empty() {
1514 removed_ids.push(repo_id);
1515 }
1516 }
1517 }
1518 } else if let UpdatedGitRepository {
1519 new_work_directory_abs_path: Some(work_directory_abs_path),
1520 dot_git_abs_path: Some(dot_git_abs_path),
1521 repository_dir_abs_path: Some(_repository_dir_abs_path),
1522 common_dir_abs_path: Some(common_dir_abs_path),
1523 ..
1524 } = update
1525 {
1526 let original_repo_abs_path: Arc<Path> =
1527 git::repository::original_repo_path_from_common_dir(common_dir_abs_path).into();
1528 let id = RepositoryId(next_repository_id.fetch_add(1, atomic::Ordering::Release));
1529 let is_trusted = TrustedWorktrees::try_get_global(cx)
1530 .map(|trusted_worktrees| {
1531 trusted_worktrees.update(cx, |trusted_worktrees, cx| {
1532 trusted_worktrees.can_trust(&self.worktree_store, worktree_id, cx)
1533 })
1534 })
1535 .unwrap_or(false);
1536 let git_store = cx.weak_entity();
1537 let repo = cx.new(|cx| {
1538 let mut repo = Repository::local(
1539 id,
1540 work_directory_abs_path.clone(),
1541 original_repo_abs_path.clone(),
1542 dot_git_abs_path.clone(),
1543 project_environment.downgrade(),
1544 fs.clone(),
1545 is_trusted,
1546 git_store,
1547 cx,
1548 );
1549 if let Some(updates_tx) = updates_tx.as_ref() {
1550 // trigger an empty `UpdateRepository` to ensure remote active_repo_id is set correctly
1551 updates_tx
1552 .unbounded_send(DownstreamUpdate::UpdateRepository(repo.snapshot()))
1553 .ok();
1554 }
1555 repo.schedule_scan(updates_tx.clone(), cx);
1556 repo
1557 });
1558 self._subscriptions
1559 .push(cx.subscribe(&repo, Self::on_repository_event));
1560 self._subscriptions
1561 .push(cx.subscribe(&repo, Self::on_jobs_updated));
1562 self.repositories.insert(id, repo);
1563 self.worktree_ids.insert(id, HashSet::from([worktree_id]));
1564 cx.emit(GitStoreEvent::RepositoryAdded);
1565 self.active_repo_id.get_or_insert_with(|| {
1566 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1567 id
1568 });
1569 }
1570 }
1571
1572 for id in removed_ids {
1573 if self.active_repo_id == Some(id) {
1574 self.active_repo_id = None;
1575 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1576 }
1577 self.repositories.remove(&id);
1578 if let Some(updates_tx) = updates_tx.as_ref() {
1579 updates_tx
1580 .unbounded_send(DownstreamUpdate::RemoveRepository(id))
1581 .ok();
1582 }
1583 }
1584 }
1585
1586 fn on_trusted_worktrees_event(
1587 &mut self,
1588 _: Entity<TrustedWorktreesStore>,
1589 event: &TrustedWorktreesEvent,
1590 cx: &mut Context<Self>,
1591 ) {
1592 if !matches!(self.state, GitStoreState::Local { .. }) {
1593 return;
1594 }
1595
1596 let (is_trusted, event_paths) = match event {
1597 TrustedWorktreesEvent::Trusted(_, trusted_paths) => (true, trusted_paths),
1598 TrustedWorktreesEvent::Restricted(_, restricted_paths) => (false, restricted_paths),
1599 };
1600
1601 for (repo_id, worktree_ids) in &self.worktree_ids {
1602 if worktree_ids
1603 .iter()
1604 .any(|worktree_id| event_paths.contains(&PathTrust::Worktree(*worktree_id)))
1605 {
1606 if let Some(repo) = self.repositories.get(repo_id) {
1607 let repository_state = repo.read(cx).repository_state.clone();
1608 cx.background_spawn(async move {
1609 if let Ok(RepositoryState::Local(state)) = repository_state.await {
1610 state.backend.set_trusted(is_trusted);
1611 }
1612 })
1613 .detach();
1614 }
1615 }
1616 }
1617 }
1618
1619 fn on_buffer_store_event(
1620 &mut self,
1621 _: Entity<BufferStore>,
1622 event: &BufferStoreEvent,
1623 cx: &mut Context<Self>,
1624 ) {
1625 match event {
1626 BufferStoreEvent::BufferAdded(buffer) => {
1627 cx.subscribe(buffer, |this, buffer, event, cx| {
1628 if let BufferEvent::LanguageChanged(_) = event {
1629 let buffer_id = buffer.read(cx).remote_id();
1630 if let Some(diff_state) = this.diffs.get(&buffer_id) {
1631 diff_state.update(cx, |diff_state, cx| {
1632 diff_state.buffer_language_changed(buffer, cx);
1633 });
1634 }
1635 }
1636 })
1637 .detach();
1638 }
1639 BufferStoreEvent::SharedBufferClosed(peer_id, buffer_id) => {
1640 if let Some(diffs) = self.shared_diffs.get_mut(peer_id) {
1641 diffs.remove(buffer_id);
1642 }
1643 }
1644 BufferStoreEvent::BufferDropped(buffer_id) => {
1645 self.diffs.remove(buffer_id);
1646 for diffs in self.shared_diffs.values_mut() {
1647 diffs.remove(buffer_id);
1648 }
1649 }
1650 BufferStoreEvent::BufferChangedFilePath { buffer, .. } => {
1651 // Whenever a buffer's file path changes, it's possible that the
1652 // new path is actually a path that is being tracked by a git
1653 // repository. In that case, we'll want to update the buffer's
1654 // `BufferDiffState`, in case it already has one.
1655 let buffer_id = buffer.read(cx).remote_id();
1656 let diff_state = self.diffs.get(&buffer_id);
1657 let repo = self.repository_and_path_for_buffer_id(buffer_id, cx);
1658
1659 if let Some(diff_state) = diff_state
1660 && let Some((repo, repo_path)) = repo
1661 {
1662 let buffer = buffer.clone();
1663 let diff_state = diff_state.clone();
1664
1665 cx.spawn(async move |_git_store, cx| {
1666 async {
1667 let diff_bases_change = repo
1668 .update(cx, |repo, cx| {
1669 repo.load_committed_text(buffer_id, repo_path, cx)
1670 })
1671 .await?;
1672
1673 diff_state.update(cx, |diff_state, cx| {
1674 let buffer_snapshot = buffer.read(cx).text_snapshot();
1675 diff_state.diff_bases_changed(
1676 buffer_snapshot,
1677 Some(diff_bases_change),
1678 cx,
1679 );
1680 });
1681 anyhow::Ok(())
1682 }
1683 .await
1684 .log_err();
1685 })
1686 .detach();
1687 }
1688 }
1689 }
1690 }
1691
1692 pub fn recalculate_buffer_diffs(
1693 &mut self,
1694 buffers: Vec<Entity<Buffer>>,
1695 cx: &mut Context<Self>,
1696 ) -> impl Future<Output = ()> + use<> {
1697 let mut futures = Vec::new();
1698 for buffer in buffers {
1699 if let Some(diff_state) = self.diffs.get_mut(&buffer.read(cx).remote_id()) {
1700 let buffer = buffer.read(cx).text_snapshot();
1701 diff_state.update(cx, |diff_state, cx| {
1702 diff_state.recalculate_diffs(buffer.clone(), cx);
1703 futures.extend(diff_state.wait_for_recalculation().map(FutureExt::boxed));
1704 });
1705 futures.push(diff_state.update(cx, |diff_state, cx| {
1706 diff_state
1707 .reparse_conflict_markers(buffer, cx)
1708 .map(|_| {})
1709 .boxed()
1710 }));
1711 }
1712 }
1713 async move {
1714 futures::future::join_all(futures).await;
1715 }
1716 }
1717
1718 fn on_buffer_diff_event(
1719 &mut self,
1720 diff: Entity<buffer_diff::BufferDiff>,
1721 event: &BufferDiffEvent,
1722 cx: &mut Context<Self>,
1723 ) {
1724 if let BufferDiffEvent::HunksStagedOrUnstaged(new_index_text) = event {
1725 let buffer_id = diff.read(cx).buffer_id;
1726 if let Some(diff_state) = self.diffs.get(&buffer_id) {
1727 let hunk_staging_operation_count = diff_state.update(cx, |diff_state, _| {
1728 diff_state.hunk_staging_operation_count += 1;
1729 diff_state.hunk_staging_operation_count
1730 });
1731 if let Some((repo, path)) = self.repository_and_path_for_buffer_id(buffer_id, cx) {
1732 let recv = repo.update(cx, |repo, cx| {
1733 log::debug!("hunks changed for {}", path.as_unix_str());
1734 repo.spawn_set_index_text_job(
1735 path,
1736 new_index_text.as_ref().map(|rope| rope.to_string()),
1737 Some(hunk_staging_operation_count),
1738 cx,
1739 )
1740 });
1741 let diff = diff.downgrade();
1742 cx.spawn(async move |this, cx| {
1743 if let Ok(Err(error)) = cx.background_spawn(recv).await {
1744 diff.update(cx, |diff, cx| {
1745 diff.clear_pending_hunks(cx);
1746 })
1747 .ok();
1748 this.update(cx, |_, cx| cx.emit(GitStoreEvent::IndexWriteError(error)))
1749 .ok();
1750 }
1751 })
1752 .detach();
1753 }
1754 }
1755 }
1756 }
1757
1758 fn local_worktree_git_repos_changed(
1759 &mut self,
1760 worktree: Entity<Worktree>,
1761 changed_repos: &UpdatedGitRepositoriesSet,
1762 cx: &mut Context<Self>,
1763 ) {
1764 log::debug!("local worktree repos changed");
1765 debug_assert!(worktree.read(cx).is_local());
1766
1767 for repository in self.repositories.values() {
1768 repository.update(cx, |repository, cx| {
1769 let repo_abs_path = &repository.work_directory_abs_path;
1770 if changed_repos.iter().any(|update| {
1771 update.old_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1772 || update.new_work_directory_abs_path.as_ref() == Some(repo_abs_path)
1773 }) {
1774 repository.reload_buffer_diff_bases(cx);
1775 }
1776 });
1777 }
1778 }
1779
1780 pub fn repositories(&self) -> &HashMap<RepositoryId, Entity<Repository>> {
1781 &self.repositories
1782 }
1783
1784 pub fn status_for_buffer_id(&self, buffer_id: BufferId, cx: &App) -> Option<FileStatus> {
1785 let (repo, path) = self.repository_and_path_for_buffer_id(buffer_id, cx)?;
1786 let status = repo.read(cx).snapshot.status_for_path(&path)?;
1787 Some(status.status)
1788 }
1789
1790 pub fn repository_and_path_for_buffer_id(
1791 &self,
1792 buffer_id: BufferId,
1793 cx: &App,
1794 ) -> Option<(Entity<Repository>, RepoPath)> {
1795 let buffer = self.buffer_store.read(cx).get(buffer_id)?;
1796 let project_path = buffer.read(cx).project_path(cx)?;
1797 self.repository_and_path_for_project_path(&project_path, cx)
1798 }
1799
1800 pub fn repository_and_path_for_project_path(
1801 &self,
1802 path: &ProjectPath,
1803 cx: &App,
1804 ) -> Option<(Entity<Repository>, RepoPath)> {
1805 let abs_path = self.worktree_store.read(cx).absolutize(path, cx)?;
1806 self.repositories
1807 .values()
1808 .filter_map(|repo| {
1809 let repo_path = repo.read(cx).abs_path_to_repo_path(&abs_path)?;
1810 Some((repo.clone(), repo_path))
1811 })
1812 .max_by_key(|(repo, _)| repo.read(cx).work_directory_abs_path.clone())
1813 }
1814
1815 pub fn git_init(
1816 &self,
1817 path: Arc<Path>,
1818 fallback_branch_name: String,
1819 cx: &App,
1820 ) -> Task<Result<()>> {
1821 match &self.state {
1822 GitStoreState::Local { fs, .. } => {
1823 let fs = fs.clone();
1824 cx.background_executor()
1825 .spawn(async move { fs.git_init(&path, fallback_branch_name).await })
1826 }
1827 GitStoreState::Remote {
1828 upstream_client,
1829 upstream_project_id: project_id,
1830 ..
1831 } => {
1832 let client = upstream_client.clone();
1833 let project_id = *project_id;
1834 cx.background_executor().spawn(async move {
1835 client
1836 .request(proto::GitInit {
1837 project_id: project_id,
1838 abs_path: path.to_string_lossy().into_owned(),
1839 fallback_branch_name,
1840 })
1841 .await?;
1842 Ok(())
1843 })
1844 }
1845 }
1846 }
1847
1848 pub fn git_clone(
1849 &self,
1850 repo: String,
1851 path: impl Into<Arc<std::path::Path>>,
1852 cx: &App,
1853 ) -> Task<Result<()>> {
1854 let path = path.into();
1855 match &self.state {
1856 GitStoreState::Local { fs, .. } => {
1857 let fs = fs.clone();
1858 cx.background_executor()
1859 .spawn(async move { fs.git_clone(&repo, &path).await })
1860 }
1861 GitStoreState::Remote {
1862 upstream_client,
1863 upstream_project_id,
1864 ..
1865 } => {
1866 if upstream_client.is_via_collab() {
1867 return Task::ready(Err(anyhow!(
1868 "Git Clone isn't supported for project guests"
1869 )));
1870 }
1871 let request = upstream_client.request(proto::GitClone {
1872 project_id: *upstream_project_id,
1873 abs_path: path.to_string_lossy().into_owned(),
1874 remote_repo: repo,
1875 });
1876
1877 cx.background_spawn(async move {
1878 let result = request.await?;
1879
1880 match result.success {
1881 true => Ok(()),
1882 false => Err(anyhow!("Git Clone failed")),
1883 }
1884 })
1885 }
1886 }
1887 }
1888
1889 async fn handle_update_repository(
1890 this: Entity<Self>,
1891 envelope: TypedEnvelope<proto::UpdateRepository>,
1892 mut cx: AsyncApp,
1893 ) -> Result<()> {
1894 this.update(&mut cx, |this, cx| {
1895 let path_style = this.worktree_store.read(cx).path_style();
1896 let mut update = envelope.payload;
1897
1898 let id = RepositoryId::from_proto(update.id);
1899 let client = this.upstream_client().context("no upstream client")?;
1900
1901 let original_repo_abs_path: Option<Arc<Path>> = update
1902 .original_repo_abs_path
1903 .as_deref()
1904 .map(|p| Path::new(p).into());
1905
1906 let mut repo_subscription = None;
1907 let repo = this.repositories.entry(id).or_insert_with(|| {
1908 let git_store = cx.weak_entity();
1909 let repo = cx.new(|cx| {
1910 Repository::remote(
1911 id,
1912 Path::new(&update.abs_path).into(),
1913 original_repo_abs_path.clone(),
1914 path_style,
1915 ProjectId(update.project_id),
1916 client,
1917 git_store,
1918 cx,
1919 )
1920 });
1921 repo_subscription = Some(cx.subscribe(&repo, Self::on_repository_event));
1922 cx.emit(GitStoreEvent::RepositoryAdded);
1923 repo
1924 });
1925 this._subscriptions.extend(repo_subscription);
1926
1927 repo.update(cx, {
1928 let update = update.clone();
1929 |repo, cx| repo.apply_remote_update(update, cx)
1930 })?;
1931
1932 this.active_repo_id.get_or_insert_with(|| {
1933 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
1934 id
1935 });
1936
1937 if let Some((client, project_id)) = this.downstream_client() {
1938 update.project_id = project_id.to_proto();
1939 client.send(update).log_err();
1940 }
1941 Ok(())
1942 })
1943 }
1944
1945 async fn handle_remove_repository(
1946 this: Entity<Self>,
1947 envelope: TypedEnvelope<proto::RemoveRepository>,
1948 mut cx: AsyncApp,
1949 ) -> Result<()> {
1950 this.update(&mut cx, |this, cx| {
1951 let mut update = envelope.payload;
1952 let id = RepositoryId::from_proto(update.id);
1953 this.repositories.remove(&id);
1954 if let Some((client, project_id)) = this.downstream_client() {
1955 update.project_id = project_id.to_proto();
1956 client.send(update).log_err();
1957 }
1958 if this.active_repo_id == Some(id) {
1959 this.active_repo_id = None;
1960 cx.emit(GitStoreEvent::ActiveRepositoryChanged(None));
1961 }
1962 cx.emit(GitStoreEvent::RepositoryRemoved(id));
1963 });
1964 Ok(())
1965 }
1966
1967 async fn handle_git_init(
1968 this: Entity<Self>,
1969 envelope: TypedEnvelope<proto::GitInit>,
1970 cx: AsyncApp,
1971 ) -> Result<proto::Ack> {
1972 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1973 let name = envelope.payload.fallback_branch_name;
1974 cx.update(|cx| this.read(cx).git_init(path, name, cx))
1975 .await?;
1976
1977 Ok(proto::Ack {})
1978 }
1979
1980 async fn handle_git_clone(
1981 this: Entity<Self>,
1982 envelope: TypedEnvelope<proto::GitClone>,
1983 cx: AsyncApp,
1984 ) -> Result<proto::GitCloneResponse> {
1985 let path: Arc<Path> = PathBuf::from(envelope.payload.abs_path).into();
1986 let repo_name = envelope.payload.remote_repo;
1987 let result = cx
1988 .update(|cx| this.read(cx).git_clone(repo_name, path, cx))
1989 .await;
1990
1991 Ok(proto::GitCloneResponse {
1992 success: result.is_ok(),
1993 })
1994 }
1995
1996 async fn handle_fetch(
1997 this: Entity<Self>,
1998 envelope: TypedEnvelope<proto::Fetch>,
1999 mut cx: AsyncApp,
2000 ) -> Result<proto::RemoteMessageResponse> {
2001 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2002 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2003 let fetch_options = FetchOptions::from_proto(envelope.payload.remote);
2004 let askpass_id = envelope.payload.askpass_id;
2005
2006 let askpass = make_remote_delegate(
2007 this,
2008 envelope.payload.project_id,
2009 repository_id,
2010 askpass_id,
2011 &mut cx,
2012 );
2013
2014 let remote_output = repository_handle
2015 .update(&mut cx, |repository_handle, cx| {
2016 repository_handle.fetch(fetch_options, askpass, cx)
2017 })
2018 .await??;
2019
2020 Ok(proto::RemoteMessageResponse {
2021 stdout: remote_output.stdout,
2022 stderr: remote_output.stderr,
2023 })
2024 }
2025
2026 async fn handle_push(
2027 this: Entity<Self>,
2028 envelope: TypedEnvelope<proto::Push>,
2029 mut cx: AsyncApp,
2030 ) -> Result<proto::RemoteMessageResponse> {
2031 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2032 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2033
2034 let askpass_id = envelope.payload.askpass_id;
2035 let askpass = make_remote_delegate(
2036 this,
2037 envelope.payload.project_id,
2038 repository_id,
2039 askpass_id,
2040 &mut cx,
2041 );
2042
2043 let options = envelope
2044 .payload
2045 .options
2046 .as_ref()
2047 .map(|_| match envelope.payload.options() {
2048 proto::push::PushOptions::SetUpstream => git::repository::PushOptions::SetUpstream,
2049 proto::push::PushOptions::Force => git::repository::PushOptions::Force,
2050 });
2051
2052 let branch_name = envelope.payload.branch_name.into();
2053 let remote_branch_name = envelope.payload.remote_branch_name.into();
2054 let remote_name = envelope.payload.remote_name.into();
2055
2056 let remote_output = repository_handle
2057 .update(&mut cx, |repository_handle, cx| {
2058 repository_handle.push(
2059 branch_name,
2060 remote_branch_name,
2061 remote_name,
2062 options,
2063 askpass,
2064 cx,
2065 )
2066 })
2067 .await??;
2068 Ok(proto::RemoteMessageResponse {
2069 stdout: remote_output.stdout,
2070 stderr: remote_output.stderr,
2071 })
2072 }
2073
2074 async fn handle_pull(
2075 this: Entity<Self>,
2076 envelope: TypedEnvelope<proto::Pull>,
2077 mut cx: AsyncApp,
2078 ) -> Result<proto::RemoteMessageResponse> {
2079 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2080 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2081 let askpass_id = envelope.payload.askpass_id;
2082 let askpass = make_remote_delegate(
2083 this,
2084 envelope.payload.project_id,
2085 repository_id,
2086 askpass_id,
2087 &mut cx,
2088 );
2089
2090 let branch_name = envelope.payload.branch_name.map(|name| name.into());
2091 let remote_name = envelope.payload.remote_name.into();
2092 let rebase = envelope.payload.rebase;
2093
2094 let remote_message = repository_handle
2095 .update(&mut cx, |repository_handle, cx| {
2096 repository_handle.pull(branch_name, remote_name, rebase, askpass, cx)
2097 })
2098 .await??;
2099
2100 Ok(proto::RemoteMessageResponse {
2101 stdout: remote_message.stdout,
2102 stderr: remote_message.stderr,
2103 })
2104 }
2105
2106 async fn handle_stage(
2107 this: Entity<Self>,
2108 envelope: TypedEnvelope<proto::Stage>,
2109 mut cx: AsyncApp,
2110 ) -> Result<proto::Ack> {
2111 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2112 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2113
2114 let entries = envelope
2115 .payload
2116 .paths
2117 .into_iter()
2118 .map(|path| RepoPath::new(&path))
2119 .collect::<Result<Vec<_>>>()?;
2120
2121 repository_handle
2122 .update(&mut cx, |repository_handle, cx| {
2123 repository_handle.stage_entries(entries, cx)
2124 })
2125 .await?;
2126 Ok(proto::Ack {})
2127 }
2128
2129 async fn handle_unstage(
2130 this: Entity<Self>,
2131 envelope: TypedEnvelope<proto::Unstage>,
2132 mut cx: AsyncApp,
2133 ) -> Result<proto::Ack> {
2134 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2135 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2136
2137 let entries = envelope
2138 .payload
2139 .paths
2140 .into_iter()
2141 .map(|path| RepoPath::new(&path))
2142 .collect::<Result<Vec<_>>>()?;
2143
2144 repository_handle
2145 .update(&mut cx, |repository_handle, cx| {
2146 repository_handle.unstage_entries(entries, cx)
2147 })
2148 .await?;
2149
2150 Ok(proto::Ack {})
2151 }
2152
2153 async fn handle_stash(
2154 this: Entity<Self>,
2155 envelope: TypedEnvelope<proto::Stash>,
2156 mut cx: AsyncApp,
2157 ) -> Result<proto::Ack> {
2158 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2159 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2160
2161 let entries = envelope
2162 .payload
2163 .paths
2164 .into_iter()
2165 .map(|path| RepoPath::new(&path))
2166 .collect::<Result<Vec<_>>>()?;
2167
2168 repository_handle
2169 .update(&mut cx, |repository_handle, cx| {
2170 repository_handle.stash_entries(entries, cx)
2171 })
2172 .await?;
2173
2174 Ok(proto::Ack {})
2175 }
2176
2177 async fn handle_stash_pop(
2178 this: Entity<Self>,
2179 envelope: TypedEnvelope<proto::StashPop>,
2180 mut cx: AsyncApp,
2181 ) -> Result<proto::Ack> {
2182 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2183 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2184 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2185
2186 repository_handle
2187 .update(&mut cx, |repository_handle, cx| {
2188 repository_handle.stash_pop(stash_index, cx)
2189 })
2190 .await?;
2191
2192 Ok(proto::Ack {})
2193 }
2194
2195 async fn handle_stash_apply(
2196 this: Entity<Self>,
2197 envelope: TypedEnvelope<proto::StashApply>,
2198 mut cx: AsyncApp,
2199 ) -> Result<proto::Ack> {
2200 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2201 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2202 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2203
2204 repository_handle
2205 .update(&mut cx, |repository_handle, cx| {
2206 repository_handle.stash_apply(stash_index, cx)
2207 })
2208 .await?;
2209
2210 Ok(proto::Ack {})
2211 }
2212
2213 async fn handle_stash_drop(
2214 this: Entity<Self>,
2215 envelope: TypedEnvelope<proto::StashDrop>,
2216 mut cx: AsyncApp,
2217 ) -> Result<proto::Ack> {
2218 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2219 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2220 let stash_index = envelope.payload.stash_index.map(|i| i as usize);
2221
2222 repository_handle
2223 .update(&mut cx, |repository_handle, cx| {
2224 repository_handle.stash_drop(stash_index, cx)
2225 })
2226 .await??;
2227
2228 Ok(proto::Ack {})
2229 }
2230
2231 async fn handle_set_index_text(
2232 this: Entity<Self>,
2233 envelope: TypedEnvelope<proto::SetIndexText>,
2234 mut cx: AsyncApp,
2235 ) -> Result<proto::Ack> {
2236 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2237 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2238 let repo_path = RepoPath::from_proto(&envelope.payload.path)?;
2239
2240 repository_handle
2241 .update(&mut cx, |repository_handle, cx| {
2242 repository_handle.spawn_set_index_text_job(
2243 repo_path,
2244 envelope.payload.text,
2245 None,
2246 cx,
2247 )
2248 })
2249 .await??;
2250 Ok(proto::Ack {})
2251 }
2252
2253 async fn handle_run_hook(
2254 this: Entity<Self>,
2255 envelope: TypedEnvelope<proto::RunGitHook>,
2256 mut cx: AsyncApp,
2257 ) -> Result<proto::Ack> {
2258 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2259 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2260 let hook = RunHook::from_proto(envelope.payload.hook).context("invalid hook")?;
2261 repository_handle
2262 .update(&mut cx, |repository_handle, cx| {
2263 repository_handle.run_hook(hook, cx)
2264 })
2265 .await??;
2266 Ok(proto::Ack {})
2267 }
2268
2269 async fn handle_commit(
2270 this: Entity<Self>,
2271 envelope: TypedEnvelope<proto::Commit>,
2272 mut cx: AsyncApp,
2273 ) -> Result<proto::Ack> {
2274 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2275 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2276 let askpass_id = envelope.payload.askpass_id;
2277
2278 let askpass = make_remote_delegate(
2279 this,
2280 envelope.payload.project_id,
2281 repository_id,
2282 askpass_id,
2283 &mut cx,
2284 );
2285
2286 let message = SharedString::from(envelope.payload.message);
2287 let name = envelope.payload.name.map(SharedString::from);
2288 let email = envelope.payload.email.map(SharedString::from);
2289 let options = envelope.payload.options.unwrap_or_default();
2290
2291 repository_handle
2292 .update(&mut cx, |repository_handle, cx| {
2293 repository_handle.commit(
2294 message,
2295 name.zip(email),
2296 CommitOptions {
2297 amend: options.amend,
2298 signoff: options.signoff,
2299 },
2300 askpass,
2301 cx,
2302 )
2303 })
2304 .await??;
2305 Ok(proto::Ack {})
2306 }
2307
2308 async fn handle_get_remotes(
2309 this: Entity<Self>,
2310 envelope: TypedEnvelope<proto::GetRemotes>,
2311 mut cx: AsyncApp,
2312 ) -> Result<proto::GetRemotesResponse> {
2313 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2314 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2315
2316 let branch_name = envelope.payload.branch_name;
2317 let is_push = envelope.payload.is_push;
2318
2319 let remotes = repository_handle
2320 .update(&mut cx, |repository_handle, _| {
2321 repository_handle.get_remotes(branch_name, is_push)
2322 })
2323 .await??;
2324
2325 Ok(proto::GetRemotesResponse {
2326 remotes: remotes
2327 .into_iter()
2328 .map(|remotes| proto::get_remotes_response::Remote {
2329 name: remotes.name.to_string(),
2330 })
2331 .collect::<Vec<_>>(),
2332 })
2333 }
2334
2335 async fn handle_get_worktrees(
2336 this: Entity<Self>,
2337 envelope: TypedEnvelope<proto::GitGetWorktrees>,
2338 mut cx: AsyncApp,
2339 ) -> Result<proto::GitWorktreesResponse> {
2340 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2341 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2342
2343 let worktrees = repository_handle
2344 .update(&mut cx, |repository_handle, _| {
2345 repository_handle.worktrees()
2346 })
2347 .await??;
2348
2349 Ok(proto::GitWorktreesResponse {
2350 worktrees: worktrees
2351 .into_iter()
2352 .map(|worktree| worktree_to_proto(&worktree))
2353 .collect::<Vec<_>>(),
2354 })
2355 }
2356
2357 async fn handle_create_worktree(
2358 this: Entity<Self>,
2359 envelope: TypedEnvelope<proto::GitCreateWorktree>,
2360 mut cx: AsyncApp,
2361 ) -> Result<proto::Ack> {
2362 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2363 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2364 let directory = PathBuf::from(envelope.payload.directory);
2365 let name = envelope.payload.name;
2366 let commit = envelope.payload.commit;
2367
2368 repository_handle
2369 .update(&mut cx, |repository_handle, _| {
2370 repository_handle.create_worktree(name, directory, commit)
2371 })
2372 .await??;
2373
2374 Ok(proto::Ack {})
2375 }
2376
2377 async fn handle_get_branches(
2378 this: Entity<Self>,
2379 envelope: TypedEnvelope<proto::GitGetBranches>,
2380 mut cx: AsyncApp,
2381 ) -> Result<proto::GitBranchesResponse> {
2382 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2383 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2384
2385 let branches = repository_handle
2386 .update(&mut cx, |repository_handle, _| repository_handle.branches())
2387 .await??;
2388
2389 Ok(proto::GitBranchesResponse {
2390 branches: branches
2391 .into_iter()
2392 .map(|branch| branch_to_proto(&branch))
2393 .collect::<Vec<_>>(),
2394 })
2395 }
2396 async fn handle_get_default_branch(
2397 this: Entity<Self>,
2398 envelope: TypedEnvelope<proto::GetDefaultBranch>,
2399 mut cx: AsyncApp,
2400 ) -> Result<proto::GetDefaultBranchResponse> {
2401 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2402 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2403
2404 let branch = repository_handle
2405 .update(&mut cx, |repository_handle, _| {
2406 repository_handle.default_branch(false)
2407 })
2408 .await??
2409 .map(Into::into);
2410
2411 Ok(proto::GetDefaultBranchResponse { branch })
2412 }
2413 async fn handle_create_branch(
2414 this: Entity<Self>,
2415 envelope: TypedEnvelope<proto::GitCreateBranch>,
2416 mut cx: AsyncApp,
2417 ) -> Result<proto::Ack> {
2418 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2419 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2420 let branch_name = envelope.payload.branch_name;
2421
2422 repository_handle
2423 .update(&mut cx, |repository_handle, _| {
2424 repository_handle.create_branch(branch_name, None)
2425 })
2426 .await??;
2427
2428 Ok(proto::Ack {})
2429 }
2430
2431 async fn handle_change_branch(
2432 this: Entity<Self>,
2433 envelope: TypedEnvelope<proto::GitChangeBranch>,
2434 mut cx: AsyncApp,
2435 ) -> Result<proto::Ack> {
2436 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2437 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2438 let branch_name = envelope.payload.branch_name;
2439
2440 repository_handle
2441 .update(&mut cx, |repository_handle, _| {
2442 repository_handle.change_branch(branch_name)
2443 })
2444 .await??;
2445
2446 Ok(proto::Ack {})
2447 }
2448
2449 async fn handle_rename_branch(
2450 this: Entity<Self>,
2451 envelope: TypedEnvelope<proto::GitRenameBranch>,
2452 mut cx: AsyncApp,
2453 ) -> Result<proto::Ack> {
2454 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2455 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2456 let branch = envelope.payload.branch;
2457 let new_name = envelope.payload.new_name;
2458
2459 repository_handle
2460 .update(&mut cx, |repository_handle, _| {
2461 repository_handle.rename_branch(branch, new_name)
2462 })
2463 .await??;
2464
2465 Ok(proto::Ack {})
2466 }
2467
2468 async fn handle_create_remote(
2469 this: Entity<Self>,
2470 envelope: TypedEnvelope<proto::GitCreateRemote>,
2471 mut cx: AsyncApp,
2472 ) -> Result<proto::Ack> {
2473 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2474 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2475 let remote_name = envelope.payload.remote_name;
2476 let remote_url = envelope.payload.remote_url;
2477
2478 repository_handle
2479 .update(&mut cx, |repository_handle, _| {
2480 repository_handle.create_remote(remote_name, remote_url)
2481 })
2482 .await??;
2483
2484 Ok(proto::Ack {})
2485 }
2486
2487 async fn handle_delete_branch(
2488 this: Entity<Self>,
2489 envelope: TypedEnvelope<proto::GitDeleteBranch>,
2490 mut cx: AsyncApp,
2491 ) -> Result<proto::Ack> {
2492 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2493 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2494 let branch_name = envelope.payload.branch_name;
2495
2496 repository_handle
2497 .update(&mut cx, |repository_handle, _| {
2498 repository_handle.delete_branch(branch_name)
2499 })
2500 .await??;
2501
2502 Ok(proto::Ack {})
2503 }
2504
2505 async fn handle_remove_remote(
2506 this: Entity<Self>,
2507 envelope: TypedEnvelope<proto::GitRemoveRemote>,
2508 mut cx: AsyncApp,
2509 ) -> Result<proto::Ack> {
2510 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2511 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2512 let remote_name = envelope.payload.remote_name;
2513
2514 repository_handle
2515 .update(&mut cx, |repository_handle, _| {
2516 repository_handle.remove_remote(remote_name)
2517 })
2518 .await??;
2519
2520 Ok(proto::Ack {})
2521 }
2522
2523 async fn handle_show(
2524 this: Entity<Self>,
2525 envelope: TypedEnvelope<proto::GitShow>,
2526 mut cx: AsyncApp,
2527 ) -> Result<proto::GitCommitDetails> {
2528 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2529 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2530
2531 let commit = repository_handle
2532 .update(&mut cx, |repository_handle, _| {
2533 repository_handle.show(envelope.payload.commit)
2534 })
2535 .await??;
2536 Ok(proto::GitCommitDetails {
2537 sha: commit.sha.into(),
2538 message: commit.message.into(),
2539 commit_timestamp: commit.commit_timestamp,
2540 author_email: commit.author_email.into(),
2541 author_name: commit.author_name.into(),
2542 })
2543 }
2544
2545 async fn handle_load_commit_diff(
2546 this: Entity<Self>,
2547 envelope: TypedEnvelope<proto::LoadCommitDiff>,
2548 mut cx: AsyncApp,
2549 ) -> Result<proto::LoadCommitDiffResponse> {
2550 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2551 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2552
2553 let commit_diff = repository_handle
2554 .update(&mut cx, |repository_handle, _| {
2555 repository_handle.load_commit_diff(envelope.payload.commit)
2556 })
2557 .await??;
2558 Ok(proto::LoadCommitDiffResponse {
2559 files: commit_diff
2560 .files
2561 .into_iter()
2562 .map(|file| proto::CommitFile {
2563 path: file.path.to_proto(),
2564 old_text: file.old_text,
2565 new_text: file.new_text,
2566 is_binary: file.is_binary,
2567 })
2568 .collect(),
2569 })
2570 }
2571
2572 async fn handle_file_history(
2573 this: Entity<Self>,
2574 envelope: TypedEnvelope<proto::GitFileHistory>,
2575 mut cx: AsyncApp,
2576 ) -> Result<proto::GitFileHistoryResponse> {
2577 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2578 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2579 let path = RepoPath::from_proto(&envelope.payload.path)?;
2580 let skip = envelope.payload.skip as usize;
2581 let limit = envelope.payload.limit.map(|l| l as usize);
2582
2583 let file_history = repository_handle
2584 .update(&mut cx, |repository_handle, _| {
2585 repository_handle.file_history_paginated(path, skip, limit)
2586 })
2587 .await??;
2588
2589 Ok(proto::GitFileHistoryResponse {
2590 entries: file_history
2591 .entries
2592 .into_iter()
2593 .map(|entry| proto::FileHistoryEntry {
2594 sha: entry.sha.to_string(),
2595 subject: entry.subject.to_string(),
2596 message: entry.message.to_string(),
2597 commit_timestamp: entry.commit_timestamp,
2598 author_name: entry.author_name.to_string(),
2599 author_email: entry.author_email.to_string(),
2600 })
2601 .collect(),
2602 path: file_history.path.to_proto(),
2603 })
2604 }
2605
2606 async fn handle_reset(
2607 this: Entity<Self>,
2608 envelope: TypedEnvelope<proto::GitReset>,
2609 mut cx: AsyncApp,
2610 ) -> Result<proto::Ack> {
2611 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2612 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2613
2614 let mode = match envelope.payload.mode() {
2615 git_reset::ResetMode::Soft => ResetMode::Soft,
2616 git_reset::ResetMode::Mixed => ResetMode::Mixed,
2617 };
2618
2619 repository_handle
2620 .update(&mut cx, |repository_handle, cx| {
2621 repository_handle.reset(envelope.payload.commit, mode, cx)
2622 })
2623 .await??;
2624 Ok(proto::Ack {})
2625 }
2626
2627 async fn handle_checkout_files(
2628 this: Entity<Self>,
2629 envelope: TypedEnvelope<proto::GitCheckoutFiles>,
2630 mut cx: AsyncApp,
2631 ) -> Result<proto::Ack> {
2632 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2633 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2634 let paths = envelope
2635 .payload
2636 .paths
2637 .iter()
2638 .map(|s| RepoPath::from_proto(s))
2639 .collect::<Result<Vec<_>>>()?;
2640
2641 repository_handle
2642 .update(&mut cx, |repository_handle, cx| {
2643 repository_handle.checkout_files(&envelope.payload.commit, paths, cx)
2644 })
2645 .await?;
2646 Ok(proto::Ack {})
2647 }
2648
2649 async fn handle_open_commit_message_buffer(
2650 this: Entity<Self>,
2651 envelope: TypedEnvelope<proto::OpenCommitMessageBuffer>,
2652 mut cx: AsyncApp,
2653 ) -> Result<proto::OpenBufferResponse> {
2654 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2655 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2656 let buffer = repository
2657 .update(&mut cx, |repository, cx| {
2658 repository.open_commit_buffer(None, this.read(cx).buffer_store.clone(), cx)
2659 })
2660 .await?;
2661
2662 let buffer_id = buffer.read_with(&cx, |buffer, _| buffer.remote_id());
2663 this.update(&mut cx, |this, cx| {
2664 this.buffer_store.update(cx, |buffer_store, cx| {
2665 buffer_store
2666 .create_buffer_for_peer(
2667 &buffer,
2668 envelope.original_sender_id.unwrap_or(envelope.sender_id),
2669 cx,
2670 )
2671 .detach_and_log_err(cx);
2672 })
2673 });
2674
2675 Ok(proto::OpenBufferResponse {
2676 buffer_id: buffer_id.to_proto(),
2677 })
2678 }
2679
2680 async fn handle_askpass(
2681 this: Entity<Self>,
2682 envelope: TypedEnvelope<proto::AskPassRequest>,
2683 mut cx: AsyncApp,
2684 ) -> Result<proto::AskPassResponse> {
2685 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2686 let repository = Self::repository_for_request(&this, repository_id, &mut cx)?;
2687
2688 let delegates = cx.update(|cx| repository.read(cx).askpass_delegates.clone());
2689 let Some(mut askpass) = delegates.lock().remove(&envelope.payload.askpass_id) else {
2690 debug_panic!("no askpass found");
2691 anyhow::bail!("no askpass found");
2692 };
2693
2694 let response = askpass
2695 .ask_password(envelope.payload.prompt)
2696 .await
2697 .ok_or_else(|| anyhow::anyhow!("askpass cancelled"))?;
2698
2699 delegates
2700 .lock()
2701 .insert(envelope.payload.askpass_id, askpass);
2702
2703 // In fact, we don't quite know what we're doing here, as we're sending askpass password unencrypted, but..
2704 Ok(proto::AskPassResponse {
2705 response: response.decrypt(IKnowWhatIAmDoingAndIHaveReadTheDocs)?,
2706 })
2707 }
2708
2709 async fn handle_check_for_pushed_commits(
2710 this: Entity<Self>,
2711 envelope: TypedEnvelope<proto::CheckForPushedCommits>,
2712 mut cx: AsyncApp,
2713 ) -> Result<proto::CheckForPushedCommitsResponse> {
2714 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2715 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2716
2717 let branches = repository_handle
2718 .update(&mut cx, |repository_handle, _| {
2719 repository_handle.check_for_pushed_commits()
2720 })
2721 .await??;
2722 Ok(proto::CheckForPushedCommitsResponse {
2723 pushed_to: branches
2724 .into_iter()
2725 .map(|commit| commit.to_string())
2726 .collect(),
2727 })
2728 }
2729
2730 async fn handle_git_diff(
2731 this: Entity<Self>,
2732 envelope: TypedEnvelope<proto::GitDiff>,
2733 mut cx: AsyncApp,
2734 ) -> Result<proto::GitDiffResponse> {
2735 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2736 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2737 let diff_type = match envelope.payload.diff_type() {
2738 proto::git_diff::DiffType::HeadToIndex => DiffType::HeadToIndex,
2739 proto::git_diff::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2740 proto::git_diff::DiffType::MergeBase => {
2741 let base_ref = envelope
2742 .payload
2743 .merge_base_ref
2744 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2745 DiffType::MergeBase {
2746 base_ref: base_ref.into(),
2747 }
2748 }
2749 };
2750
2751 let mut diff = repository_handle
2752 .update(&mut cx, |repository_handle, cx| {
2753 repository_handle.diff(diff_type, cx)
2754 })
2755 .await??;
2756 const ONE_MB: usize = 1_000_000;
2757 if diff.len() > ONE_MB {
2758 diff = diff.chars().take(ONE_MB).collect()
2759 }
2760
2761 Ok(proto::GitDiffResponse { diff })
2762 }
2763
2764 async fn handle_git_diff_stat(
2765 this: Entity<Self>,
2766 envelope: TypedEnvelope<proto::GitDiffStat>,
2767 mut cx: AsyncApp,
2768 ) -> Result<proto::GitDiffStatResponse> {
2769 let repository_id = RepositoryId::from_proto(envelope.payload.repository_id);
2770 let repository_handle = Self::repository_for_request(&this, repository_id, &mut cx)?;
2771 let diff_type = match envelope.payload.diff_type() {
2772 proto::git_diff_stat::DiffType::HeadToIndex => DiffType::HeadToIndex,
2773 proto::git_diff_stat::DiffType::HeadToWorktree => DiffType::HeadToWorktree,
2774 proto::git_diff_stat::DiffType::MergeBase => {
2775 let base_ref = envelope
2776 .payload
2777 .merge_base_ref
2778 .ok_or_else(|| anyhow!("merge_base_ref is required for MergeBase diff type"))?;
2779 DiffType::MergeBase {
2780 base_ref: base_ref.into(),
2781 }
2782 }
2783 };
2784
2785 let stats = repository_handle
2786 .update(&mut cx, |repository_handle, cx| {
2787 repository_handle.diff_stat(diff_type, cx)
2788 })
2789 .await??;
2790
2791 let entries = stats
2792 .into_iter()
2793 .map(|(path, stat)| proto::GitDiffStatEntry {
2794 path: path.to_proto(),
2795 added: stat.added,
2796 deleted: stat.deleted,
2797 })
2798 .collect();
2799
2800 Ok(proto::GitDiffStatResponse { entries })
2801 }
2802
2803 async fn handle_tree_diff(
2804 this: Entity<Self>,
2805 request: TypedEnvelope<proto::GetTreeDiff>,
2806 mut cx: AsyncApp,
2807 ) -> Result<proto::GetTreeDiffResponse> {
2808 let repository_id = RepositoryId(request.payload.repository_id);
2809 let diff_type = if request.payload.is_merge {
2810 DiffTreeType::MergeBase {
2811 base: request.payload.base.into(),
2812 head: request.payload.head.into(),
2813 }
2814 } else {
2815 DiffTreeType::Since {
2816 base: request.payload.base.into(),
2817 head: request.payload.head.into(),
2818 }
2819 };
2820
2821 let diff = this
2822 .update(&mut cx, |this, cx| {
2823 let repository = this.repositories().get(&repository_id)?;
2824 Some(repository.update(cx, |repo, cx| repo.diff_tree(diff_type, cx)))
2825 })
2826 .context("missing repository")?
2827 .await??;
2828
2829 Ok(proto::GetTreeDiffResponse {
2830 entries: diff
2831 .entries
2832 .into_iter()
2833 .map(|(path, status)| proto::TreeDiffStatus {
2834 path: path.as_ref().to_proto(),
2835 status: match status {
2836 TreeDiffStatus::Added {} => proto::tree_diff_status::Status::Added.into(),
2837 TreeDiffStatus::Modified { .. } => {
2838 proto::tree_diff_status::Status::Modified.into()
2839 }
2840 TreeDiffStatus::Deleted { .. } => {
2841 proto::tree_diff_status::Status::Deleted.into()
2842 }
2843 },
2844 oid: match status {
2845 TreeDiffStatus::Deleted { old } | TreeDiffStatus::Modified { old } => {
2846 Some(old.to_string())
2847 }
2848 TreeDiffStatus::Added => None,
2849 },
2850 })
2851 .collect(),
2852 })
2853 }
2854
2855 async fn handle_get_blob_content(
2856 this: Entity<Self>,
2857 request: TypedEnvelope<proto::GetBlobContent>,
2858 mut cx: AsyncApp,
2859 ) -> Result<proto::GetBlobContentResponse> {
2860 let oid = git::Oid::from_str(&request.payload.oid)?;
2861 let repository_id = RepositoryId(request.payload.repository_id);
2862 let content = this
2863 .update(&mut cx, |this, cx| {
2864 let repository = this.repositories().get(&repository_id)?;
2865 Some(repository.update(cx, |repo, cx| repo.load_blob_content(oid, cx)))
2866 })
2867 .context("missing repository")?
2868 .await?;
2869 Ok(proto::GetBlobContentResponse { content })
2870 }
2871
2872 async fn handle_open_unstaged_diff(
2873 this: Entity<Self>,
2874 request: TypedEnvelope<proto::OpenUnstagedDiff>,
2875 mut cx: AsyncApp,
2876 ) -> Result<proto::OpenUnstagedDiffResponse> {
2877 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2878 let diff = this
2879 .update(&mut cx, |this, cx| {
2880 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2881 Some(this.open_unstaged_diff(buffer, cx))
2882 })
2883 .context("missing buffer")?
2884 .await?;
2885 this.update(&mut cx, |this, _| {
2886 let shared_diffs = this
2887 .shared_diffs
2888 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2889 .or_default();
2890 shared_diffs.entry(buffer_id).or_default().unstaged = Some(diff.clone());
2891 });
2892 let staged_text = diff.read_with(&cx, |diff, cx| diff.base_text_string(cx));
2893 Ok(proto::OpenUnstagedDiffResponse { staged_text })
2894 }
2895
2896 async fn handle_open_uncommitted_diff(
2897 this: Entity<Self>,
2898 request: TypedEnvelope<proto::OpenUncommittedDiff>,
2899 mut cx: AsyncApp,
2900 ) -> Result<proto::OpenUncommittedDiffResponse> {
2901 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2902 let diff = this
2903 .update(&mut cx, |this, cx| {
2904 let buffer = this.buffer_store.read(cx).get(buffer_id)?;
2905 Some(this.open_uncommitted_diff(buffer, cx))
2906 })
2907 .context("missing buffer")?
2908 .await?;
2909 this.update(&mut cx, |this, _| {
2910 let shared_diffs = this
2911 .shared_diffs
2912 .entry(request.original_sender_id.unwrap_or(request.sender_id))
2913 .or_default();
2914 shared_diffs.entry(buffer_id).or_default().uncommitted = Some(diff.clone());
2915 });
2916 Ok(diff.read_with(&cx, |diff, cx| {
2917 use proto::open_uncommitted_diff_response::Mode;
2918
2919 let unstaged_diff = diff.secondary_diff();
2920 let index_snapshot = unstaged_diff.and_then(|diff| {
2921 let diff = diff.read(cx);
2922 diff.base_text_exists().then(|| diff.base_text(cx))
2923 });
2924
2925 let mode;
2926 let staged_text;
2927 let committed_text;
2928 if diff.base_text_exists() {
2929 let committed_snapshot = diff.base_text(cx);
2930 committed_text = Some(committed_snapshot.text());
2931 if let Some(index_text) = index_snapshot {
2932 if index_text.remote_id() == committed_snapshot.remote_id() {
2933 mode = Mode::IndexMatchesHead;
2934 staged_text = None;
2935 } else {
2936 mode = Mode::IndexAndHead;
2937 staged_text = Some(index_text.text());
2938 }
2939 } else {
2940 mode = Mode::IndexAndHead;
2941 staged_text = None;
2942 }
2943 } else {
2944 mode = Mode::IndexAndHead;
2945 committed_text = None;
2946 staged_text = index_snapshot.as_ref().map(|buffer| buffer.text());
2947 }
2948
2949 proto::OpenUncommittedDiffResponse {
2950 committed_text,
2951 staged_text,
2952 mode: mode.into(),
2953 }
2954 }))
2955 }
2956
2957 async fn handle_update_diff_bases(
2958 this: Entity<Self>,
2959 request: TypedEnvelope<proto::UpdateDiffBases>,
2960 mut cx: AsyncApp,
2961 ) -> Result<()> {
2962 let buffer_id = BufferId::new(request.payload.buffer_id)?;
2963 this.update(&mut cx, |this, cx| {
2964 if let Some(diff_state) = this.diffs.get_mut(&buffer_id)
2965 && let Some(buffer) = this.buffer_store.read(cx).get(buffer_id)
2966 {
2967 let buffer = buffer.read(cx).text_snapshot();
2968 diff_state.update(cx, |diff_state, cx| {
2969 diff_state.handle_base_texts_updated(buffer, request.payload, cx);
2970 })
2971 }
2972 });
2973 Ok(())
2974 }
2975
2976 async fn handle_blame_buffer(
2977 this: Entity<Self>,
2978 envelope: TypedEnvelope<proto::BlameBuffer>,
2979 mut cx: AsyncApp,
2980 ) -> Result<proto::BlameBufferResponse> {
2981 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
2982 let version = deserialize_version(&envelope.payload.version);
2983 let buffer = this.read_with(&cx, |this, cx| {
2984 this.buffer_store.read(cx).get_existing(buffer_id)
2985 })?;
2986 buffer
2987 .update(&mut cx, |buffer, _| {
2988 buffer.wait_for_version(version.clone())
2989 })
2990 .await?;
2991 let blame = this
2992 .update(&mut cx, |this, cx| {
2993 this.blame_buffer(&buffer, Some(version), cx)
2994 })
2995 .await?;
2996 Ok(serialize_blame_buffer_response(blame))
2997 }
2998
2999 async fn handle_get_permalink_to_line(
3000 this: Entity<Self>,
3001 envelope: TypedEnvelope<proto::GetPermalinkToLine>,
3002 mut cx: AsyncApp,
3003 ) -> Result<proto::GetPermalinkToLineResponse> {
3004 let buffer_id = BufferId::new(envelope.payload.buffer_id)?;
3005 // let version = deserialize_version(&envelope.payload.version);
3006 let selection = {
3007 let proto_selection = envelope
3008 .payload
3009 .selection
3010 .context("no selection to get permalink for defined")?;
3011 proto_selection.start as u32..proto_selection.end as u32
3012 };
3013 let buffer = this.read_with(&cx, |this, cx| {
3014 this.buffer_store.read(cx).get_existing(buffer_id)
3015 })?;
3016 let permalink = this
3017 .update(&mut cx, |this, cx| {
3018 this.get_permalink_to_line(&buffer, selection, cx)
3019 })
3020 .await?;
3021 Ok(proto::GetPermalinkToLineResponse {
3022 permalink: permalink.to_string(),
3023 })
3024 }
3025
3026 fn repository_for_request(
3027 this: &Entity<Self>,
3028 id: RepositoryId,
3029 cx: &mut AsyncApp,
3030 ) -> Result<Entity<Repository>> {
3031 this.read_with(cx, |this, _| {
3032 this.repositories
3033 .get(&id)
3034 .context("missing repository handle")
3035 .cloned()
3036 })
3037 }
3038
3039 pub fn repo_snapshots(&self, cx: &App) -> HashMap<RepositoryId, RepositorySnapshot> {
3040 self.repositories
3041 .iter()
3042 .map(|(id, repo)| (*id, repo.read(cx).snapshot.clone()))
3043 .collect()
3044 }
3045
3046 fn process_updated_entries(
3047 &self,
3048 worktree: &Entity<Worktree>,
3049 updated_entries: &[(Arc<RelPath>, ProjectEntryId, PathChange)],
3050 cx: &mut App,
3051 ) -> Task<HashMap<Entity<Repository>, Vec<RepoPath>>> {
3052 let path_style = worktree.read(cx).path_style();
3053 let mut repo_paths = self
3054 .repositories
3055 .values()
3056 .map(|repo| (repo.read(cx).work_directory_abs_path.clone(), repo.clone()))
3057 .collect::<Vec<_>>();
3058 let mut entries: Vec<_> = updated_entries
3059 .iter()
3060 .map(|(path, _, _)| path.clone())
3061 .collect();
3062 entries.sort();
3063 let worktree = worktree.read(cx);
3064
3065 let entries = entries
3066 .into_iter()
3067 .map(|path| worktree.absolutize(&path))
3068 .collect::<Arc<[_]>>();
3069
3070 let executor = cx.background_executor().clone();
3071 cx.background_executor().spawn(async move {
3072 repo_paths.sort_by(|lhs, rhs| lhs.0.cmp(&rhs.0));
3073 let mut paths_by_git_repo = HashMap::<_, Vec<_>>::default();
3074 let mut tasks = FuturesOrdered::new();
3075 for (repo_path, repo) in repo_paths.into_iter().rev() {
3076 let entries = entries.clone();
3077 let task = executor.spawn(async move {
3078 // Find all repository paths that belong to this repo
3079 let mut ix = entries.partition_point(|path| path < &*repo_path);
3080 if ix == entries.len() {
3081 return None;
3082 };
3083
3084 let mut paths = Vec::new();
3085 // All paths prefixed by a given repo will constitute a continuous range.
3086 while let Some(path) = entries.get(ix)
3087 && let Some(repo_path) = RepositorySnapshot::abs_path_to_repo_path_inner(
3088 &repo_path, path, path_style,
3089 )
3090 {
3091 paths.push((repo_path, ix));
3092 ix += 1;
3093 }
3094 if paths.is_empty() {
3095 None
3096 } else {
3097 Some((repo, paths))
3098 }
3099 });
3100 tasks.push_back(task);
3101 }
3102
3103 // Now, let's filter out the "duplicate" entries that were processed by multiple distinct repos.
3104 let mut path_was_used = vec![false; entries.len()];
3105 let tasks = tasks.collect::<Vec<_>>().await;
3106 // Process tasks from the back: iterating backwards allows us to see more-specific paths first.
3107 // We always want to assign a path to it's innermost repository.
3108 for t in tasks {
3109 let Some((repo, paths)) = t else {
3110 continue;
3111 };
3112 let entry = paths_by_git_repo.entry(repo).or_default();
3113 for (repo_path, ix) in paths {
3114 if path_was_used[ix] {
3115 continue;
3116 }
3117 path_was_used[ix] = true;
3118 entry.push(repo_path);
3119 }
3120 }
3121
3122 paths_by_git_repo
3123 })
3124 }
3125}
3126
3127impl BufferGitState {
3128 fn new(_git_store: WeakEntity<GitStore>) -> Self {
3129 Self {
3130 unstaged_diff: Default::default(),
3131 uncommitted_diff: Default::default(),
3132 oid_diffs: Default::default(),
3133 recalculate_diff_task: Default::default(),
3134 language: Default::default(),
3135 language_registry: Default::default(),
3136 recalculating_tx: postage::watch::channel_with(false).0,
3137 hunk_staging_operation_count: 0,
3138 hunk_staging_operation_count_as_of_write: 0,
3139 head_text: Default::default(),
3140 index_text: Default::default(),
3141 oid_texts: Default::default(),
3142 head_changed: Default::default(),
3143 index_changed: Default::default(),
3144 language_changed: Default::default(),
3145 conflict_updated_futures: Default::default(),
3146 conflict_set: Default::default(),
3147 reparse_conflict_markers_task: Default::default(),
3148 }
3149 }
3150
3151 #[ztracing::instrument(skip_all)]
3152 fn buffer_language_changed(&mut self, buffer: Entity<Buffer>, cx: &mut Context<Self>) {
3153 self.language = buffer.read(cx).language().cloned();
3154 self.language_changed = true;
3155 let _ = self.recalculate_diffs(buffer.read(cx).text_snapshot(), cx);
3156 }
3157
3158 fn reparse_conflict_markers(
3159 &mut self,
3160 buffer: text::BufferSnapshot,
3161 cx: &mut Context<Self>,
3162 ) -> oneshot::Receiver<()> {
3163 let (tx, rx) = oneshot::channel();
3164
3165 let Some(conflict_set) = self
3166 .conflict_set
3167 .as_ref()
3168 .and_then(|conflict_set| conflict_set.upgrade())
3169 else {
3170 return rx;
3171 };
3172
3173 let old_snapshot = conflict_set.read_with(cx, |conflict_set, _| {
3174 if conflict_set.has_conflict {
3175 Some(conflict_set.snapshot())
3176 } else {
3177 None
3178 }
3179 });
3180
3181 if let Some(old_snapshot) = old_snapshot {
3182 self.conflict_updated_futures.push(tx);
3183 self.reparse_conflict_markers_task = Some(cx.spawn(async move |this, cx| {
3184 let (snapshot, changed_range) = cx
3185 .background_spawn(async move {
3186 let new_snapshot = ConflictSet::parse(&buffer);
3187 let changed_range = old_snapshot.compare(&new_snapshot, &buffer);
3188 (new_snapshot, changed_range)
3189 })
3190 .await;
3191 this.update(cx, |this, cx| {
3192 if let Some(conflict_set) = &this.conflict_set {
3193 conflict_set
3194 .update(cx, |conflict_set, cx| {
3195 conflict_set.set_snapshot(snapshot, changed_range, cx);
3196 })
3197 .ok();
3198 }
3199 let futures = std::mem::take(&mut this.conflict_updated_futures);
3200 for tx in futures {
3201 tx.send(()).ok();
3202 }
3203 })
3204 }))
3205 }
3206
3207 rx
3208 }
3209
3210 fn unstaged_diff(&self) -> Option<Entity<BufferDiff>> {
3211 self.unstaged_diff.as_ref().and_then(|set| set.upgrade())
3212 }
3213
3214 fn uncommitted_diff(&self) -> Option<Entity<BufferDiff>> {
3215 self.uncommitted_diff.as_ref().and_then(|set| set.upgrade())
3216 }
3217
3218 fn oid_diff(&self, oid: Option<git::Oid>) -> Option<Entity<BufferDiff>> {
3219 self.oid_diffs.get(&oid).and_then(|weak| weak.upgrade())
3220 }
3221
3222 fn handle_base_texts_updated(
3223 &mut self,
3224 buffer: text::BufferSnapshot,
3225 message: proto::UpdateDiffBases,
3226 cx: &mut Context<Self>,
3227 ) {
3228 use proto::update_diff_bases::Mode;
3229
3230 let Some(mode) = Mode::from_i32(message.mode) else {
3231 return;
3232 };
3233
3234 let diff_bases_change = match mode {
3235 Mode::HeadOnly => DiffBasesChange::SetHead(message.committed_text),
3236 Mode::IndexOnly => DiffBasesChange::SetIndex(message.staged_text),
3237 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(message.committed_text),
3238 Mode::IndexAndHead => DiffBasesChange::SetEach {
3239 index: message.staged_text,
3240 head: message.committed_text,
3241 },
3242 };
3243
3244 self.diff_bases_changed(buffer, Some(diff_bases_change), cx);
3245 }
3246
3247 pub fn wait_for_recalculation(&mut self) -> Option<impl Future<Output = ()> + use<>> {
3248 if *self.recalculating_tx.borrow() {
3249 let mut rx = self.recalculating_tx.subscribe();
3250 Some(async move {
3251 loop {
3252 let is_recalculating = rx.recv().await;
3253 if is_recalculating != Some(true) {
3254 break;
3255 }
3256 }
3257 })
3258 } else {
3259 None
3260 }
3261 }
3262
3263 fn diff_bases_changed(
3264 &mut self,
3265 buffer: text::BufferSnapshot,
3266 diff_bases_change: Option<DiffBasesChange>,
3267 cx: &mut Context<Self>,
3268 ) {
3269 match diff_bases_change {
3270 Some(DiffBasesChange::SetIndex(index)) => {
3271 self.index_text = index.map(|mut index| {
3272 text::LineEnding::normalize(&mut index);
3273 Arc::from(index.as_str())
3274 });
3275 self.index_changed = true;
3276 }
3277 Some(DiffBasesChange::SetHead(head)) => {
3278 self.head_text = head.map(|mut head| {
3279 text::LineEnding::normalize(&mut head);
3280 Arc::from(head.as_str())
3281 });
3282 self.head_changed = true;
3283 }
3284 Some(DiffBasesChange::SetBoth(text)) => {
3285 let text = text.map(|mut text| {
3286 text::LineEnding::normalize(&mut text);
3287 Arc::from(text.as_str())
3288 });
3289 self.head_text = text.clone();
3290 self.index_text = text;
3291 self.head_changed = true;
3292 self.index_changed = true;
3293 }
3294 Some(DiffBasesChange::SetEach { index, head }) => {
3295 self.index_text = index.map(|mut index| {
3296 text::LineEnding::normalize(&mut index);
3297 Arc::from(index.as_str())
3298 });
3299 self.index_changed = true;
3300 self.head_text = head.map(|mut head| {
3301 text::LineEnding::normalize(&mut head);
3302 Arc::from(head.as_str())
3303 });
3304 self.head_changed = true;
3305 }
3306 None => {}
3307 }
3308
3309 self.recalculate_diffs(buffer, cx)
3310 }
3311
3312 #[ztracing::instrument(skip_all)]
3313 fn recalculate_diffs(&mut self, buffer: text::BufferSnapshot, cx: &mut Context<Self>) {
3314 *self.recalculating_tx.borrow_mut() = true;
3315
3316 let language = self.language.clone();
3317 let language_registry = self.language_registry.clone();
3318 let unstaged_diff = self.unstaged_diff();
3319 let uncommitted_diff = self.uncommitted_diff();
3320 let head = self.head_text.clone();
3321 let index = self.index_text.clone();
3322 let index_changed = self.index_changed;
3323 let head_changed = self.head_changed;
3324 let language_changed = self.language_changed;
3325 let prev_hunk_staging_operation_count = self.hunk_staging_operation_count_as_of_write;
3326 let index_matches_head = match (self.index_text.as_ref(), self.head_text.as_ref()) {
3327 (Some(index), Some(head)) => Arc::ptr_eq(index, head),
3328 (None, None) => true,
3329 _ => false,
3330 };
3331
3332 let oid_diffs: Vec<(Option<git::Oid>, Entity<BufferDiff>, Option<Arc<str>>)> = self
3333 .oid_diffs
3334 .iter()
3335 .filter_map(|(oid, weak)| {
3336 let base_text = oid.and_then(|oid| self.oid_texts.get(&oid).cloned());
3337 weak.upgrade().map(|diff| (*oid, diff, base_text))
3338 })
3339 .collect();
3340
3341 self.oid_diffs.retain(|oid, weak| {
3342 let alive = weak.upgrade().is_some();
3343 if !alive {
3344 if let Some(oid) = oid {
3345 self.oid_texts.remove(oid);
3346 }
3347 }
3348 alive
3349 });
3350 self.recalculate_diff_task = Some(cx.spawn(async move |this, cx| {
3351 log::debug!(
3352 "start recalculating diffs for buffer {}",
3353 buffer.remote_id()
3354 );
3355
3356 let mut new_unstaged_diff = None;
3357 if let Some(unstaged_diff) = &unstaged_diff {
3358 new_unstaged_diff = Some(
3359 cx.update(|cx| {
3360 unstaged_diff.read(cx).update_diff(
3361 buffer.clone(),
3362 index,
3363 index_changed.then_some(false),
3364 language.clone(),
3365 cx,
3366 )
3367 })
3368 .await,
3369 );
3370 }
3371
3372 // Dropping BufferDiff can be expensive, so yield back to the event loop
3373 // for a bit
3374 yield_now().await;
3375
3376 let mut new_uncommitted_diff = None;
3377 if let Some(uncommitted_diff) = &uncommitted_diff {
3378 new_uncommitted_diff = if index_matches_head {
3379 new_unstaged_diff.clone()
3380 } else {
3381 Some(
3382 cx.update(|cx| {
3383 uncommitted_diff.read(cx).update_diff(
3384 buffer.clone(),
3385 head,
3386 head_changed.then_some(true),
3387 language.clone(),
3388 cx,
3389 )
3390 })
3391 .await,
3392 )
3393 }
3394 }
3395
3396 // Dropping BufferDiff can be expensive, so yield back to the event loop
3397 // for a bit
3398 yield_now().await;
3399
3400 let cancel = this.update(cx, |this, _| {
3401 // This checks whether all pending stage/unstage operations
3402 // have quiesced (i.e. both the corresponding write and the
3403 // read of that write have completed). If not, then we cancel
3404 // this recalculation attempt to avoid invalidating pending
3405 // state too quickly; another recalculation will come along
3406 // later and clear the pending state once the state of the index has settled.
3407 if this.hunk_staging_operation_count > prev_hunk_staging_operation_count {
3408 *this.recalculating_tx.borrow_mut() = false;
3409 true
3410 } else {
3411 false
3412 }
3413 })?;
3414 if cancel {
3415 log::debug!(
3416 concat!(
3417 "aborting recalculating diffs for buffer {}",
3418 "due to subsequent hunk operations",
3419 ),
3420 buffer.remote_id()
3421 );
3422 return Ok(());
3423 }
3424
3425 let unstaged_changed_range = if let Some((unstaged_diff, new_unstaged_diff)) =
3426 unstaged_diff.as_ref().zip(new_unstaged_diff.clone())
3427 {
3428 let task = unstaged_diff.update(cx, |diff, cx| {
3429 // For git index buffer we skip assigning the language as we do not really need to perform any syntax highlighting on
3430 // it. As a result, by skipping it we are potentially shaving off a lot of RSS plus we get a snappier feel for large diff
3431 // view multibuffers.
3432 diff.set_snapshot(new_unstaged_diff, &buffer, cx)
3433 });
3434 Some(task.await)
3435 } else {
3436 None
3437 };
3438
3439 yield_now().await;
3440
3441 if let Some((uncommitted_diff, new_uncommitted_diff)) =
3442 uncommitted_diff.as_ref().zip(new_uncommitted_diff.clone())
3443 {
3444 uncommitted_diff
3445 .update(cx, |diff, cx| {
3446 if language_changed {
3447 diff.language_changed(language.clone(), language_registry, cx);
3448 }
3449 diff.set_snapshot_with_secondary(
3450 new_uncommitted_diff,
3451 &buffer,
3452 unstaged_changed_range.flatten(),
3453 true,
3454 cx,
3455 )
3456 })
3457 .await;
3458 }
3459
3460 yield_now().await;
3461
3462 for (oid, oid_diff, base_text) in oid_diffs {
3463 let new_oid_diff = cx
3464 .update(|cx| {
3465 oid_diff.read(cx).update_diff(
3466 buffer.clone(),
3467 base_text,
3468 None,
3469 language.clone(),
3470 cx,
3471 )
3472 })
3473 .await;
3474
3475 oid_diff
3476 .update(cx, |diff, cx| diff.set_snapshot(new_oid_diff, &buffer, cx))
3477 .await;
3478
3479 log::debug!(
3480 "finished recalculating oid diff for buffer {} oid {:?}",
3481 buffer.remote_id(),
3482 oid
3483 );
3484
3485 yield_now().await;
3486 }
3487
3488 log::debug!(
3489 "finished recalculating diffs for buffer {}",
3490 buffer.remote_id()
3491 );
3492
3493 if let Some(this) = this.upgrade() {
3494 this.update(cx, |this, _| {
3495 this.index_changed = false;
3496 this.head_changed = false;
3497 this.language_changed = false;
3498 *this.recalculating_tx.borrow_mut() = false;
3499 });
3500 }
3501
3502 Ok(())
3503 }));
3504 }
3505}
3506
3507fn make_remote_delegate(
3508 this: Entity<GitStore>,
3509 project_id: u64,
3510 repository_id: RepositoryId,
3511 askpass_id: u64,
3512 cx: &mut AsyncApp,
3513) -> AskPassDelegate {
3514 AskPassDelegate::new(cx, move |prompt, tx, cx| {
3515 this.update(cx, |this, cx| {
3516 let Some((client, _)) = this.downstream_client() else {
3517 return;
3518 };
3519 let response = client.request(proto::AskPassRequest {
3520 project_id,
3521 repository_id: repository_id.to_proto(),
3522 askpass_id,
3523 prompt,
3524 });
3525 cx.spawn(async move |_, _| {
3526 let mut response = response.await?.response;
3527 tx.send(EncryptedPassword::try_from(response.as_ref())?)
3528 .ok();
3529 response.zeroize();
3530 anyhow::Ok(())
3531 })
3532 .detach_and_log_err(cx);
3533 });
3534 })
3535}
3536
3537impl RepositoryId {
3538 pub fn to_proto(self) -> u64 {
3539 self.0
3540 }
3541
3542 pub fn from_proto(id: u64) -> Self {
3543 RepositoryId(id)
3544 }
3545}
3546
3547impl RepositorySnapshot {
3548 fn empty(
3549 id: RepositoryId,
3550 work_directory_abs_path: Arc<Path>,
3551 original_repo_abs_path: Option<Arc<Path>>,
3552 path_style: PathStyle,
3553 ) -> Self {
3554 Self {
3555 id,
3556 statuses_by_path: Default::default(),
3557 original_repo_abs_path: original_repo_abs_path
3558 .unwrap_or_else(|| work_directory_abs_path.clone()),
3559 work_directory_abs_path,
3560 branch: None,
3561 head_commit: None,
3562 scan_id: 0,
3563 merge: Default::default(),
3564 remote_origin_url: None,
3565 remote_upstream_url: None,
3566 stash_entries: Default::default(),
3567 path_style,
3568 }
3569 }
3570
3571 fn initial_update(&self, project_id: u64) -> proto::UpdateRepository {
3572 proto::UpdateRepository {
3573 branch_summary: self.branch.as_ref().map(branch_to_proto),
3574 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3575 updated_statuses: self
3576 .statuses_by_path
3577 .iter()
3578 .map(|entry| entry.to_proto())
3579 .collect(),
3580 removed_statuses: Default::default(),
3581 current_merge_conflicts: self
3582 .merge
3583 .merge_heads_by_conflicted_path
3584 .iter()
3585 .map(|(repo_path, _)| repo_path.to_proto())
3586 .collect(),
3587 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3588 project_id,
3589 id: self.id.to_proto(),
3590 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3591 entry_ids: vec![self.id.to_proto()],
3592 scan_id: self.scan_id,
3593 is_last_update: true,
3594 stash_entries: self
3595 .stash_entries
3596 .entries
3597 .iter()
3598 .map(stash_to_proto)
3599 .collect(),
3600 remote_upstream_url: self.remote_upstream_url.clone(),
3601 remote_origin_url: self.remote_origin_url.clone(),
3602 original_repo_abs_path: Some(
3603 self.original_repo_abs_path.to_string_lossy().into_owned(),
3604 ),
3605 }
3606 }
3607
3608 fn build_update(&self, old: &Self, project_id: u64) -> proto::UpdateRepository {
3609 let mut updated_statuses: Vec<proto::StatusEntry> = Vec::new();
3610 let mut removed_statuses: Vec<String> = Vec::new();
3611
3612 let mut new_statuses = self.statuses_by_path.iter().peekable();
3613 let mut old_statuses = old.statuses_by_path.iter().peekable();
3614
3615 let mut current_new_entry = new_statuses.next();
3616 let mut current_old_entry = old_statuses.next();
3617 loop {
3618 match (current_new_entry, current_old_entry) {
3619 (Some(new_entry), Some(old_entry)) => {
3620 match new_entry.repo_path.cmp(&old_entry.repo_path) {
3621 Ordering::Less => {
3622 updated_statuses.push(new_entry.to_proto());
3623 current_new_entry = new_statuses.next();
3624 }
3625 Ordering::Equal => {
3626 if new_entry.status != old_entry.status {
3627 updated_statuses.push(new_entry.to_proto());
3628 }
3629 current_old_entry = old_statuses.next();
3630 current_new_entry = new_statuses.next();
3631 }
3632 Ordering::Greater => {
3633 removed_statuses.push(old_entry.repo_path.to_proto());
3634 current_old_entry = old_statuses.next();
3635 }
3636 }
3637 }
3638 (None, Some(old_entry)) => {
3639 removed_statuses.push(old_entry.repo_path.to_proto());
3640 current_old_entry = old_statuses.next();
3641 }
3642 (Some(new_entry), None) => {
3643 updated_statuses.push(new_entry.to_proto());
3644 current_new_entry = new_statuses.next();
3645 }
3646 (None, None) => break,
3647 }
3648 }
3649
3650 proto::UpdateRepository {
3651 branch_summary: self.branch.as_ref().map(branch_to_proto),
3652 head_commit_details: self.head_commit.as_ref().map(commit_details_to_proto),
3653 updated_statuses,
3654 removed_statuses,
3655 current_merge_conflicts: self
3656 .merge
3657 .merge_heads_by_conflicted_path
3658 .iter()
3659 .map(|(path, _)| path.to_proto())
3660 .collect(),
3661 merge_message: self.merge.message.as_ref().map(|msg| msg.to_string()),
3662 project_id,
3663 id: self.id.to_proto(),
3664 abs_path: self.work_directory_abs_path.to_string_lossy().into_owned(),
3665 entry_ids: vec![],
3666 scan_id: self.scan_id,
3667 is_last_update: true,
3668 stash_entries: self
3669 .stash_entries
3670 .entries
3671 .iter()
3672 .map(stash_to_proto)
3673 .collect(),
3674 remote_upstream_url: self.remote_upstream_url.clone(),
3675 remote_origin_url: self.remote_origin_url.clone(),
3676 original_repo_abs_path: Some(
3677 self.original_repo_abs_path.to_string_lossy().into_owned(),
3678 ),
3679 }
3680 }
3681
3682 pub fn status(&self) -> impl Iterator<Item = StatusEntry> + '_ {
3683 self.statuses_by_path.iter().cloned()
3684 }
3685
3686 pub fn status_summary(&self) -> GitSummary {
3687 self.statuses_by_path.summary().item_summary
3688 }
3689
3690 pub fn status_for_path(&self, path: &RepoPath) -> Option<StatusEntry> {
3691 self.statuses_by_path
3692 .get(&PathKey(path.as_ref().clone()), ())
3693 .cloned()
3694 }
3695
3696 pub fn abs_path_to_repo_path(&self, abs_path: &Path) -> Option<RepoPath> {
3697 Self::abs_path_to_repo_path_inner(&self.work_directory_abs_path, abs_path, self.path_style)
3698 }
3699
3700 fn repo_path_to_abs_path(&self, repo_path: &RepoPath) -> PathBuf {
3701 self.path_style
3702 .join(&self.work_directory_abs_path, repo_path.as_std_path())
3703 .unwrap()
3704 .into()
3705 }
3706
3707 #[inline]
3708 fn abs_path_to_repo_path_inner(
3709 work_directory_abs_path: &Path,
3710 abs_path: &Path,
3711 path_style: PathStyle,
3712 ) -> Option<RepoPath> {
3713 let rel_path = path_style.strip_prefix(abs_path, work_directory_abs_path)?;
3714 Some(RepoPath::from_rel_path(&rel_path))
3715 }
3716
3717 pub fn had_conflict_on_last_merge_head_change(&self, repo_path: &RepoPath) -> bool {
3718 self.merge
3719 .merge_heads_by_conflicted_path
3720 .contains_key(repo_path)
3721 }
3722
3723 pub fn has_conflict(&self, repo_path: &RepoPath) -> bool {
3724 let had_conflict_on_last_merge_head_change = self
3725 .merge
3726 .merge_heads_by_conflicted_path
3727 .contains_key(repo_path);
3728 let has_conflict_currently = self
3729 .status_for_path(repo_path)
3730 .is_some_and(|entry| entry.status.is_conflicted());
3731 had_conflict_on_last_merge_head_change || has_conflict_currently
3732 }
3733
3734 /// This is the name that will be displayed in the repository selector for this repository.
3735 pub fn display_name(&self) -> SharedString {
3736 self.work_directory_abs_path
3737 .file_name()
3738 .unwrap_or_default()
3739 .to_string_lossy()
3740 .to_string()
3741 .into()
3742 }
3743}
3744
3745pub fn stash_to_proto(entry: &StashEntry) -> proto::StashEntry {
3746 proto::StashEntry {
3747 oid: entry.oid.as_bytes().to_vec(),
3748 message: entry.message.clone(),
3749 branch: entry.branch.clone(),
3750 index: entry.index as u64,
3751 timestamp: entry.timestamp,
3752 }
3753}
3754
3755pub fn proto_to_stash(entry: &proto::StashEntry) -> Result<StashEntry> {
3756 Ok(StashEntry {
3757 oid: Oid::from_bytes(&entry.oid)?,
3758 message: entry.message.clone(),
3759 index: entry.index as usize,
3760 branch: entry.branch.clone(),
3761 timestamp: entry.timestamp,
3762 })
3763}
3764
3765impl MergeDetails {
3766 async fn update(
3767 &mut self,
3768 backend: &Arc<dyn GitRepository>,
3769 current_conflicted_paths: Vec<RepoPath>,
3770 ) -> Result<bool> {
3771 log::debug!("load merge details");
3772 self.message = backend.merge_message().await.map(SharedString::from);
3773 let heads = backend
3774 .revparse_batch(vec![
3775 "MERGE_HEAD".into(),
3776 "CHERRY_PICK_HEAD".into(),
3777 "REBASE_HEAD".into(),
3778 "REVERT_HEAD".into(),
3779 "APPLY_HEAD".into(),
3780 ])
3781 .await
3782 .log_err()
3783 .unwrap_or_default()
3784 .into_iter()
3785 .map(|opt| opt.map(SharedString::from))
3786 .collect::<Vec<_>>();
3787
3788 let mut conflicts_changed = false;
3789
3790 // Record the merge state for newly conflicted paths
3791 for path in ¤t_conflicted_paths {
3792 if self.merge_heads_by_conflicted_path.get(&path).is_none() {
3793 conflicts_changed = true;
3794 self.merge_heads_by_conflicted_path
3795 .insert(path.clone(), heads.clone());
3796 }
3797 }
3798
3799 // Clear state for paths that are no longer conflicted and for which the merge heads have changed
3800 self.merge_heads_by_conflicted_path
3801 .retain(|path, old_merge_heads| {
3802 let keep = current_conflicted_paths.contains(path)
3803 || (old_merge_heads == &heads
3804 && old_merge_heads.iter().any(|head| head.is_some()));
3805 if !keep {
3806 conflicts_changed = true;
3807 }
3808 keep
3809 });
3810
3811 Ok(conflicts_changed)
3812 }
3813}
3814
3815impl Repository {
3816 pub fn is_trusted(&self) -> bool {
3817 match self.repository_state.peek() {
3818 Some(Ok(RepositoryState::Local(state))) => state.backend.is_trusted(),
3819 _ => false,
3820 }
3821 }
3822
3823 pub fn snapshot(&self) -> RepositorySnapshot {
3824 self.snapshot.clone()
3825 }
3826
3827 pub fn pending_ops(&self) -> impl Iterator<Item = PendingOps> + '_ {
3828 self.pending_ops.iter().cloned()
3829 }
3830
3831 pub fn pending_ops_summary(&self) -> PathSummary<PendingOpsSummary> {
3832 self.pending_ops.summary().clone()
3833 }
3834
3835 pub fn pending_ops_for_path(&self, path: &RepoPath) -> Option<PendingOps> {
3836 self.pending_ops
3837 .get(&PathKey(path.as_ref().clone()), ())
3838 .cloned()
3839 }
3840
3841 fn local(
3842 id: RepositoryId,
3843 work_directory_abs_path: Arc<Path>,
3844 original_repo_abs_path: Arc<Path>,
3845 dot_git_abs_path: Arc<Path>,
3846 project_environment: WeakEntity<ProjectEnvironment>,
3847 fs: Arc<dyn Fs>,
3848 is_trusted: bool,
3849 git_store: WeakEntity<GitStore>,
3850 cx: &mut Context<Self>,
3851 ) -> Self {
3852 let snapshot = RepositorySnapshot::empty(
3853 id,
3854 work_directory_abs_path.clone(),
3855 Some(original_repo_abs_path),
3856 PathStyle::local(),
3857 );
3858 let state = cx
3859 .spawn(async move |_, cx| {
3860 LocalRepositoryState::new(
3861 work_directory_abs_path,
3862 dot_git_abs_path,
3863 project_environment,
3864 fs,
3865 is_trusted,
3866 cx,
3867 )
3868 .await
3869 .map_err(|err| err.to_string())
3870 })
3871 .shared();
3872 let job_sender = Repository::spawn_local_git_worker(state.clone(), cx);
3873 let state = cx
3874 .spawn(async move |_, _| {
3875 let state = state.await?;
3876 Ok(RepositoryState::Local(state))
3877 })
3878 .shared();
3879
3880 cx.subscribe_self(move |this, event: &RepositoryEvent, _| match event {
3881 RepositoryEvent::BranchChanged => {
3882 if this.scan_id > 1 {
3883 this.initial_graph_data.clear();
3884 }
3885 }
3886 _ => {}
3887 })
3888 .detach();
3889
3890 Repository {
3891 this: cx.weak_entity(),
3892 git_store,
3893 snapshot,
3894 pending_ops: Default::default(),
3895 repository_state: state,
3896 commit_message_buffer: None,
3897 askpass_delegates: Default::default(),
3898 paths_needing_status_update: Default::default(),
3899 latest_askpass_id: 0,
3900 job_sender,
3901 job_id: 0,
3902 active_jobs: Default::default(),
3903 initial_graph_data: Default::default(),
3904 commit_data: Default::default(),
3905 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3906 }
3907 }
3908
3909 fn remote(
3910 id: RepositoryId,
3911 work_directory_abs_path: Arc<Path>,
3912 original_repo_abs_path: Option<Arc<Path>>,
3913 path_style: PathStyle,
3914 project_id: ProjectId,
3915 client: AnyProtoClient,
3916 git_store: WeakEntity<GitStore>,
3917 cx: &mut Context<Self>,
3918 ) -> Self {
3919 let snapshot = RepositorySnapshot::empty(
3920 id,
3921 work_directory_abs_path,
3922 original_repo_abs_path,
3923 path_style,
3924 );
3925 let repository_state = RemoteRepositoryState { project_id, client };
3926 let job_sender = Self::spawn_remote_git_worker(repository_state.clone(), cx);
3927 let repository_state = Task::ready(Ok(RepositoryState::Remote(repository_state))).shared();
3928 Self {
3929 this: cx.weak_entity(),
3930 snapshot,
3931 commit_message_buffer: None,
3932 git_store,
3933 pending_ops: Default::default(),
3934 paths_needing_status_update: Default::default(),
3935 job_sender,
3936 repository_state,
3937 askpass_delegates: Default::default(),
3938 latest_askpass_id: 0,
3939 active_jobs: Default::default(),
3940 job_id: 0,
3941 initial_graph_data: Default::default(),
3942 commit_data: Default::default(),
3943 graph_commit_data_handler: GraphCommitHandlerState::Closed,
3944 }
3945 }
3946
3947 pub fn git_store(&self) -> Option<Entity<GitStore>> {
3948 self.git_store.upgrade()
3949 }
3950
3951 fn reload_buffer_diff_bases(&mut self, cx: &mut Context<Self>) {
3952 let this = cx.weak_entity();
3953 let git_store = self.git_store.clone();
3954 let _ = self.send_keyed_job(
3955 Some(GitJobKey::ReloadBufferDiffBases),
3956 None,
3957 |state, mut cx| async move {
3958 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
3959 log::error!("tried to recompute diffs for a non-local repository");
3960 return Ok(());
3961 };
3962
3963 let Some(this) = this.upgrade() else {
3964 return Ok(());
3965 };
3966
3967 let repo_diff_state_updates = this.update(&mut cx, |this, cx| {
3968 git_store.update(cx, |git_store, cx| {
3969 git_store
3970 .diffs
3971 .iter()
3972 .filter_map(|(buffer_id, diff_state)| {
3973 let buffer_store = git_store.buffer_store.read(cx);
3974 let buffer = buffer_store.get(*buffer_id)?;
3975 let file = File::from_dyn(buffer.read(cx).file())?;
3976 let abs_path = file.worktree.read(cx).absolutize(&file.path);
3977 let repo_path = this.abs_path_to_repo_path(&abs_path)?;
3978 log::debug!(
3979 "start reload diff bases for repo path {}",
3980 repo_path.as_unix_str()
3981 );
3982 diff_state.update(cx, |diff_state, _| {
3983 let has_unstaged_diff = diff_state
3984 .unstaged_diff
3985 .as_ref()
3986 .is_some_and(|diff| diff.is_upgradable());
3987 let has_uncommitted_diff = diff_state
3988 .uncommitted_diff
3989 .as_ref()
3990 .is_some_and(|set| set.is_upgradable());
3991
3992 Some((
3993 buffer,
3994 repo_path,
3995 has_unstaged_diff.then(|| diff_state.index_text.clone()),
3996 has_uncommitted_diff.then(|| diff_state.head_text.clone()),
3997 ))
3998 })
3999 })
4000 .collect::<Vec<_>>()
4001 })
4002 })?;
4003
4004 let buffer_diff_base_changes = cx
4005 .background_spawn(async move {
4006 let mut changes = Vec::new();
4007 for (buffer, repo_path, current_index_text, current_head_text) in
4008 &repo_diff_state_updates
4009 {
4010 let index_text = if current_index_text.is_some() {
4011 backend.load_index_text(repo_path.clone()).await
4012 } else {
4013 None
4014 };
4015 let head_text = if current_head_text.is_some() {
4016 backend.load_committed_text(repo_path.clone()).await
4017 } else {
4018 None
4019 };
4020
4021 let change =
4022 match (current_index_text.as_ref(), current_head_text.as_ref()) {
4023 (Some(current_index), Some(current_head)) => {
4024 let index_changed =
4025 index_text.as_deref() != current_index.as_deref();
4026 let head_changed =
4027 head_text.as_deref() != current_head.as_deref();
4028 if index_changed && head_changed {
4029 if index_text == head_text {
4030 Some(DiffBasesChange::SetBoth(head_text))
4031 } else {
4032 Some(DiffBasesChange::SetEach {
4033 index: index_text,
4034 head: head_text,
4035 })
4036 }
4037 } else if index_changed {
4038 Some(DiffBasesChange::SetIndex(index_text))
4039 } else if head_changed {
4040 Some(DiffBasesChange::SetHead(head_text))
4041 } else {
4042 None
4043 }
4044 }
4045 (Some(current_index), None) => {
4046 let index_changed =
4047 index_text.as_deref() != current_index.as_deref();
4048 index_changed
4049 .then_some(DiffBasesChange::SetIndex(index_text))
4050 }
4051 (None, Some(current_head)) => {
4052 let head_changed =
4053 head_text.as_deref() != current_head.as_deref();
4054 head_changed.then_some(DiffBasesChange::SetHead(head_text))
4055 }
4056 (None, None) => None,
4057 };
4058
4059 changes.push((buffer.clone(), change))
4060 }
4061 changes
4062 })
4063 .await;
4064
4065 git_store.update(&mut cx, |git_store, cx| {
4066 for (buffer, diff_bases_change) in buffer_diff_base_changes {
4067 let buffer_snapshot = buffer.read(cx).text_snapshot();
4068 let buffer_id = buffer_snapshot.remote_id();
4069 let Some(diff_state) = git_store.diffs.get(&buffer_id) else {
4070 continue;
4071 };
4072
4073 let downstream_client = git_store.downstream_client();
4074 diff_state.update(cx, |diff_state, cx| {
4075 use proto::update_diff_bases::Mode;
4076
4077 if let Some((diff_bases_change, (client, project_id))) =
4078 diff_bases_change.clone().zip(downstream_client)
4079 {
4080 let (staged_text, committed_text, mode) = match diff_bases_change {
4081 DiffBasesChange::SetIndex(index) => {
4082 (index, None, Mode::IndexOnly)
4083 }
4084 DiffBasesChange::SetHead(head) => (None, head, Mode::HeadOnly),
4085 DiffBasesChange::SetEach { index, head } => {
4086 (index, head, Mode::IndexAndHead)
4087 }
4088 DiffBasesChange::SetBoth(text) => {
4089 (None, text, Mode::IndexMatchesHead)
4090 }
4091 };
4092 client
4093 .send(proto::UpdateDiffBases {
4094 project_id: project_id.to_proto(),
4095 buffer_id: buffer_id.to_proto(),
4096 staged_text,
4097 committed_text,
4098 mode: mode as i32,
4099 })
4100 .log_err();
4101 }
4102
4103 diff_state.diff_bases_changed(buffer_snapshot, diff_bases_change, cx);
4104 });
4105 }
4106 })
4107 },
4108 );
4109 }
4110
4111 pub fn send_job<F, Fut, R>(
4112 &mut self,
4113 status: Option<SharedString>,
4114 job: F,
4115 ) -> oneshot::Receiver<R>
4116 where
4117 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4118 Fut: Future<Output = R> + 'static,
4119 R: Send + 'static,
4120 {
4121 self.send_keyed_job(None, status, job)
4122 }
4123
4124 fn send_keyed_job<F, Fut, R>(
4125 &mut self,
4126 key: Option<GitJobKey>,
4127 status: Option<SharedString>,
4128 job: F,
4129 ) -> oneshot::Receiver<R>
4130 where
4131 F: FnOnce(RepositoryState, AsyncApp) -> Fut + 'static,
4132 Fut: Future<Output = R> + 'static,
4133 R: Send + 'static,
4134 {
4135 let (result_tx, result_rx) = futures::channel::oneshot::channel();
4136 let job_id = post_inc(&mut self.job_id);
4137 let this = self.this.clone();
4138 self.job_sender
4139 .unbounded_send(GitJob {
4140 key,
4141 job: Box::new(move |state, cx: &mut AsyncApp| {
4142 let job = job(state, cx.clone());
4143 cx.spawn(async move |cx| {
4144 if let Some(s) = status.clone() {
4145 this.update(cx, |this, cx| {
4146 this.active_jobs.insert(
4147 job_id,
4148 JobInfo {
4149 start: Instant::now(),
4150 message: s.clone(),
4151 },
4152 );
4153
4154 cx.notify();
4155 })
4156 .ok();
4157 }
4158 let result = job.await;
4159
4160 this.update(cx, |this, cx| {
4161 this.active_jobs.remove(&job_id);
4162 cx.notify();
4163 })
4164 .ok();
4165
4166 result_tx.send(result).ok();
4167 })
4168 }),
4169 })
4170 .ok();
4171 result_rx
4172 }
4173
4174 pub fn set_as_active_repository(&self, cx: &mut Context<Self>) {
4175 let Some(git_store) = self.git_store.upgrade() else {
4176 return;
4177 };
4178 let entity = cx.entity();
4179 git_store.update(cx, |git_store, cx| {
4180 let Some((&id, _)) = git_store
4181 .repositories
4182 .iter()
4183 .find(|(_, handle)| *handle == &entity)
4184 else {
4185 return;
4186 };
4187 git_store.active_repo_id = Some(id);
4188 cx.emit(GitStoreEvent::ActiveRepositoryChanged(Some(id)));
4189 });
4190 }
4191
4192 pub fn cached_status(&self) -> impl '_ + Iterator<Item = StatusEntry> {
4193 self.snapshot.status()
4194 }
4195
4196 pub fn cached_stash(&self) -> GitStash {
4197 self.snapshot.stash_entries.clone()
4198 }
4199
4200 pub fn repo_path_to_project_path(&self, path: &RepoPath, cx: &App) -> Option<ProjectPath> {
4201 let git_store = self.git_store.upgrade()?;
4202 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4203 let abs_path = self.snapshot.repo_path_to_abs_path(path);
4204 let abs_path = SanitizedPath::new(&abs_path);
4205 let (worktree, relative_path) = worktree_store.find_worktree(abs_path, cx)?;
4206 Some(ProjectPath {
4207 worktree_id: worktree.read(cx).id(),
4208 path: relative_path,
4209 })
4210 }
4211
4212 pub fn project_path_to_repo_path(&self, path: &ProjectPath, cx: &App) -> Option<RepoPath> {
4213 let git_store = self.git_store.upgrade()?;
4214 let worktree_store = git_store.read(cx).worktree_store.read(cx);
4215 let abs_path = worktree_store.absolutize(path, cx)?;
4216 self.snapshot.abs_path_to_repo_path(&abs_path)
4217 }
4218
4219 pub fn contains_sub_repo(&self, other: &Entity<Self>, cx: &App) -> bool {
4220 other
4221 .read(cx)
4222 .snapshot
4223 .work_directory_abs_path
4224 .starts_with(&self.snapshot.work_directory_abs_path)
4225 }
4226
4227 pub fn open_commit_buffer(
4228 &mut self,
4229 languages: Option<Arc<LanguageRegistry>>,
4230 buffer_store: Entity<BufferStore>,
4231 cx: &mut Context<Self>,
4232 ) -> Task<Result<Entity<Buffer>>> {
4233 let id = self.id;
4234 if let Some(buffer) = self.commit_message_buffer.clone() {
4235 return Task::ready(Ok(buffer));
4236 }
4237 let this = cx.weak_entity();
4238
4239 let rx = self.send_job(None, move |state, mut cx| async move {
4240 let Some(this) = this.upgrade() else {
4241 bail!("git store was dropped");
4242 };
4243 match state {
4244 RepositoryState::Local(..) => {
4245 this.update(&mut cx, |_, cx| {
4246 Self::open_local_commit_buffer(languages, buffer_store, cx)
4247 })
4248 .await
4249 }
4250 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4251 let request = client.request(proto::OpenCommitMessageBuffer {
4252 project_id: project_id.0,
4253 repository_id: id.to_proto(),
4254 });
4255 let response = request.await.context("requesting to open commit buffer")?;
4256 let buffer_id = BufferId::new(response.buffer_id)?;
4257 let buffer = buffer_store
4258 .update(&mut cx, |buffer_store, cx| {
4259 buffer_store.wait_for_remote_buffer(buffer_id, cx)
4260 })
4261 .await?;
4262 if let Some(language_registry) = languages {
4263 let git_commit_language =
4264 language_registry.language_for_name("Git Commit").await?;
4265 buffer.update(&mut cx, |buffer, cx| {
4266 buffer.set_language(Some(git_commit_language), cx);
4267 });
4268 }
4269 this.update(&mut cx, |this, _| {
4270 this.commit_message_buffer = Some(buffer.clone());
4271 });
4272 Ok(buffer)
4273 }
4274 }
4275 });
4276
4277 cx.spawn(|_, _: &mut AsyncApp| async move { rx.await? })
4278 }
4279
4280 fn open_local_commit_buffer(
4281 language_registry: Option<Arc<LanguageRegistry>>,
4282 buffer_store: Entity<BufferStore>,
4283 cx: &mut Context<Self>,
4284 ) -> Task<Result<Entity<Buffer>>> {
4285 cx.spawn(async move |repository, cx| {
4286 let git_commit_language = match language_registry {
4287 Some(language_registry) => {
4288 Some(language_registry.language_for_name("Git Commit").await?)
4289 }
4290 None => None,
4291 };
4292 let buffer = buffer_store
4293 .update(cx, |buffer_store, cx| {
4294 buffer_store.create_buffer(git_commit_language, false, cx)
4295 })
4296 .await?;
4297
4298 repository.update(cx, |repository, _| {
4299 repository.commit_message_buffer = Some(buffer.clone());
4300 })?;
4301 Ok(buffer)
4302 })
4303 }
4304
4305 pub fn checkout_files(
4306 &mut self,
4307 commit: &str,
4308 paths: Vec<RepoPath>,
4309 cx: &mut Context<Self>,
4310 ) -> Task<Result<()>> {
4311 let commit = commit.to_string();
4312 let id = self.id;
4313
4314 self.spawn_job_with_tracking(
4315 paths.clone(),
4316 pending_op::GitStatus::Reverted,
4317 cx,
4318 async move |this, cx| {
4319 this.update(cx, |this, _cx| {
4320 this.send_job(
4321 Some(format!("git checkout {}", commit).into()),
4322 move |git_repo, _| async move {
4323 match git_repo {
4324 RepositoryState::Local(LocalRepositoryState {
4325 backend,
4326 environment,
4327 ..
4328 }) => {
4329 backend
4330 .checkout_files(commit, paths, environment.clone())
4331 .await
4332 }
4333 RepositoryState::Remote(RemoteRepositoryState {
4334 project_id,
4335 client,
4336 }) => {
4337 client
4338 .request(proto::GitCheckoutFiles {
4339 project_id: project_id.0,
4340 repository_id: id.to_proto(),
4341 commit,
4342 paths: paths
4343 .into_iter()
4344 .map(|p| p.to_proto())
4345 .collect(),
4346 })
4347 .await?;
4348
4349 Ok(())
4350 }
4351 }
4352 },
4353 )
4354 })?
4355 .await?
4356 },
4357 )
4358 }
4359
4360 pub fn reset(
4361 &mut self,
4362 commit: String,
4363 reset_mode: ResetMode,
4364 _cx: &mut App,
4365 ) -> oneshot::Receiver<Result<()>> {
4366 let id = self.id;
4367
4368 self.send_job(None, move |git_repo, _| async move {
4369 match git_repo {
4370 RepositoryState::Local(LocalRepositoryState {
4371 backend,
4372 environment,
4373 ..
4374 }) => backend.reset(commit, reset_mode, environment).await,
4375 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4376 client
4377 .request(proto::GitReset {
4378 project_id: project_id.0,
4379 repository_id: id.to_proto(),
4380 commit,
4381 mode: match reset_mode {
4382 ResetMode::Soft => git_reset::ResetMode::Soft.into(),
4383 ResetMode::Mixed => git_reset::ResetMode::Mixed.into(),
4384 },
4385 })
4386 .await?;
4387
4388 Ok(())
4389 }
4390 }
4391 })
4392 }
4393
4394 pub fn show(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDetails>> {
4395 let id = self.id;
4396 self.send_job(None, move |git_repo, _cx| async move {
4397 match git_repo {
4398 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4399 backend.show(commit).await
4400 }
4401 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
4402 let resp = client
4403 .request(proto::GitShow {
4404 project_id: project_id.0,
4405 repository_id: id.to_proto(),
4406 commit,
4407 })
4408 .await?;
4409
4410 Ok(CommitDetails {
4411 sha: resp.sha.into(),
4412 message: resp.message.into(),
4413 commit_timestamp: resp.commit_timestamp,
4414 author_email: resp.author_email.into(),
4415 author_name: resp.author_name.into(),
4416 })
4417 }
4418 }
4419 })
4420 }
4421
4422 pub fn load_commit_diff(&mut self, commit: String) -> oneshot::Receiver<Result<CommitDiff>> {
4423 let id = self.id;
4424 self.send_job(None, move |git_repo, cx| async move {
4425 match git_repo {
4426 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4427 backend.load_commit(commit, cx).await
4428 }
4429 RepositoryState::Remote(RemoteRepositoryState {
4430 client, project_id, ..
4431 }) => {
4432 let response = client
4433 .request(proto::LoadCommitDiff {
4434 project_id: project_id.0,
4435 repository_id: id.to_proto(),
4436 commit,
4437 })
4438 .await?;
4439 Ok(CommitDiff {
4440 files: response
4441 .files
4442 .into_iter()
4443 .map(|file| {
4444 Ok(CommitFile {
4445 path: RepoPath::from_proto(&file.path)?,
4446 old_text: file.old_text,
4447 new_text: file.new_text,
4448 is_binary: file.is_binary,
4449 })
4450 })
4451 .collect::<Result<Vec<_>>>()?,
4452 })
4453 }
4454 }
4455 })
4456 }
4457
4458 pub fn file_history(
4459 &mut self,
4460 path: RepoPath,
4461 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4462 self.file_history_paginated(path, 0, None)
4463 }
4464
4465 pub fn file_history_paginated(
4466 &mut self,
4467 path: RepoPath,
4468 skip: usize,
4469 limit: Option<usize>,
4470 ) -> oneshot::Receiver<Result<git::repository::FileHistory>> {
4471 let id = self.id;
4472 self.send_job(None, move |git_repo, _cx| async move {
4473 match git_repo {
4474 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
4475 backend.file_history_paginated(path, skip, limit).await
4476 }
4477 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
4478 let response = client
4479 .request(proto::GitFileHistory {
4480 project_id: project_id.0,
4481 repository_id: id.to_proto(),
4482 path: path.to_proto(),
4483 skip: skip as u64,
4484 limit: limit.map(|l| l as u64),
4485 })
4486 .await?;
4487 Ok(git::repository::FileHistory {
4488 entries: response
4489 .entries
4490 .into_iter()
4491 .map(|entry| git::repository::FileHistoryEntry {
4492 sha: entry.sha.into(),
4493 subject: entry.subject.into(),
4494 message: entry.message.into(),
4495 commit_timestamp: entry.commit_timestamp,
4496 author_name: entry.author_name.into(),
4497 author_email: entry.author_email.into(),
4498 })
4499 .collect(),
4500 path: RepoPath::from_proto(&response.path)?,
4501 })
4502 }
4503 }
4504 })
4505 }
4506
4507 pub fn get_graph_data(
4508 &self,
4509 log_source: LogSource,
4510 log_order: LogOrder,
4511 ) -> Option<&InitialGitGraphData> {
4512 self.initial_graph_data.get(&(log_source, log_order))
4513 }
4514
4515 pub fn graph_data(
4516 &mut self,
4517 log_source: LogSource,
4518 log_order: LogOrder,
4519 range: Range<usize>,
4520 cx: &mut Context<Self>,
4521 ) -> GraphDataResponse<'_> {
4522 let initial_commit_data = self
4523 .initial_graph_data
4524 .entry((log_source.clone(), log_order))
4525 .or_insert_with(|| {
4526 let state = self.repository_state.clone();
4527 let log_source = log_source.clone();
4528
4529 let fetch_task = cx.spawn(async move |repository, cx| {
4530 let state = state.await;
4531 let result = match state {
4532 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => {
4533 Self::local_git_graph_data(
4534 repository.clone(),
4535 backend,
4536 log_source.clone(),
4537 log_order,
4538 cx,
4539 )
4540 .await
4541 }
4542 Ok(RepositoryState::Remote(_)) => {
4543 Err("Git graph is not supported for collab yet".into())
4544 }
4545 Err(e) => Err(SharedString::from(e)),
4546 };
4547
4548 if let Err(fetch_task_error) = result {
4549 repository
4550 .update(cx, |repository, _| {
4551 if let Some(data) = repository
4552 .initial_graph_data
4553 .get_mut(&(log_source, log_order))
4554 {
4555 data.error = Some(fetch_task_error);
4556 } else {
4557 debug_panic!(
4558 "This task would be dropped if this entry doesn't exist"
4559 );
4560 }
4561 })
4562 .ok();
4563 }
4564 });
4565
4566 InitialGitGraphData {
4567 fetch_task,
4568 error: None,
4569 commit_data: Vec::new(),
4570 commit_oid_to_index: HashMap::default(),
4571 }
4572 });
4573
4574 let max_start = initial_commit_data.commit_data.len().saturating_sub(1);
4575 let max_end = initial_commit_data.commit_data.len();
4576
4577 GraphDataResponse {
4578 commits: &initial_commit_data.commit_data
4579 [range.start.min(max_start)..range.end.min(max_end)],
4580 is_loading: !initial_commit_data.fetch_task.is_ready(),
4581 error: initial_commit_data.error.clone(),
4582 }
4583 }
4584
4585 async fn local_git_graph_data(
4586 this: WeakEntity<Self>,
4587 backend: Arc<dyn GitRepository>,
4588 log_source: LogSource,
4589 log_order: LogOrder,
4590 cx: &mut AsyncApp,
4591 ) -> Result<(), SharedString> {
4592 let (request_tx, request_rx) =
4593 smol::channel::unbounded::<Vec<Arc<InitialGraphCommitData>>>();
4594
4595 let task = cx.background_executor().spawn({
4596 let log_source = log_source.clone();
4597 async move {
4598 backend
4599 .initial_graph_data(log_source, log_order, request_tx)
4600 .await
4601 .map_err(|err| SharedString::from(err.to_string()))
4602 }
4603 });
4604
4605 let graph_data_key = (log_source, log_order);
4606
4607 while let Ok(initial_graph_commit_data) = request_rx.recv().await {
4608 this.update(cx, |repository, cx| {
4609 let graph_data = repository
4610 .initial_graph_data
4611 .entry(graph_data_key.clone())
4612 .and_modify(|graph_data| {
4613 for commit_data in initial_graph_commit_data {
4614 graph_data
4615 .commit_oid_to_index
4616 .insert(commit_data.sha, graph_data.commit_data.len());
4617 graph_data.commit_data.push(commit_data);
4618
4619 cx.emit(RepositoryEvent::GraphEvent(
4620 graph_data_key.clone(),
4621 GitGraphEvent::CountUpdated(graph_data.commit_data.len()),
4622 ));
4623 }
4624 });
4625
4626 match &graph_data {
4627 Entry::Occupied(_) => {}
4628 Entry::Vacant(_) => {
4629 debug_panic!("This task should be dropped if data doesn't exist");
4630 }
4631 }
4632 })
4633 .ok();
4634 }
4635
4636 task.await?;
4637 Ok(())
4638 }
4639
4640 pub fn fetch_commit_data(&mut self, sha: Oid, cx: &mut Context<Self>) -> &CommitDataState {
4641 if !self.commit_data.contains_key(&sha) {
4642 match &self.graph_commit_data_handler {
4643 GraphCommitHandlerState::Open(handler) => {
4644 if handler.commit_data_request.try_send(sha).is_ok() {
4645 let old_value = self.commit_data.insert(sha, CommitDataState::Loading);
4646 debug_assert!(old_value.is_none(), "We should never overwrite commit data");
4647 }
4648 }
4649 GraphCommitHandlerState::Closed => {
4650 self.open_graph_commit_data_handler(cx);
4651 }
4652 GraphCommitHandlerState::Starting => {}
4653 }
4654 }
4655
4656 self.commit_data
4657 .get(&sha)
4658 .unwrap_or(&CommitDataState::Loading)
4659 }
4660
4661 fn open_graph_commit_data_handler(&mut self, cx: &mut Context<Self>) {
4662 self.graph_commit_data_handler = GraphCommitHandlerState::Starting;
4663
4664 let state = self.repository_state.clone();
4665 let (result_tx, result_rx) = smol::channel::bounded::<(Oid, GraphCommitData)>(64);
4666 let (request_tx, request_rx) = smol::channel::unbounded::<Oid>();
4667
4668 let foreground_task = cx.spawn(async move |this, cx| {
4669 while let Ok((sha, commit_data)) = result_rx.recv().await {
4670 let result = this.update(cx, |this, cx| {
4671 let old_value = this
4672 .commit_data
4673 .insert(sha, CommitDataState::Loaded(Arc::new(commit_data)));
4674 debug_assert!(
4675 !matches!(old_value, Some(CommitDataState::Loaded(_))),
4676 "We should never overwrite commit data"
4677 );
4678
4679 cx.notify();
4680 });
4681 if result.is_err() {
4682 break;
4683 }
4684 }
4685
4686 this.update(cx, |this, _cx| {
4687 this.graph_commit_data_handler = GraphCommitHandlerState::Closed;
4688 })
4689 .ok();
4690 });
4691
4692 let request_tx_for_handler = request_tx;
4693 let background_executor = cx.background_executor().clone();
4694
4695 cx.background_spawn(async move {
4696 let backend = match state.await {
4697 Ok(RepositoryState::Local(LocalRepositoryState { backend, .. })) => backend,
4698 Ok(RepositoryState::Remote(_)) => {
4699 log::error!("commit_data_reader not supported for remote repositories");
4700 return;
4701 }
4702 Err(error) => {
4703 log::error!("failed to get repository state: {error}");
4704 return;
4705 }
4706 };
4707
4708 let reader = match backend.commit_data_reader() {
4709 Ok(reader) => reader,
4710 Err(error) => {
4711 log::error!("failed to create commit data reader: {error:?}");
4712 return;
4713 }
4714 };
4715
4716 loop {
4717 let timeout = background_executor.timer(std::time::Duration::from_secs(10));
4718
4719 futures::select_biased! {
4720 sha = futures::FutureExt::fuse(request_rx.recv()) => {
4721 let Ok(sha) = sha else {
4722 break;
4723 };
4724
4725 match reader.read(sha).await {
4726 Ok(commit_data) => {
4727 if result_tx.send((sha, commit_data)).await.is_err() {
4728 break;
4729 }
4730 }
4731 Err(error) => {
4732 log::error!("failed to read commit data for {sha}: {error:?}");
4733 }
4734 }
4735 }
4736 _ = futures::FutureExt::fuse(timeout) => {
4737 break;
4738 }
4739 }
4740 }
4741
4742 drop(result_tx);
4743 })
4744 .detach();
4745
4746 self.graph_commit_data_handler = GraphCommitHandlerState::Open(GraphCommitDataHandler {
4747 _task: foreground_task,
4748 commit_data_request: request_tx_for_handler,
4749 });
4750 }
4751
4752 fn buffer_store(&self, cx: &App) -> Option<Entity<BufferStore>> {
4753 Some(self.git_store.upgrade()?.read(cx).buffer_store.clone())
4754 }
4755
4756 fn save_buffers<'a>(
4757 &self,
4758 entries: impl IntoIterator<Item = &'a RepoPath>,
4759 cx: &mut Context<Self>,
4760 ) -> Vec<Task<anyhow::Result<()>>> {
4761 let mut save_futures = Vec::new();
4762 if let Some(buffer_store) = self.buffer_store(cx) {
4763 buffer_store.update(cx, |buffer_store, cx| {
4764 for path in entries {
4765 let Some(project_path) = self.repo_path_to_project_path(path, cx) else {
4766 continue;
4767 };
4768 if let Some(buffer) = buffer_store.get_by_path(&project_path)
4769 && buffer
4770 .read(cx)
4771 .file()
4772 .is_some_and(|file| file.disk_state().exists())
4773 && buffer.read(cx).has_unsaved_edits()
4774 {
4775 save_futures.push(buffer_store.save_buffer(buffer, cx));
4776 }
4777 }
4778 })
4779 }
4780 save_futures
4781 }
4782
4783 pub fn stage_entries(
4784 &mut self,
4785 entries: Vec<RepoPath>,
4786 cx: &mut Context<Self>,
4787 ) -> Task<anyhow::Result<()>> {
4788 self.stage_or_unstage_entries(true, entries, cx)
4789 }
4790
4791 pub fn unstage_entries(
4792 &mut self,
4793 entries: Vec<RepoPath>,
4794 cx: &mut Context<Self>,
4795 ) -> Task<anyhow::Result<()>> {
4796 self.stage_or_unstage_entries(false, entries, cx)
4797 }
4798
4799 fn stage_or_unstage_entries(
4800 &mut self,
4801 stage: bool,
4802 entries: Vec<RepoPath>,
4803 cx: &mut Context<Self>,
4804 ) -> Task<anyhow::Result<()>> {
4805 if entries.is_empty() {
4806 return Task::ready(Ok(()));
4807 }
4808 let Some(git_store) = self.git_store.upgrade() else {
4809 return Task::ready(Ok(()));
4810 };
4811 let id = self.id;
4812 let save_tasks = self.save_buffers(&entries, cx);
4813 let paths = entries
4814 .iter()
4815 .map(|p| p.as_unix_str())
4816 .collect::<Vec<_>>()
4817 .join(" ");
4818 let status = if stage {
4819 format!("git add {paths}")
4820 } else {
4821 format!("git reset {paths}")
4822 };
4823 let job_key = GitJobKey::WriteIndex(entries.clone());
4824
4825 self.spawn_job_with_tracking(
4826 entries.clone(),
4827 if stage {
4828 pending_op::GitStatus::Staged
4829 } else {
4830 pending_op::GitStatus::Unstaged
4831 },
4832 cx,
4833 async move |this, cx| {
4834 for save_task in save_tasks {
4835 save_task.await?;
4836 }
4837
4838 this.update(cx, |this, cx| {
4839 let weak_this = cx.weak_entity();
4840 this.send_keyed_job(
4841 Some(job_key),
4842 Some(status.into()),
4843 move |git_repo, mut cx| async move {
4844 let hunk_staging_operation_counts = weak_this
4845 .update(&mut cx, |this, cx| {
4846 let mut hunk_staging_operation_counts = HashMap::default();
4847 for path in &entries {
4848 let Some(project_path) =
4849 this.repo_path_to_project_path(path, cx)
4850 else {
4851 continue;
4852 };
4853 let Some(buffer) = git_store
4854 .read(cx)
4855 .buffer_store
4856 .read(cx)
4857 .get_by_path(&project_path)
4858 else {
4859 continue;
4860 };
4861 let Some(diff_state) = git_store
4862 .read(cx)
4863 .diffs
4864 .get(&buffer.read(cx).remote_id())
4865 .cloned()
4866 else {
4867 continue;
4868 };
4869 let Some(uncommitted_diff) =
4870 diff_state.read(cx).uncommitted_diff.as_ref().and_then(
4871 |uncommitted_diff| uncommitted_diff.upgrade(),
4872 )
4873 else {
4874 continue;
4875 };
4876 let buffer_snapshot = buffer.read(cx).text_snapshot();
4877 let file_exists = buffer
4878 .read(cx)
4879 .file()
4880 .is_some_and(|file| file.disk_state().exists());
4881 let hunk_staging_operation_count =
4882 diff_state.update(cx, |diff_state, cx| {
4883 uncommitted_diff.update(
4884 cx,
4885 |uncommitted_diff, cx| {
4886 uncommitted_diff
4887 .stage_or_unstage_all_hunks(
4888 stage,
4889 &buffer_snapshot,
4890 file_exists,
4891 cx,
4892 );
4893 },
4894 );
4895
4896 diff_state.hunk_staging_operation_count += 1;
4897 diff_state.hunk_staging_operation_count
4898 });
4899 hunk_staging_operation_counts.insert(
4900 diff_state.downgrade(),
4901 hunk_staging_operation_count,
4902 );
4903 }
4904 hunk_staging_operation_counts
4905 })
4906 .unwrap_or_default();
4907
4908 let result = match git_repo {
4909 RepositoryState::Local(LocalRepositoryState {
4910 backend,
4911 environment,
4912 ..
4913 }) => {
4914 if stage {
4915 backend.stage_paths(entries, environment.clone()).await
4916 } else {
4917 backend.unstage_paths(entries, environment.clone()).await
4918 }
4919 }
4920 RepositoryState::Remote(RemoteRepositoryState {
4921 project_id,
4922 client,
4923 }) => {
4924 if stage {
4925 client
4926 .request(proto::Stage {
4927 project_id: project_id.0,
4928 repository_id: id.to_proto(),
4929 paths: entries
4930 .into_iter()
4931 .map(|repo_path| repo_path.to_proto())
4932 .collect(),
4933 })
4934 .await
4935 .context("sending stage request")
4936 .map(|_| ())
4937 } else {
4938 client
4939 .request(proto::Unstage {
4940 project_id: project_id.0,
4941 repository_id: id.to_proto(),
4942 paths: entries
4943 .into_iter()
4944 .map(|repo_path| repo_path.to_proto())
4945 .collect(),
4946 })
4947 .await
4948 .context("sending unstage request")
4949 .map(|_| ())
4950 }
4951 }
4952 };
4953
4954 for (diff_state, hunk_staging_operation_count) in
4955 hunk_staging_operation_counts
4956 {
4957 diff_state
4958 .update(&mut cx, |diff_state, cx| {
4959 if result.is_ok() {
4960 diff_state.hunk_staging_operation_count_as_of_write =
4961 hunk_staging_operation_count;
4962 } else if let Some(uncommitted_diff) =
4963 &diff_state.uncommitted_diff
4964 {
4965 uncommitted_diff
4966 .update(cx, |uncommitted_diff, cx| {
4967 uncommitted_diff.clear_pending_hunks(cx);
4968 })
4969 .ok();
4970 }
4971 })
4972 .ok();
4973 }
4974
4975 result
4976 },
4977 )
4978 })?
4979 .await?
4980 },
4981 )
4982 }
4983
4984 pub fn stage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
4985 let to_stage = self
4986 .cached_status()
4987 .filter_map(|entry| {
4988 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
4989 if ops.staging() || ops.staged() {
4990 None
4991 } else {
4992 Some(entry.repo_path)
4993 }
4994 } else if entry.status.staging().is_fully_staged() {
4995 None
4996 } else {
4997 Some(entry.repo_path)
4998 }
4999 })
5000 .collect();
5001 self.stage_or_unstage_entries(true, to_stage, cx)
5002 }
5003
5004 pub fn unstage_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5005 let to_unstage = self
5006 .cached_status()
5007 .filter_map(|entry| {
5008 if let Some(ops) = self.pending_ops_for_path(&entry.repo_path) {
5009 if !ops.staging() && !ops.staged() {
5010 None
5011 } else {
5012 Some(entry.repo_path)
5013 }
5014 } else if entry.status.staging().is_fully_unstaged() {
5015 None
5016 } else {
5017 Some(entry.repo_path)
5018 }
5019 })
5020 .collect();
5021 self.stage_or_unstage_entries(false, to_unstage, cx)
5022 }
5023
5024 pub fn stash_all(&mut self, cx: &mut Context<Self>) -> Task<anyhow::Result<()>> {
5025 let to_stash = self.cached_status().map(|entry| entry.repo_path).collect();
5026
5027 self.stash_entries(to_stash, cx)
5028 }
5029
5030 pub fn stash_entries(
5031 &mut self,
5032 entries: Vec<RepoPath>,
5033 cx: &mut Context<Self>,
5034 ) -> Task<anyhow::Result<()>> {
5035 let id = self.id;
5036
5037 cx.spawn(async move |this, cx| {
5038 this.update(cx, |this, _| {
5039 this.send_job(None, move |git_repo, _cx| async move {
5040 match git_repo {
5041 RepositoryState::Local(LocalRepositoryState {
5042 backend,
5043 environment,
5044 ..
5045 }) => backend.stash_paths(entries, environment).await,
5046 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5047 client
5048 .request(proto::Stash {
5049 project_id: project_id.0,
5050 repository_id: id.to_proto(),
5051 paths: entries
5052 .into_iter()
5053 .map(|repo_path| repo_path.to_proto())
5054 .collect(),
5055 })
5056 .await?;
5057 Ok(())
5058 }
5059 }
5060 })
5061 })?
5062 .await??;
5063 Ok(())
5064 })
5065 }
5066
5067 pub fn stash_pop(
5068 &mut self,
5069 index: Option<usize>,
5070 cx: &mut Context<Self>,
5071 ) -> Task<anyhow::Result<()>> {
5072 let id = self.id;
5073 cx.spawn(async move |this, cx| {
5074 this.update(cx, |this, _| {
5075 this.send_job(None, move |git_repo, _cx| async move {
5076 match git_repo {
5077 RepositoryState::Local(LocalRepositoryState {
5078 backend,
5079 environment,
5080 ..
5081 }) => backend.stash_pop(index, environment).await,
5082 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5083 client
5084 .request(proto::StashPop {
5085 project_id: project_id.0,
5086 repository_id: id.to_proto(),
5087 stash_index: index.map(|i| i as u64),
5088 })
5089 .await
5090 .context("sending stash pop request")?;
5091 Ok(())
5092 }
5093 }
5094 })
5095 })?
5096 .await??;
5097 Ok(())
5098 })
5099 }
5100
5101 pub fn stash_apply(
5102 &mut self,
5103 index: Option<usize>,
5104 cx: &mut Context<Self>,
5105 ) -> Task<anyhow::Result<()>> {
5106 let id = self.id;
5107 cx.spawn(async move |this, cx| {
5108 this.update(cx, |this, _| {
5109 this.send_job(None, move |git_repo, _cx| async move {
5110 match git_repo {
5111 RepositoryState::Local(LocalRepositoryState {
5112 backend,
5113 environment,
5114 ..
5115 }) => backend.stash_apply(index, environment).await,
5116 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5117 client
5118 .request(proto::StashApply {
5119 project_id: project_id.0,
5120 repository_id: id.to_proto(),
5121 stash_index: index.map(|i| i as u64),
5122 })
5123 .await
5124 .context("sending stash apply request")?;
5125 Ok(())
5126 }
5127 }
5128 })
5129 })?
5130 .await??;
5131 Ok(())
5132 })
5133 }
5134
5135 pub fn stash_drop(
5136 &mut self,
5137 index: Option<usize>,
5138 cx: &mut Context<Self>,
5139 ) -> oneshot::Receiver<anyhow::Result<()>> {
5140 let id = self.id;
5141 let updates_tx = self
5142 .git_store()
5143 .and_then(|git_store| match &git_store.read(cx).state {
5144 GitStoreState::Local { downstream, .. } => downstream
5145 .as_ref()
5146 .map(|downstream| downstream.updates_tx.clone()),
5147 _ => None,
5148 });
5149 let this = cx.weak_entity();
5150 self.send_job(None, move |git_repo, mut cx| async move {
5151 match git_repo {
5152 RepositoryState::Local(LocalRepositoryState {
5153 backend,
5154 environment,
5155 ..
5156 }) => {
5157 // TODO would be nice to not have to do this manually
5158 let result = backend.stash_drop(index, environment).await;
5159 if result.is_ok()
5160 && let Ok(stash_entries) = backend.stash_entries().await
5161 {
5162 let snapshot = this.update(&mut cx, |this, cx| {
5163 this.snapshot.stash_entries = stash_entries;
5164 cx.emit(RepositoryEvent::StashEntriesChanged);
5165 this.snapshot.clone()
5166 })?;
5167 if let Some(updates_tx) = updates_tx {
5168 updates_tx
5169 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5170 .ok();
5171 }
5172 }
5173
5174 result
5175 }
5176 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5177 client
5178 .request(proto::StashDrop {
5179 project_id: project_id.0,
5180 repository_id: id.to_proto(),
5181 stash_index: index.map(|i| i as u64),
5182 })
5183 .await
5184 .context("sending stash pop request")?;
5185 Ok(())
5186 }
5187 }
5188 })
5189 }
5190
5191 pub fn run_hook(&mut self, hook: RunHook, _cx: &mut App) -> oneshot::Receiver<Result<()>> {
5192 let id = self.id;
5193 self.send_job(
5194 Some(format!("git hook {}", hook.as_str()).into()),
5195 move |git_repo, _cx| async move {
5196 match git_repo {
5197 RepositoryState::Local(LocalRepositoryState {
5198 backend,
5199 environment,
5200 ..
5201 }) => backend.run_hook(hook, environment.clone()).await,
5202 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5203 client
5204 .request(proto::RunGitHook {
5205 project_id: project_id.0,
5206 repository_id: id.to_proto(),
5207 hook: hook.to_proto(),
5208 })
5209 .await?;
5210
5211 Ok(())
5212 }
5213 }
5214 },
5215 )
5216 }
5217
5218 pub fn commit(
5219 &mut self,
5220 message: SharedString,
5221 name_and_email: Option<(SharedString, SharedString)>,
5222 options: CommitOptions,
5223 askpass: AskPassDelegate,
5224 cx: &mut App,
5225 ) -> oneshot::Receiver<Result<()>> {
5226 let id = self.id;
5227 let askpass_delegates = self.askpass_delegates.clone();
5228 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5229
5230 let rx = self.run_hook(RunHook::PreCommit, cx);
5231
5232 self.send_job(Some("git commit".into()), move |git_repo, _cx| async move {
5233 rx.await??;
5234
5235 match git_repo {
5236 RepositoryState::Local(LocalRepositoryState {
5237 backend,
5238 environment,
5239 ..
5240 }) => {
5241 backend
5242 .commit(message, name_and_email, options, askpass, environment)
5243 .await
5244 }
5245 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5246 askpass_delegates.lock().insert(askpass_id, askpass);
5247 let _defer = util::defer(|| {
5248 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5249 debug_assert!(askpass_delegate.is_some());
5250 });
5251 let (name, email) = name_and_email.unzip();
5252 client
5253 .request(proto::Commit {
5254 project_id: project_id.0,
5255 repository_id: id.to_proto(),
5256 message: String::from(message),
5257 name: name.map(String::from),
5258 email: email.map(String::from),
5259 options: Some(proto::commit::CommitOptions {
5260 amend: options.amend,
5261 signoff: options.signoff,
5262 }),
5263 askpass_id,
5264 })
5265 .await?;
5266
5267 Ok(())
5268 }
5269 }
5270 })
5271 }
5272
5273 pub fn fetch(
5274 &mut self,
5275 fetch_options: FetchOptions,
5276 askpass: AskPassDelegate,
5277 _cx: &mut App,
5278 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5279 let askpass_delegates = self.askpass_delegates.clone();
5280 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5281 let id = self.id;
5282
5283 self.send_job(Some("git fetch".into()), move |git_repo, cx| async move {
5284 match git_repo {
5285 RepositoryState::Local(LocalRepositoryState {
5286 backend,
5287 environment,
5288 ..
5289 }) => backend.fetch(fetch_options, askpass, environment, cx).await,
5290 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5291 askpass_delegates.lock().insert(askpass_id, askpass);
5292 let _defer = util::defer(|| {
5293 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5294 debug_assert!(askpass_delegate.is_some());
5295 });
5296
5297 let response = client
5298 .request(proto::Fetch {
5299 project_id: project_id.0,
5300 repository_id: id.to_proto(),
5301 askpass_id,
5302 remote: fetch_options.to_proto(),
5303 })
5304 .await?;
5305
5306 Ok(RemoteCommandOutput {
5307 stdout: response.stdout,
5308 stderr: response.stderr,
5309 })
5310 }
5311 }
5312 })
5313 }
5314
5315 pub fn push(
5316 &mut self,
5317 branch: SharedString,
5318 remote_branch: SharedString,
5319 remote: SharedString,
5320 options: Option<PushOptions>,
5321 askpass: AskPassDelegate,
5322 cx: &mut Context<Self>,
5323 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5324 let askpass_delegates = self.askpass_delegates.clone();
5325 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5326 let id = self.id;
5327
5328 let args = options
5329 .map(|option| match option {
5330 PushOptions::SetUpstream => " --set-upstream",
5331 PushOptions::Force => " --force-with-lease",
5332 })
5333 .unwrap_or("");
5334
5335 let updates_tx = self
5336 .git_store()
5337 .and_then(|git_store| match &git_store.read(cx).state {
5338 GitStoreState::Local { downstream, .. } => downstream
5339 .as_ref()
5340 .map(|downstream| downstream.updates_tx.clone()),
5341 _ => None,
5342 });
5343
5344 let this = cx.weak_entity();
5345 self.send_job(
5346 Some(format!("git push {} {} {}:{}", args, remote, branch, remote_branch).into()),
5347 move |git_repo, mut cx| async move {
5348 match git_repo {
5349 RepositoryState::Local(LocalRepositoryState {
5350 backend,
5351 environment,
5352 ..
5353 }) => {
5354 let result = backend
5355 .push(
5356 branch.to_string(),
5357 remote_branch.to_string(),
5358 remote.to_string(),
5359 options,
5360 askpass,
5361 environment.clone(),
5362 cx.clone(),
5363 )
5364 .await;
5365 // TODO would be nice to not have to do this manually
5366 if result.is_ok() {
5367 let branches = backend.branches().await?;
5368 let branch = branches.into_iter().find(|branch| branch.is_head);
5369 log::info!("head branch after scan is {branch:?}");
5370 let snapshot = this.update(&mut cx, |this, cx| {
5371 this.snapshot.branch = branch;
5372 cx.emit(RepositoryEvent::BranchChanged);
5373 this.snapshot.clone()
5374 })?;
5375 if let Some(updates_tx) = updates_tx {
5376 updates_tx
5377 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
5378 .ok();
5379 }
5380 }
5381 result
5382 }
5383 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5384 askpass_delegates.lock().insert(askpass_id, askpass);
5385 let _defer = util::defer(|| {
5386 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5387 debug_assert!(askpass_delegate.is_some());
5388 });
5389 let response = client
5390 .request(proto::Push {
5391 project_id: project_id.0,
5392 repository_id: id.to_proto(),
5393 askpass_id,
5394 branch_name: branch.to_string(),
5395 remote_branch_name: remote_branch.to_string(),
5396 remote_name: remote.to_string(),
5397 options: options.map(|options| match options {
5398 PushOptions::Force => proto::push::PushOptions::Force,
5399 PushOptions::SetUpstream => {
5400 proto::push::PushOptions::SetUpstream
5401 }
5402 }
5403 as i32),
5404 })
5405 .await?;
5406
5407 Ok(RemoteCommandOutput {
5408 stdout: response.stdout,
5409 stderr: response.stderr,
5410 })
5411 }
5412 }
5413 },
5414 )
5415 }
5416
5417 pub fn pull(
5418 &mut self,
5419 branch: Option<SharedString>,
5420 remote: SharedString,
5421 rebase: bool,
5422 askpass: AskPassDelegate,
5423 _cx: &mut App,
5424 ) -> oneshot::Receiver<Result<RemoteCommandOutput>> {
5425 let askpass_delegates = self.askpass_delegates.clone();
5426 let askpass_id = util::post_inc(&mut self.latest_askpass_id);
5427 let id = self.id;
5428
5429 let mut status = "git pull".to_string();
5430 if rebase {
5431 status.push_str(" --rebase");
5432 }
5433 status.push_str(&format!(" {}", remote));
5434 if let Some(b) = &branch {
5435 status.push_str(&format!(" {}", b));
5436 }
5437
5438 self.send_job(Some(status.into()), move |git_repo, cx| async move {
5439 match git_repo {
5440 RepositoryState::Local(LocalRepositoryState {
5441 backend,
5442 environment,
5443 ..
5444 }) => {
5445 backend
5446 .pull(
5447 branch.as_ref().map(|b| b.to_string()),
5448 remote.to_string(),
5449 rebase,
5450 askpass,
5451 environment.clone(),
5452 cx,
5453 )
5454 .await
5455 }
5456 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5457 askpass_delegates.lock().insert(askpass_id, askpass);
5458 let _defer = util::defer(|| {
5459 let askpass_delegate = askpass_delegates.lock().remove(&askpass_id);
5460 debug_assert!(askpass_delegate.is_some());
5461 });
5462 let response = client
5463 .request(proto::Pull {
5464 project_id: project_id.0,
5465 repository_id: id.to_proto(),
5466 askpass_id,
5467 rebase,
5468 branch_name: branch.as_ref().map(|b| b.to_string()),
5469 remote_name: remote.to_string(),
5470 })
5471 .await?;
5472
5473 Ok(RemoteCommandOutput {
5474 stdout: response.stdout,
5475 stderr: response.stderr,
5476 })
5477 }
5478 }
5479 })
5480 }
5481
5482 fn spawn_set_index_text_job(
5483 &mut self,
5484 path: RepoPath,
5485 content: Option<String>,
5486 hunk_staging_operation_count: Option<usize>,
5487 cx: &mut Context<Self>,
5488 ) -> oneshot::Receiver<anyhow::Result<()>> {
5489 let id = self.id;
5490 let this = cx.weak_entity();
5491 let git_store = self.git_store.clone();
5492 let abs_path = self.snapshot.repo_path_to_abs_path(&path);
5493 self.send_keyed_job(
5494 Some(GitJobKey::WriteIndex(vec![path.clone()])),
5495 None,
5496 move |git_repo, mut cx| async move {
5497 log::debug!(
5498 "start updating index text for buffer {}",
5499 path.as_unix_str()
5500 );
5501
5502 match git_repo {
5503 RepositoryState::Local(LocalRepositoryState {
5504 fs,
5505 backend,
5506 environment,
5507 ..
5508 }) => {
5509 let executable = match fs.metadata(&abs_path).await {
5510 Ok(Some(meta)) => meta.is_executable,
5511 Ok(None) => false,
5512 Err(_err) => false,
5513 };
5514 backend
5515 .set_index_text(path.clone(), content, environment.clone(), executable)
5516 .await?;
5517 }
5518 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5519 client
5520 .request(proto::SetIndexText {
5521 project_id: project_id.0,
5522 repository_id: id.to_proto(),
5523 path: path.to_proto(),
5524 text: content,
5525 })
5526 .await?;
5527 }
5528 }
5529 log::debug!(
5530 "finish updating index text for buffer {}",
5531 path.as_unix_str()
5532 );
5533
5534 if let Some(hunk_staging_operation_count) = hunk_staging_operation_count {
5535 let project_path = this
5536 .read_with(&cx, |this, cx| this.repo_path_to_project_path(&path, cx))
5537 .ok()
5538 .flatten();
5539 git_store
5540 .update(&mut cx, |git_store, cx| {
5541 let buffer_id = git_store
5542 .buffer_store
5543 .read(cx)
5544 .get_by_path(&project_path?)?
5545 .read(cx)
5546 .remote_id();
5547 let diff_state = git_store.diffs.get(&buffer_id)?;
5548 diff_state.update(cx, |diff_state, _| {
5549 diff_state.hunk_staging_operation_count_as_of_write =
5550 hunk_staging_operation_count;
5551 });
5552 Some(())
5553 })
5554 .context("Git store dropped")?;
5555 }
5556 Ok(())
5557 },
5558 )
5559 }
5560
5561 pub fn create_remote(
5562 &mut self,
5563 remote_name: String,
5564 remote_url: String,
5565 ) -> oneshot::Receiver<Result<()>> {
5566 let id = self.id;
5567 self.send_job(
5568 Some(format!("git remote add {remote_name} {remote_url}").into()),
5569 move |repo, _cx| async move {
5570 match repo {
5571 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5572 backend.create_remote(remote_name, remote_url).await
5573 }
5574 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5575 client
5576 .request(proto::GitCreateRemote {
5577 project_id: project_id.0,
5578 repository_id: id.to_proto(),
5579 remote_name,
5580 remote_url,
5581 })
5582 .await?;
5583
5584 Ok(())
5585 }
5586 }
5587 },
5588 )
5589 }
5590
5591 pub fn remove_remote(&mut self, remote_name: String) -> oneshot::Receiver<Result<()>> {
5592 let id = self.id;
5593 self.send_job(
5594 Some(format!("git remove remote {remote_name}").into()),
5595 move |repo, _cx| async move {
5596 match repo {
5597 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5598 backend.remove_remote(remote_name).await
5599 }
5600 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5601 client
5602 .request(proto::GitRemoveRemote {
5603 project_id: project_id.0,
5604 repository_id: id.to_proto(),
5605 remote_name,
5606 })
5607 .await?;
5608
5609 Ok(())
5610 }
5611 }
5612 },
5613 )
5614 }
5615
5616 pub fn get_remotes(
5617 &mut self,
5618 branch_name: Option<String>,
5619 is_push: bool,
5620 ) -> oneshot::Receiver<Result<Vec<Remote>>> {
5621 let id = self.id;
5622 self.send_job(None, move |repo, _cx| async move {
5623 match repo {
5624 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5625 let remote = if let Some(branch_name) = branch_name {
5626 if is_push {
5627 backend.get_push_remote(branch_name).await?
5628 } else {
5629 backend.get_branch_remote(branch_name).await?
5630 }
5631 } else {
5632 None
5633 };
5634
5635 match remote {
5636 Some(remote) => Ok(vec![remote]),
5637 None => backend.get_all_remotes().await,
5638 }
5639 }
5640 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5641 let response = client
5642 .request(proto::GetRemotes {
5643 project_id: project_id.0,
5644 repository_id: id.to_proto(),
5645 branch_name,
5646 is_push,
5647 })
5648 .await?;
5649
5650 let remotes = response
5651 .remotes
5652 .into_iter()
5653 .map(|remotes| Remote {
5654 name: remotes.name.into(),
5655 })
5656 .collect();
5657
5658 Ok(remotes)
5659 }
5660 }
5661 })
5662 }
5663
5664 pub fn branches(&mut self) -> oneshot::Receiver<Result<Vec<Branch>>> {
5665 let id = self.id;
5666 self.send_job(None, move |repo, _| async move {
5667 match repo {
5668 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5669 backend.branches().await
5670 }
5671 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5672 let response = client
5673 .request(proto::GitGetBranches {
5674 project_id: project_id.0,
5675 repository_id: id.to_proto(),
5676 })
5677 .await?;
5678
5679 let branches = response
5680 .branches
5681 .into_iter()
5682 .map(|branch| proto_to_branch(&branch))
5683 .collect();
5684
5685 Ok(branches)
5686 }
5687 }
5688 })
5689 }
5690
5691 pub fn worktrees(&mut self) -> oneshot::Receiver<Result<Vec<GitWorktree>>> {
5692 let id = self.id;
5693 self.send_job(None, move |repo, _| async move {
5694 match repo {
5695 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5696 backend.worktrees().await
5697 }
5698 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5699 let response = client
5700 .request(proto::GitGetWorktrees {
5701 project_id: project_id.0,
5702 repository_id: id.to_proto(),
5703 })
5704 .await?;
5705
5706 let worktrees = response
5707 .worktrees
5708 .into_iter()
5709 .map(|worktree| proto_to_worktree(&worktree))
5710 .collect();
5711
5712 Ok(worktrees)
5713 }
5714 }
5715 })
5716 }
5717
5718 pub fn create_worktree(
5719 &mut self,
5720 name: String,
5721 directory: PathBuf,
5722 commit: Option<String>,
5723 ) -> oneshot::Receiver<Result<()>> {
5724 let id = self.id;
5725 self.send_job(
5726 Some("git worktree add".into()),
5727 move |repo, _cx| async move {
5728 match repo {
5729 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5730 backend.create_worktree(name, directory, commit).await
5731 }
5732 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5733 client
5734 .request(proto::GitCreateWorktree {
5735 project_id: project_id.0,
5736 repository_id: id.to_proto(),
5737 name,
5738 directory: directory.to_string_lossy().to_string(),
5739 commit,
5740 })
5741 .await?;
5742
5743 Ok(())
5744 }
5745 }
5746 },
5747 )
5748 }
5749
5750 pub fn remove_worktree(&mut self, path: PathBuf, force: bool) -> oneshot::Receiver<Result<()>> {
5751 self.send_job(
5752 Some("git worktree remove".into()),
5753 move |repo, _cx| async move {
5754 match repo {
5755 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5756 backend.remove_worktree(path, force).await
5757 }
5758 RepositoryState::Remote(_) => {
5759 anyhow::bail!(
5760 "Removing worktrees on remote repositories is not yet supported"
5761 )
5762 }
5763 }
5764 },
5765 )
5766 }
5767
5768 pub fn default_branch(
5769 &mut self,
5770 include_remote_name: bool,
5771 ) -> oneshot::Receiver<Result<Option<SharedString>>> {
5772 let id = self.id;
5773 self.send_job(None, move |repo, _| async move {
5774 match repo {
5775 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5776 backend.default_branch(include_remote_name).await
5777 }
5778 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5779 let response = client
5780 .request(proto::GetDefaultBranch {
5781 project_id: project_id.0,
5782 repository_id: id.to_proto(),
5783 })
5784 .await?;
5785
5786 anyhow::Ok(response.branch.map(SharedString::from))
5787 }
5788 }
5789 })
5790 }
5791
5792 pub fn diff_tree(
5793 &mut self,
5794 diff_type: DiffTreeType,
5795 _cx: &App,
5796 ) -> oneshot::Receiver<Result<TreeDiff>> {
5797 let repository_id = self.snapshot.id;
5798 self.send_job(None, move |repo, _cx| async move {
5799 match repo {
5800 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5801 backend.diff_tree(diff_type).await
5802 }
5803 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
5804 let response = client
5805 .request(proto::GetTreeDiff {
5806 project_id: project_id.0,
5807 repository_id: repository_id.0,
5808 is_merge: matches!(diff_type, DiffTreeType::MergeBase { .. }),
5809 base: diff_type.base().to_string(),
5810 head: diff_type.head().to_string(),
5811 })
5812 .await?;
5813
5814 let entries = response
5815 .entries
5816 .into_iter()
5817 .filter_map(|entry| {
5818 let status = match entry.status() {
5819 proto::tree_diff_status::Status::Added => TreeDiffStatus::Added,
5820 proto::tree_diff_status::Status::Modified => {
5821 TreeDiffStatus::Modified {
5822 old: git::Oid::from_str(
5823 &entry.oid.context("missing oid").log_err()?,
5824 )
5825 .log_err()?,
5826 }
5827 }
5828 proto::tree_diff_status::Status::Deleted => {
5829 TreeDiffStatus::Deleted {
5830 old: git::Oid::from_str(
5831 &entry.oid.context("missing oid").log_err()?,
5832 )
5833 .log_err()?,
5834 }
5835 }
5836 };
5837 Some((
5838 RepoPath::from_rel_path(
5839 &RelPath::from_proto(&entry.path).log_err()?,
5840 ),
5841 status,
5842 ))
5843 })
5844 .collect();
5845
5846 Ok(TreeDiff { entries })
5847 }
5848 }
5849 })
5850 }
5851
5852 pub fn diff(&mut self, diff_type: DiffType, _cx: &App) -> oneshot::Receiver<Result<String>> {
5853 let id = self.id;
5854 self.send_job(None, move |repo, _cx| async move {
5855 match repo {
5856 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5857 backend.diff(diff_type).await
5858 }
5859 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5860 let (proto_diff_type, merge_base_ref) = match &diff_type {
5861 DiffType::HeadToIndex => {
5862 (proto::git_diff::DiffType::HeadToIndex.into(), None)
5863 }
5864 DiffType::HeadToWorktree => {
5865 (proto::git_diff::DiffType::HeadToWorktree.into(), None)
5866 }
5867 DiffType::MergeBase { base_ref } => (
5868 proto::git_diff::DiffType::MergeBase.into(),
5869 Some(base_ref.to_string()),
5870 ),
5871 };
5872 let response = client
5873 .request(proto::GitDiff {
5874 project_id: project_id.0,
5875 repository_id: id.to_proto(),
5876 diff_type: proto_diff_type,
5877 merge_base_ref,
5878 })
5879 .await?;
5880
5881 Ok(response.diff)
5882 }
5883 }
5884 })
5885 }
5886
5887 /// Fetches per-line diff statistics (additions/deletions) via `git diff --numstat`.
5888 pub fn diff_stat(
5889 &mut self,
5890 diff_type: DiffType,
5891 _cx: &App,
5892 ) -> oneshot::Receiver<
5893 Result<collections::HashMap<git::repository::RepoPath, git::status::DiffStat>>,
5894 > {
5895 let id = self.id;
5896 self.send_job(None, move |repo, _cx| async move {
5897 match repo {
5898 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5899 backend.diff_stat(diff_type).await
5900 }
5901 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5902 let (proto_diff_type, merge_base_ref) = match &diff_type {
5903 DiffType::HeadToIndex => {
5904 (proto::git_diff_stat::DiffType::HeadToIndex.into(), None)
5905 }
5906 DiffType::HeadToWorktree => {
5907 (proto::git_diff_stat::DiffType::HeadToWorktree.into(), None)
5908 }
5909 DiffType::MergeBase { base_ref } => (
5910 proto::git_diff_stat::DiffType::MergeBase.into(),
5911 Some(base_ref.to_string()),
5912 ),
5913 };
5914 let response = client
5915 .request(proto::GitDiffStat {
5916 project_id: project_id.0,
5917 repository_id: id.to_proto(),
5918 diff_type: proto_diff_type,
5919 merge_base_ref,
5920 })
5921 .await?;
5922
5923 let stats = response
5924 .entries
5925 .into_iter()
5926 .filter_map(|entry| {
5927 let path = RepoPath::from_proto(&entry.path).log_err()?;
5928 Some((
5929 path,
5930 git::status::DiffStat {
5931 added: entry.added,
5932 deleted: entry.deleted,
5933 },
5934 ))
5935 })
5936 .collect();
5937
5938 Ok(stats)
5939 }
5940 }
5941 })
5942 }
5943
5944 pub fn create_branch(
5945 &mut self,
5946 branch_name: String,
5947 base_branch: Option<String>,
5948 ) -> oneshot::Receiver<Result<()>> {
5949 let id = self.id;
5950 let status_msg = if let Some(ref base) = base_branch {
5951 format!("git switch -c {branch_name} {base}").into()
5952 } else {
5953 format!("git switch -c {branch_name}").into()
5954 };
5955 self.send_job(Some(status_msg), move |repo, _cx| async move {
5956 match repo {
5957 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5958 backend.create_branch(branch_name, base_branch).await
5959 }
5960 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5961 client
5962 .request(proto::GitCreateBranch {
5963 project_id: project_id.0,
5964 repository_id: id.to_proto(),
5965 branch_name,
5966 })
5967 .await?;
5968
5969 Ok(())
5970 }
5971 }
5972 })
5973 }
5974
5975 pub fn change_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
5976 let id = self.id;
5977 self.send_job(
5978 Some(format!("git switch {branch_name}").into()),
5979 move |repo, _cx| async move {
5980 match repo {
5981 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
5982 backend.change_branch(branch_name).await
5983 }
5984 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
5985 client
5986 .request(proto::GitChangeBranch {
5987 project_id: project_id.0,
5988 repository_id: id.to_proto(),
5989 branch_name,
5990 })
5991 .await?;
5992
5993 Ok(())
5994 }
5995 }
5996 },
5997 )
5998 }
5999
6000 pub fn delete_branch(&mut self, branch_name: String) -> oneshot::Receiver<Result<()>> {
6001 let id = self.id;
6002 self.send_job(
6003 Some(format!("git branch -d {branch_name}").into()),
6004 move |repo, _cx| async move {
6005 match repo {
6006 RepositoryState::Local(state) => state.backend.delete_branch(branch_name).await,
6007 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6008 client
6009 .request(proto::GitDeleteBranch {
6010 project_id: project_id.0,
6011 repository_id: id.to_proto(),
6012 branch_name,
6013 })
6014 .await?;
6015
6016 Ok(())
6017 }
6018 }
6019 },
6020 )
6021 }
6022
6023 pub fn rename_branch(
6024 &mut self,
6025 branch: String,
6026 new_name: String,
6027 ) -> oneshot::Receiver<Result<()>> {
6028 let id = self.id;
6029 self.send_job(
6030 Some(format!("git branch -m {branch} {new_name}").into()),
6031 move |repo, _cx| async move {
6032 match repo {
6033 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6034 backend.rename_branch(branch, new_name).await
6035 }
6036 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6037 client
6038 .request(proto::GitRenameBranch {
6039 project_id: project_id.0,
6040 repository_id: id.to_proto(),
6041 branch,
6042 new_name,
6043 })
6044 .await?;
6045
6046 Ok(())
6047 }
6048 }
6049 },
6050 )
6051 }
6052
6053 pub fn check_for_pushed_commits(&mut self) -> oneshot::Receiver<Result<Vec<SharedString>>> {
6054 let id = self.id;
6055 self.send_job(None, move |repo, _cx| async move {
6056 match repo {
6057 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6058 backend.check_for_pushed_commit().await
6059 }
6060 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6061 let response = client
6062 .request(proto::CheckForPushedCommits {
6063 project_id: project_id.0,
6064 repository_id: id.to_proto(),
6065 })
6066 .await?;
6067
6068 let branches = response.pushed_to.into_iter().map(Into::into).collect();
6069
6070 Ok(branches)
6071 }
6072 }
6073 })
6074 }
6075
6076 pub fn checkpoint(&mut self) -> oneshot::Receiver<Result<GitRepositoryCheckpoint>> {
6077 self.send_job(None, |repo, _cx| async move {
6078 match repo {
6079 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6080 backend.checkpoint().await
6081 }
6082 RepositoryState::Remote(..) => anyhow::bail!("not implemented yet"),
6083 }
6084 })
6085 }
6086
6087 pub fn restore_checkpoint(
6088 &mut self,
6089 checkpoint: GitRepositoryCheckpoint,
6090 ) -> oneshot::Receiver<Result<()>> {
6091 self.send_job(None, move |repo, _cx| async move {
6092 match repo {
6093 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6094 backend.restore_checkpoint(checkpoint).await
6095 }
6096 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6097 }
6098 })
6099 }
6100
6101 pub(crate) fn apply_remote_update(
6102 &mut self,
6103 update: proto::UpdateRepository,
6104 cx: &mut Context<Self>,
6105 ) -> Result<()> {
6106 if let Some(main_path) = &update.original_repo_abs_path {
6107 self.snapshot.original_repo_abs_path = Path::new(main_path.as_str()).into();
6108 }
6109
6110 let new_branch = update.branch_summary.as_ref().map(proto_to_branch);
6111 let new_head_commit = update
6112 .head_commit_details
6113 .as_ref()
6114 .map(proto_to_commit_details);
6115 if self.snapshot.branch != new_branch || self.snapshot.head_commit != new_head_commit {
6116 cx.emit(RepositoryEvent::BranchChanged)
6117 }
6118 self.snapshot.branch = new_branch;
6119 self.snapshot.head_commit = new_head_commit;
6120
6121 // We don't store any merge head state for downstream projects; the upstream
6122 // will track it and we will just get the updated conflicts
6123 let new_merge_heads = TreeMap::from_ordered_entries(
6124 update
6125 .current_merge_conflicts
6126 .into_iter()
6127 .filter_map(|path| Some((RepoPath::from_proto(&path).ok()?, vec![]))),
6128 );
6129 let conflicts_changed =
6130 self.snapshot.merge.merge_heads_by_conflicted_path != new_merge_heads;
6131 self.snapshot.merge.merge_heads_by_conflicted_path = new_merge_heads;
6132 self.snapshot.merge.message = update.merge_message.map(SharedString::from);
6133 let new_stash_entries = GitStash {
6134 entries: update
6135 .stash_entries
6136 .iter()
6137 .filter_map(|entry| proto_to_stash(entry).ok())
6138 .collect(),
6139 };
6140 if self.snapshot.stash_entries != new_stash_entries {
6141 cx.emit(RepositoryEvent::StashEntriesChanged)
6142 }
6143 self.snapshot.stash_entries = new_stash_entries;
6144 self.snapshot.remote_upstream_url = update.remote_upstream_url;
6145 self.snapshot.remote_origin_url = update.remote_origin_url;
6146
6147 let edits = update
6148 .removed_statuses
6149 .into_iter()
6150 .filter_map(|path| {
6151 Some(sum_tree::Edit::Remove(PathKey(
6152 RelPath::from_proto(&path).log_err()?,
6153 )))
6154 })
6155 .chain(
6156 update
6157 .updated_statuses
6158 .into_iter()
6159 .filter_map(|updated_status| {
6160 Some(sum_tree::Edit::Insert(updated_status.try_into().log_err()?))
6161 }),
6162 )
6163 .collect::<Vec<_>>();
6164 if conflicts_changed || !edits.is_empty() {
6165 cx.emit(RepositoryEvent::StatusesChanged);
6166 }
6167 self.snapshot.statuses_by_path.edit(edits, ());
6168 if update.is_last_update {
6169 self.snapshot.scan_id = update.scan_id;
6170 }
6171 self.clear_pending_ops(cx);
6172 Ok(())
6173 }
6174
6175 pub fn compare_checkpoints(
6176 &mut self,
6177 left: GitRepositoryCheckpoint,
6178 right: GitRepositoryCheckpoint,
6179 ) -> oneshot::Receiver<Result<bool>> {
6180 self.send_job(None, move |repo, _cx| async move {
6181 match repo {
6182 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6183 backend.compare_checkpoints(left, right).await
6184 }
6185 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6186 }
6187 })
6188 }
6189
6190 pub fn diff_checkpoints(
6191 &mut self,
6192 base_checkpoint: GitRepositoryCheckpoint,
6193 target_checkpoint: GitRepositoryCheckpoint,
6194 ) -> oneshot::Receiver<Result<String>> {
6195 self.send_job(None, move |repo, _cx| async move {
6196 match repo {
6197 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6198 backend
6199 .diff_checkpoints(base_checkpoint, target_checkpoint)
6200 .await
6201 }
6202 RepositoryState::Remote { .. } => anyhow::bail!("not implemented yet"),
6203 }
6204 })
6205 }
6206
6207 fn clear_pending_ops(&mut self, cx: &mut Context<Self>) {
6208 let updated = SumTree::from_iter(
6209 self.pending_ops.iter().filter_map(|ops| {
6210 let inner_ops: Vec<PendingOp> =
6211 ops.ops.iter().filter(|op| op.running()).cloned().collect();
6212 if inner_ops.is_empty() {
6213 None
6214 } else {
6215 Some(PendingOps {
6216 repo_path: ops.repo_path.clone(),
6217 ops: inner_ops,
6218 })
6219 }
6220 }),
6221 (),
6222 );
6223
6224 if updated != self.pending_ops {
6225 cx.emit(RepositoryEvent::PendingOpsChanged {
6226 pending_ops: self.pending_ops.clone(),
6227 })
6228 }
6229
6230 self.pending_ops = updated;
6231 }
6232
6233 fn schedule_scan(
6234 &mut self,
6235 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6236 cx: &mut Context<Self>,
6237 ) {
6238 let this = cx.weak_entity();
6239 let _ = self.send_keyed_job(
6240 Some(GitJobKey::ReloadGitState),
6241 None,
6242 |state, mut cx| async move {
6243 log::debug!("run scheduled git status scan");
6244
6245 let Some(this) = this.upgrade() else {
6246 return Ok(());
6247 };
6248 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6249 bail!("not a local repository")
6250 };
6251 let compute_snapshot = this.update(&mut cx, |this, _| {
6252 this.paths_needing_status_update.clear();
6253 compute_snapshot(
6254 this.id,
6255 this.work_directory_abs_path.clone(),
6256 this.snapshot.clone(),
6257 backend.clone(),
6258 )
6259 });
6260 let (snapshot, events) = cx.background_spawn(compute_snapshot).await?;
6261 this.update(&mut cx, |this, cx| {
6262 this.snapshot = snapshot.clone();
6263 this.clear_pending_ops(cx);
6264 for event in events {
6265 cx.emit(event);
6266 }
6267 });
6268 if let Some(updates_tx) = updates_tx {
6269 updates_tx
6270 .unbounded_send(DownstreamUpdate::UpdateRepository(snapshot))
6271 .ok();
6272 }
6273 Ok(())
6274 },
6275 );
6276 }
6277
6278 fn spawn_local_git_worker(
6279 state: Shared<Task<Result<LocalRepositoryState, String>>>,
6280 cx: &mut Context<Self>,
6281 ) -> mpsc::UnboundedSender<GitJob> {
6282 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6283
6284 cx.spawn(async move |_, cx| {
6285 let state = state.await.map_err(|err| anyhow::anyhow!(err))?;
6286 if let Some(git_hosting_provider_registry) =
6287 cx.update(|cx| GitHostingProviderRegistry::try_global(cx))
6288 {
6289 git_hosting_providers::register_additional_providers(
6290 git_hosting_provider_registry,
6291 state.backend.clone(),
6292 )
6293 .await;
6294 }
6295 let state = RepositoryState::Local(state);
6296 let mut jobs = VecDeque::new();
6297 loop {
6298 while let Ok(Some(next_job)) = job_rx.try_next() {
6299 jobs.push_back(next_job);
6300 }
6301
6302 if let Some(job) = jobs.pop_front() {
6303 if let Some(current_key) = &job.key
6304 && jobs
6305 .iter()
6306 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6307 {
6308 continue;
6309 }
6310 (job.job)(state.clone(), cx).await;
6311 } else if let Some(job) = job_rx.next().await {
6312 jobs.push_back(job);
6313 } else {
6314 break;
6315 }
6316 }
6317 anyhow::Ok(())
6318 })
6319 .detach_and_log_err(cx);
6320
6321 job_tx
6322 }
6323
6324 fn spawn_remote_git_worker(
6325 state: RemoteRepositoryState,
6326 cx: &mut Context<Self>,
6327 ) -> mpsc::UnboundedSender<GitJob> {
6328 let (job_tx, mut job_rx) = mpsc::unbounded::<GitJob>();
6329
6330 cx.spawn(async move |_, cx| {
6331 let state = RepositoryState::Remote(state);
6332 let mut jobs = VecDeque::new();
6333 loop {
6334 while let Ok(Some(next_job)) = job_rx.try_next() {
6335 jobs.push_back(next_job);
6336 }
6337
6338 if let Some(job) = jobs.pop_front() {
6339 if let Some(current_key) = &job.key
6340 && jobs
6341 .iter()
6342 .any(|other_job| other_job.key.as_ref() == Some(current_key))
6343 {
6344 continue;
6345 }
6346 (job.job)(state.clone(), cx).await;
6347 } else if let Some(job) = job_rx.next().await {
6348 jobs.push_back(job);
6349 } else {
6350 break;
6351 }
6352 }
6353 anyhow::Ok(())
6354 })
6355 .detach_and_log_err(cx);
6356
6357 job_tx
6358 }
6359
6360 fn load_staged_text(
6361 &mut self,
6362 buffer_id: BufferId,
6363 repo_path: RepoPath,
6364 cx: &App,
6365 ) -> Task<Result<Option<String>>> {
6366 let rx = self.send_job(None, move |state, _| async move {
6367 match state {
6368 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6369 anyhow::Ok(backend.load_index_text(repo_path).await)
6370 }
6371 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6372 let response = client
6373 .request(proto::OpenUnstagedDiff {
6374 project_id: project_id.to_proto(),
6375 buffer_id: buffer_id.to_proto(),
6376 })
6377 .await?;
6378 Ok(response.staged_text)
6379 }
6380 }
6381 });
6382 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6383 }
6384
6385 fn load_committed_text(
6386 &mut self,
6387 buffer_id: BufferId,
6388 repo_path: RepoPath,
6389 cx: &App,
6390 ) -> Task<Result<DiffBasesChange>> {
6391 let rx = self.send_job(None, move |state, _| async move {
6392 match state {
6393 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6394 let committed_text = backend.load_committed_text(repo_path.clone()).await;
6395 let staged_text = backend.load_index_text(repo_path).await;
6396 let diff_bases_change = if committed_text == staged_text {
6397 DiffBasesChange::SetBoth(committed_text)
6398 } else {
6399 DiffBasesChange::SetEach {
6400 index: staged_text,
6401 head: committed_text,
6402 }
6403 };
6404 anyhow::Ok(diff_bases_change)
6405 }
6406 RepositoryState::Remote(RemoteRepositoryState { project_id, client }) => {
6407 use proto::open_uncommitted_diff_response::Mode;
6408
6409 let response = client
6410 .request(proto::OpenUncommittedDiff {
6411 project_id: project_id.to_proto(),
6412 buffer_id: buffer_id.to_proto(),
6413 })
6414 .await?;
6415 let mode = Mode::from_i32(response.mode).context("Invalid mode")?;
6416 let bases = match mode {
6417 Mode::IndexMatchesHead => DiffBasesChange::SetBoth(response.committed_text),
6418 Mode::IndexAndHead => DiffBasesChange::SetEach {
6419 head: response.committed_text,
6420 index: response.staged_text,
6421 },
6422 };
6423 Ok(bases)
6424 }
6425 }
6426 });
6427
6428 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6429 }
6430
6431 fn load_blob_content(&mut self, oid: Oid, cx: &App) -> Task<Result<String>> {
6432 let repository_id = self.snapshot.id;
6433 let rx = self.send_job(None, move |state, _| async move {
6434 match state {
6435 RepositoryState::Local(LocalRepositoryState { backend, .. }) => {
6436 backend.load_blob_content(oid).await
6437 }
6438 RepositoryState::Remote(RemoteRepositoryState { client, project_id }) => {
6439 let response = client
6440 .request(proto::GetBlobContent {
6441 project_id: project_id.to_proto(),
6442 repository_id: repository_id.0,
6443 oid: oid.to_string(),
6444 })
6445 .await?;
6446 Ok(response.content)
6447 }
6448 }
6449 });
6450 cx.spawn(|_: &mut AsyncApp| async move { rx.await? })
6451 }
6452
6453 fn paths_changed(
6454 &mut self,
6455 paths: Vec<RepoPath>,
6456 updates_tx: Option<mpsc::UnboundedSender<DownstreamUpdate>>,
6457 cx: &mut Context<Self>,
6458 ) {
6459 if !paths.is_empty() {
6460 self.paths_needing_status_update.push(paths);
6461 }
6462
6463 let this = cx.weak_entity();
6464 let _ = self.send_keyed_job(
6465 Some(GitJobKey::RefreshStatuses),
6466 None,
6467 |state, mut cx| async move {
6468 let (prev_snapshot, changed_paths) = this.update(&mut cx, |this, _| {
6469 (
6470 this.snapshot.clone(),
6471 mem::take(&mut this.paths_needing_status_update),
6472 )
6473 })?;
6474 let RepositoryState::Local(LocalRepositoryState { backend, .. }) = state else {
6475 bail!("not a local repository")
6476 };
6477
6478 if changed_paths.is_empty() {
6479 return Ok(());
6480 }
6481
6482 let stash_entries = backend.stash_entries().await?;
6483 let changed_path_statuses = cx
6484 .background_spawn(async move {
6485 let mut changed_paths =
6486 changed_paths.into_iter().flatten().collect::<BTreeSet<_>>();
6487 let statuses = backend
6488 .status(&changed_paths.iter().cloned().collect::<Vec<_>>())
6489 .await?;
6490 let mut changed_path_statuses = Vec::new();
6491 let prev_statuses = prev_snapshot.statuses_by_path.clone();
6492 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6493
6494 for (repo_path, status) in &*statuses.entries {
6495 changed_paths.remove(repo_path);
6496 if cursor.seek_forward(&PathTarget::Path(repo_path), Bias::Left)
6497 && cursor.item().is_some_and(|entry| entry.status == *status)
6498 {
6499 continue;
6500 }
6501
6502 changed_path_statuses.push(Edit::Insert(StatusEntry {
6503 repo_path: repo_path.clone(),
6504 status: *status,
6505 }));
6506 }
6507 let mut cursor = prev_statuses.cursor::<PathProgress>(());
6508 for path in changed_paths.into_iter() {
6509 if cursor.seek_forward(&PathTarget::Path(&path), Bias::Left) {
6510 changed_path_statuses
6511 .push(Edit::Remove(PathKey(path.as_ref().clone())));
6512 }
6513 }
6514 anyhow::Ok(changed_path_statuses)
6515 })
6516 .await?;
6517
6518 this.update(&mut cx, |this, cx| {
6519 if this.snapshot.stash_entries != stash_entries {
6520 cx.emit(RepositoryEvent::StashEntriesChanged);
6521 this.snapshot.stash_entries = stash_entries;
6522 }
6523
6524 if !changed_path_statuses.is_empty() {
6525 cx.emit(RepositoryEvent::StatusesChanged);
6526 this.snapshot
6527 .statuses_by_path
6528 .edit(changed_path_statuses, ());
6529 this.snapshot.scan_id += 1;
6530 }
6531
6532 if let Some(updates_tx) = updates_tx {
6533 updates_tx
6534 .unbounded_send(DownstreamUpdate::UpdateRepository(
6535 this.snapshot.clone(),
6536 ))
6537 .ok();
6538 }
6539 })
6540 },
6541 );
6542 }
6543
6544 /// currently running git command and when it started
6545 pub fn current_job(&self) -> Option<JobInfo> {
6546 self.active_jobs.values().next().cloned()
6547 }
6548
6549 pub fn barrier(&mut self) -> oneshot::Receiver<()> {
6550 self.send_job(None, |_, _| async {})
6551 }
6552
6553 fn spawn_job_with_tracking<AsyncFn>(
6554 &mut self,
6555 paths: Vec<RepoPath>,
6556 git_status: pending_op::GitStatus,
6557 cx: &mut Context<Self>,
6558 f: AsyncFn,
6559 ) -> Task<Result<()>>
6560 where
6561 AsyncFn: AsyncFnOnce(WeakEntity<Repository>, &mut AsyncApp) -> Result<()> + 'static,
6562 {
6563 let ids = self.new_pending_ops_for_paths(paths, git_status);
6564
6565 cx.spawn(async move |this, cx| {
6566 let (job_status, result) = match f(this.clone(), cx).await {
6567 Ok(()) => (pending_op::JobStatus::Finished, Ok(())),
6568 Err(err) if err.is::<Canceled>() => (pending_op::JobStatus::Skipped, Ok(())),
6569 Err(err) => (pending_op::JobStatus::Error, Err(err)),
6570 };
6571
6572 this.update(cx, |this, _| {
6573 let mut edits = Vec::with_capacity(ids.len());
6574 for (id, entry) in ids {
6575 if let Some(mut ops) = this
6576 .pending_ops
6577 .get(&PathKey(entry.as_ref().clone()), ())
6578 .cloned()
6579 {
6580 if let Some(op) = ops.op_by_id_mut(id) {
6581 op.job_status = job_status;
6582 }
6583 edits.push(sum_tree::Edit::Insert(ops));
6584 }
6585 }
6586 this.pending_ops.edit(edits, ());
6587 })?;
6588
6589 result
6590 })
6591 }
6592
6593 fn new_pending_ops_for_paths(
6594 &mut self,
6595 paths: Vec<RepoPath>,
6596 git_status: pending_op::GitStatus,
6597 ) -> Vec<(PendingOpId, RepoPath)> {
6598 let mut edits = Vec::with_capacity(paths.len());
6599 let mut ids = Vec::with_capacity(paths.len());
6600 for path in paths {
6601 let mut ops = self
6602 .pending_ops
6603 .get(&PathKey(path.as_ref().clone()), ())
6604 .cloned()
6605 .unwrap_or_else(|| PendingOps::new(&path));
6606 let id = ops.max_id() + 1;
6607 ops.ops.push(PendingOp {
6608 id,
6609 git_status,
6610 job_status: pending_op::JobStatus::Running,
6611 });
6612 edits.push(sum_tree::Edit::Insert(ops));
6613 ids.push((id, path));
6614 }
6615 self.pending_ops.edit(edits, ());
6616 ids
6617 }
6618 pub fn default_remote_url(&self) -> Option<String> {
6619 self.remote_upstream_url
6620 .clone()
6621 .or(self.remote_origin_url.clone())
6622 }
6623}
6624
6625fn get_permalink_in_rust_registry_src(
6626 provider_registry: Arc<GitHostingProviderRegistry>,
6627 path: PathBuf,
6628 selection: Range<u32>,
6629) -> Result<url::Url> {
6630 #[derive(Deserialize)]
6631 struct CargoVcsGit {
6632 sha1: String,
6633 }
6634
6635 #[derive(Deserialize)]
6636 struct CargoVcsInfo {
6637 git: CargoVcsGit,
6638 path_in_vcs: String,
6639 }
6640
6641 #[derive(Deserialize)]
6642 struct CargoPackage {
6643 repository: String,
6644 }
6645
6646 #[derive(Deserialize)]
6647 struct CargoToml {
6648 package: CargoPackage,
6649 }
6650
6651 let Some((dir, cargo_vcs_info_json)) = path.ancestors().skip(1).find_map(|dir| {
6652 let json = std::fs::read_to_string(dir.join(".cargo_vcs_info.json")).ok()?;
6653 Some((dir, json))
6654 }) else {
6655 bail!("No .cargo_vcs_info.json found in parent directories")
6656 };
6657 let cargo_vcs_info = serde_json::from_str::<CargoVcsInfo>(&cargo_vcs_info_json)?;
6658 let cargo_toml = std::fs::read_to_string(dir.join("Cargo.toml"))?;
6659 let manifest = toml::from_str::<CargoToml>(&cargo_toml)?;
6660 let (provider, remote) = parse_git_remote_url(provider_registry, &manifest.package.repository)
6661 .context("parsing package.repository field of manifest")?;
6662 let path = PathBuf::from(cargo_vcs_info.path_in_vcs).join(path.strip_prefix(dir).unwrap());
6663 let permalink = provider.build_permalink(
6664 remote,
6665 BuildPermalinkParams::new(
6666 &cargo_vcs_info.git.sha1,
6667 &RepoPath::from_rel_path(
6668 &RelPath::new(&path, PathStyle::local()).context("invalid path")?,
6669 ),
6670 Some(selection),
6671 ),
6672 );
6673 Ok(permalink)
6674}
6675
6676fn serialize_blame_buffer_response(blame: Option<git::blame::Blame>) -> proto::BlameBufferResponse {
6677 let Some(blame) = blame else {
6678 return proto::BlameBufferResponse {
6679 blame_response: None,
6680 };
6681 };
6682
6683 let entries = blame
6684 .entries
6685 .into_iter()
6686 .map(|entry| proto::BlameEntry {
6687 sha: entry.sha.as_bytes().into(),
6688 start_line: entry.range.start,
6689 end_line: entry.range.end,
6690 original_line_number: entry.original_line_number,
6691 author: entry.author,
6692 author_mail: entry.author_mail,
6693 author_time: entry.author_time,
6694 author_tz: entry.author_tz,
6695 committer: entry.committer_name,
6696 committer_mail: entry.committer_email,
6697 committer_time: entry.committer_time,
6698 committer_tz: entry.committer_tz,
6699 summary: entry.summary,
6700 previous: entry.previous,
6701 filename: entry.filename,
6702 })
6703 .collect::<Vec<_>>();
6704
6705 let messages = blame
6706 .messages
6707 .into_iter()
6708 .map(|(oid, message)| proto::CommitMessage {
6709 oid: oid.as_bytes().into(),
6710 message,
6711 })
6712 .collect::<Vec<_>>();
6713
6714 proto::BlameBufferResponse {
6715 blame_response: Some(proto::blame_buffer_response::BlameResponse { entries, messages }),
6716 }
6717}
6718
6719fn deserialize_blame_buffer_response(
6720 response: proto::BlameBufferResponse,
6721) -> Option<git::blame::Blame> {
6722 let response = response.blame_response?;
6723 let entries = response
6724 .entries
6725 .into_iter()
6726 .filter_map(|entry| {
6727 Some(git::blame::BlameEntry {
6728 sha: git::Oid::from_bytes(&entry.sha).ok()?,
6729 range: entry.start_line..entry.end_line,
6730 original_line_number: entry.original_line_number,
6731 committer_name: entry.committer,
6732 committer_time: entry.committer_time,
6733 committer_tz: entry.committer_tz,
6734 committer_email: entry.committer_mail,
6735 author: entry.author,
6736 author_mail: entry.author_mail,
6737 author_time: entry.author_time,
6738 author_tz: entry.author_tz,
6739 summary: entry.summary,
6740 previous: entry.previous,
6741 filename: entry.filename,
6742 })
6743 })
6744 .collect::<Vec<_>>();
6745
6746 let messages = response
6747 .messages
6748 .into_iter()
6749 .filter_map(|message| Some((git::Oid::from_bytes(&message.oid).ok()?, message.message)))
6750 .collect::<HashMap<_, _>>();
6751
6752 Some(Blame { entries, messages })
6753}
6754
6755fn branch_to_proto(branch: &git::repository::Branch) -> proto::Branch {
6756 proto::Branch {
6757 is_head: branch.is_head,
6758 ref_name: branch.ref_name.to_string(),
6759 unix_timestamp: branch
6760 .most_recent_commit
6761 .as_ref()
6762 .map(|commit| commit.commit_timestamp as u64),
6763 upstream: branch.upstream.as_ref().map(|upstream| proto::GitUpstream {
6764 ref_name: upstream.ref_name.to_string(),
6765 tracking: upstream
6766 .tracking
6767 .status()
6768 .map(|upstream| proto::UpstreamTracking {
6769 ahead: upstream.ahead as u64,
6770 behind: upstream.behind as u64,
6771 }),
6772 }),
6773 most_recent_commit: branch
6774 .most_recent_commit
6775 .as_ref()
6776 .map(|commit| proto::CommitSummary {
6777 sha: commit.sha.to_string(),
6778 subject: commit.subject.to_string(),
6779 commit_timestamp: commit.commit_timestamp,
6780 author_name: commit.author_name.to_string(),
6781 }),
6782 }
6783}
6784
6785fn worktree_to_proto(worktree: &git::repository::Worktree) -> proto::Worktree {
6786 proto::Worktree {
6787 path: worktree.path.to_string_lossy().to_string(),
6788 ref_name: worktree.ref_name.to_string(),
6789 sha: worktree.sha.to_string(),
6790 }
6791}
6792
6793fn proto_to_worktree(proto: &proto::Worktree) -> git::repository::Worktree {
6794 git::repository::Worktree {
6795 path: PathBuf::from(proto.path.clone()),
6796 ref_name: proto.ref_name.clone().into(),
6797 sha: proto.sha.clone().into(),
6798 }
6799}
6800
6801fn proto_to_branch(proto: &proto::Branch) -> git::repository::Branch {
6802 git::repository::Branch {
6803 is_head: proto.is_head,
6804 ref_name: proto.ref_name.clone().into(),
6805 upstream: proto
6806 .upstream
6807 .as_ref()
6808 .map(|upstream| git::repository::Upstream {
6809 ref_name: upstream.ref_name.to_string().into(),
6810 tracking: upstream
6811 .tracking
6812 .as_ref()
6813 .map(|tracking| {
6814 git::repository::UpstreamTracking::Tracked(UpstreamTrackingStatus {
6815 ahead: tracking.ahead as u32,
6816 behind: tracking.behind as u32,
6817 })
6818 })
6819 .unwrap_or(git::repository::UpstreamTracking::Gone),
6820 }),
6821 most_recent_commit: proto.most_recent_commit.as_ref().map(|commit| {
6822 git::repository::CommitSummary {
6823 sha: commit.sha.to_string().into(),
6824 subject: commit.subject.to_string().into(),
6825 commit_timestamp: commit.commit_timestamp,
6826 author_name: commit.author_name.to_string().into(),
6827 has_parent: true,
6828 }
6829 }),
6830 }
6831}
6832
6833fn commit_details_to_proto(commit: &CommitDetails) -> proto::GitCommitDetails {
6834 proto::GitCommitDetails {
6835 sha: commit.sha.to_string(),
6836 message: commit.message.to_string(),
6837 commit_timestamp: commit.commit_timestamp,
6838 author_email: commit.author_email.to_string(),
6839 author_name: commit.author_name.to_string(),
6840 }
6841}
6842
6843fn proto_to_commit_details(proto: &proto::GitCommitDetails) -> CommitDetails {
6844 CommitDetails {
6845 sha: proto.sha.clone().into(),
6846 message: proto.message.clone().into(),
6847 commit_timestamp: proto.commit_timestamp,
6848 author_email: proto.author_email.clone().into(),
6849 author_name: proto.author_name.clone().into(),
6850 }
6851}
6852
6853async fn compute_snapshot(
6854 id: RepositoryId,
6855 work_directory_abs_path: Arc<Path>,
6856 prev_snapshot: RepositorySnapshot,
6857 backend: Arc<dyn GitRepository>,
6858) -> Result<(RepositorySnapshot, Vec<RepositoryEvent>)> {
6859 let mut events = Vec::new();
6860 let branches = backend.branches().await?;
6861 let branch = branches.into_iter().find(|branch| branch.is_head);
6862 let statuses = backend
6863 .status(&[RepoPath::from_rel_path(
6864 &RelPath::new(".".as_ref(), PathStyle::local()).unwrap(),
6865 )])
6866 .await?;
6867 let stash_entries = backend.stash_entries().await?;
6868 let mut conflicted_paths = Vec::new();
6869 let statuses_by_path = SumTree::from_iter(
6870 statuses.entries.iter().map(|(repo_path, status)| {
6871 if status.is_conflicted() {
6872 conflicted_paths.push(repo_path.clone());
6873 }
6874 StatusEntry {
6875 repo_path: repo_path.clone(),
6876 status: *status,
6877 }
6878 }),
6879 (),
6880 );
6881 let mut merge_details = prev_snapshot.merge;
6882 let conflicts_changed = merge_details.update(&backend, conflicted_paths).await?;
6883 log::debug!("new merge details: {merge_details:?}");
6884
6885 if conflicts_changed || statuses_by_path != prev_snapshot.statuses_by_path {
6886 events.push(RepositoryEvent::StatusesChanged)
6887 }
6888
6889 // Useful when branch is None in detached head state
6890 let head_commit = match backend.head_sha().await {
6891 Some(head_sha) => backend.show(head_sha).await.log_err(),
6892 None => None,
6893 };
6894
6895 if branch != prev_snapshot.branch || head_commit != prev_snapshot.head_commit {
6896 events.push(RepositoryEvent::BranchChanged);
6897 }
6898
6899 let remote_origin_url = backend.remote_url("origin").await;
6900 let remote_upstream_url = backend.remote_url("upstream").await;
6901
6902 let snapshot = RepositorySnapshot {
6903 id,
6904 statuses_by_path,
6905 work_directory_abs_path,
6906 original_repo_abs_path: prev_snapshot.original_repo_abs_path,
6907 path_style: prev_snapshot.path_style,
6908 scan_id: prev_snapshot.scan_id + 1,
6909 branch,
6910 head_commit,
6911 merge: merge_details,
6912 remote_origin_url,
6913 remote_upstream_url,
6914 stash_entries,
6915 };
6916
6917 Ok((snapshot, events))
6918}
6919
6920fn status_from_proto(
6921 simple_status: i32,
6922 status: Option<proto::GitFileStatus>,
6923) -> anyhow::Result<FileStatus> {
6924 use proto::git_file_status::Variant;
6925
6926 let Some(variant) = status.and_then(|status| status.variant) else {
6927 let code = proto::GitStatus::from_i32(simple_status)
6928 .with_context(|| format!("Invalid git status code: {simple_status}"))?;
6929 let result = match code {
6930 proto::GitStatus::Added => TrackedStatus {
6931 worktree_status: StatusCode::Added,
6932 index_status: StatusCode::Unmodified,
6933 }
6934 .into(),
6935 proto::GitStatus::Modified => TrackedStatus {
6936 worktree_status: StatusCode::Modified,
6937 index_status: StatusCode::Unmodified,
6938 }
6939 .into(),
6940 proto::GitStatus::Conflict => UnmergedStatus {
6941 first_head: UnmergedStatusCode::Updated,
6942 second_head: UnmergedStatusCode::Updated,
6943 }
6944 .into(),
6945 proto::GitStatus::Deleted => TrackedStatus {
6946 worktree_status: StatusCode::Deleted,
6947 index_status: StatusCode::Unmodified,
6948 }
6949 .into(),
6950 _ => anyhow::bail!("Invalid code for simple status: {simple_status}"),
6951 };
6952 return Ok(result);
6953 };
6954
6955 let result = match variant {
6956 Variant::Untracked(_) => FileStatus::Untracked,
6957 Variant::Ignored(_) => FileStatus::Ignored,
6958 Variant::Unmerged(unmerged) => {
6959 let [first_head, second_head] =
6960 [unmerged.first_head, unmerged.second_head].map(|head| {
6961 let code = proto::GitStatus::from_i32(head)
6962 .with_context(|| format!("Invalid git status code: {head}"))?;
6963 let result = match code {
6964 proto::GitStatus::Added => UnmergedStatusCode::Added,
6965 proto::GitStatus::Updated => UnmergedStatusCode::Updated,
6966 proto::GitStatus::Deleted => UnmergedStatusCode::Deleted,
6967 _ => anyhow::bail!("Invalid code for unmerged status: {code:?}"),
6968 };
6969 Ok(result)
6970 });
6971 let [first_head, second_head] = [first_head?, second_head?];
6972 UnmergedStatus {
6973 first_head,
6974 second_head,
6975 }
6976 .into()
6977 }
6978 Variant::Tracked(tracked) => {
6979 let [index_status, worktree_status] = [tracked.index_status, tracked.worktree_status]
6980 .map(|status| {
6981 let code = proto::GitStatus::from_i32(status)
6982 .with_context(|| format!("Invalid git status code: {status}"))?;
6983 let result = match code {
6984 proto::GitStatus::Modified => StatusCode::Modified,
6985 proto::GitStatus::TypeChanged => StatusCode::TypeChanged,
6986 proto::GitStatus::Added => StatusCode::Added,
6987 proto::GitStatus::Deleted => StatusCode::Deleted,
6988 proto::GitStatus::Renamed => StatusCode::Renamed,
6989 proto::GitStatus::Copied => StatusCode::Copied,
6990 proto::GitStatus::Unmodified => StatusCode::Unmodified,
6991 _ => anyhow::bail!("Invalid code for tracked status: {code:?}"),
6992 };
6993 Ok(result)
6994 });
6995 let [index_status, worktree_status] = [index_status?, worktree_status?];
6996 TrackedStatus {
6997 index_status,
6998 worktree_status,
6999 }
7000 .into()
7001 }
7002 };
7003 Ok(result)
7004}
7005
7006fn status_to_proto(status: FileStatus) -> proto::GitFileStatus {
7007 use proto::git_file_status::{Tracked, Unmerged, Variant};
7008
7009 let variant = match status {
7010 FileStatus::Untracked => Variant::Untracked(Default::default()),
7011 FileStatus::Ignored => Variant::Ignored(Default::default()),
7012 FileStatus::Unmerged(UnmergedStatus {
7013 first_head,
7014 second_head,
7015 }) => Variant::Unmerged(Unmerged {
7016 first_head: unmerged_status_to_proto(first_head),
7017 second_head: unmerged_status_to_proto(second_head),
7018 }),
7019 FileStatus::Tracked(TrackedStatus {
7020 index_status,
7021 worktree_status,
7022 }) => Variant::Tracked(Tracked {
7023 index_status: tracked_status_to_proto(index_status),
7024 worktree_status: tracked_status_to_proto(worktree_status),
7025 }),
7026 };
7027 proto::GitFileStatus {
7028 variant: Some(variant),
7029 }
7030}
7031
7032fn unmerged_status_to_proto(code: UnmergedStatusCode) -> i32 {
7033 match code {
7034 UnmergedStatusCode::Added => proto::GitStatus::Added as _,
7035 UnmergedStatusCode::Deleted => proto::GitStatus::Deleted as _,
7036 UnmergedStatusCode::Updated => proto::GitStatus::Updated as _,
7037 }
7038}
7039
7040fn tracked_status_to_proto(code: StatusCode) -> i32 {
7041 match code {
7042 StatusCode::Added => proto::GitStatus::Added as _,
7043 StatusCode::Deleted => proto::GitStatus::Deleted as _,
7044 StatusCode::Modified => proto::GitStatus::Modified as _,
7045 StatusCode::Renamed => proto::GitStatus::Renamed as _,
7046 StatusCode::TypeChanged => proto::GitStatus::TypeChanged as _,
7047 StatusCode::Copied => proto::GitStatus::Copied as _,
7048 StatusCode::Unmodified => proto::GitStatus::Unmodified as _,
7049 }
7050}